diff --git a/.github/ISSUE_TEMPLATE/10_bug_report.yml b/.github/ISSUE_TEMPLATE/10_bug_report.yml index e132eca1e5..1bf6c80e40 100644 --- a/.github/ISSUE_TEMPLATE/10_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/10_bug_report.yml @@ -14,7 +14,7 @@ body: ### Description diff --git a/.github/actionlint.yml b/.github/actionlint.yml index 0ee6af8a1d..6d8e0107e9 100644 --- a/.github/actionlint.yml +++ b/.github/actionlint.yml @@ -19,14 +19,27 @@ self-hosted-runner: - namespace-profile-16x32-ubuntu-2004-arm - namespace-profile-32x64-ubuntu-2004-arm # Namespace Ubuntu 22.04 (Everything else) - - namespace-profile-2x4-ubuntu-2204 - namespace-profile-4x8-ubuntu-2204 - namespace-profile-8x16-ubuntu-2204 - namespace-profile-16x32-ubuntu-2204 - namespace-profile-32x64-ubuntu-2204 + # Namespace Ubuntu 24.04 (like ubuntu-latest) + - namespace-profile-2x4-ubuntu-2404 # Namespace Limited Preview - namespace-profile-8x16-ubuntu-2004-arm-m4 - namespace-profile-8x32-ubuntu-2004-arm-m4 # Self Hosted Runners - self-mini-macos - self-32vcpu-windows-2022 + +# Disable shellcheck because it doesn't like powershell +# This should have been triggered with initial rollout of actionlint +# but https://github.com/zed-industries/zed/pull/36693 +# somehow caused actionlint to actually check those windows jobs +# where previously they were being skipped. Likely caused by an +# unknown bug in actionlint where parsing of `runs-on: [ ]` +# breaks something else. (yuck) +paths: + .github/workflows/{ci,release_nightly}.yml: + ignore: + - "shellcheck" diff --git a/.github/workflows/bump_collab_staging.yml b/.github/workflows/bump_collab_staging.yml index d8eaa6019e..d400905b4d 100644 --- a/.github/workflows/bump_collab_staging.yml +++ b/.github/workflows/bump_collab_staging.yml @@ -8,7 +8,7 @@ on: jobs: update-collab-staging-tag: if: github.repository_owner == 'zed-industries' - runs-on: ubuntu-latest + runs-on: namespace-profile-2x4-ubuntu-2404 steps: - name: Checkout repository uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f4ba227168..a34833d0fd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,7 +37,7 @@ jobs: run_nix: ${{ steps.filter.outputs.run_nix }} run_actionlint: ${{ steps.filter.outputs.run_actionlint }} runs-on: - - ubuntu-latest + - namespace-profile-2x4-ubuntu-2404 steps: - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 @@ -237,7 +237,7 @@ jobs: uses: ./.github/actions/build_docs actionlint: - runs-on: ubuntu-latest + runs-on: namespace-profile-2x4-ubuntu-2404 if: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_actionlint == 'true' needs: [job_spec] steps: @@ -418,7 +418,7 @@ jobs: if: | github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_tests == 'true' - runs-on: [self-hosted, Windows, X64] + runs-on: [self-32vcpu-windows-2022] steps: - name: Environment Setup run: | @@ -458,7 +458,7 @@ jobs: tests_pass: name: Tests Pass - runs-on: ubuntu-latest + runs-on: namespace-profile-2x4-ubuntu-2404 needs: - job_spec - style @@ -784,7 +784,7 @@ jobs: bundle-windows-x64: timeout-minutes: 120 name: Create a Windows installer - runs-on: [self-hosted, Windows, X64] + runs-on: [self-32vcpu-windows-2022] if: contains(github.event.pull_request.labels.*.name, 'run-bundling') # if: (startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling')) needs: [windows_tests] diff --git a/.github/workflows/danger.yml b/.github/workflows/danger.yml index 15c82643ae..3f84179278 100644 --- a/.github/workflows/danger.yml +++ b/.github/workflows/danger.yml @@ -12,7 +12,7 @@ on: jobs: danger: if: github.repository_owner == 'zed-industries' - runs-on: ubuntu-latest + runs-on: namespace-profile-2x4-ubuntu-2404 steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 0cc6737a45..2026ee7b73 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -59,7 +59,7 @@ jobs: timeout-minutes: 60 name: Run tests on Windows if: github.repository_owner == 'zed-industries' - runs-on: [self-hosted, Windows, X64] + runs-on: [self-32vcpu-windows-2022] steps: - name: Checkout repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 @@ -206,9 +206,6 @@ jobs: runs-on: github-8vcpu-ubuntu-2404 needs: tests name: Build Zed on FreeBSD - # env: - # MYTOKEN : ${{ secrets.MYTOKEN }} - # MYTOKEN2: "value2" steps: - uses: actions/checkout@v4 - name: Build FreeBSD remote-server @@ -243,7 +240,6 @@ jobs: bundle-nix: name: Build and cache Nix package - if: false needs: tests secrets: inherit uses: ./.github/workflows/nix.yml @@ -252,7 +248,7 @@ jobs: timeout-minutes: 60 name: Create a Windows installer if: github.repository_owner == 'zed-industries' - runs-on: [self-hosted, Windows, X64] + runs-on: [self-32vcpu-windows-2022] needs: windows-tests env: AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} @@ -294,7 +290,7 @@ jobs: update-nightly-tag: name: Update nightly tag if: github.repository_owner == 'zed-industries' - runs-on: ubuntu-latest + runs-on: namespace-profile-2x4-ubuntu-2404 needs: - bundle-mac - bundle-linux-x86 diff --git a/.github/workflows/script_checks.yml b/.github/workflows/script_checks.yml index c32a433e46..5dbfc9cb7f 100644 --- a/.github/workflows/script_checks.yml +++ b/.github/workflows/script_checks.yml @@ -12,7 +12,7 @@ jobs: shellcheck: name: "ShellCheck Scripts" if: github.repository_owner == 'zed-industries' - runs-on: ubuntu-latest + runs-on: namespace-profile-2x4-ubuntu-2404 steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 diff --git a/Cargo.lock b/Cargo.lock index 206566e228..57dac2f3c4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -39,6 +39,26 @@ dependencies = [ "workspace-hack", ] +[[package]] +name = "acp_tools" +version = "0.1.0" +dependencies = [ + "agent-client-protocol", + "collections", + "gpui", + "language", + "markdown", + "project", + "serde", + "serde_json", + "settings", + "theme", + "ui", + "util", + "workspace", + "workspace-hack", +] + [[package]] name = "action_log" version = "0.1.0" @@ -171,11 +191,12 @@ dependencies = [ [[package]] name = "agent-client-protocol" -version = "0.0.26" +version = "0.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "160971bb53ca0b2e70ebc857c21e24eb448745f1396371015f4c59e9a9e51ed0" +checksum = "289eb34ee17213dadcca47eedadd386a5e7678094095414e475965d1bcca2860" dependencies = [ "anyhow", + "async-broadcast", "futures 0.3.31", "log", "parking_lot", @@ -244,6 +265,7 @@ dependencies = [ "terminal", "text", "theme", + "thiserror 2.0.12", "tree-sitter-rust", "ui", "unindent", @@ -263,16 +285,19 @@ name = "agent_servers" version = "0.1.0" dependencies = [ "acp_thread", + "acp_tools", "action_log", "agent-client-protocol", "agent_settings", - "agentic-coding-protocol", "anyhow", + "client", "collections", "context_server", "env_logger 0.11.8", + "fs", "futures 0.3.31", "gpui", + "gpui_tokio", "indoc", "itertools 0.14.0", "language", @@ -284,6 +309,7 @@ dependencies = [ "paths", "project", "rand 0.8.5", + "reqwest_client", "schemars", "semver", "serde", @@ -377,6 +403,7 @@ dependencies = [ "parking_lot", "paths", "picker", + "postage", "pretty_assertions", "project", "prompt_store", @@ -416,24 +443,6 @@ dependencies = [ "zed_actions", ] -[[package]] -name = "agentic-coding-protocol" -version = "0.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e6ae951b36fa2f8d9dd6e1af6da2fcaba13d7c866cf6a9e65deda9dc6c5fe4" -dependencies = [ - "anyhow", - "chrono", - "derive_more 2.0.1", - "futures 0.3.31", - "log", - "parking_lot", - "schemars", - "semver", - "serde", - "serde_json", -] - [[package]] name = "ahash" version = "0.7.8" @@ -849,7 +858,7 @@ dependencies = [ "anyhow", "async-trait", "collections", - "derive_more 0.99.19", + "derive_more", "extension", "futures 0.3.31", "gpui", @@ -912,7 +921,7 @@ dependencies = [ "clock", "collections", "ctor", - "derive_more 0.99.19", + "derive_more", "gpui", "icons", "indoc", @@ -949,7 +958,7 @@ dependencies = [ "cloud_llm_client", "collections", "component", - "derive_more 0.99.19", + "derive_more", "diffy", "editor", "feature_flags", @@ -1375,10 +1384,11 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "derive_more 0.99.19", "gpui", - "parking_lot", "rodio", + "schemars", + "serde", + "settings", "util", "workspace-hack", ] @@ -3061,7 +3071,7 @@ dependencies = [ "cocoa 0.26.0", "collections", "credentials_provider", - "derive_more 0.99.19", + "derive_more", "feature_flags", "fs", "futures 0.3.31", @@ -3493,7 +3503,7 @@ name = "command_palette_hooks" version = "0.1.0" dependencies = [ "collections", - "derive_more 0.99.19", + "derive_more", "gpui", "workspace-hack", ] @@ -4044,6 +4054,7 @@ dependencies = [ name = "crashes" version = "0.1.0" dependencies = [ + "bincode", "crash-handler", "log", "mach2 0.5.0", @@ -4053,6 +4064,7 @@ dependencies = [ "serde", "serde_json", "smol", + "system_specs", "workspace-hack", ] @@ -4654,27 +4666,6 @@ dependencies = [ "syn 2.0.101", ] -[[package]] -name = "derive_more" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" -dependencies = [ - "derive_more-impl", -] - -[[package]] -name = "derive_more-impl" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", - "unicode-xid", -] - [[package]] name = "derive_refineable" version = "0.1.0" @@ -4695,7 +4686,6 @@ dependencies = [ "component", "ctor", "editor", - "futures 0.3.31", "gpui", "indoc", "language", @@ -5732,14 +5722,10 @@ dependencies = [ name = "feedback" version = "0.1.0" dependencies = [ - "client", "editor", "gpui", - "human_bytes", "menu", - "release_channel", - "serde", - "sysinfo", + "system_specs", "ui", "urlencoding", "util", @@ -6415,7 +6401,7 @@ dependencies = [ "askpass", "async-trait", "collections", - "derive_more 0.99.19", + "derive_more", "futures 0.3.31", "git2", "gpui", @@ -7445,7 +7431,7 @@ dependencies = [ "core-video", "cosmic-text", "ctor", - "derive_more 0.99.19", + "derive_more", "embed-resource", "env_logger 0.11.8", "etagere", @@ -7533,6 +7519,7 @@ dependencies = [ name = "gpui_tokio" version = "0.1.0" dependencies = [ + "anyhow", "gpui", "tokio", "util", @@ -7969,7 +7956,7 @@ version = "0.1.0" dependencies = [ "anyhow", "bytes 1.10.1", - "derive_more 0.99.19", + "derive_more", "futures 0.3.31", "http 1.3.1", "http-body 1.0.1", @@ -8481,6 +8468,7 @@ dependencies = [ "theme", "ui", "util", + "util_macros", "workspace", "workspace-hack", "zed_actions", @@ -9618,6 +9606,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "audio", "collections", "core-foundation 0.10.0", "core-video", @@ -9640,6 +9629,7 @@ dependencies = [ "scap", "serde", "serde_json", + "settings", "sha2", "simplelog", "smallvec", @@ -11626,6 +11616,12 @@ dependencies = [ "hmac", ] +[[package]] +name = "pciid-parser" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0008e816fcdaf229cdd540e9b6ca2dc4a10d65c31624abb546c6420a02846e61" + [[package]] name = "pem" version = "3.0.5" @@ -13526,6 +13522,7 @@ dependencies = [ "smol", "sysinfo", "telemetry_events", + "thiserror 2.0.12", "toml 0.8.20", "unindent", "util", @@ -14365,12 +14362,10 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe8c9d1c68d67dd9f97ecbc6f932b60eb289c5dbddd8aa1405484a8fd2fcd984" dependencies = [ - "chrono", "dyn-clone", "indexmap", "ref-cast", "schemars_derive", - "semver", "serde", "serde_json", ] @@ -16146,6 +16141,21 @@ dependencies = [ "winx", ] +[[package]] +name = "system_specs" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "gpui", + "human_bytes", + "pciid-parser", + "release_channel", + "serde", + "sysinfo", + "workspace-hack", +] + [[package]] name = "tab_switcher" version = "0.1.0" @@ -16439,7 +16449,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "derive_more 0.99.19", + "derive_more", "fs", "futures 0.3.31", "gpui", @@ -19780,7 +19790,6 @@ dependencies = [ "any_vec", "anyhow", "async-recursion", - "bincode", "call", "client", "clock", @@ -19799,6 +19808,7 @@ dependencies = [ "node_runtime", "parking_lot", "postage", + "pretty_assertions", "project", "remote", "schemars", @@ -19954,7 +19964,6 @@ dependencies = [ "rustix 1.0.7", "rustls 0.23.26", "rustls-webpki 0.103.1", - "schemars", "scopeguard", "sea-orm", "sea-query-binder", @@ -20388,8 +20397,9 @@ dependencies = [ [[package]] name = "zed" -version = "0.201.0" +version = "0.202.0" dependencies = [ + "acp_tools", "activity_indicator", "agent", "agent_servers", @@ -20405,6 +20415,7 @@ dependencies = [ "auto_update", "auto_update_ui", "backtrace", + "bincode", "breadcrumbs", "call", "channel", @@ -20503,6 +20514,7 @@ dependencies = [ "supermaven", "svg_preview", "sysinfo", + "system_specs", "tab_switcher", "task", "tasks_ui", diff --git a/Cargo.toml b/Cargo.toml index a2de4aaaed..6ec243a9b9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,7 @@ [workspace] resolver = "2" members = [ + "crates/acp_tools", "crates/acp_thread", "crates/action_log", "crates/activity_indicator", @@ -155,6 +156,7 @@ members = [ "crates/streaming_diff", "crates/sum_tree", "crates/supermaven", + "crates/system_specs", "crates/supermaven_api", "crates/svg_preview", "crates/tab_switcher", @@ -226,6 +228,7 @@ edition = "2024" # Workspace member crates # +acp_tools = { path = "crates/acp_tools" } acp_thread = { path = "crates/acp_thread" } action_log = { path = "crates/action_log" } agent = { path = "crates/agent" } @@ -381,6 +384,7 @@ streaming_diff = { path = "crates/streaming_diff" } sum_tree = { path = "crates/sum_tree" } supermaven = { path = "crates/supermaven" } supermaven_api = { path = "crates/supermaven_api" } +system_specs = { path = "crates/system_specs" } tab_switcher = { path = "crates/tab_switcher" } task = { path = "crates/task" } tasks_ui = { path = "crates/tasks_ui" } @@ -422,8 +426,7 @@ zlog_settings = { path = "crates/zlog_settings" } # External crates # -agentic-coding-protocol = "0.0.10" -agent-client-protocol = "0.0.26" +agent-client-protocol = "0.0.31" aho-corasick = "1.1" alacritty_terminal = { git = "https://github.com/zed-industries/alacritty.git", branch = "add-hush-login-flag" } any_vec = "0.14" @@ -450,6 +453,7 @@ aws-sdk-bedrockruntime = { version = "1.80.0", features = [ aws-smithy-runtime-api = { version = "1.7.4", features = ["http-1x", "client"] } aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] } base64 = "0.22" +bincode = "1.2.1" bitflags = "2.6.0" blade-graphics = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" } blade-macros = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" } @@ -493,6 +497,7 @@ handlebars = "4.3" heck = "0.5" heed = { version = "0.21.0", features = ["read-txn-no-tls"] } hex = "0.4.3" +human_bytes = "0.4.1" html5ever = "0.27.0" http = "1.1" http-body = "1.0" @@ -532,6 +537,7 @@ palette = { version = "0.7.5", default-features = false, features = ["std"] } parking_lot = "0.12.1" partial-json-fixer = "0.5.3" parse_int = "0.9" +pciid-parser = "0.8.0" pathdiff = "0.2" pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" } pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" } @@ -802,143 +808,32 @@ unexpected_cfgs = { level = "allow" } dbg_macro = "deny" todo = "deny" -# Motivation: We use `vec![a..b]` a lot when dealing with ranges in text, so -# warning on this rule produces a lot of noise. -single_range_in_vec_init = "allow" - -redundant_clone = "warn" +# This is not a style lint, see https://github.com/rust-lang/rust-clippy/pull/15454 +# Remove when the lint gets promoted to `suspicious`. declare_interior_mutable_const = "deny" -# These are all of the rules that currently have violations in the Zed -# codebase. -# -# We'll want to drive this list down by either: -# 1. fixing violations of the rule and begin enforcing it -# 2. deciding we want to allow the rule permanently, at which point -# we should codify that separately above. -# -# This list shouldn't be added to; it should only get shorter. -# ============================================================================= +redundant_clone = "deny" -# There are a bunch of rules currently failing in the `style` group, so -# allow all of those, for now. +# We currently do not restrict any style rules +# as it slows down shipping code to Zed. +# +# Running ./script/clippy can take several minutes, and so it's +# common to skip that step and let CI do it. Any unexpected failures +# (which also take minutes to discover) thus require switching back +# to an old branch, manual fixing, and re-pushing. +# +# In the future we could improve this by either making sure +# Zed can surface clippy errors in diagnostics (in addition to the +# rust-analyzer errors), or by having CI fix style nits automatically. style = { level = "allow", priority = -1 } -# Temporary list of style lints that we've fixed so far. -# Progress is being tracked in #36577 -blocks_in_conditions = "warn" -bool_assert_comparison = "warn" -borrow_interior_mutable_const = "warn" -box_default = "warn" -builtin_type_shadow = "warn" -bytes_nth = "warn" -chars_next_cmp = "warn" -cmp_null = "warn" -collapsible_else_if = "warn" -collapsible_if = "warn" -comparison_to_empty = "warn" -default_instead_of_iter_empty = "warn" -disallowed_macros = "warn" -disallowed_methods = "warn" -disallowed_names = "warn" -disallowed_types = "warn" -doc_lazy_continuation = "warn" -doc_overindented_list_items = "warn" -duplicate_underscore_argument = "warn" -err_expect = "warn" -fn_to_numeric_cast = "warn" -fn_to_numeric_cast_with_truncation = "warn" -for_kv_map = "warn" -implicit_saturating_add = "warn" -implicit_saturating_sub = "warn" -inconsistent_digit_grouping = "warn" -infallible_destructuring_match = "warn" -inherent_to_string = "warn" -init_numbered_fields = "warn" -into_iter_on_ref = "warn" -io_other_error = "warn" -items_after_test_module = "warn" -iter_cloned_collect = "warn" -iter_next_slice = "warn" -iter_nth = "warn" -iter_nth_zero = "warn" -iter_skip_next = "warn" -just_underscores_and_digits = "warn" -len_zero = "warn" -let_and_return = "warn" -main_recursion = "warn" -manual_bits = "warn" -manual_dangling_ptr = "warn" -manual_is_ascii_check = "warn" -manual_is_finite = "warn" -manual_is_infinite = "warn" -manual_map = "warn" -manual_next_back = "warn" -manual_non_exhaustive = "warn" -manual_ok_or = "warn" -manual_pattern_char_comparison = "warn" -manual_rotate = "warn" -manual_slice_fill = "warn" -manual_while_let_some = "warn" -map_clone = "warn" -map_collect_result_unit = "warn" -match_like_matches_macro = "warn" -match_overlapping_arm = "warn" -mem_replace_option_with_none = "warn" -mem_replace_option_with_some = "warn" -missing_enforced_import_renames = "warn" -missing_safety_doc = "warn" -mixed_attributes_style = "warn" -mixed_case_hex_literals = "warn" -module_inception = "warn" -must_use_unit = "warn" -mut_mutex_lock = "warn" -needless_borrow = "warn" -needless_doctest_main = "warn" -needless_else = "warn" -needless_parens_on_range_literals = "warn" -needless_pub_self = "warn" -needless_return = "warn" -needless_return_with_question_mark = "warn" -ok_expect = "warn" -owned_cow = "warn" -print_literal = "warn" -print_with_newline = "warn" -ptr_eq = "warn" -question_mark = "warn" -redundant_closure = "warn" -redundant_field_names = "warn" -redundant_pattern_matching = "warn" -redundant_static_lifetimes = "warn" -result_map_or_into_option = "warn" -self_named_constructors = "warn" -single_match = "warn" -tabs_in_doc_comments = "warn" -to_digit_is_some = "warn" -toplevel_ref_arg = "warn" -unnecessary_fold = "warn" -unnecessary_map_or = "warn" -unnecessary_mut_passed = "warn" -unnecessary_owned_empty_strings = "warn" -unneeded_struct_pattern = "warn" -unsafe_removed_from_name = "warn" -unused_unit = "warn" -unusual_byte_groupings = "warn" -write_literal = "warn" -writeln_empty_string = "warn" -wrong_self_convention = "warn" -zero_ptr = "warn" - # Individual rules that have violations in the codebase: type_complexity = "allow" -# We often return trait objects from `new` functions. -new_ret_no_self = { level = "allow" } -# We have a few `next` functions that differ in lifetimes -# compared to Iterator::next. Yet, clippy complains about those. -should_implement_trait = { level = "allow" } let_underscore_future = "allow" -# It doesn't make sense to implement `Default` unilaterally. -new_without_default = "allow" + +# Motivation: We use `vec![a..b]` a lot when dealing with ranges in text, so +# warning on this rule produces a lot of noise. +single_range_in_vec_init = "allow" # in Rust it can be very tedious to reduce argument count without # running afoul of the borrow checker. @@ -947,10 +842,6 @@ too_many_arguments = "allow" # We often have large enum variants yet we rarely actually bother with splitting them up. large_enum_variant = "allow" -# `enum_variant_names` fires for all enums, even when they derive serde traits. -# Adhering to this lint would be a breaking change. -enum_variant_names = "allow" - [workspace.metadata.cargo-machete] ignored = [ "bindgen", diff --git a/Procfile.web b/Procfile.web new file mode 100644 index 0000000000..8140555144 --- /dev/null +++ b/Procfile.web @@ -0,0 +1,2 @@ +postgrest_llm: postgrest crates/collab/postgrest_llm.conf +website: cd ../zed.dev; npm run dev -- --port=3000 diff --git a/assets/icons/attach.svg b/assets/icons/attach.svg new file mode 100644 index 0000000000..f923a3c7c8 --- /dev/null +++ b/assets/icons/attach.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/copy.svg b/assets/icons/copy.svg index bca13f8d56..aba193930b 100644 --- a/assets/icons/copy.svg +++ b/assets/icons/copy.svg @@ -1 +1,4 @@ - + + + + diff --git a/assets/icons/menu_alt.svg b/assets/icons/menu_alt.svg index 87add13216..b9cc19e22f 100644 --- a/assets/icons/menu_alt.svg +++ b/assets/icons/menu_alt.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/pencil_unavailable.svg b/assets/icons/pencil_unavailable.svg new file mode 100644 index 0000000000..4241d766ac --- /dev/null +++ b/assets/icons/pencil_unavailable.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/tool_think.svg b/assets/icons/tool_think.svg index efd5908a90..773f5e7fa7 100644 --- a/assets/icons/tool_think.svg +++ b/assets/icons/tool_think.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/zed_agent.svg b/assets/icons/zed_agent.svg index b6e120a0b6..0c80e22c51 100644 --- a/assets/icons/zed_agent.svg +++ b/assets/icons/zed_agent.svg @@ -1,27 +1,27 @@ - + - - + + - - + + - + - - - - - - - - + + + + + + + + - + - - + + diff --git a/assets/icons/zed_assistant.svg b/assets/icons/zed_assistant.svg index 470eb0fede..812277a100 100644 --- a/assets/icons/zed_assistant.svg +++ b/assets/icons/zed_assistant.svg @@ -1,5 +1,5 @@ - - + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index b4efa70572..e84f4834af 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -16,7 +16,6 @@ "up": "menu::SelectPrevious", "enter": "menu::Confirm", "ctrl-enter": "menu::SecondaryConfirm", - "ctrl-escape": "menu::Cancel", "ctrl-c": "menu::Cancel", "escape": "menu::Cancel", "alt-shift-enter": "menu::Restart", @@ -138,7 +137,7 @@ "find": "buffer_search::Deploy", "ctrl-f": "buffer_search::Deploy", "ctrl-h": "buffer_search::DeployReplace", - "ctrl->": "assistant::QuoteSelection", + "ctrl->": "agent::QuoteSelection", "ctrl-<": "assistant::InsertIntoEditor", "ctrl-alt-e": "editor::SelectEnclosingSymbol", "ctrl-shift-backspace": "editor::GoToPreviousChange", @@ -241,7 +240,7 @@ "ctrl-shift-i": "agent::ToggleOptionsMenu", "ctrl-alt-shift-n": "agent::ToggleNewThreadMenu", "shift-alt-escape": "agent::ExpandMessageEditor", - "ctrl->": "assistant::QuoteSelection", + "ctrl->": "agent::QuoteSelection", "ctrl-alt-e": "agent::RemoveAllContext", "ctrl-shift-e": "project_panel::ToggleFocus", "ctrl-shift-enter": "agent::ContinueThread", @@ -856,7 +855,7 @@ "ctrl-backspace": ["project_panel::Delete", { "skip_prompt": false }], "ctrl-delete": ["project_panel::Delete", { "skip_prompt": false }], "alt-ctrl-r": "project_panel::RevealInFileManager", - "ctrl-shift-enter": "project_panel::OpenWithSystem", + "ctrl-shift-enter": "workspace::OpenWithSystem", "alt-d": "project_panel::CompareMarkedFiles", "shift-find": "project_panel::NewSearchInDirectory", "ctrl-alt-shift-f": "project_panel::NewSearchInDirectory", @@ -1195,9 +1194,16 @@ "ctrl-1": "onboarding::ActivateBasicsPage", "ctrl-2": "onboarding::ActivateEditingPage", "ctrl-3": "onboarding::ActivateAISetupPage", - "ctrl-escape": "onboarding::Finish", - "alt-tab": "onboarding::SignIn", + "ctrl-enter": "onboarding::Finish", + "alt-shift-l": "onboarding::SignIn", "alt-shift-a": "onboarding::OpenAccount" } + }, + { + "context": "InvalidBuffer", + "use_key_equivalents": true, + "bindings": { + "ctrl-shift-enter": "workspace::OpenWithSystem" + } } ] diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index ad2ab2ba89..e72f4174ff 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -162,7 +162,7 @@ "cmd-alt-f": "buffer_search::DeployReplace", "cmd-alt-l": ["buffer_search::Deploy", { "selection_search_enabled": true }], "cmd-e": ["buffer_search::Deploy", { "focus": false }], - "cmd->": "assistant::QuoteSelection", + "cmd->": "agent::QuoteSelection", "cmd-<": "assistant::InsertIntoEditor", "cmd-alt-e": "editor::SelectEnclosingSymbol", "alt-enter": "editor::OpenSelectionsInMultibuffer" @@ -281,7 +281,7 @@ "cmd-shift-i": "agent::ToggleOptionsMenu", "cmd-alt-shift-n": "agent::ToggleNewThreadMenu", "shift-alt-escape": "agent::ExpandMessageEditor", - "cmd->": "assistant::QuoteSelection", + "cmd->": "agent::QuoteSelection", "cmd-alt-e": "agent::RemoveAllContext", "cmd-shift-e": "project_panel::ToggleFocus", "cmd-ctrl-b": "agent::ToggleBurnMode", @@ -915,7 +915,7 @@ "cmd-backspace": ["project_panel::Trash", { "skip_prompt": true }], "cmd-delete": ["project_panel::Delete", { "skip_prompt": false }], "alt-cmd-r": "project_panel::RevealInFileManager", - "ctrl-shift-enter": "project_panel::OpenWithSystem", + "ctrl-shift-enter": "workspace::OpenWithSystem", "alt-d": "project_panel::CompareMarkedFiles", "cmd-alt-backspace": ["project_panel::Delete", { "skip_prompt": false }], "cmd-alt-shift-f": "project_panel::NewSearchInDirectory", @@ -1301,5 +1301,12 @@ "alt-tab": "onboarding::SignIn", "alt-shift-a": "onboarding::OpenAccount" } + }, + { + "context": "InvalidBuffer", + "use_key_equivalents": true, + "bindings": { + "ctrl-shift-enter": "workspace::OpenWithSystem" + } } ] diff --git a/assets/keymaps/linux/cursor.json b/assets/keymaps/linux/cursor.json index 1c381b0cf0..2e27158e11 100644 --- a/assets/keymaps/linux/cursor.json +++ b/assets/keymaps/linux/cursor.json @@ -17,8 +17,8 @@ "bindings": { "ctrl-i": "agent::ToggleFocus", "ctrl-shift-i": "agent::ToggleFocus", - "ctrl-shift-l": "assistant::QuoteSelection", // In cursor uses "Ask" mode - "ctrl-l": "assistant::QuoteSelection", // In cursor uses "Agent" mode + "ctrl-shift-l": "agent::QuoteSelection", // In cursor uses "Ask" mode + "ctrl-l": "agent::QuoteSelection", // In cursor uses "Agent" mode "ctrl-k": "assistant::InlineAssist", "ctrl-shift-k": "assistant::InsertIntoEditor" } diff --git a/assets/keymaps/macos/cursor.json b/assets/keymaps/macos/cursor.json index fdf9c437cf..1d723bd75b 100644 --- a/assets/keymaps/macos/cursor.json +++ b/assets/keymaps/macos/cursor.json @@ -17,8 +17,8 @@ "bindings": { "cmd-i": "agent::ToggleFocus", "cmd-shift-i": "agent::ToggleFocus", - "cmd-shift-l": "assistant::QuoteSelection", // In cursor uses "Ask" mode - "cmd-l": "assistant::QuoteSelection", // In cursor uses "Agent" mode + "cmd-shift-l": "agent::QuoteSelection", // In cursor uses "Ask" mode + "cmd-l": "agent::QuoteSelection", // In cursor uses "Agent" mode "cmd-k": "assistant::InlineAssist", "cmd-shift-k": "assistant::InsertIntoEditor" } diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index be6d34a134..67add61bd3 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -428,11 +428,13 @@ "g h": "vim::StartOfLine", "g s": "vim::FirstNonWhitespace", // "g s" default behavior is "space s" "g e": "vim::EndOfDocument", + "g .": "vim::HelixGotoLastModification", // go to last modification "g r": "editor::FindAllReferences", // zed specific "g t": "vim::WindowTop", "g c": "vim::WindowMiddle", "g b": "vim::WindowBottom", + "shift-r": "editor::Paste", "x": "editor::SelectLine", "shift-x": "editor::SelectLine", "%": "editor::SelectAll", @@ -819,7 +821,7 @@ "v": "project_panel::OpenPermanent", "p": "project_panel::Open", "x": "project_panel::RevealInFileManager", - "s": "project_panel::OpenWithSystem", + "s": "workspace::OpenWithSystem", "z d": "project_panel::CompareMarkedFiles", "] c": "project_panel::SelectNextGitEntry", "[ c": "project_panel::SelectPrevGitEntry", diff --git a/assets/settings/default.json b/assets/settings/default.json index c290baf003..f0b9e11e57 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -162,6 +162,12 @@ // 2. Always quit the application // "on_last_window_closed": "quit_app", "on_last_window_closed": "platform_default", + // Whether to show padding for zoomed panels. + // When enabled, zoomed center panels (e.g. code editor) will have padding all around, + // while zoomed bottom/left/right panels will have padding to the top/right/left (respectively). + // + // Default: true + "zoomed_padding": true, // Whether to use the system provided dialogs for Open and Save As. // When set to false, Zed will use the built-in keyboard-first pickers. "use_system_path_prompts": true, @@ -1133,11 +1139,6 @@ // The minimum severity of the diagnostics to show inline. // Inherits editor's diagnostics' max severity settings when `null`. "max_severity": null - }, - "cargo": { - // When enabled, Zed disables rust-analyzer's check on save and starts to query - // Cargo diagnostics separately. - "fetch_cargo_diagnostics": false } }, // Files or globs of files that will be excluded by Zed entirely. They will be skipped during file @@ -1503,6 +1504,11 @@ // // Default: fallback "words": "fallback", + // Minimum number of characters required to automatically trigger word-based completions. + // Before that value, it's still possible to trigger the words-based completion manually with the corresponding editor command. + // + // Default: 3 + "words_min_length": 3, // Whether to fetch LSP completions or not. // // Default: true @@ -1629,6 +1635,9 @@ "allowed": true } }, + "Kotlin": { + "language_servers": ["kotlin-language-server", "!kotlin-lsp", "..."] + }, "LaTeX": { "formatter": "language_server", "language_servers": ["texlab", "..."], @@ -1642,9 +1651,6 @@ "use_on_type_format": false, "allow_rewrap": "anywhere", "soft_wrap": "editor_width", - "completions": { - "words": "disabled" - }, "prettier": { "allowed": true } @@ -1658,9 +1664,6 @@ } }, "Plain Text": { - "completions": { - "words": "disabled" - }, "allow_rewrap": "anywhere" }, "Python": { diff --git a/assets/themes/ayu/ayu.json b/assets/themes/ayu/ayu.json index f9f8720729..0ffbb9f61e 100644 --- a/assets/themes/ayu/ayu.json +++ b/assets/themes/ayu/ayu.json @@ -93,7 +93,7 @@ "terminal.ansi.bright_cyan": "#4c806fff", "terminal.ansi.dim_cyan": "#cbf2e4ff", "terminal.ansi.white": "#bfbdb6ff", - "terminal.ansi.bright_white": "#bfbdb6ff", + "terminal.ansi.bright_white": "#fafafaff", "terminal.ansi.dim_white": "#787876ff", "link_text.hover": "#5ac1feff", "conflict": "#feb454ff", @@ -479,7 +479,7 @@ "terminal.ansi.bright_cyan": "#ace0cbff", "terminal.ansi.dim_cyan": "#2a5f4aff", "terminal.ansi.white": "#fcfcfcff", - "terminal.ansi.bright_white": "#fcfcfcff", + "terminal.ansi.bright_white": "#ffffffff", "terminal.ansi.dim_white": "#bcbec0ff", "link_text.hover": "#3b9ee5ff", "conflict": "#f1ad49ff", @@ -865,7 +865,7 @@ "terminal.ansi.bright_cyan": "#4c806fff", "terminal.ansi.dim_cyan": "#cbf2e4ff", "terminal.ansi.white": "#cccac2ff", - "terminal.ansi.bright_white": "#cccac2ff", + "terminal.ansi.bright_white": "#fafafaff", "terminal.ansi.dim_white": "#898a8aff", "link_text.hover": "#72cffeff", "conflict": "#fecf72ff", diff --git a/assets/themes/gruvbox/gruvbox.json b/assets/themes/gruvbox/gruvbox.json index 459825c733..f0f0358b76 100644 --- a/assets/themes/gruvbox/gruvbox.json +++ b/assets/themes/gruvbox/gruvbox.json @@ -94,7 +94,7 @@ "terminal.ansi.bright_cyan": "#45603eff", "terminal.ansi.dim_cyan": "#c7dfbdff", "terminal.ansi.white": "#fbf1c7ff", - "terminal.ansi.bright_white": "#fbf1c7ff", + "terminal.ansi.bright_white": "#ffffffff", "terminal.ansi.dim_white": "#b0a189ff", "link_text.hover": "#83a598ff", "version_control.added": "#b7bb26ff", @@ -494,7 +494,7 @@ "terminal.ansi.bright_cyan": "#45603eff", "terminal.ansi.dim_cyan": "#c7dfbdff", "terminal.ansi.white": "#fbf1c7ff", - "terminal.ansi.bright_white": "#fbf1c7ff", + "terminal.ansi.bright_white": "#ffffffff", "terminal.ansi.dim_white": "#b0a189ff", "link_text.hover": "#83a598ff", "version_control.added": "#b7bb26ff", @@ -894,7 +894,7 @@ "terminal.ansi.bright_cyan": "#45603eff", "terminal.ansi.dim_cyan": "#c7dfbdff", "terminal.ansi.white": "#fbf1c7ff", - "terminal.ansi.bright_white": "#fbf1c7ff", + "terminal.ansi.bright_white": "#ffffffff", "terminal.ansi.dim_white": "#b0a189ff", "link_text.hover": "#83a598ff", "version_control.added": "#b7bb26ff", @@ -1294,7 +1294,7 @@ "terminal.ansi.bright_cyan": "#9fbca8ff", "terminal.ansi.dim_cyan": "#253e2eff", "terminal.ansi.white": "#fbf1c7ff", - "terminal.ansi.bright_white": "#fbf1c7ff", + "terminal.ansi.bright_white": "#ffffffff", "terminal.ansi.dim_white": "#b0a189ff", "link_text.hover": "#0b6678ff", "version_control.added": "#797410ff", @@ -1694,7 +1694,7 @@ "terminal.ansi.bright_cyan": "#9fbca8ff", "terminal.ansi.dim_cyan": "#253e2eff", "terminal.ansi.white": "#f9f5d7ff", - "terminal.ansi.bright_white": "#f9f5d7ff", + "terminal.ansi.bright_white": "#ffffffff", "terminal.ansi.dim_white": "#b0a189ff", "link_text.hover": "#0b6678ff", "version_control.added": "#797410ff", @@ -2094,7 +2094,7 @@ "terminal.ansi.bright_cyan": "#9fbca8ff", "terminal.ansi.dim_cyan": "#253e2eff", "terminal.ansi.white": "#f2e5bcff", - "terminal.ansi.bright_white": "#f2e5bcff", + "terminal.ansi.bright_white": "#ffffffff", "terminal.ansi.dim_white": "#b0a189ff", "link_text.hover": "#0b6678ff", "version_control.added": "#797410ff", diff --git a/assets/themes/one/one.json b/assets/themes/one/one.json index 23ebbcc67e..33f6d3c622 100644 --- a/assets/themes/one/one.json +++ b/assets/themes/one/one.json @@ -93,7 +93,7 @@ "terminal.ansi.bright_cyan": "#3a565bff", "terminal.ansi.dim_cyan": "#b9d9dfff", "terminal.ansi.white": "#dce0e5ff", - "terminal.ansi.bright_white": "#dce0e5ff", + "terminal.ansi.bright_white": "#fafafaff", "terminal.ansi.dim_white": "#575d65ff", "link_text.hover": "#74ade8ff", "version_control.added": "#27a657ff", @@ -468,7 +468,7 @@ "terminal.bright_foreground": "#242529ff", "terminal.dim_foreground": "#fafafaff", "terminal.ansi.black": "#242529ff", - "terminal.ansi.bright_black": "#242529ff", + "terminal.ansi.bright_black": "#747579ff", "terminal.ansi.dim_black": "#97979aff", "terminal.ansi.red": "#d36151ff", "terminal.ansi.bright_red": "#f0b0a4ff", @@ -489,7 +489,7 @@ "terminal.ansi.bright_cyan": "#a3bedaff", "terminal.ansi.dim_cyan": "#254058ff", "terminal.ansi.white": "#fafafaff", - "terminal.ansi.bright_white": "#fafafaff", + "terminal.ansi.bright_white": "#ffffffff", "terminal.ansi.dim_white": "#aaaaaaff", "link_text.hover": "#5c78e2ff", "version_control.added": "#27a657ff", diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index a1f9b32eba..4ded647a74 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -183,16 +183,15 @@ impl ToolCall { language_registry: Arc, cx: &mut App, ) -> Self { + let title = if let Some((first_line, _)) = tool_call.title.split_once("\n") { + first_line.to_owned() + "…" + } else { + tool_call.title + }; Self { id: tool_call.id, - label: cx.new(|cx| { - Markdown::new( - tool_call.title.into(), - Some(language_registry.clone()), - None, - cx, - ) - }), + label: cx + .new(|cx| Markdown::new(title.into(), Some(language_registry.clone()), None, cx)), kind: tool_call.kind, content: tool_call .content @@ -233,15 +232,30 @@ impl ToolCall { if let Some(title) = title { self.label.update(cx, |label, cx| { - label.replace(title, cx); + if let Some((first_line, _)) = title.split_once("\n") { + label.replace(first_line.to_owned() + "…", cx) + } else { + label.replace(title, cx); + } }); } if let Some(content) = content { - self.content = content - .into_iter() - .map(|chunk| ToolCallContent::from_acp(chunk, language_registry.clone(), cx)) - .collect(); + let new_content_len = content.len(); + let mut content = content.into_iter(); + + // Reuse existing content if we can + for (old, new) in self.content.iter_mut().zip(content.by_ref()) { + old.update_from_acp(new, language_registry.clone(), cx); + } + for new in content { + self.content.push(ToolCallContent::from_acp( + new, + language_registry.clone(), + cx, + )) + } + self.content.truncate(new_content_len); } if let Some(locations) = locations { @@ -498,7 +512,7 @@ impl ContentBlock { "`Image`".into() } - fn to_markdown<'a>(&'a self, cx: &'a App) -> &'a str { + pub fn to_markdown<'a>(&'a self, cx: &'a App) -> &'a str { match self { ContentBlock::Empty => "", ContentBlock::Markdown { markdown } => markdown.read(cx).source(), @@ -551,6 +565,28 @@ impl ToolCallContent { } } + pub fn update_from_acp( + &mut self, + new: acp::ToolCallContent, + language_registry: Arc, + cx: &mut App, + ) { + let needs_update = match (&self, &new) { + (Self::Diff(old_diff), acp::ToolCallContent::Diff { diff: new_diff }) => { + old_diff.read(cx).needs_update( + new_diff.old_text.as_deref().unwrap_or(""), + &new_diff.new_text, + cx, + ) + } + _ => true, + }; + + if needs_update { + *self = Self::from_acp(new, language_registry, cx); + } + } + pub fn to_markdown(&self, cx: &App) -> String { match self { Self::ContentBlock(content) => content.to_markdown(cx).to_string(), @@ -723,6 +759,8 @@ pub struct AcpThread { connection: Rc, session_id: acp::SessionId, token_usage: Option, + prompt_capabilities: acp::PromptCapabilities, + _observe_prompt_capabilities: Task>, } #[derive(Debug)] @@ -737,11 +775,12 @@ pub enum AcpThreadEvent { Stopped, Error, LoadError(LoadError), + PromptCapabilitiesUpdated, } impl EventEmitter for AcpThread {} -#[derive(PartialEq, Eq)] +#[derive(PartialEq, Eq, Debug)] pub enum ThreadStatus { Idle, WaitingForToolConfirmation, @@ -788,7 +827,20 @@ impl AcpThread { project: Entity, action_log: Entity, session_id: acp::SessionId, + mut prompt_capabilities_rx: watch::Receiver, + cx: &mut Context, ) -> Self { + let prompt_capabilities = *prompt_capabilities_rx.borrow(); + let task = cx.spawn::<_, anyhow::Result<()>>(async move |this, cx| { + loop { + let caps = prompt_capabilities_rx.recv().await?; + this.update(cx, |this, cx| { + this.prompt_capabilities = caps; + cx.emit(AcpThreadEvent::PromptCapabilitiesUpdated); + })?; + } + }); + Self { action_log, shared_buffers: Default::default(), @@ -800,9 +852,15 @@ impl AcpThread { connection, session_id, token_usage: None, + prompt_capabilities, + _observe_prompt_capabilities: task, } } + pub fn prompt_capabilities(&self) -> acp::PromptCapabilities { + self.prompt_capabilities + } + pub fn connection(&self) -> &Rc { &self.connection } @@ -987,10 +1045,19 @@ impl AcpThread { cx.emit(AcpThreadEvent::NewEntry); } - pub fn update_title(&mut self, title: SharedString, cx: &mut Context) -> Result<()> { - self.title = title; - cx.emit(AcpThreadEvent::TitleUpdated); - Ok(()) + pub fn can_set_title(&mut self, cx: &mut Context) -> bool { + self.connection.set_title(&self.session_id, cx).is_some() + } + + pub fn set_title(&mut self, title: SharedString, cx: &mut Context) -> Task> { + if title != self.title { + self.title = title.clone(); + cx.emit(AcpThreadEvent::TitleUpdated); + if let Some(set_title) = self.connection.set_title(&self.session_id, cx) { + return set_title.run(title, cx); + } + } + Task::ready(Ok(())) } pub fn update_token_usage(&mut self, usage: Option, cx: &mut Context) { @@ -1293,11 +1360,7 @@ impl AcpThread { }; let git_store = self.project.read(cx).git_store().clone(); - let message_id = if self - .connection - .session_editor(&self.session_id, cx) - .is_some() - { + let message_id = if self.connection.truncate(&self.session_id, cx).is_some() { Some(UserMessageId::new()) } else { None @@ -1335,6 +1398,10 @@ impl AcpThread { }) } + pub fn can_resume(&self, cx: &App) -> bool { + self.connection.resume(&self.session_id, cx).is_some() + } + pub fn resume(&mut self, cx: &mut Context) -> BoxFuture<'static, Result<()>> { self.run_turn(cx, async move |this, cx| { this.update(cx, |this, cx| { @@ -1381,7 +1448,7 @@ impl AcpThread { let canceled = matches!( result, Ok(Ok(acp::PromptResponse { - stop_reason: acp::StopReason::Canceled + stop_reason: acp::StopReason::Cancelled })) ); @@ -1443,7 +1510,7 @@ impl AcpThread { /// Rewinds this thread to before the entry at `index`, removing it and all /// subsequent entries while reverting any changes made from that point. pub fn rewind(&mut self, id: UserMessageId, cx: &mut Context) -> Task> { - let Some(session_editor) = self.connection.session_editor(&self.session_id, cx) else { + let Some(truncate) = self.connection.truncate(&self.session_id, cx) else { return Task::ready(Err(anyhow!("not supported"))); }; let Some(message) = self.user_message(&id) else { @@ -1463,8 +1530,7 @@ impl AcpThread { .await?; } - cx.update(|cx| session_editor.truncate(id.clone(), cx))? - .await?; + cx.update(|cx| truncate.run(id.clone(), cx))?.await?; this.update(cx, |this, cx| { if let Some((ix, _)) = this.user_message_mut(&id) { let range = ix..this.entries.len(); @@ -2558,13 +2624,19 @@ mod tests { .into(), ); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let thread = cx.new(|_cx| { + let thread = cx.new(|cx| { AcpThread::new( "Test", self.clone(), project, action_log, session_id.clone(), + watch::Receiver::constant(acp::PromptCapabilities { + image: true, + audio: true, + embedded_context: true, + }), + cx, ) }); self.sessions.lock().insert(session_id, thread.downgrade()); @@ -2611,11 +2683,11 @@ mod tests { .detach(); } - fn session_editor( + fn truncate( &self, session_id: &acp::SessionId, - _cx: &mut App, - ) -> Option> { + _cx: &App, + ) -> Option> { Some(Rc::new(FakeAgentSessionEditor { _session_id: session_id.clone(), })) @@ -2630,8 +2702,8 @@ mod tests { _session_id: acp::SessionId, } - impl AgentSessionEditor for FakeAgentSessionEditor { - fn truncate(&self, _message_id: UserMessageId, _cx: &mut App) -> Task> { + impl AgentSessionTruncate for FakeAgentSessionEditor { + fn run(&self, _message_id: UserMessageId, _cx: &mut App) -> Task> { Task::ready(Ok(())) } } diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index dc1a41c81e..af229b7545 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -41,18 +41,26 @@ pub trait AgentConnection { fn resume( &self, _session_id: &acp::SessionId, - _cx: &mut App, + _cx: &App, ) -> Option> { None } fn cancel(&self, session_id: &acp::SessionId, cx: &mut App); - fn session_editor( + fn truncate( &self, _session_id: &acp::SessionId, - _cx: &mut App, - ) -> Option> { + _cx: &App, + ) -> Option> { + None + } + + fn set_title( + &self, + _session_id: &acp::SessionId, + _cx: &App, + ) -> Option> { None } @@ -77,14 +85,18 @@ impl dyn AgentConnection { } } -pub trait AgentSessionEditor { - fn truncate(&self, message_id: UserMessageId, cx: &mut App) -> Task>; +pub trait AgentSessionTruncate { + fn run(&self, message_id: UserMessageId, cx: &mut App) -> Task>; } pub trait AgentSessionResume { fn run(&self, cx: &mut App) -> Task>; } +pub trait AgentSessionSetTitle { + fn run(&self, title: SharedString, cx: &mut App) -> Task>; +} + pub trait AgentTelemetry { /// The name of the agent used for telemetry. fn agent_name(&self) -> String; @@ -315,13 +327,19 @@ mod test_support { ) -> Task>> { let session_id = acp::SessionId(self.sessions.lock().len().to_string().into()); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let thread = cx.new(|_cx| { + let thread = cx.new(|cx| { AcpThread::new( "Test", self.clone(), project, action_log, session_id.clone(), + watch::Receiver::constant(acp::PromptCapabilities { + image: true, + audio: true, + embedded_context: true, + }), + cx, ) }); self.sessions.lock().insert( @@ -410,15 +428,15 @@ mod test_support { .response_tx .take() { - end_turn_tx.send(acp::StopReason::Canceled).unwrap(); + end_turn_tx.send(acp::StopReason::Cancelled).unwrap(); } } - fn session_editor( + fn truncate( &self, _session_id: &agent_client_protocol::SessionId, - _cx: &mut App, - ) -> Option> { + _cx: &App, + ) -> Option> { Some(Rc::new(StubAgentSessionEditor)) } @@ -429,8 +447,8 @@ mod test_support { struct StubAgentSessionEditor; - impl AgentSessionEditor for StubAgentSessionEditor { - fn truncate(&self, _: UserMessageId, _: &mut App) -> Task> { + impl AgentSessionTruncate for StubAgentSessionEditor { + fn run(&self, _: UserMessageId, _: &mut App) -> Task> { Task::ready(Ok(())) } } diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index 70367e340a..0fec6809e0 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -28,57 +28,46 @@ impl Diff { cx: &mut Context, ) -> Self { let multibuffer = cx.new(|_cx| MultiBuffer::without_headers(Capability::ReadOnly)); - let new_buffer = cx.new(|cx| Buffer::local(new_text, cx)); - let old_buffer = cx.new(|cx| Buffer::local(old_text.unwrap_or("".into()), cx)); - let new_buffer_snapshot = new_buffer.read(cx).text_snapshot(); - let buffer_diff = cx.new(|cx| BufferDiff::new(&new_buffer_snapshot, cx)); - + let base_text = old_text.clone().unwrap_or(String::new()).into(); let task = cx.spawn({ let multibuffer = multibuffer.clone(); let path = path.clone(); + let buffer = new_buffer.clone(); async move |_, cx| { let language = language_registry .language_for_file_path(&path) .await .log_err(); - new_buffer.update(cx, |buffer, cx| buffer.set_language(language.clone(), cx))?; + buffer.update(cx, |buffer, cx| buffer.set_language(language.clone(), cx))?; - let old_buffer_snapshot = old_buffer.update(cx, |buffer, cx| { - buffer.set_language(language, cx); - buffer.snapshot() - })?; - - buffer_diff - .update(cx, |diff, cx| { - diff.set_base_text( - old_buffer_snapshot, - Some(language_registry), - new_buffer_snapshot, - cx, - ) - })? - .await?; + let diff = build_buffer_diff( + old_text.unwrap_or("".into()).into(), + &buffer, + Some(language_registry.clone()), + cx, + ) + .await?; multibuffer .update(cx, |multibuffer, cx| { let hunk_ranges = { - let buffer = new_buffer.read(cx); - let diff = buffer_diff.read(cx); + let buffer = buffer.read(cx); + let diff = diff.read(cx); diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx) .map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer)) .collect::>() }; multibuffer.set_excerpts_for_path( - PathKey::for_buffer(&new_buffer, cx), - new_buffer.clone(), + PathKey::for_buffer(&buffer, cx), + buffer.clone(), hunk_ranges, editor::DEFAULT_MULTIBUFFER_CONTEXT, cx, ); - multibuffer.add_diff(buffer_diff, cx); + multibuffer.add_diff(diff, cx); }) .log_err(); @@ -89,23 +78,26 @@ impl Diff { Self::Finalized(FinalizedDiff { multibuffer, path, + base_text, + new_buffer, _update_diff: task, }) } pub fn new(buffer: Entity, cx: &mut Context) -> Self { - let buffer_snapshot = buffer.read(cx).snapshot(); - let base_text = buffer_snapshot.text(); - let language_registry = buffer.read(cx).language_registry(); - let text_snapshot = buffer.read(cx).text_snapshot(); + let buffer_text_snapshot = buffer.read(cx).text_snapshot(); + let base_text_snapshot = buffer.read(cx).snapshot(); + let base_text = base_text_snapshot.text(); + debug_assert_eq!(buffer_text_snapshot.text(), base_text); let buffer_diff = cx.new(|cx| { - let mut diff = BufferDiff::new(&text_snapshot, cx); - let _ = diff.set_base_text( - buffer_snapshot.clone(), - language_registry, - text_snapshot, - cx, - ); + let mut diff = BufferDiff::new_unchanged(&buffer_text_snapshot, base_text_snapshot); + let snapshot = diff.snapshot(cx); + let secondary_diff = cx.new(|cx| { + let mut diff = BufferDiff::new(&buffer_text_snapshot, cx); + diff.set_snapshot(snapshot, &buffer_text_snapshot, cx); + diff + }); + diff.set_secondary_diff(secondary_diff); diff }); @@ -123,7 +115,7 @@ impl Diff { diff.update(cx); } }), - buffer, + new_buffer: buffer, diff: buffer_diff, revealed_ranges: Vec::new(), update_diff: Task::ready(Ok(())), @@ -158,9 +150,9 @@ impl Diff { .map(|buffer| buffer.read(cx).text()) .join("\n"); let path = match self { - Diff::Pending(PendingDiff { buffer, .. }) => { - buffer.read(cx).file().map(|file| file.path().as_ref()) - } + Diff::Pending(PendingDiff { + new_buffer: buffer, .. + }) => buffer.read(cx).file().map(|file| file.path().as_ref()), Diff::Finalized(FinalizedDiff { path, .. }) => Some(path.as_path()), }; format!( @@ -173,12 +165,33 @@ impl Diff { pub fn has_revealed_range(&self, cx: &App) -> bool { self.multibuffer().read(cx).excerpt_paths().next().is_some() } + + pub fn needs_update(&self, old_text: &str, new_text: &str, cx: &App) -> bool { + match self { + Diff::Pending(PendingDiff { + base_text, + new_buffer, + .. + }) => { + base_text.as_str() != old_text + || !new_buffer.read(cx).as_rope().chunks().equals_str(new_text) + } + Diff::Finalized(FinalizedDiff { + base_text, + new_buffer, + .. + }) => { + base_text.as_str() != old_text + || !new_buffer.read(cx).as_rope().chunks().equals_str(new_text) + } + } + } } pub struct PendingDiff { multibuffer: Entity, base_text: Arc, - buffer: Entity, + new_buffer: Entity, diff: Entity, revealed_ranges: Vec>, _subscription: Subscription, @@ -187,7 +200,7 @@ pub struct PendingDiff { impl PendingDiff { pub fn update(&mut self, cx: &mut Context) { - let buffer = self.buffer.clone(); + let buffer = self.new_buffer.clone(); let buffer_diff = self.diff.clone(); let base_text = self.base_text.clone(); self.update_diff = cx.spawn(async move |diff, cx| { @@ -204,7 +217,10 @@ impl PendingDiff { ) .await?; buffer_diff.update(cx, |diff, cx| { - diff.set_snapshot(diff_snapshot, &text_snapshot, cx) + diff.set_snapshot(diff_snapshot.clone(), &text_snapshot, cx); + diff.secondary_diff().unwrap().update(cx, |diff, cx| { + diff.set_snapshot(diff_snapshot.clone(), &text_snapshot, cx); + }); })?; diff.update(cx, |diff, cx| { if let Diff::Pending(diff) = diff { @@ -222,10 +238,10 @@ impl PendingDiff { fn finalize(&self, cx: &mut Context) -> FinalizedDiff { let ranges = self.excerpt_ranges(cx); let base_text = self.base_text.clone(); - let language_registry = self.buffer.read(cx).language_registry(); + let language_registry = self.new_buffer.read(cx).language_registry(); let path = self - .buffer + .new_buffer .read(cx) .file() .map(|file| file.path().as_ref()) @@ -234,12 +250,12 @@ impl PendingDiff { // Replace the buffer in the multibuffer with the snapshot let buffer = cx.new(|cx| { - let language = self.buffer.read(cx).language().cloned(); + let language = self.new_buffer.read(cx).language().cloned(); let buffer = TextBuffer::new_normalized( 0, cx.entity_id().as_non_zero_u64().into(), - self.buffer.read(cx).line_ending(), - self.buffer.read(cx).as_rope().clone(), + self.new_buffer.read(cx).line_ending(), + self.new_buffer.read(cx).as_rope().clone(), ); let mut buffer = Buffer::build(buffer, None, Capability::ReadWrite); buffer.set_language(language, cx); @@ -275,7 +291,9 @@ impl PendingDiff { FinalizedDiff { path, + base_text: self.base_text.clone(), multibuffer: self.multibuffer.clone(), + new_buffer: self.new_buffer.clone(), _update_diff: update_diff, } } @@ -284,8 +302,8 @@ impl PendingDiff { let ranges = self.excerpt_ranges(cx); self.multibuffer.update(cx, |multibuffer, cx| { multibuffer.set_excerpts_for_path( - PathKey::for_buffer(&self.buffer, cx), - self.buffer.clone(), + PathKey::for_buffer(&self.new_buffer, cx), + self.new_buffer.clone(), ranges, editor::DEFAULT_MULTIBUFFER_CONTEXT, cx, @@ -297,7 +315,7 @@ impl PendingDiff { } fn excerpt_ranges(&self, cx: &App) -> Vec> { - let buffer = self.buffer.read(cx); + let buffer = self.new_buffer.read(cx); let diff = self.diff.read(cx); let mut ranges = diff .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx) @@ -331,6 +349,8 @@ impl PendingDiff { pub struct FinalizedDiff { path: PathBuf, + base_text: Arc, + new_buffer: Entity, multibuffer: Entity, _update_diff: Task>, } @@ -384,3 +404,21 @@ async fn build_buffer_diff( diff }) } + +#[cfg(test)] +mod tests { + use gpui::{AppContext as _, TestAppContext}; + use language::Buffer; + + use crate::Diff; + + #[gpui::test] + async fn test_pending_diff(cx: &mut TestAppContext) { + let buffer = cx.new(|cx| Buffer::local("hello!", cx)); + let _diff = cx.new(|cx| Diff::new(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| { + buffer.set_text("HELLO!", cx); + }); + cx.run_until_parked(); + } +} diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs index a1e713cffa..6fa0887e22 100644 --- a/crates/acp_thread/src/mention.rs +++ b/crates/acp_thread/src/mention.rs @@ -5,7 +5,7 @@ use prompt_store::{PromptId, UserPromptId}; use serde::{Deserialize, Serialize}; use std::{ fmt, - ops::Range, + ops::RangeInclusive, path::{Path, PathBuf}, str::FromStr, }; @@ -17,13 +17,14 @@ pub enum MentionUri { File { abs_path: PathBuf, }, + PastedImage, Directory { abs_path: PathBuf, }, Symbol { - path: PathBuf, + abs_path: PathBuf, name: String, - line_range: Range, + line_range: RangeInclusive, }, Thread { id: acp::SessionId, @@ -38,8 +39,9 @@ pub enum MentionUri { name: String, }, Selection { - path: PathBuf, - line_range: Range, + #[serde(default, skip_serializing_if = "Option::is_none")] + abs_path: Option, + line_range: RangeInclusive, }, Fetch { url: Url, @@ -48,36 +50,44 @@ pub enum MentionUri { impl MentionUri { pub fn parse(input: &str) -> Result { + fn parse_line_range(fragment: &str) -> Result> { + let range = fragment + .strip_prefix("L") + .context("Line range must start with \"L\"")?; + let (start, end) = range + .split_once(":") + .context("Line range must use colon as separator")?; + let range = start + .parse::() + .context("Parsing line range start")? + .checked_sub(1) + .context("Line numbers should be 1-based")? + ..=end + .parse::() + .context("Parsing line range end")? + .checked_sub(1) + .context("Line numbers should be 1-based")?; + Ok(range) + } + let url = url::Url::parse(input)?; let path = url.path(); match url.scheme() { "file" => { let path = url.to_file_path().ok().context("Extracting file path")?; if let Some(fragment) = url.fragment() { - let range = fragment - .strip_prefix("L") - .context("Line range must start with \"L\"")?; - let (start, end) = range - .split_once(":") - .context("Line range must use colon as separator")?; - let line_range = start - .parse::() - .context("Parsing line range start")? - .checked_sub(1) - .context("Line numbers should be 1-based")? - ..end - .parse::() - .context("Parsing line range end")? - .checked_sub(1) - .context("Line numbers should be 1-based")?; + let line_range = parse_line_range(fragment)?; if let Some(name) = single_query_param(&url, "symbol")? { Ok(Self::Symbol { name, - path, + abs_path: path, line_range, }) } else { - Ok(Self::Selection { path, line_range }) + Ok(Self::Selection { + abs_path: Some(path), + line_range, + }) } } else if input.ends_with("/") { Ok(Self::Directory { abs_path: path }) @@ -105,6 +115,17 @@ impl MentionUri { id: rule_id.into(), name, }) + } else if path.starts_with("/agent/pasted-image") { + Ok(Self::PastedImage) + } else if path.starts_with("/agent/untitled-buffer") { + let fragment = url + .fragment() + .context("Missing fragment for untitled buffer selection")?; + let line_range = parse_line_range(fragment)?; + Ok(Self::Selection { + abs_path: None, + line_range, + }) } else { bail!("invalid zed url: {:?}", input); } @@ -121,13 +142,16 @@ impl MentionUri { .unwrap_or_default() .to_string_lossy() .into_owned(), + MentionUri::PastedImage => "Image".to_string(), MentionUri::Symbol { name, .. } => name.clone(), MentionUri::Thread { name, .. } => name.clone(), MentionUri::TextThread { name, .. } => name.clone(), MentionUri::Rule { name, .. } => name.clone(), MentionUri::Selection { - path, line_range, .. - } => selection_name(path, line_range), + abs_path: path, + line_range, + .. + } => selection_name(path.as_deref(), line_range), MentionUri::Fetch { url } => url.to_string(), } } @@ -137,6 +161,7 @@ impl MentionUri { MentionUri::File { abs_path } => { FileIcons::get_icon(abs_path, cx).unwrap_or_else(|| IconName::File.path().into()) } + MentionUri::PastedImage => IconName::Image.path().into(), MentionUri::Directory { .. } => FileIcons::get_folder_icon(false, cx) .unwrap_or_else(|| IconName::Folder.path().into()), MentionUri::Symbol { .. } => IconName::Code.path().into(), @@ -157,29 +182,40 @@ impl MentionUri { MentionUri::File { abs_path } => { Url::from_file_path(abs_path).expect("mention path should be absolute") } + MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(), MentionUri::Directory { abs_path } => { Url::from_directory_path(abs_path).expect("mention path should be absolute") } MentionUri::Symbol { - path, + abs_path, name, line_range, } => { - let mut url = Url::from_file_path(path).expect("mention path should be absolute"); + let mut url = + Url::from_file_path(abs_path).expect("mention path should be absolute"); url.query_pairs_mut().append_pair("symbol", name); url.set_fragment(Some(&format!( "L{}:{}", - line_range.start + 1, - line_range.end + 1 + line_range.start() + 1, + line_range.end() + 1 ))); url } - MentionUri::Selection { path, line_range } => { - let mut url = Url::from_file_path(path).expect("mention path should be absolute"); + MentionUri::Selection { + abs_path: path, + line_range, + } => { + let mut url = if let Some(path) = path { + Url::from_file_path(path).expect("mention path should be absolute") + } else { + let mut url = Url::parse("zed:///").unwrap(); + url.set_path("/agent/untitled-buffer"); + url + }; url.set_fragment(Some(&format!( "L{}:{}", - line_range.start + 1, - line_range.end + 1 + line_range.start() + 1, + line_range.end() + 1 ))); url } @@ -191,7 +227,10 @@ impl MentionUri { } MentionUri::TextThread { path, name } => { let mut url = Url::parse("zed:///").unwrap(); - url.set_path(&format!("/agent/text-thread/{}", path.to_string_lossy())); + url.set_path(&format!( + "/agent/text-thread/{}", + path.to_string_lossy().trim_start_matches('/') + )); url.query_pairs_mut().append_pair("name", name); url } @@ -237,12 +276,14 @@ fn single_query_param(url: &Url, name: &'static str) -> Result> { } } -pub fn selection_name(path: &Path, line_range: &Range) -> String { +pub fn selection_name(path: Option<&Path>, line_range: &RangeInclusive) -> String { format!( "{} ({}:{})", - path.file_name().unwrap_or_default().display(), - line_range.start + 1, - line_range.end + 1 + path.and_then(|path| path.file_name()) + .unwrap_or("Untitled".as_ref()) + .display(), + *line_range.start() + 1, + *line_range.end() + 1 ) } @@ -302,14 +343,14 @@ mod tests { let parsed = MentionUri::parse(symbol_uri).unwrap(); match &parsed { MentionUri::Symbol { - path, + abs_path: path, name, line_range, } => { assert_eq!(path.to_str().unwrap(), path!("/path/to/file.rs")); assert_eq!(name, "MySymbol"); - assert_eq!(line_range.start, 9); - assert_eq!(line_range.end, 19); + assert_eq!(line_range.start(), &9); + assert_eq!(line_range.end(), &19); } _ => panic!("Expected Symbol variant"), } @@ -321,16 +362,39 @@ mod tests { let selection_uri = uri!("file:///path/to/file.rs#L5:15"); let parsed = MentionUri::parse(selection_uri).unwrap(); match &parsed { - MentionUri::Selection { path, line_range } => { - assert_eq!(path.to_str().unwrap(), path!("/path/to/file.rs")); - assert_eq!(line_range.start, 4); - assert_eq!(line_range.end, 14); + MentionUri::Selection { + abs_path: path, + line_range, + } => { + assert_eq!( + path.as_ref().unwrap().to_str().unwrap(), + path!("/path/to/file.rs") + ); + assert_eq!(line_range.start(), &4); + assert_eq!(line_range.end(), &14); } _ => panic!("Expected Selection variant"), } assert_eq!(parsed.to_uri().to_string(), selection_uri); } + #[test] + fn test_parse_untitled_selection_uri() { + let selection_uri = uri!("zed:///agent/untitled-buffer#L1:10"); + let parsed = MentionUri::parse(selection_uri).unwrap(); + match &parsed { + MentionUri::Selection { + abs_path: None, + line_range, + } => { + assert_eq!(line_range.start(), &0); + assert_eq!(line_range.end(), &9); + } + _ => panic!("Expected Selection variant without path"), + } + assert_eq!(parsed.to_uri().to_string(), selection_uri); + } + #[test] fn test_parse_thread_uri() { let thread_uri = "zed:///agent/thread/session123?name=Thread+name"; diff --git a/crates/acp_tools/Cargo.toml b/crates/acp_tools/Cargo.toml new file mode 100644 index 0000000000..7a6d8c21a0 --- /dev/null +++ b/crates/acp_tools/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "acp_tools" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + + +[lints] +workspace = true + +[lib] +path = "src/acp_tools.rs" +doctest = false + +[dependencies] +agent-client-protocol.workspace = true +collections.workspace = true +gpui.workspace = true +language.workspace= true +markdown.workspace = true +project.workspace = true +serde.workspace = true +serde_json.workspace = true +settings.workspace = true +theme.workspace = true +ui.workspace = true +util.workspace = true +workspace-hack.workspace = true +workspace.workspace = true diff --git a/crates/acp_tools/LICENSE-GPL b/crates/acp_tools/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/acp_tools/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/acp_tools/src/acp_tools.rs b/crates/acp_tools/src/acp_tools.rs new file mode 100644 index 0000000000..e20a040e9d --- /dev/null +++ b/crates/acp_tools/src/acp_tools.rs @@ -0,0 +1,494 @@ +use std::{ + cell::RefCell, + collections::HashSet, + fmt::Display, + rc::{Rc, Weak}, + sync::Arc, +}; + +use agent_client_protocol as acp; +use collections::HashMap; +use gpui::{ + App, Empty, Entity, EventEmitter, FocusHandle, Focusable, Global, ListAlignment, ListState, + StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list, prelude::*, +}; +use language::LanguageRegistry; +use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle}; +use project::Project; +use settings::Settings; +use theme::ThemeSettings; +use ui::prelude::*; +use util::ResultExt as _; +use workspace::{Item, Workspace}; + +actions!(dev, [OpenAcpLogs]); + +pub fn init(cx: &mut App) { + cx.observe_new( + |workspace: &mut Workspace, _window, _cx: &mut Context| { + workspace.register_action(|workspace, _: &OpenAcpLogs, window, cx| { + let acp_tools = + Box::new(cx.new(|cx| AcpTools::new(workspace.project().clone(), cx))); + workspace.add_item_to_active_pane(acp_tools, None, true, window, cx); + }); + }, + ) + .detach(); +} + +struct GlobalAcpConnectionRegistry(Entity); + +impl Global for GlobalAcpConnectionRegistry {} + +#[derive(Default)] +pub struct AcpConnectionRegistry { + active_connection: RefCell>, +} + +struct ActiveConnection { + server_name: SharedString, + connection: Weak, +} + +impl AcpConnectionRegistry { + pub fn default_global(cx: &mut App) -> Entity { + if cx.has_global::() { + cx.global::().0.clone() + } else { + let registry = cx.new(|_cx| AcpConnectionRegistry::default()); + cx.set_global(GlobalAcpConnectionRegistry(registry.clone())); + registry + } + } + + pub fn set_active_connection( + &self, + server_name: impl Into, + connection: &Rc, + cx: &mut Context, + ) { + self.active_connection.replace(Some(ActiveConnection { + server_name: server_name.into(), + connection: Rc::downgrade(connection), + })); + cx.notify(); + } +} + +struct AcpTools { + project: Entity, + focus_handle: FocusHandle, + expanded: HashSet, + watched_connection: Option, + connection_registry: Entity, + _subscription: Subscription, +} + +struct WatchedConnection { + server_name: SharedString, + messages: Vec, + list_state: ListState, + connection: Weak, + incoming_request_methods: HashMap>, + outgoing_request_methods: HashMap>, + _task: Task<()>, +} + +impl AcpTools { + fn new(project: Entity, cx: &mut Context) -> Self { + let connection_registry = AcpConnectionRegistry::default_global(cx); + + let subscription = cx.observe(&connection_registry, |this, _, cx| { + this.update_connection(cx); + cx.notify(); + }); + + let mut this = Self { + project, + focus_handle: cx.focus_handle(), + expanded: HashSet::default(), + watched_connection: None, + connection_registry, + _subscription: subscription, + }; + this.update_connection(cx); + this + } + + fn update_connection(&mut self, cx: &mut Context) { + let active_connection = self.connection_registry.read(cx).active_connection.borrow(); + let Some(active_connection) = active_connection.as_ref() else { + return; + }; + + if let Some(watched_connection) = self.watched_connection.as_ref() { + if Weak::ptr_eq( + &watched_connection.connection, + &active_connection.connection, + ) { + return; + } + } + + if let Some(connection) = active_connection.connection.upgrade() { + let mut receiver = connection.subscribe(); + let task = cx.spawn(async move |this, cx| { + while let Ok(message) = receiver.recv().await { + this.update(cx, |this, cx| { + this.push_stream_message(message, cx); + }) + .ok(); + } + }); + + self.watched_connection = Some(WatchedConnection { + server_name: active_connection.server_name.clone(), + messages: vec![], + list_state: ListState::new(0, ListAlignment::Bottom, px(2048.)), + connection: active_connection.connection.clone(), + incoming_request_methods: HashMap::default(), + outgoing_request_methods: HashMap::default(), + _task: task, + }); + } + } + + fn push_stream_message(&mut self, stream_message: acp::StreamMessage, cx: &mut Context) { + let Some(connection) = self.watched_connection.as_mut() else { + return; + }; + let language_registry = self.project.read(cx).languages().clone(); + let index = connection.messages.len(); + + let (request_id, method, message_type, params) = match stream_message.message { + acp::StreamMessageContent::Request { id, method, params } => { + let method_map = match stream_message.direction { + acp::StreamMessageDirection::Incoming => { + &mut connection.incoming_request_methods + } + acp::StreamMessageDirection::Outgoing => { + &mut connection.outgoing_request_methods + } + }; + + method_map.insert(id, method.clone()); + (Some(id), method.into(), MessageType::Request, Ok(params)) + } + acp::StreamMessageContent::Response { id, result } => { + let method_map = match stream_message.direction { + acp::StreamMessageDirection::Incoming => { + &mut connection.outgoing_request_methods + } + acp::StreamMessageDirection::Outgoing => { + &mut connection.incoming_request_methods + } + }; + + if let Some(method) = method_map.remove(&id) { + (Some(id), method.into(), MessageType::Response, result) + } else { + ( + Some(id), + "[unrecognized response]".into(), + MessageType::Response, + result, + ) + } + } + acp::StreamMessageContent::Notification { method, params } => { + (None, method.into(), MessageType::Notification, Ok(params)) + } + }; + + let message = WatchedConnectionMessage { + name: method, + message_type, + request_id, + direction: stream_message.direction, + collapsed_params_md: match params.as_ref() { + Ok(params) => params + .as_ref() + .map(|params| collapsed_params_md(params, &language_registry, cx)), + Err(err) => { + if let Ok(err) = &serde_json::to_value(err) { + Some(collapsed_params_md(&err, &language_registry, cx)) + } else { + None + } + } + }, + + expanded_params_md: None, + params, + }; + + connection.messages.push(message); + connection.list_state.splice(index..index, 1); + cx.notify(); + } + + fn render_message( + &mut self, + index: usize, + window: &mut Window, + cx: &mut Context, + ) -> AnyElement { + let Some(connection) = self.watched_connection.as_ref() else { + return Empty.into_any(); + }; + + let Some(message) = connection.messages.get(index) else { + return Empty.into_any(); + }; + + let base_size = TextSize::Editor.rems(cx); + + let theme_settings = ThemeSettings::get_global(cx); + let text_style = window.text_style(); + + let colors = cx.theme().colors(); + let expanded = self.expanded.contains(&index); + + v_flex() + .w_full() + .px_4() + .py_3() + .border_color(colors.border) + .border_b_1() + .gap_2() + .items_start() + .font_buffer(cx) + .text_size(base_size) + .id(index) + .group("message") + .hover(|this| this.bg(colors.element_background.opacity(0.5))) + .on_click(cx.listener(move |this, _, _, cx| { + if this.expanded.contains(&index) { + this.expanded.remove(&index); + } else { + this.expanded.insert(index); + let Some(connection) = &mut this.watched_connection else { + return; + }; + let Some(message) = connection.messages.get_mut(index) else { + return; + }; + message.expanded(this.project.read(cx).languages().clone(), cx); + connection.list_state.scroll_to_reveal_item(index); + } + cx.notify() + })) + .child( + h_flex() + .w_full() + .gap_2() + .items_center() + .flex_shrink_0() + .child(match message.direction { + acp::StreamMessageDirection::Incoming => { + ui::Icon::new(ui::IconName::ArrowDown).color(Color::Error) + } + acp::StreamMessageDirection::Outgoing => { + ui::Icon::new(ui::IconName::ArrowUp).color(Color::Success) + } + }) + .child( + Label::new(message.name.clone()) + .buffer_font(cx) + .color(Color::Muted), + ) + .child(div().flex_1()) + .child( + div() + .child(ui::Chip::new(message.message_type.to_string())) + .visible_on_hover("message"), + ) + .children( + message + .request_id + .map(|req_id| div().child(ui::Chip::new(req_id.to_string()))), + ), + ) + // I'm aware using markdown is a hack. Trying to get something working for the demo. + // Will clean up soon! + .when_some( + if expanded { + message.expanded_params_md.clone() + } else { + message.collapsed_params_md.clone() + }, + |this, params| { + this.child( + div().pl_6().w_full().child( + MarkdownElement::new( + params, + MarkdownStyle { + base_text_style: text_style, + selection_background_color: colors.element_selection_background, + syntax: cx.theme().syntax().clone(), + code_block_overflow_x_scroll: true, + code_block: StyleRefinement { + text: Some(TextStyleRefinement { + font_family: Some( + theme_settings.buffer_font.family.clone(), + ), + font_size: Some((base_size * 0.8).into()), + ..Default::default() + }), + ..Default::default() + }, + ..Default::default() + }, + ) + .code_block_renderer( + CodeBlockRenderer::Default { + copy_button: false, + copy_button_on_hover: expanded, + border: false, + }, + ), + ), + ) + }, + ) + .into_any() + } +} + +struct WatchedConnectionMessage { + name: SharedString, + request_id: Option, + direction: acp::StreamMessageDirection, + message_type: MessageType, + params: Result, acp::Error>, + collapsed_params_md: Option>, + expanded_params_md: Option>, +} + +impl WatchedConnectionMessage { + fn expanded(&mut self, language_registry: Arc, cx: &mut App) { + let params_md = match &self.params { + Ok(Some(params)) => Some(expanded_params_md(params, &language_registry, cx)), + Err(err) => { + if let Some(err) = &serde_json::to_value(err).log_err() { + Some(expanded_params_md(&err, &language_registry, cx)) + } else { + None + } + } + _ => None, + }; + self.expanded_params_md = params_md; + } +} + +fn collapsed_params_md( + params: &serde_json::Value, + language_registry: &Arc, + cx: &mut App, +) -> Entity { + let params_json = serde_json::to_string(params).unwrap_or_default(); + let mut spaced_out_json = String::with_capacity(params_json.len() + params_json.len() / 4); + + for ch in params_json.chars() { + match ch { + '{' => spaced_out_json.push_str("{ "), + '}' => spaced_out_json.push_str(" }"), + ':' => spaced_out_json.push_str(": "), + ',' => spaced_out_json.push_str(", "), + c => spaced_out_json.push(c), + } + } + + let params_md = format!("```json\n{}\n```", spaced_out_json); + cx.new(|cx| Markdown::new(params_md.into(), Some(language_registry.clone()), None, cx)) +} + +fn expanded_params_md( + params: &serde_json::Value, + language_registry: &Arc, + cx: &mut App, +) -> Entity { + let params_json = serde_json::to_string_pretty(params).unwrap_or_default(); + let params_md = format!("```json\n{}\n```", params_json); + cx.new(|cx| Markdown::new(params_md.into(), Some(language_registry.clone()), None, cx)) +} + +enum MessageType { + Request, + Response, + Notification, +} + +impl Display for MessageType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + MessageType::Request => write!(f, "Request"), + MessageType::Response => write!(f, "Response"), + MessageType::Notification => write!(f, "Notification"), + } + } +} + +enum AcpToolsEvent {} + +impl EventEmitter for AcpTools {} + +impl Item for AcpTools { + type Event = AcpToolsEvent; + + fn tab_content_text(&self, _detail: usize, _cx: &App) -> ui::SharedString { + format!( + "ACP: {}", + self.watched_connection + .as_ref() + .map_or("Disconnected", |connection| &connection.server_name) + ) + .into() + } + + fn tab_icon(&self, _window: &Window, _cx: &App) -> Option { + Some(ui::Icon::new(IconName::Thread)) + } +} + +impl Focusable for AcpTools { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Render for AcpTools { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + v_flex() + .track_focus(&self.focus_handle) + .size_full() + .bg(cx.theme().colors().editor_background) + .child(match self.watched_connection.as_ref() { + Some(connection) => { + if connection.messages.is_empty() { + h_flex() + .size_full() + .justify_center() + .items_center() + .child("No messages recorded yet") + .into_any() + } else { + list( + connection.list_state.clone(), + cx.processor(Self::render_message), + ) + .with_sizing_behavior(gpui::ListSizingBehavior::Auto) + .flex_grow() + .into_any() + } + } + None => h_flex() + .size_full() + .justify_center() + .items_center() + .child("No active connection") + .into_any(), + }) + } +} diff --git a/crates/agent/src/context.rs b/crates/agent/src/context.rs index 9bb8fc0eae..a94a933d86 100644 --- a/crates/agent/src/context.rs +++ b/crates/agent/src/context.rs @@ -362,7 +362,7 @@ impl Display for DirectoryContext { let mut is_first = true; for descendant in &self.descendants { if !is_first { - write!(f, "\n")?; + writeln!(f)?; } else { is_first = false; } @@ -650,7 +650,7 @@ impl TextThreadContextHandle { impl Display for TextThreadContext { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { // TODO: escape title? - write!(f, "\n", self.title)?; + writeln!(f, "", self.title)?; write!(f, "{}", self.text.trim())?; write!(f, "\n") } @@ -716,7 +716,7 @@ impl RulesContextHandle { impl Display for RulesContext { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if let Some(title) = &self.title { - write!(f, "Rules title: {}\n", title)?; + writeln!(f, "Rules title: {}", title)?; } let code_block = MarkdownCodeBlock { tag: "", diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 7b70fde56a..899e360ab0 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -664,7 +664,7 @@ impl Thread { } pub fn get_or_init_configured_model(&mut self, cx: &App) -> Option { - if self.configured_model.is_none() { + if self.configured_model.is_none() || self.messages.is_empty() { self.configured_model = LanguageModelRegistry::read_global(cx).default_model(); } self.configured_model.clone() @@ -2097,7 +2097,7 @@ impl Thread { } pub fn summarize(&mut self, cx: &mut Context) { - let Some(model) = LanguageModelRegistry::read_global(cx).thread_summary_model() else { + let Some(model) = LanguageModelRegistry::read_global(cx).thread_summary_model(cx) else { println!("No thread summary model"); return; }; @@ -2416,7 +2416,7 @@ impl Thread { } let Some(ConfiguredModel { model, provider }) = - LanguageModelRegistry::read_global(cx).thread_summary_model() + LanguageModelRegistry::read_global(cx).thread_summary_model(cx) else { return; }; @@ -5410,13 +5410,10 @@ fn main() {{ }), cx, ); - registry.set_thread_summary_model( - Some(ConfiguredModel { - provider, - model: model.clone(), - }), - cx, - ); + registry.set_thread_summary_model(Some(ConfiguredModel { + provider, + model: model.clone(), + })); }) }); diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index 45e551dbdf..cba2457566 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -893,8 +893,19 @@ impl ThreadsDatabase { let needs_migration_from_heed = mdb_path.exists(); - let connection = if *ZED_STATELESS || cfg!(any(feature = "test-support", test)) { + let connection = if *ZED_STATELESS { Connection::open_memory(Some("THREAD_FALLBACK_DB")) + } else if cfg!(any(feature = "test-support", test)) { + // rust stores the name of the test on the current thread. + // We use this to automatically create a database that will + // be shared within the test (for the test_retrieve_old_thread) + // but not with concurrent tests. + let thread = std::thread::current(); + let test_name = thread.name(); + Connection::open_memory(Some(&format!( + "THREAD_FALLBACK_{}", + test_name.unwrap_or_default() + ))) } else { Connection::open_file(&sqlite_path.to_string_lossy()) }; diff --git a/crates/agent2/Cargo.toml b/crates/agent2/Cargo.toml index 2a5d879e9e..68246a96b0 100644 --- a/crates/agent2/Cargo.toml +++ b/crates/agent2/Cargo.toml @@ -10,6 +10,7 @@ path = "src/agent2.rs" [features] test-support = ["db/test-support"] +e2e = [] [lints] workspace = true @@ -60,6 +61,7 @@ sqlez.workspace = true task.workspace = true telemetry.workspace = true terminal.workspace = true +thiserror.workspace = true text.workspace = true ui.workspace = true util.workspace = true @@ -72,6 +74,7 @@ zstd.workspace = true [dev-dependencies] agent = { workspace = true, "features" = ["test-support"] } +agent_servers = { workspace = true, "features" = ["test-support"] } assistant_context = { workspace = true, "features" = ["test-support"] } ctor.workspace = true client = { workspace = true, "features" = ["test-support"] } diff --git a/crates/agent2/src/agent.rs b/crates/agent2/src/agent.rs index 2f5f15399e..ecfaea4b49 100644 --- a/crates/agent2/src/agent.rs +++ b/crates/agent2/src/agent.rs @@ -2,7 +2,7 @@ use crate::{ ContextServerRegistry, Thread, ThreadEvent, ThreadsDatabase, ToolCallAuthorization, UserMessageContent, templates::Templates, }; -use crate::{HistoryStore, TokenUsageUpdated}; +use crate::{HistoryStore, TitleUpdated, TokenUsageUpdated}; use acp_thread::{AcpThread, AgentModelSelector}; use action_log::ActionLog; use agent_client_protocol as acp; @@ -180,7 +180,7 @@ impl NativeAgent { fs: Arc, cx: &mut AsyncApp, ) -> Result> { - log::info!("Creating new NativeAgent"); + log::debug!("Creating new NativeAgent"); let project_context = cx .update(|cx| Self::build_project_context(&project, prompt_store.as_ref(), cx))? @@ -228,7 +228,7 @@ impl NativeAgent { ) -> Entity { let connection = Rc::new(NativeAgentConnection(cx.entity())); let registry = LanguageModelRegistry::read_global(cx); - let summarization_model = registry.thread_summary_model().map(|c| c.model); + let summarization_model = registry.thread_summary_model(cx).map(|c| c.model); thread_handle.update(cx, |thread, cx| { thread.set_summarization_model(summarization_model, cx); @@ -240,19 +240,23 @@ impl NativeAgent { let title = thread.title(); let project = thread.project.clone(); let action_log = thread.action_log.clone(); - let acp_thread = cx.new(|_cx| { + let prompt_capabilities_rx = thread.prompt_capabilities_rx.clone(); + let acp_thread = cx.new(|cx| { acp_thread::AcpThread::new( title, connection, project.clone(), action_log.clone(), session_id.clone(), + prompt_capabilities_rx, + cx, ) }); let subscriptions = vec![ cx.observe_release(&acp_thread, |this, acp_thread, _cx| { this.sessions.remove(acp_thread.session_id()); }), + cx.subscribe(&thread_handle, Self::handle_thread_title_updated), cx.subscribe(&thread_handle, Self::handle_thread_token_usage_updated), cx.observe(&thread_handle, move |this, thread, cx| { this.save_thread(thread, cx) @@ -441,6 +445,26 @@ impl NativeAgent { }) } + fn handle_thread_title_updated( + &mut self, + thread: Entity, + _: &TitleUpdated, + cx: &mut Context, + ) { + let session_id = thread.read(cx).id(); + let Some(session) = self.sessions.get(session_id) else { + return; + }; + let thread = thread.downgrade(); + let acp_thread = session.acp_thread.clone(); + cx.spawn(async move |_, cx| { + let title = thread.read_with(cx, |thread, _| thread.title())?; + let task = acp_thread.update(cx, |acp_thread, cx| acp_thread.set_title(title, cx))?; + task.await + }) + .detach_and_log_err(cx); + } + fn handle_thread_token_usage_updated( &mut self, thread: Entity, @@ -500,7 +524,7 @@ impl NativeAgent { let registry = LanguageModelRegistry::read_global(cx); let default_model = registry.default_model().map(|m| m.model); - let summarization_model = registry.thread_summary_model().map(|m| m.model); + let summarization_model = registry.thread_summary_model(cx).map(|m| m.model); for session in self.sessions.values_mut() { session.thread.update(cx, |thread, cx| { @@ -717,10 +741,6 @@ impl NativeAgentConnection { thread.update_tool_call(update, cx) })??; } - ThreadEvent::TitleUpdate(title) => { - acp_thread - .update(cx, |thread, cx| thread.update_title(title, cx))??; - } ThreadEvent::Retry(status) => { acp_thread.update(cx, |thread, cx| { thread.update_retry_status(status, cx) @@ -739,7 +759,7 @@ impl NativeAgentConnection { } } - log::info!("Response stream completed"); + log::debug!("Response stream completed"); anyhow::Ok(acp::PromptResponse { stop_reason: acp::StopReason::EndTurn, }) @@ -764,7 +784,7 @@ impl AgentModelSelector for NativeAgentConnection { model_id: acp_thread::AgentModelId, cx: &mut App, ) -> Task> { - log::info!("Setting model for session {}: {}", session_id, model_id); + log::debug!("Setting model for session {}: {}", session_id, model_id); let Some(thread) = self .0 .read(cx) @@ -835,12 +855,11 @@ impl acp_thread::AgentConnection for NativeAgentConnection { cx: &mut App, ) -> Task>> { let agent = self.0.clone(); - log::info!("Creating new thread for project at: {:?}", cwd); + log::debug!("Creating new thread for project at: {:?}", cwd); cx.spawn(async move |cx| { log::debug!("Starting thread creation in async context"); - let action_log = cx.new(|_cx| ActionLog::new(project.clone()))?; // Create Thread let thread = agent.update( cx, @@ -856,20 +875,16 @@ impl acp_thread::AgentConnection for NativeAgentConnection { .models .model_from_id(&LanguageModels::model_id(&default_model.model)) }); - - let thread = cx.new(|cx| { + Ok(cx.new(|cx| { Thread::new( project.clone(), agent.project_context.clone(), agent.context_server_registry.clone(), - action_log.clone(), agent.templates.clone(), default_model, cx, ) - }); - - Ok(thread) + })) }, )??; agent.update(cx, |agent, cx| agent.register_session(thread, cx)) @@ -905,7 +920,7 @@ impl acp_thread::AgentConnection for NativeAgentConnection { .into_iter() .map(Into::into) .collect::>(); - log::info!("Converted prompt to message: {} chars", content.len()); + log::debug!("Converted prompt to message: {} chars", content.len()); log::debug!("Message id: {:?}", id); log::debug!("Message content: {:?}", content); @@ -916,7 +931,7 @@ impl acp_thread::AgentConnection for NativeAgentConnection { fn resume( &self, session_id: &acp::SessionId, - _cx: &mut App, + _cx: &App, ) -> Option> { Some(Rc::new(NativeAgentSessionResume { connection: self.clone(), @@ -933,12 +948,12 @@ impl acp_thread::AgentConnection for NativeAgentConnection { }); } - fn session_editor( + fn truncate( &self, session_id: &agent_client_protocol::SessionId, - cx: &mut App, - ) -> Option> { - self.0.update(cx, |agent, _cx| { + cx: &App, + ) -> Option> { + self.0.read_with(cx, |agent, _cx| { agent.sessions.get(session_id).map(|session| { Rc::new(NativeAgentSessionEditor { thread: session.thread.clone(), @@ -948,6 +963,17 @@ impl acp_thread::AgentConnection for NativeAgentConnection { }) } + fn set_title( + &self, + session_id: &acp::SessionId, + _cx: &App, + ) -> Option> { + Some(Rc::new(NativeAgentSessionSetTitle { + connection: self.clone(), + session_id: session_id.clone(), + }) as _) + } + fn telemetry(&self) -> Option> { Some(Rc::new(self.clone()) as Rc) } @@ -983,8 +1009,8 @@ struct NativeAgentSessionEditor { acp_thread: WeakEntity, } -impl acp_thread::AgentSessionEditor for NativeAgentSessionEditor { - fn truncate(&self, message_id: acp_thread::UserMessageId, cx: &mut App) -> Task> { +impl acp_thread::AgentSessionTruncate for NativeAgentSessionEditor { + fn run(&self, message_id: acp_thread::UserMessageId, cx: &mut App) -> Task> { match self.thread.update(cx, |thread, cx| { thread.truncate(message_id.clone(), cx)?; Ok(thread.latest_token_usage()) @@ -1016,6 +1042,22 @@ impl acp_thread::AgentSessionResume for NativeAgentSessionResume { } } +struct NativeAgentSessionSetTitle { + connection: NativeAgentConnection, + session_id: acp::SessionId, +} + +impl acp_thread::AgentSessionSetTitle for NativeAgentSessionSetTitle { + fn run(&self, title: SharedString, cx: &mut App) -> Task> { + let Some(session) = self.connection.0.read(cx).sessions.get(&self.session_id) else { + return Task::ready(Err(anyhow!("session not found"))); + }; + let thread = session.thread.clone(); + thread.update(cx, |thread, cx| thread.set_title(title, cx)); + Task::ready(Ok(())) + } +} + #[cfg(test)] mod tests { use crate::HistoryEntryId; @@ -1261,18 +1303,12 @@ mod tests { let model = Arc::new(FakeLanguageModel::default()); let summary_model = Arc::new(FakeLanguageModel::default()); thread.update(cx, |thread, cx| { - thread.set_model(model, cx); - thread.set_summarization_model(Some(summary_model), cx); + thread.set_model(model.clone(), cx); + thread.set_summarization_model(Some(summary_model.clone()), cx); }); cx.run_until_parked(); assert_eq!(history_entries(&history_store, cx), vec![]); - let model = thread.read_with(cx, |thread, _| thread.model().unwrap().clone()); - let model = model.as_fake(); - let summary_model = thread.read_with(cx, |thread, _| { - thread.summarization_model().unwrap().clone() - }); - let summary_model = summary_model.as_fake(); let send = acp_thread.update(cx, |thread, cx| { thread.send( vec![ @@ -1321,6 +1357,8 @@ mod tests { ) }); + cx.run_until_parked(); + // Drop the ACP thread, which should cause the session to be dropped as well. cx.update(|_| { drop(thread); @@ -1363,10 +1401,9 @@ mod tests { history: &Entity, cx: &mut TestAppContext, ) -> Vec<(HistoryEntryId, String)> { - history.read_with(cx, |history, cx| { + history.read_with(cx, |history, _| { history - .entries(cx) - .iter() + .entries() .map(|e| (e.id(), e.title().to_string())) .collect::>() }) diff --git a/crates/agent2/src/db.rs b/crates/agent2/src/db.rs index 1b88955a24..e7d31c0c7a 100644 --- a/crates/agent2/src/db.rs +++ b/crates/agent2/src/db.rs @@ -266,8 +266,19 @@ impl ThreadsDatabase { } pub fn new(executor: BackgroundExecutor) -> Result { - let connection = if *ZED_STATELESS || cfg!(any(feature = "test-support", test)) { + let connection = if *ZED_STATELESS { Connection::open_memory(Some("THREAD_FALLBACK_DB")) + } else if cfg!(any(feature = "test-support", test)) { + // rust stores the name of the test on the current thread. + // We use this to automatically create a database that will + // be shared within the test (for the test_retrieve_old_thread) + // but not with concurrent tests. + let thread = std::thread::current(); + let test_name = thread.name(); + Connection::open_memory(Some(&format!( + "THREAD_FALLBACK_{}", + test_name.unwrap_or_default() + ))) } else { let threads_dir = paths::data_dir().join("threads"); std::fs::create_dir_all(&threads_dir)?; diff --git a/crates/agent2/src/history_store.rs b/crates/agent2/src/history_store.rs index 870c2607c4..c656456e01 100644 --- a/crates/agent2/src/history_store.rs +++ b/crates/agent2/src/history_store.rs @@ -10,6 +10,7 @@ use itertools::Itertools; use paths::contexts_dir; use serde::{Deserialize, Serialize}; use std::{collections::VecDeque, path::Path, sync::Arc, time::Duration}; +use ui::ElementId; use util::ResultExt as _; const MAX_RECENTLY_OPENED_ENTRIES: usize = 6; @@ -68,6 +69,15 @@ pub enum HistoryEntryId { TextThread(Arc), } +impl Into for HistoryEntryId { + fn into(self) -> ElementId { + match self { + HistoryEntryId::AcpThread(session_id) => ElementId::Name(session_id.0.into()), + HistoryEntryId::TextThread(path) => ElementId::Path(path), + } + } +} + #[derive(Serialize, Deserialize, Debug)] enum SerializedRecentOpen { AcpThread(String), @@ -76,6 +86,7 @@ enum SerializedRecentOpen { pub struct HistoryStore { threads: Vec, + entries: Vec, context_store: Entity, recently_opened_entries: VecDeque, _subscriptions: Vec, @@ -87,7 +98,7 @@ impl HistoryStore { context_store: Entity, cx: &mut Context, ) -> Self { - let subscriptions = vec![cx.observe(&context_store, |_, _, cx| cx.notify())]; + let subscriptions = vec![cx.observe(&context_store, |this, _, cx| this.update_entries(cx))]; cx.spawn(async move |this, cx| { let entries = Self::load_recently_opened_entries(cx).await; @@ -106,6 +117,7 @@ impl HistoryStore { context_store, recently_opened_entries: VecDeque::default(), threads: Vec::default(), + entries: Vec::default(), _subscriptions: subscriptions, _save_recently_opened_entries_task: Task::ready(()), } @@ -171,20 +183,18 @@ impl HistoryStore { } } this.threads = threads; - cx.notify(); + this.update_entries(cx); }) }) .detach_and_log_err(cx); } - pub fn entries(&self, cx: &App) -> Vec { - let mut history_entries = Vec::new(); - + fn update_entries(&mut self, cx: &mut Context) { #[cfg(debug_assertions)] if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() { - return history_entries; + return; } - + let mut history_entries = Vec::new(); history_entries.extend(self.threads.iter().cloned().map(HistoryEntry::AcpThread)); history_entries.extend( self.context_store @@ -195,17 +205,12 @@ impl HistoryStore { ); history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at())); - history_entries + self.entries = history_entries; + cx.notify() } - pub fn is_empty(&self, cx: &App) -> bool { - self.threads.is_empty() - && self - .context_store - .read(cx) - .unordered_contexts() - .next() - .is_none() + pub fn is_empty(&self, _cx: &App) -> bool { + self.entries.is_empty() } pub fn recently_opened_entries(&self, cx: &App) -> Vec { @@ -345,4 +350,8 @@ impl HistoryStore { .retain(|old_entry| old_entry != entry); self.save_recently_opened_entries(cx); } + + pub fn entries(&self) -> impl Iterator { + self.entries.iter().cloned() + } } diff --git a/crates/agent2/src/native_agent_server.rs b/crates/agent2/src/native_agent_server.rs index 74d24efb13..9ff98ccd18 100644 --- a/crates/agent2/src/native_agent_server.rs +++ b/crates/agent2/src/native_agent_server.rs @@ -3,7 +3,7 @@ use std::{any::Any, path::Path, rc::Rc, sync::Arc}; use agent_servers::AgentServer; use anyhow::Result; use fs::Fs; -use gpui::{App, Entity, Task}; +use gpui::{App, Entity, SharedString, Task}; use project::Project; use prompt_store::PromptStore; @@ -22,16 +22,20 @@ impl NativeAgentServer { } impl AgentServer for NativeAgentServer { - fn name(&self) -> &'static str { - "Native Agent" + fn telemetry_id(&self) -> &'static str { + "zed" } - fn empty_state_headline(&self) -> &'static str { - "" + fn name(&self) -> SharedString { + "Zed Agent".into() } - fn empty_state_message(&self) -> &'static str { - "" + fn empty_state_headline(&self) -> SharedString { + self.name() + } + + fn empty_state_message(&self) -> SharedString { + "".into() } fn logo(&self) -> ui::IconName { @@ -44,7 +48,7 @@ impl AgentServer for NativeAgentServer { project: &Entity, cx: &mut App, ) -> Task>> { - log::info!( + log::debug!( "NativeAgentServer::connect called for path: {:?}", _root_dir ); @@ -63,7 +67,7 @@ impl AgentServer for NativeAgentServer { // Create the connection wrapper let connection = NativeAgentConnection(agent); - log::info!("NativeAgentServer connection established successfully"); + log::debug!("NativeAgentServer connection established successfully"); Ok(Rc::new(connection) as Rc) }) @@ -73,3 +77,52 @@ impl AgentServer for NativeAgentServer { self } } + +#[cfg(test)] +mod tests { + use super::*; + + use assistant_context::ContextStore; + use gpui::AppContext; + + agent_servers::e2e_tests::common_e2e_tests!( + async |fs, project, cx| { + let auth = cx.update(|cx| { + prompt_store::init(cx); + terminal::init(cx); + + let registry = language_model::LanguageModelRegistry::read_global(cx); + let auth = registry + .provider(&language_model::ANTHROPIC_PROVIDER_ID) + .unwrap() + .authenticate(cx); + + cx.spawn(async move |_| auth.await) + }); + + auth.await.unwrap(); + + cx.update(|cx| { + let registry = language_model::LanguageModelRegistry::global(cx); + + registry.update(cx, |registry, cx| { + registry.select_default_model( + Some(&language_model::SelectedModel { + provider: language_model::ANTHROPIC_PROVIDER_ID, + model: language_model::LanguageModelId("claude-sonnet-4-latest".into()), + }), + cx, + ); + }); + }); + + let history = cx.update(|cx| { + let context_store = cx.new(move |cx| ContextStore::fake(project.clone(), cx)); + cx.new(move |cx| HistoryStore::new(context_store, cx)) + }); + + NativeAgentServer::new(fs.clone(), history) + }, + allow_option_id = "allow" + ); +} diff --git a/crates/agent2/src/tests/mod.rs b/crates/agent2/src/tests/mod.rs index 478604b14a..78e5c88280 100644 --- a/crates/agent2/src/tests/mod.rs +++ b/crates/agent2/src/tests/mod.rs @@ -1,30 +1,40 @@ use super::*; use acp_thread::{AgentConnection, AgentModelGroupName, AgentModelList, UserMessageId}; -use action_log::ActionLog; use agent_client_protocol::{self as acp}; use agent_settings::AgentProfileId; use anyhow::Result; use client::{Client, UserStore}; +use cloud_llm_client::CompletionIntent; +use collections::IndexMap; +use context_server::{ContextServer, ContextServerCommand, ContextServerId}; use fs::{FakeFs, Fs}; -use futures::{StreamExt, channel::mpsc::UnboundedReceiver}; +use futures::{ + StreamExt, + channel::{ + mpsc::{self, UnboundedReceiver}, + oneshot, + }, +}; use gpui::{ App, AppContext, Entity, Task, TestAppContext, UpdateGlobal, http_client::FakeHttpClient, }; use indoc::indoc; use language_model::{ LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, - LanguageModelProviderName, LanguageModelRegistry, LanguageModelRequestMessage, - LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role, StopReason, - fake_provider::FakeLanguageModel, + LanguageModelProviderName, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, LanguageModelToolResult, LanguageModelToolSchemaFormat, + LanguageModelToolUse, MessageContent, Role, StopReason, fake_provider::FakeLanguageModel, }; use pretty_assertions::assert_eq; -use project::Project; +use project::{ + Project, context_server_store::ContextServerStore, project_settings::ProjectSettings, +}; use prompt_store::ProjectContext; use reqwest_client::ReqwestClient; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use serde_json::json; -use settings::SettingsStore; +use settings::{Settings, SettingsStore}; use std::{path::Path, rc::Rc, sync::Arc, time::Duration}; use util::path; @@ -32,17 +42,22 @@ mod test_tools; use test_tools::*; #[gpui::test] -#[ignore = "can't run on CI yet"] async fn test_echo(cx: &mut TestAppContext) { - let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await; + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); let events = thread .update(cx, |thread, cx| { thread.send(UserMessageId::new(), ["Testing: Reply with 'Hello'"], cx) }) - .unwrap() - .collect() - .await; + .unwrap(); + cx.run_until_parked(); + fake_model.send_last_completion_stream_text_chunk("Hello"); + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)); + fake_model.end_last_completion_stream(); + + let events = events.collect().await; thread.update(cx, |thread, _cx| { assert_eq!( thread.last_message().unwrap().to_markdown(), @@ -57,9 +72,10 @@ async fn test_echo(cx: &mut TestAppContext) { } #[gpui::test] -#[ignore = "can't run on CI yet"] +#[cfg_attr(target_os = "windows", ignore)] // TODO: Fix this test on Windows async fn test_thinking(cx: &mut TestAppContext) { - let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4Thinking).await; + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); let events = thread .update(cx, |thread, cx| { @@ -74,9 +90,18 @@ async fn test_thinking(cx: &mut TestAppContext) { cx, ) }) - .unwrap() - .collect() - .await; + .unwrap(); + cx.run_until_parked(); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::Thinking { + text: "Think".to_string(), + signature: None, + }); + fake_model.send_last_completion_stream_text_chunk("Hello"); + fake_model + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)); + fake_model.end_last_completion_stream(); + + let events = events.collect().await; thread.update(cx, |thread, _cx| { assert_eq!( thread.last_message().unwrap().to_markdown(), @@ -210,7 +235,7 @@ async fn test_prompt_caching(cx: &mut TestAppContext) { let tool_use = LanguageModelToolUse { id: "tool_1".into(), - name: EchoTool.name().into(), + name: EchoTool::name().into(), raw_input: json!({"text": "test"}).to_string(), input: json!({"text": "test"}), is_input_complete: true, @@ -223,7 +248,7 @@ async fn test_prompt_caching(cx: &mut TestAppContext) { let completion = fake_model.pending_completions().pop().unwrap(); let tool_result = LanguageModelToolResult { tool_use_id: "tool_1".into(), - tool_name: EchoTool.name().into(), + tool_name: EchoTool::name().into(), is_error: false, content: "test".into(), output: Some("test".into()), @@ -271,7 +296,7 @@ async fn test_prompt_caching(cx: &mut TestAppContext) { } #[gpui::test] -#[ignore = "can't run on CI yet"] +#[cfg_attr(not(feature = "e2e"), ignore)] async fn test_basic_tool_calls(cx: &mut TestAppContext) { let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await; @@ -293,7 +318,7 @@ async fn test_basic_tool_calls(cx: &mut TestAppContext) { // Test a tool calls that's likely to complete *after* streaming stops. let events = thread .update(cx, |thread, cx| { - thread.remove_tool(&AgentTool::name(&EchoTool)); + thread.remove_tool(&EchoTool::name()); thread.add_tool(DelayTool); thread.send( UserMessageId::new(), @@ -331,7 +356,7 @@ async fn test_basic_tool_calls(cx: &mut TestAppContext) { } #[gpui::test] -#[ignore = "can't run on CI yet"] +#[cfg_attr(not(feature = "e2e"), ignore)] async fn test_streaming_tool_calls(cx: &mut TestAppContext) { let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await; @@ -397,7 +422,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( LanguageModelToolUse { id: "tool_id_1".into(), - name: ToolRequiringPermission.name().into(), + name: ToolRequiringPermission::name().into(), raw_input: "{}".into(), input: json!({}), is_input_complete: true, @@ -406,7 +431,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( LanguageModelToolUse { id: "tool_id_2".into(), - name: ToolRequiringPermission.name().into(), + name: ToolRequiringPermission::name().into(), raw_input: "{}".into(), input: json!({}), is_input_complete: true, @@ -437,14 +462,14 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { vec![ language_model::MessageContent::ToolResult(LanguageModelToolResult { tool_use_id: tool_call_auth_1.tool_call.id.0.to_string().into(), - tool_name: ToolRequiringPermission.name().into(), + tool_name: ToolRequiringPermission::name().into(), is_error: false, content: "Allowed".into(), output: Some("Allowed".into()) }), language_model::MessageContent::ToolResult(LanguageModelToolResult { tool_use_id: tool_call_auth_2.tool_call.id.0.to_string().into(), - tool_name: ToolRequiringPermission.name().into(), + tool_name: ToolRequiringPermission::name().into(), is_error: true, content: "Permission to run tool denied by user".into(), output: None @@ -456,7 +481,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( LanguageModelToolUse { id: "tool_id_3".into(), - name: ToolRequiringPermission.name().into(), + name: ToolRequiringPermission::name().into(), raw_input: "{}".into(), input: json!({}), is_input_complete: true, @@ -478,7 +503,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { vec![language_model::MessageContent::ToolResult( LanguageModelToolResult { tool_use_id: tool_call_auth_3.tool_call.id.0.to_string().into(), - tool_name: ToolRequiringPermission.name().into(), + tool_name: ToolRequiringPermission::name().into(), is_error: false, content: "Allowed".into(), output: Some("Allowed".into()) @@ -490,7 +515,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( LanguageModelToolUse { id: "tool_id_4".into(), - name: ToolRequiringPermission.name().into(), + name: ToolRequiringPermission::name().into(), raw_input: "{}".into(), input: json!({}), is_input_complete: true, @@ -505,7 +530,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { vec![language_model::MessageContent::ToolResult( LanguageModelToolResult { tool_use_id: "tool_id_4".into(), - tool_name: ToolRequiringPermission.name().into(), + tool_name: ToolRequiringPermission::name().into(), is_error: false, content: "Allowed".into(), output: Some("Allowed".into()) @@ -557,7 +582,7 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) { cx.run_until_parked(); let tool_use = LanguageModelToolUse { id: "tool_id_1".into(), - name: EchoTool.name().into(), + name: EchoTool::name().into(), raw_input: "{}".into(), input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(), is_input_complete: true, @@ -570,7 +595,7 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) { let completion = fake_model.pending_completions().pop().unwrap(); let tool_result = LanguageModelToolResult { tool_use_id: "tool_id_1".into(), - tool_name: EchoTool.name().into(), + tool_name: EchoTool::name().into(), is_error: false, content: "def".into(), output: Some("def".into()), @@ -650,15 +675,6 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) { "} ) }); - - // Ensure we error if calling resume when tool use limit was *not* reached. - let error = thread - .update(cx, |thread, cx| thread.resume(cx)) - .unwrap_err(); - assert_eq!( - error.to_string(), - "can only resume after tool use limit is reached" - ) } #[gpui::test] @@ -676,14 +692,14 @@ async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) { let tool_use = LanguageModelToolUse { id: "tool_id_1".into(), - name: EchoTool.name().into(), + name: EchoTool::name().into(), raw_input: "{}".into(), input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(), is_input_complete: true, }; let tool_result = LanguageModelToolResult { tool_use_id: "tool_id_1".into(), - tool_name: EchoTool.name().into(), + tool_name: EchoTool::name().into(), is_error: false, content: "def".into(), output: Some("def".into()), @@ -794,7 +810,7 @@ async fn next_tool_call_authorization( } #[gpui::test] -#[ignore = "can't run on CI yet"] +#[cfg_attr(not(feature = "e2e"), ignore)] async fn test_concurrent_tool_calls(cx: &mut TestAppContext) { let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await; @@ -860,14 +876,14 @@ async fn test_profiles(cx: &mut TestAppContext) { "test-1": { "name": "Test Profile 1", "tools": { - EchoTool.name(): true, - DelayTool.name(): true, + EchoTool::name(): true, + DelayTool::name(): true, } }, "test-2": { "name": "Test Profile 2", "tools": { - InfiniteTool.name(): true, + InfiniteTool::name(): true, } } } @@ -896,7 +912,7 @@ async fn test_profiles(cx: &mut TestAppContext) { .iter() .map(|tool| tool.name.clone()) .collect(); - assert_eq!(tool_names, vec![DelayTool.name(), EchoTool.name()]); + assert_eq!(tool_names, vec![DelayTool::name(), EchoTool::name()]); fake_model.end_last_completion_stream(); // Switch to test-2 profile, and verify that it has only the infinite tool. @@ -915,11 +931,339 @@ async fn test_profiles(cx: &mut TestAppContext) { .iter() .map(|tool| tool.name.clone()) .collect(); - assert_eq!(tool_names, vec![InfiniteTool.name()]); + assert_eq!(tool_names, vec![InfiniteTool::name()]); } #[gpui::test] -#[ignore = "can't run on CI yet"] +async fn test_mcp_tools(cx: &mut TestAppContext) { + let ThreadTest { + model, + thread, + context_server_store, + fs, + .. + } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + // Override profiles and wait for settings to be loaded. + fs.insert_file( + paths::settings_file(), + json!({ + "agent": { + "profiles": { + "test": { + "name": "Test Profile", + "enable_all_context_servers": true, + "tools": { + EchoTool::name(): true, + } + }, + } + } + }) + .to_string() + .into_bytes(), + ) + .await; + cx.run_until_parked(); + thread.update(cx, |thread, _| { + thread.set_profile(AgentProfileId("test".into())) + }); + + let mut mcp_tool_calls = setup_context_server( + "test_server", + vec![context_server::types::Tool { + name: "echo".into(), + description: None, + input_schema: serde_json::to_value( + EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema), + ) + .unwrap(), + output_schema: None, + annotations: None, + }], + &context_server_store, + cx, + ); + + let events = thread.update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hey"], cx).unwrap() + }); + cx.run_until_parked(); + + // Simulate the model calling the MCP tool. + let completion = fake_model.pending_completions().pop().unwrap(); + assert_eq!(tool_names_for_completion(&completion), vec!["echo"]); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "tool_1".into(), + name: "echo".into(), + raw_input: json!({"text": "test"}).to_string(), + input: json!({"text": "test"}), + is_input_complete: true, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + let (tool_call_params, tool_call_response) = mcp_tool_calls.next().await.unwrap(); + assert_eq!(tool_call_params.name, "echo"); + assert_eq!(tool_call_params.arguments, Some(json!({"text": "test"}))); + tool_call_response + .send(context_server::types::CallToolResponse { + content: vec![context_server::types::ToolResponseContent::Text { + text: "test".into(), + }], + is_error: None, + meta: None, + structured_content: None, + }) + .unwrap(); + cx.run_until_parked(); + + assert_eq!(tool_names_for_completion(&completion), vec!["echo"]); + fake_model.send_last_completion_stream_text_chunk("Done!"); + fake_model.end_last_completion_stream(); + events.collect::>().await; + + // Send again after adding the echo tool, ensuring the name collision is resolved. + let events = thread.update(cx, |thread, cx| { + thread.add_tool(EchoTool); + thread.send(UserMessageId::new(), ["Go"], cx).unwrap() + }); + cx.run_until_parked(); + let completion = fake_model.pending_completions().pop().unwrap(); + assert_eq!( + tool_names_for_completion(&completion), + vec!["echo", "test_server_echo"] + ); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "tool_2".into(), + name: "test_server_echo".into(), + raw_input: json!({"text": "mcp"}).to_string(), + input: json!({"text": "mcp"}), + is_input_complete: true, + }, + )); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "tool_3".into(), + name: "echo".into(), + raw_input: json!({"text": "native"}).to_string(), + input: json!({"text": "native"}), + is_input_complete: true, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + let (tool_call_params, tool_call_response) = mcp_tool_calls.next().await.unwrap(); + assert_eq!(tool_call_params.name, "echo"); + assert_eq!(tool_call_params.arguments, Some(json!({"text": "mcp"}))); + tool_call_response + .send(context_server::types::CallToolResponse { + content: vec![context_server::types::ToolResponseContent::Text { text: "mcp".into() }], + is_error: None, + meta: None, + structured_content: None, + }) + .unwrap(); + cx.run_until_parked(); + + // Ensure the tool results were inserted with the correct names. + let completion = fake_model.pending_completions().pop().unwrap(); + assert_eq!( + completion.messages.last().unwrap().content, + vec![ + MessageContent::ToolResult(LanguageModelToolResult { + tool_use_id: "tool_3".into(), + tool_name: "echo".into(), + is_error: false, + content: "native".into(), + output: Some("native".into()), + },), + MessageContent::ToolResult(LanguageModelToolResult { + tool_use_id: "tool_2".into(), + tool_name: "test_server_echo".into(), + is_error: false, + content: "mcp".into(), + output: Some("mcp".into()), + },), + ] + ); + fake_model.end_last_completion_stream(); + events.collect::>().await; +} + +#[gpui::test] +async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { + let ThreadTest { + model, + thread, + context_server_store, + fs, + .. + } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + // Set up a profile with all tools enabled + fs.insert_file( + paths::settings_file(), + json!({ + "agent": { + "profiles": { + "test": { + "name": "Test Profile", + "enable_all_context_servers": true, + "tools": { + EchoTool::name(): true, + DelayTool::name(): true, + WordListTool::name(): true, + ToolRequiringPermission::name(): true, + InfiniteTool::name(): true, + } + }, + } + } + }) + .to_string() + .into_bytes(), + ) + .await; + cx.run_until_parked(); + + thread.update(cx, |thread, _| { + thread.set_profile(AgentProfileId("test".into())); + thread.add_tool(EchoTool); + thread.add_tool(DelayTool); + thread.add_tool(WordListTool); + thread.add_tool(ToolRequiringPermission); + thread.add_tool(InfiniteTool); + }); + + // Set up multiple context servers with some overlapping tool names + let _server1_calls = setup_context_server( + "xxx", + vec![ + context_server::types::Tool { + name: "echo".into(), // Conflicts with native EchoTool + description: None, + input_schema: serde_json::to_value( + EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema), + ) + .unwrap(), + output_schema: None, + annotations: None, + }, + context_server::types::Tool { + name: "unique_tool_1".into(), + description: None, + input_schema: json!({"type": "object", "properties": {}}), + output_schema: None, + annotations: None, + }, + ], + &context_server_store, + cx, + ); + + let _server2_calls = setup_context_server( + "yyy", + vec![ + context_server::types::Tool { + name: "echo".into(), // Also conflicts with native EchoTool + description: None, + input_schema: serde_json::to_value( + EchoTool.input_schema(LanguageModelToolSchemaFormat::JsonSchema), + ) + .unwrap(), + output_schema: None, + annotations: None, + }, + context_server::types::Tool { + name: "unique_tool_2".into(), + description: None, + input_schema: json!({"type": "object", "properties": {}}), + output_schema: None, + annotations: None, + }, + context_server::types::Tool { + name: "a".repeat(MAX_TOOL_NAME_LENGTH - 2), + description: None, + input_schema: json!({"type": "object", "properties": {}}), + output_schema: None, + annotations: None, + }, + context_server::types::Tool { + name: "b".repeat(MAX_TOOL_NAME_LENGTH - 1), + description: None, + input_schema: json!({"type": "object", "properties": {}}), + output_schema: None, + annotations: None, + }, + ], + &context_server_store, + cx, + ); + let _server3_calls = setup_context_server( + "zzz", + vec![ + context_server::types::Tool { + name: "a".repeat(MAX_TOOL_NAME_LENGTH - 2), + description: None, + input_schema: json!({"type": "object", "properties": {}}), + output_schema: None, + annotations: None, + }, + context_server::types::Tool { + name: "b".repeat(MAX_TOOL_NAME_LENGTH - 1), + description: None, + input_schema: json!({"type": "object", "properties": {}}), + output_schema: None, + annotations: None, + }, + context_server::types::Tool { + name: "c".repeat(MAX_TOOL_NAME_LENGTH + 1), + description: None, + input_schema: json!({"type": "object", "properties": {}}), + output_schema: None, + annotations: None, + }, + ], + &context_server_store, + cx, + ); + + thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Go"], cx) + }) + .unwrap(); + cx.run_until_parked(); + let completion = fake_model.pending_completions().pop().unwrap(); + assert_eq!( + tool_names_for_completion(&completion), + vec![ + "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + "cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc", + "delay", + "echo", + "infinite", + "tool_requiring_permission", + "unique_tool_1", + "unique_tool_2", + "word_list", + "xxx_echo", + "y_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "yyy_echo", + "z_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + ] + ); +} + +#[gpui::test] +#[cfg_attr(not(feature = "e2e"), ignore)] async fn test_cancellation(cx: &mut TestAppContext) { let ThreadTest { thread, .. } = setup(cx, TestModel::Sonnet4).await; @@ -975,7 +1319,7 @@ async fn test_cancellation(cx: &mut TestAppContext) { assert!( matches!( last_event, - Some(Ok(ThreadEvent::Stop(acp::StopReason::Canceled))) + Some(Ok(ThreadEvent::Stop(acp::StopReason::Cancelled))) ), "unexpected event {last_event:?}" ); @@ -1004,6 +1348,7 @@ async fn test_cancellation(cx: &mut TestAppContext) { } #[gpui::test] +#[cfg_attr(target_os = "windows", ignore)] // TODO: Fix this test on Windows async fn test_in_progress_send_canceled_by_next_send(cx: &mut TestAppContext) { let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; let fake_model = model.as_fake(); @@ -1029,7 +1374,7 @@ async fn test_in_progress_send_canceled_by_next_send(cx: &mut TestAppContext) { fake_model.end_last_completion_stream(); let events_1 = events_1.collect::>().await; - assert_eq!(stop_events(events_1), vec![acp::StopReason::Canceled]); + assert_eq!(stop_events(events_1), vec![acp::StopReason::Cancelled]); let events_2 = events_2.collect::>().await; assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]); } @@ -1342,6 +1687,7 @@ async fn test_truncate_second_message(cx: &mut TestAppContext) { } #[gpui::test] +#[cfg_attr(target_os = "windows", ignore)] // TODO: Fix this test on Windows async fn test_title_generation(cx: &mut TestAppContext) { let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; let fake_model = model.as_fake(); @@ -1369,6 +1715,7 @@ async fn test_title_generation(cx: &mut TestAppContext) { summary_model.send_last_completion_stream_text_chunk("oodnight Moon"); summary_model.end_last_completion_stream(); send.collect::>().await; + cx.run_until_parked(); thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "Hello world")); // Send another message, ensuring no title is generated this time. @@ -1386,6 +1733,81 @@ async fn test_title_generation(cx: &mut TestAppContext) { thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "Hello world")); } +#[gpui::test] +async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) { + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let _events = thread + .update(cx, |thread, cx| { + thread.add_tool(ToolRequiringPermission); + thread.add_tool(EchoTool); + thread.send(UserMessageId::new(), ["Hey!"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + let permission_tool_use = LanguageModelToolUse { + id: "tool_id_1".into(), + name: ToolRequiringPermission::name().into(), + raw_input: "{}".into(), + input: json!({}), + is_input_complete: true, + }; + let echo_tool_use = LanguageModelToolUse { + id: "tool_id_2".into(), + name: EchoTool::name().into(), + raw_input: json!({"text": "test"}).to_string(), + input: json!({"text": "test"}), + is_input_complete: true, + }; + fake_model.send_last_completion_stream_text_chunk("Hi!"); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + permission_tool_use, + )); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + echo_tool_use.clone(), + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + // Ensure pending tools are skipped when building a request. + let request = thread + .read_with(cx, |thread, cx| { + thread.build_completion_request(CompletionIntent::EditFile, cx) + }) + .unwrap(); + assert_eq!( + request.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Hey!".into()], + cache: true + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![ + MessageContent::Text("Hi!".into()), + MessageContent::ToolUse(echo_tool_use.clone()) + ], + cache: false + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![MessageContent::ToolResult(LanguageModelToolResult { + tool_use_id: echo_tool_use.id.clone(), + tool_name: echo_tool_use.name, + is_error: false, + content: "test".into(), + output: Some("test".into()) + })], + cache: false + }, + ], + ); +} + #[gpui::test] async fn test_agent_connection(cx: &mut TestAppContext) { cx.update(settings::init); @@ -1400,11 +1822,11 @@ async fn test_agent_connection(cx: &mut TestAppContext) { let clock = Arc::new(clock::FakeSystemClock::new()); let client = Client::new(clock, http_client, cx); let user_store = cx.new(|cx| UserStore::new(client.clone(), cx)); + Project::init_settings(cx); + agent_settings::init(cx); language_model::init(client.clone(), cx); language_models::init(user_store, client.clone(), cx); - Project::init_settings(cx); LanguageModelRegistry::test(cx); - agent_settings::init(cx); }); cx.executor().forbid_parking(); @@ -1537,7 +1959,7 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) { fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( LanguageModelToolUse { id: "1".into(), - name: ThinkingTool.name().into(), + name: ThinkingTool::name().into(), raw_input: input.to_string(), input, is_input_complete: false, @@ -1678,6 +2100,7 @@ async fn test_send_retry_on_error(cx: &mut TestAppContext) { .unwrap(); cx.run_until_parked(); + fake_model.send_last_completion_stream_text_chunk("Hey,"); fake_model.send_last_completion_stream_error(LanguageModelCompletionError::ServerOverloaded { provider: LanguageModelProviderName::new("Anthropic"), retry_after: Some(Duration::from_secs(3)), @@ -1687,8 +2110,9 @@ async fn test_send_retry_on_error(cx: &mut TestAppContext) { cx.executor().advance_clock(Duration::from_secs(3)); cx.run_until_parked(); - fake_model.send_last_completion_stream_text_chunk("Hey!"); + fake_model.send_last_completion_stream_text_chunk("there!"); fake_model.end_last_completion_stream(); + cx.run_until_parked(); let mut retry_events = Vec::new(); while let Some(Ok(event)) = events.next().await { @@ -1716,12 +2140,94 @@ async fn test_send_retry_on_error(cx: &mut TestAppContext) { ## Assistant - Hey! + Hey, + + [resume] + + ## Assistant + + there! "} ) }); } +#[gpui::test] +async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) { + let ThreadTest { thread, model, .. } = setup(cx, TestModel::Fake).await; + let fake_model = model.as_fake(); + + let events = thread + .update(cx, |thread, cx| { + thread.set_completion_mode(agent_settings::CompletionMode::Burn, cx); + thread.add_tool(EchoTool); + thread.send(UserMessageId::new(), ["Call the echo tool!"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + let tool_use_1 = LanguageModelToolUse { + id: "tool_1".into(), + name: EchoTool::name().into(), + raw_input: json!({"text": "test"}).to_string(), + input: json!({"text": "test"}), + is_input_complete: true, + }; + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + tool_use_1.clone(), + )); + fake_model.send_last_completion_stream_error(LanguageModelCompletionError::ServerOverloaded { + provider: LanguageModelProviderName::new("Anthropic"), + retry_after: Some(Duration::from_secs(3)), + }); + fake_model.end_last_completion_stream(); + + cx.executor().advance_clock(Duration::from_secs(3)); + let completion = fake_model.pending_completions().pop().unwrap(); + assert_eq!( + completion.messages[1..], + vec![ + LanguageModelRequestMessage { + role: Role::User, + content: vec!["Call the echo tool!".into()], + cache: false + }, + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![language_model::MessageContent::ToolUse(tool_use_1.clone())], + cache: false + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![language_model::MessageContent::ToolResult( + LanguageModelToolResult { + tool_use_id: tool_use_1.id.clone(), + tool_name: tool_use_1.name.clone(), + is_error: false, + content: "test".into(), + output: Some("test".into()) + } + )], + cache: true + }, + ] + ); + + fake_model.send_last_completion_stream_text_chunk("Done"); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + events.collect::>().await; + thread.read_with(cx, |thread, _cx| { + assert_eq!( + thread.last_message(), + Some(Message::Agent(AgentMessage { + content: vec![AgentMessageContent::Text("Done".into())], + tool_results: IndexMap::default() + })) + ); + }) +} + #[gpui::test] async fn test_send_max_retries_exceeded(cx: &mut TestAppContext) { let ThreadTest { thread, model, .. } = setup(cx, TestModel::Fake).await; @@ -1792,12 +2298,12 @@ struct ThreadTest { model: Arc, thread: Entity, project_context: Entity, + context_server_store: Entity, fs: Arc, } enum TestModel { Sonnet4, - Sonnet4Thinking, Fake, } @@ -1805,7 +2311,6 @@ impl TestModel { fn id(&self) -> LanguageModelId { match self { TestModel::Sonnet4 => LanguageModelId("claude-sonnet-4-latest".into()), - TestModel::Sonnet4Thinking => LanguageModelId("claude-sonnet-4-thinking-latest".into()), TestModel::Fake => unreachable!(), } } @@ -1827,11 +2332,12 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { "test-profile": { "name": "Test Profile", "tools": { - EchoTool.name(): true, - DelayTool.name(): true, - WordListTool.name(): true, - ToolRequiringPermission.name(): true, - InfiniteTool.name(): true, + EchoTool::name(): true, + DelayTool::name(): true, + WordListTool::name(): true, + ToolRequiringPermission::name(): true, + InfiniteTool::name(): true, + ThinkingTool::name(): true, } } } @@ -1888,15 +2394,14 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { .await; let project_context = cx.new(|_cx| ProjectContext::default()); + let context_server_store = project.read_with(cx, |project, _| project.context_server_store()); let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let action_log = cx.new(|_| ActionLog::new(project.clone())); + cx.new(|cx| ContextServerRegistry::new(context_server_store.clone(), cx)); let thread = cx.new(|cx| { Thread::new( project, project_context.clone(), context_server_registry, - action_log, templates, Some(model.clone()), cx, @@ -1906,6 +2411,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { model, thread, project_context, + context_server_store, fs, } } @@ -1940,3 +2446,89 @@ fn watch_settings(fs: Arc, cx: &mut App) { }) .detach(); } + +fn tool_names_for_completion(completion: &LanguageModelRequest) -> Vec { + completion + .tools + .iter() + .map(|tool| tool.name.clone()) + .collect() +} + +fn setup_context_server( + name: &'static str, + tools: Vec, + context_server_store: &Entity, + cx: &mut TestAppContext, +) -> mpsc::UnboundedReceiver<( + context_server::types::CallToolParams, + oneshot::Sender, +)> { + cx.update(|cx| { + let mut settings = ProjectSettings::get_global(cx).clone(); + settings.context_servers.insert( + name.into(), + project::project_settings::ContextServerSettings::Custom { + enabled: true, + command: ContextServerCommand { + path: "somebinary".into(), + args: Vec::new(), + env: None, + }, + }, + ); + ProjectSettings::override_global(settings, cx); + }); + + let (mcp_tool_calls_tx, mcp_tool_calls_rx) = mpsc::unbounded(); + let fake_transport = context_server::test::create_fake_transport(name, cx.executor()) + .on_request::(move |_params| async move { + context_server::types::InitializeResponse { + protocol_version: context_server::types::ProtocolVersion( + context_server::types::LATEST_PROTOCOL_VERSION.to_string(), + ), + server_info: context_server::types::Implementation { + name: name.into(), + version: "1.0.0".to_string(), + }, + capabilities: context_server::types::ServerCapabilities { + tools: Some(context_server::types::ToolsCapabilities { + list_changed: Some(true), + }), + ..Default::default() + }, + meta: None, + } + }) + .on_request::(move |_params| { + let tools = tools.clone(); + async move { + context_server::types::ListToolsResponse { + tools, + next_cursor: None, + meta: None, + } + } + }) + .on_request::(move |params| { + let mcp_tool_calls_tx = mcp_tool_calls_tx.clone(); + async move { + let (response_tx, response_rx) = oneshot::channel(); + mcp_tool_calls_tx + .unbounded_send((params, response_tx)) + .unwrap(); + response_rx.await.unwrap() + } + }); + context_server_store.update(cx, |store, cx| { + store.start_server( + Arc::new(ContextServer::new( + ContextServerId(name.into()), + Arc::new(fake_transport), + )), + cx, + ); + }); + cx.run_until_parked(); + mcp_tool_calls_rx +} diff --git a/crates/agent2/src/tests/test_tools.rs b/crates/agent2/src/tests/test_tools.rs index cbff44cedf..27be7b6ac3 100644 --- a/crates/agent2/src/tests/test_tools.rs +++ b/crates/agent2/src/tests/test_tools.rs @@ -16,11 +16,11 @@ impl AgentTool for EchoTool { type Input = EchoToolInput; type Output = String; - fn name(&self) -> SharedString { - "echo".into() + fn name() -> &'static str { + "echo" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Other } @@ -51,8 +51,8 @@ impl AgentTool for DelayTool { type Input = DelayToolInput; type Output = String; - fn name(&self) -> SharedString { - "delay".into() + fn name() -> &'static str { + "delay" } fn initial_title(&self, input: Result) -> SharedString { @@ -63,7 +63,7 @@ impl AgentTool for DelayTool { } } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Other } @@ -92,11 +92,11 @@ impl AgentTool for ToolRequiringPermission { type Input = ToolRequiringPermissionInput; type Output = String; - fn name(&self) -> SharedString { - "tool_requiring_permission".into() + fn name() -> &'static str { + "tool_requiring_permission" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Other } @@ -127,11 +127,11 @@ impl AgentTool for InfiniteTool { type Input = InfiniteToolInput; type Output = String; - fn name(&self) -> SharedString { - "infinite".into() + fn name() -> &'static str { + "infinite" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Other } @@ -178,11 +178,11 @@ impl AgentTool for WordListTool { type Input = WordListInput; type Output = String; - fn name(&self) -> SharedString { - "word_list".into() + fn name() -> &'static str { + "word_list" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Other } diff --git a/crates/agent2/src/thread.rs b/crates/agent2/src/thread.rs index f407ee7de5..4acd72f275 100644 --- a/crates/agent2/src/thread.rs +++ b/crates/agent2/src/thread.rs @@ -9,15 +9,15 @@ use action_log::ActionLog; use agent::thread::{GitState, ProjectSnapshot, WorktreeSnapshot}; use agent_client_protocol as acp; use agent_settings::{ - AgentProfileId, AgentSettings, CompletionMode, SUMMARIZE_THREAD_DETAILED_PROMPT, - SUMMARIZE_THREAD_PROMPT, + AgentProfileId, AgentProfileSettings, AgentSettings, CompletionMode, + SUMMARIZE_THREAD_DETAILED_PROMPT, SUMMARIZE_THREAD_PROMPT, }; use anyhow::{Context as _, Result, anyhow}; use assistant_tool::adapt_schema_to_format; use chrono::{DateTime, Utc}; use client::{ModelRequestUsage, RequestUsage}; use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit}; -use collections::{HashMap, IndexMap}; +use collections::{HashMap, HashSet, IndexMap}; use fs::Fs; use futures::{ FutureExt, @@ -45,17 +45,19 @@ use schemars::{JsonSchema, Schema}; use serde::{Deserialize, Serialize}; use settings::{Settings, update_settings_file}; use smol::stream::StreamExt; +use std::fmt::Write; use std::{ collections::BTreeMap, + ops::RangeInclusive, path::Path, sync::Arc, time::{Duration, Instant}, }; -use std::{fmt::Write, ops::Range}; -use util::{ResultExt, markdown::MarkdownCodeBlock}; +use util::{ResultExt, debug_panic, markdown::MarkdownCodeBlock}; use uuid::Uuid; const TOOL_CANCELED_MESSAGE: &str = "Tool canceled by user"; +pub const MAX_TOOL_NAME_LENGTH: usize = 64; /// The ID of the user prompt that initiated a request. /// @@ -121,7 +123,7 @@ impl Message { match self { Message::User(message) => message.to_markdown(), Message::Agent(message) => message.to_markdown(), - Message::Resume => "[resumed after tool use limit was reached]".into(), + Message::Resume => "[resume]\n".into(), } } @@ -161,9 +163,9 @@ impl UserMessage { } UserMessageContent::Mention { uri, content } => { if !content.is_empty() { - let _ = write!(&mut markdown, "{}\n\n{}\n", uri.as_link(), content); + let _ = writeln!(&mut markdown, "{}\n\n{}", uri.as_link(), content); } else { - let _ = write!(&mut markdown, "{}\n", uri.as_link()); + let _ = writeln!(&mut markdown, "{}", uri.as_link()); } } } @@ -186,6 +188,7 @@ impl UserMessage { const OPEN_FILES_TAG: &str = ""; const OPEN_DIRECTORIES_TAG: &str = ""; const OPEN_SYMBOLS_TAG: &str = ""; + const OPEN_SELECTIONS_TAG: &str = ""; const OPEN_THREADS_TAG: &str = ""; const OPEN_FETCH_TAG: &str = ""; const OPEN_RULES_TAG: &str = @@ -194,6 +197,7 @@ impl UserMessage { let mut file_context = OPEN_FILES_TAG.to_string(); let mut directory_context = OPEN_DIRECTORIES_TAG.to_string(); let mut symbol_context = OPEN_SYMBOLS_TAG.to_string(); + let mut selection_context = OPEN_SELECTIONS_TAG.to_string(); let mut thread_context = OPEN_THREADS_TAG.to_string(); let mut fetch_context = OPEN_FETCH_TAG.to_string(); let mut rules_context = OPEN_RULES_TAG.to_string(); @@ -210,7 +214,7 @@ impl UserMessage { match uri { MentionUri::File { abs_path } => { write!( - &mut symbol_context, + &mut file_context, "\n{}", MarkdownCodeBlock { tag: &codeblock_tag(abs_path, None), @@ -219,17 +223,19 @@ impl UserMessage { ) .ok(); } + MentionUri::PastedImage => { + debug_panic!("pasted image URI should not be used in mention content") + } MentionUri::Directory { .. } => { write!(&mut directory_context, "\n{}\n", content).ok(); } MentionUri::Symbol { - path, line_range, .. - } - | MentionUri::Selection { - path, line_range, .. + abs_path: path, + line_range, + .. } => { write!( - &mut rules_context, + &mut symbol_context, "\n{}", MarkdownCodeBlock { tag: &codeblock_tag(path, Some(line_range)), @@ -238,6 +244,24 @@ impl UserMessage { ) .ok(); } + MentionUri::Selection { + abs_path: path, + line_range, + .. + } => { + write!( + &mut selection_context, + "\n{}", + MarkdownCodeBlock { + tag: &codeblock_tag( + path.as_deref().unwrap_or("Untitled".as_ref()), + Some(line_range) + ), + text: content + } + ) + .ok(); + } MentionUri::Thread { .. } => { write!(&mut thread_context, "\n{}\n", content).ok(); } @@ -290,6 +314,13 @@ impl UserMessage { .push(language_model::MessageContent::Text(symbol_context)); } + if selection_context.len() > OPEN_SELECTIONS_TAG.len() { + selection_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(selection_context)); + } + if thread_context.len() > OPEN_THREADS_TAG.len() { thread_context.push_str("\n"); message @@ -325,7 +356,7 @@ impl UserMessage { } } -fn codeblock_tag(full_path: &Path, line_range: Option<&Range>) -> String { +fn codeblock_tag(full_path: &Path, line_range: Option<&RangeInclusive>) -> String { let mut result = String::new(); if let Some(extension) = full_path.extension().and_then(|ext| ext.to_str()) { @@ -335,10 +366,10 @@ fn codeblock_tag(full_path: &Path, line_range: Option<&Range>) -> String { let _ = write!(result, "{}", full_path.display()); if let Some(range) = line_range { - if range.start == range.end { - let _ = write!(result, ":{}", range.start + 1); + if range.start() == range.end() { + let _ = write!(result, ":{}", range.start() + 1); } else { - let _ = write!(result, ":{}-{}", range.start + 1, range.end + 1); + let _ = write!(result, ":{}-{}", range.start() + 1, range.end() + 1); } } @@ -417,24 +448,33 @@ impl AgentMessage { cache: false, }; for chunk in &self.content { - let chunk = match chunk { + match chunk { AgentMessageContent::Text(text) => { - language_model::MessageContent::Text(text.clone()) + assistant_message + .content + .push(language_model::MessageContent::Text(text.clone())); } AgentMessageContent::Thinking { text, signature } => { - language_model::MessageContent::Thinking { - text: text.clone(), - signature: signature.clone(), - } + assistant_message + .content + .push(language_model::MessageContent::Thinking { + text: text.clone(), + signature: signature.clone(), + }); } AgentMessageContent::RedactedThinking(value) => { - language_model::MessageContent::RedactedThinking(value.clone()) + assistant_message.content.push( + language_model::MessageContent::RedactedThinking(value.clone()), + ); } - AgentMessageContent::ToolUse(value) => { - language_model::MessageContent::ToolUse(value.clone()) + AgentMessageContent::ToolUse(tool_use) => { + if self.tool_results.contains_key(&tool_use.id) { + assistant_message + .content + .push(language_model::MessageContent::ToolUse(tool_use.clone())); + } } }; - assistant_message.content.push(chunk); } let mut user_message = LanguageModelRequestMessage { @@ -487,7 +527,6 @@ pub enum ThreadEvent { ToolCall(acp::ToolCall), ToolCallUpdate(acp_thread::ToolCallUpdate), ToolCallAuthorization(ToolCallAuthorization), - TitleUpdate(SharedString), Retry(acp_thread::RetryStatus), Stop(acp::StopReason), } @@ -499,11 +538,22 @@ pub struct ToolCallAuthorization { pub response: oneshot::Sender, } +#[derive(Debug, thiserror::Error)] +enum CompletionError { + #[error("max tokens")] + MaxTokens, + #[error("refusal")] + Refusal, + #[error(transparent)] + Other(#[from] anyhow::Error), +} + pub struct Thread { id: acp::SessionId, prompt_id: PromptId, updated_at: DateTime, title: Option, + pending_title_generation: Option>, summary: Option, messages: Vec, completion_mode: CompletionMode, @@ -525,26 +575,40 @@ pub struct Thread { templates: Arc, model: Option>, summarization_model: Option>, + prompt_capabilities_tx: watch::Sender, + pub(crate) prompt_capabilities_rx: watch::Receiver, pub(crate) project: Entity, pub(crate) action_log: Entity, } impl Thread { + fn prompt_capabilities(model: Option<&dyn LanguageModel>) -> acp::PromptCapabilities { + let image = model.map_or(true, |model| model.supports_images()); + acp::PromptCapabilities { + image, + audio: false, + embedded_context: true, + } + } + pub fn new( project: Entity, project_context: Entity, context_server_registry: Entity, - action_log: Entity, templates: Arc, model: Option>, cx: &mut Context, ) -> Self { let profile_id = AgentSettings::get_global(cx).default_profile.clone(); + let action_log = cx.new(|_cx| ActionLog::new(project.clone())); + let (prompt_capabilities_tx, prompt_capabilities_rx) = + watch::channel(Self::prompt_capabilities(model.as_deref())); Self { id: acp::SessionId(uuid::Uuid::new_v4().to_string().into()), prompt_id: PromptId::new(), updated_at: Utc::now(), title: None, + pending_title_generation: None, summary: None, messages: Vec::new(), completion_mode: AgentSettings::get_global(cx).preferred_completion_mode, @@ -566,6 +630,8 @@ impl Thread { templates, model, summarization_model: None, + prompt_capabilities_tx, + prompt_capabilities_rx, project, action_log, } @@ -616,7 +682,20 @@ impl Thread { stream: &ThreadEventStream, cx: &mut Context, ) { - let Some(tool) = self.tools.get(tool_use.name.as_ref()) else { + let tool = self.tools.get(tool_use.name.as_ref()).cloned().or_else(|| { + self.context_server_registry + .read(cx) + .servers() + .find_map(|(_, tools)| { + if let Some(tool) = tools.get(tool_use.name.as_ref()) { + Some(tool.clone()) + } else { + None + } + }) + }); + + let Some(tool) = tool else { stream .0 .unbounded_send(Ok(ThreadEvent::ToolCall(acp::ToolCall { @@ -686,6 +765,8 @@ impl Thread { .or_else(|| registry.default_model()) .map(|model| model.model) }); + let (prompt_capabilities_tx, prompt_capabilities_rx) = + watch::channel(Self::prompt_capabilities(model.as_deref())); Self { id, @@ -695,6 +776,7 @@ impl Thread { } else { Some(db_thread.title.clone()) }, + pending_title_generation: None, summary: db_thread.detailed_summary, messages: db_thread.messages, completion_mode: db_thread.completion_mode.unwrap_or_default(), @@ -714,6 +796,8 @@ impl Thread { project, action_log, updated_at: db_thread.updated_at, + prompt_capabilities_tx, + prompt_capabilities_rx, } } @@ -881,10 +965,12 @@ impl Thread { pub fn set_model(&mut self, model: Arc, cx: &mut Context) { let old_usage = self.latest_token_usage(); self.model = Some(model); + let new_caps = Self::prompt_capabilities(self.model.as_deref()); let new_usage = self.latest_token_usage(); if old_usage != new_usage { cx.emit(TokenUsageUpdated(new_usage)); } + self.prompt_capabilities_tx.send(new_caps).log_err(); cx.notify() } @@ -947,11 +1033,11 @@ impl Thread { )); self.add_tool(TerminalTool::new(self.project.clone(), cx)); self.add_tool(ThinkingTool); - self.add_tool(WebSearchTool); // TODO: Enable this only if it's a zed model. + self.add_tool(WebSearchTool); } - pub fn add_tool(&mut self, tool: impl AgentTool) { - self.tools.insert(tool.name(), tool.erase()); + pub fn add_tool(&mut self, tool: T) { + self.tools.insert(T::name().into(), tool.erase()); } pub fn remove_tool(&mut self, name: &str) -> bool { @@ -1020,15 +1106,10 @@ impl Thread { &mut self, cx: &mut Context, ) -> Result>> { - anyhow::ensure!( - self.tool_use_limit_reached, - "can only resume after tool use limit is reached" - ); - self.messages.push(Message::Resume); cx.notify(); - log::info!("Total messages in thread: {}", self.messages.len()); + log::debug!("Total messages in thread: {}", self.messages.len()); self.run_turn(cx) } @@ -1046,7 +1127,7 @@ impl Thread { { let model = self.model().context("No language model configured")?; - log::info!("Thread::send called with model: {:?}", model.name()); + log::info!("Thread::send called with model: {}", model.name().0); self.advance_prompt_id(); let content = content.into_iter().map(Into::into).collect::>(); @@ -1056,7 +1137,7 @@ impl Thread { .push(Message::User(UserMessage { id, content })); cx.notify(); - log::info!("Total messages in thread: {}", self.messages.len()); + log::debug!("Total messages in thread: {}", self.messages.len()); self.run_turn(cx) } @@ -1067,6 +1148,10 @@ impl Thread { self.cancel(cx); let model = self.model.clone().context("No language model configured")?; + let profile = AgentSettings::get_global(cx) + .profiles + .get(&self.profile_id) + .context("Profile not found")?; let (events_tx, events_rx) = mpsc::unbounded::>(); let event_stream = ThreadEventStream(events_tx); let message_ix = self.messages.len().saturating_sub(1); @@ -1074,104 +1159,32 @@ impl Thread { self.summary = None; self.running_turn = Some(RunningTurn { event_stream: event_stream.clone(), + tools: self.enabled_tools(profile, &model, cx), _task: cx.spawn(async move |this, cx| { - log::info!("Starting agent turn execution"); - let mut update_title = None; - let turn_result: Result = async { - let mut completion_intent = CompletionIntent::UserPrompt; - loop { - log::debug!( - "Building completion request with intent: {:?}", - completion_intent - ); - let request = this.update(cx, |this, cx| { - this.build_completion_request(completion_intent, cx) - })??; + log::debug!("Starting agent turn execution"); - log::info!("Calling model.stream_completion"); - - let mut tool_use_limit_reached = false; - let mut refused = false; - let mut reached_max_tokens = false; - let mut tool_uses = Self::stream_completion_with_retries( - this.clone(), - model.clone(), - request, - &event_stream, - &mut tool_use_limit_reached, - &mut refused, - &mut reached_max_tokens, - cx, - ) - .await?; - - if refused { - return Ok(StopReason::Refusal); - } else if reached_max_tokens { - return Ok(StopReason::MaxTokens); - } - - let end_turn = tool_uses.is_empty(); - while let Some(tool_result) = tool_uses.next().await { - log::info!("Tool finished {:?}", tool_result); - - event_stream.update_tool_call_fields( - &tool_result.tool_use_id, - acp::ToolCallUpdateFields { - status: Some(if tool_result.is_error { - acp::ToolCallStatus::Failed - } else { - acp::ToolCallStatus::Completed - }), - raw_output: tool_result.output.clone(), - ..Default::default() - }, - ); - this.update(cx, |this, _cx| { - this.pending_message() - .tool_results - .insert(tool_result.tool_use_id.clone(), tool_result); - })?; - } - - this.update(cx, |this, cx| { - if this.title.is_none() && update_title.is_none() { - update_title = Some(this.update_title(&event_stream, cx)); - } - })?; - - if tool_use_limit_reached { - log::info!("Tool use limit reached, completing turn"); - this.update(cx, |this, _cx| this.tool_use_limit_reached = true)?; - return Err(language_model::ToolUseLimitReachedError.into()); - } else if end_turn { - log::info!("No tool uses found, completing turn"); - return Ok(StopReason::EndTurn); - } else { - this.update(cx, |this, cx| this.flush_pending_message(cx))?; - completion_intent = CompletionIntent::ToolResults; - } - } - } - .await; + let turn_result = Self::run_turn_internal(&this, model, &event_stream, cx).await; _ = this.update(cx, |this, cx| this.flush_pending_message(cx)); match turn_result { - Ok(reason) => { - log::info!("Turn execution completed: {:?}", reason); - - if let Some(update_title) = update_title { - update_title.await.context("update title failed").log_err(); - } - - event_stream.send_stop(reason); - if reason == StopReason::Refusal { - _ = this.update(cx, |this, _| this.messages.truncate(message_ix)); - } + Ok(()) => { + log::debug!("Turn execution completed"); + event_stream.send_stop(acp::StopReason::EndTurn); } Err(error) => { log::error!("Turn execution failed: {:?}", error); - event_stream.send_error(error); + match error.downcast::() { + Ok(CompletionError::Refusal) => { + event_stream.send_stop(acp::StopReason::Refusal); + _ = this.update(cx, |this, _| this.messages.truncate(message_ix)); + } + Ok(CompletionError::MaxTokens) => { + event_stream.send_stop(acp::StopReason::MaxTokens); + } + Ok(CompletionError::Other(error)) | Err(error) => { + event_stream.send_error(error); + } + } } } @@ -1181,20 +1194,18 @@ impl Thread { Ok(events_rx) } - async fn stream_completion_with_retries( - this: WeakEntity, + async fn run_turn_internal( + this: &WeakEntity, model: Arc, - request: LanguageModelRequest, event_stream: &ThreadEventStream, - tool_use_limit_reached: &mut bool, - refusal: &mut bool, - max_tokens_reached: &mut bool, cx: &mut AsyncApp, - ) -> Result>> { - log::debug!("Stream completion started successfully"); + ) -> Result<()> { + let mut attempt = 0; + let mut intent = CompletionIntent::UserPrompt; + loop { + let request = + this.update(cx, |this, cx| this.build_completion_request(intent, cx))??; - let mut attempt = None; - 'retry: loop { telemetry::event!( "Agent Thread Completion", thread_id = this.read_with(cx, |this, _| this.id.to_string())?, @@ -1204,138 +1215,133 @@ impl Thread { attempt ); - let mut events = model.stream_completion(request.clone(), cx).await?; - let mut tool_uses = FuturesUnordered::new(); + log::debug!("Calling model.stream_completion, attempt {}", attempt); + let mut events = model + .stream_completion(request, cx) + .await + .map_err(|error| anyhow!(error))?; + let mut tool_results = FuturesUnordered::new(); + let mut error = None; while let Some(event) = events.next().await { + log::trace!("Received completion event: {:?}", event); match event { - Ok(LanguageModelCompletionEvent::StatusUpdate( - CompletionRequestStatus::ToolUseLimitReached, - )) => { - *tool_use_limit_reached = true; - } - Ok(LanguageModelCompletionEvent::StatusUpdate( - CompletionRequestStatus::UsageUpdated { amount, limit }, - )) => { - this.update(cx, |this, cx| { - this.update_model_request_usage(amount, limit, cx) - })?; - } - Ok(LanguageModelCompletionEvent::UsageUpdate(usage)) => { - telemetry::event!( - "Agent Thread Completion Usage Updated", - thread_id = this.read_with(cx, |this, _| this.id.to_string())?, - prompt_id = this.read_with(cx, |this, _| this.prompt_id.to_string())?, - model = model.telemetry_id(), - model_provider = model.provider_id().to_string(), - attempt, - input_tokens = usage.input_tokens, - output_tokens = usage.output_tokens, - cache_creation_input_tokens = usage.cache_creation_input_tokens, - cache_read_input_tokens = usage.cache_read_input_tokens, - ); - - this.update(cx, |this, cx| this.update_token_usage(usage, cx))?; - } - Ok(LanguageModelCompletionEvent::Stop(StopReason::Refusal)) => { - *refusal = true; - return Ok(FuturesUnordered::default()); - } - Ok(LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)) => { - *max_tokens_reached = true; - return Ok(FuturesUnordered::default()); - } - Ok(LanguageModelCompletionEvent::Stop( - StopReason::ToolUse | StopReason::EndTurn, - )) => break, Ok(event) => { - log::trace!("Received completion event: {:?}", event); - this.update(cx, |this, cx| { - tool_uses.extend(this.handle_streamed_completion_event( - event, - event_stream, - cx, - )); - })?; + tool_results.extend(this.update(cx, |this, cx| { + this.handle_completion_event(event, event_stream, cx) + })??); } - Err(error) => { - let completion_mode = - this.read_with(cx, |thread, _cx| thread.completion_mode())?; - if completion_mode == CompletionMode::Normal { - return Err(error.into()); - } - - let Some(strategy) = Self::retry_strategy_for(&error) else { - return Err(error.into()); - }; - - let max_attempts = match &strategy { - RetryStrategy::ExponentialBackoff { max_attempts, .. } => *max_attempts, - RetryStrategy::Fixed { max_attempts, .. } => *max_attempts, - }; - - let attempt = attempt.get_or_insert(0u8); - - *attempt += 1; - - let attempt = *attempt; - if attempt > max_attempts { - return Err(error.into()); - } - - let delay = match &strategy { - RetryStrategy::ExponentialBackoff { initial_delay, .. } => { - let delay_secs = - initial_delay.as_secs() * 2u64.pow((attempt - 1) as u32); - Duration::from_secs(delay_secs) - } - RetryStrategy::Fixed { delay, .. } => *delay, - }; - log::debug!("Retry attempt {attempt} with delay {delay:?}"); - - event_stream.send_retry(acp_thread::RetryStatus { - last_error: error.to_string().into(), - attempt: attempt as usize, - max_attempts: max_attempts as usize, - started_at: Instant::now(), - duration: delay, - }); - - cx.background_executor().timer(delay).await; - continue 'retry; + Err(err) => { + error = Some(err); + break; } } } - return Ok(tool_uses); + let end_turn = tool_results.is_empty(); + while let Some(tool_result) = tool_results.next().await { + log::debug!("Tool finished {:?}", tool_result); + + event_stream.update_tool_call_fields( + &tool_result.tool_use_id, + acp::ToolCallUpdateFields { + status: Some(if tool_result.is_error { + acp::ToolCallStatus::Failed + } else { + acp::ToolCallStatus::Completed + }), + raw_output: tool_result.output.clone(), + ..Default::default() + }, + ); + this.update(cx, |this, _cx| { + this.pending_message() + .tool_results + .insert(tool_result.tool_use_id.clone(), tool_result); + })?; + } + + this.update(cx, |this, cx| { + this.flush_pending_message(cx); + if this.title.is_none() && this.pending_title_generation.is_none() { + this.generate_title(cx); + } + })?; + + if let Some(error) = error { + attempt += 1; + let retry = + this.update(cx, |this, _| this.handle_completion_error(error, attempt))??; + let timer = cx.background_executor().timer(retry.duration); + event_stream.send_retry(retry); + timer.await; + this.update(cx, |this, _cx| { + if let Some(Message::Agent(message)) = this.messages.last() { + if message.tool_results.is_empty() { + intent = CompletionIntent::UserPrompt; + this.messages.push(Message::Resume); + } + } + })?; + } else if this.read_with(cx, |this, _| this.tool_use_limit_reached)? { + return Err(language_model::ToolUseLimitReachedError.into()); + } else if end_turn { + return Ok(()); + } else { + intent = CompletionIntent::ToolResults; + attempt = 0; + } } } - pub fn build_system_message(&self, cx: &App) -> LanguageModelRequestMessage { - log::debug!("Building system message"); - let prompt = SystemPromptTemplate { - project: self.project_context.read(cx), - available_tools: self.tools.keys().cloned().collect(), + fn handle_completion_error( + &mut self, + error: LanguageModelCompletionError, + attempt: u8, + ) -> Result { + if self.completion_mode == CompletionMode::Normal { + return Err(anyhow!(error)); } - .render(&self.templates) - .context("failed to build system prompt") - .expect("Invalid template"); - log::debug!("System message built"); - LanguageModelRequestMessage { - role: Role::System, - content: vec![prompt.into()], - cache: true, + + let Some(strategy) = Self::retry_strategy_for(&error) else { + return Err(anyhow!(error)); + }; + + let max_attempts = match &strategy { + RetryStrategy::ExponentialBackoff { max_attempts, .. } => *max_attempts, + RetryStrategy::Fixed { max_attempts, .. } => *max_attempts, + }; + + if attempt > max_attempts { + return Err(anyhow!(error)); } + + let delay = match &strategy { + RetryStrategy::ExponentialBackoff { initial_delay, .. } => { + let delay_secs = initial_delay.as_secs() * 2u64.pow((attempt - 1) as u32); + Duration::from_secs(delay_secs) + } + RetryStrategy::Fixed { delay, .. } => *delay, + }; + log::debug!("Retry attempt {attempt} with delay {delay:?}"); + + Ok(acp_thread::RetryStatus { + last_error: error.to_string().into(), + attempt: attempt as usize, + max_attempts: max_attempts as usize, + started_at: Instant::now(), + duration: delay, + }) } /// A helper method that's called on every streamed completion event. - /// Returns an optional tool result task, which the main agentic loop in - /// send will send back to the model when it resolves. - fn handle_streamed_completion_event( + /// Returns an optional tool result task, which the main agentic loop will + /// send back to the model when it resolves. + fn handle_completion_event( &mut self, event: LanguageModelCompletionEvent, event_stream: &ThreadEventStream, cx: &mut Context, - ) -> Option> { + ) -> Result>> { log::trace!("Handling streamed completion event: {:?}", event); use LanguageModelCompletionEvent::*; @@ -1350,7 +1356,7 @@ impl Thread { } RedactedThinking { data } => self.handle_redacted_thinking_event(data, cx), ToolUse(tool_use) => { - return self.handle_tool_use_event(tool_use, event_stream, cx); + return Ok(self.handle_tool_use_event(tool_use, event_stream, cx)); } ToolUseJsonParseError { id, @@ -1358,18 +1364,46 @@ impl Thread { raw_input, json_parse_error, } => { - return Some(Task::ready(self.handle_tool_use_json_parse_error_event( - id, - tool_name, - raw_input, - json_parse_error, + return Ok(Some(Task::ready( + self.handle_tool_use_json_parse_error_event( + id, + tool_name, + raw_input, + json_parse_error, + ), ))); } - StatusUpdate(_) => {} - UsageUpdate(_) | Stop(_) => unreachable!(), + UsageUpdate(usage) => { + telemetry::event!( + "Agent Thread Completion Usage Updated", + thread_id = self.id.to_string(), + prompt_id = self.prompt_id.to_string(), + model = self.model.as_ref().map(|m| m.telemetry_id()), + model_provider = self.model.as_ref().map(|m| m.provider_id().to_string()), + input_tokens = usage.input_tokens, + output_tokens = usage.output_tokens, + cache_creation_input_tokens = usage.cache_creation_input_tokens, + cache_read_input_tokens = usage.cache_read_input_tokens, + ); + self.update_token_usage(usage, cx); + } + StatusUpdate(CompletionRequestStatus::UsageUpdated { amount, limit }) => { + self.update_model_request_usage(amount, limit, cx); + } + StatusUpdate( + CompletionRequestStatus::Started + | CompletionRequestStatus::Queued { .. } + | CompletionRequestStatus::Failed { .. }, + ) => {} + StatusUpdate(CompletionRequestStatus::ToolUseLimitReached) => { + self.tool_use_limit_reached = true; + } + Stop(StopReason::Refusal) => return Err(CompletionError::Refusal.into()), + Stop(StopReason::MaxTokens) => return Err(CompletionError::MaxTokens.into()), + Stop(StopReason::ToolUse | StopReason::EndTurn) => {} } - None + Ok(None) } fn handle_text_event( @@ -1433,7 +1467,7 @@ impl Thread { ) -> Option> { cx.notify(); - let tool = self.tools.get(tool_use.name.as_ref()).cloned(); + let tool = self.tool(tool_use.name.as_ref()); let mut title = SharedString::from(&tool_use.name); let mut kind = acp::ToolKind::Other; if let Some(tool) = tool.as_ref() { @@ -1497,7 +1531,7 @@ impl Thread { }); let supports_images = self.model().is_some_and(|model| model.supports_images()); let tool_result = tool.run(tool_use.input, tool_event_stream, cx); - log::info!("Running tool {}", tool_use.name); + log::debug!("Running tool {}", tool_use.name); Some(cx.foreground_executor().spawn(async move { let tool_result = tool_result.await.and_then(|output| { if let LanguageModelToolResultContent::Image(_) = &output.llm_output @@ -1609,7 +1643,7 @@ impl Thread { summary.extend(lines.next()); } - log::info!("Setting summary: {}", summary); + log::debug!("Setting summary: {}", summary); let summary = SharedString::from(summary); this.update(cx, |this, cx| { @@ -1621,19 +1655,15 @@ impl Thread { }) } - fn update_title( - &mut self, - event_stream: &ThreadEventStream, - cx: &mut Context, - ) -> Task> { - log::info!( + fn generate_title(&mut self, cx: &mut Context) { + let Some(model) = self.summarization_model.clone() else { + return; + }; + + log::debug!( "Generating title with model: {:?}", self.summarization_model.as_ref().map(|model| model.name()) ); - let Some(model) = self.summarization_model.clone() else { - return Task::ready(Ok(())); - }; - let event_stream = event_stream.clone(); let mut request = LanguageModelRequest { intent: Some(CompletionIntent::ThreadSummarization), temperature: AgentSettings::temperature_for_model(&model, cx), @@ -1649,42 +1679,51 @@ impl Thread { content: vec![SUMMARIZE_THREAD_PROMPT.into()], cache: false, }); - cx.spawn(async move |this, cx| { + self.pending_title_generation = Some(cx.spawn(async move |this, cx| { let mut title = String::new(); - let mut messages = model.stream_completion(request, cx).await?; - while let Some(event) = messages.next().await { - let event = event?; - let text = match event { - LanguageModelCompletionEvent::Text(text) => text, - LanguageModelCompletionEvent::StatusUpdate( - CompletionRequestStatus::UsageUpdated { amount, limit }, - ) => { - this.update(cx, |thread, cx| { - thread.update_model_request_usage(amount, limit, cx); - })?; - continue; + + let generate = async { + let mut messages = model.stream_completion(request, cx).await?; + while let Some(event) = messages.next().await { + let event = event?; + let text = match event { + LanguageModelCompletionEvent::Text(text) => text, + LanguageModelCompletionEvent::StatusUpdate( + CompletionRequestStatus::UsageUpdated { amount, limit }, + ) => { + this.update(cx, |thread, cx| { + thread.update_model_request_usage(amount, limit, cx); + })?; + continue; + } + _ => continue, + }; + + let mut lines = text.lines(); + title.extend(lines.next()); + + // Stop if the LLM generated multiple lines. + if lines.next().is_some() { + break; } - _ => continue, - }; - - let mut lines = text.lines(); - title.extend(lines.next()); - - // Stop if the LLM generated multiple lines. - if lines.next().is_some() { - break; } + anyhow::Ok(()) + }; + + if generate.await.context("failed to generate title").is_ok() { + _ = this.update(cx, |this, cx| this.set_title(title.into(), cx)); } + _ = this.update(cx, |this, _| this.pending_title_generation = None); + })); + } - log::info!("Setting title: {}", title); - - this.update(cx, |this, cx| { - let title = SharedString::from(title); - event_stream.send_title_update(title.clone()); - this.title = Some(title); - cx.notify(); - }) - }) + pub fn set_title(&mut self, title: SharedString, cx: &mut Context) { + self.pending_title_generation = None; + if Some(&title) != self.title.as_ref() { + self.title = Some(title); + cx.emit(TitleUpdated); + cx.notify(); + } } fn last_user_message(&self) -> Option<&UserMessage> { @@ -1707,6 +1746,10 @@ impl Thread { return; }; + if message.content.is_empty() { + return; + } + for content in &message.content { let AgentMessageContent::ToolUse(tool_use) = content else { continue; @@ -1735,34 +1778,32 @@ impl Thread { pub(crate) fn build_completion_request( &self, completion_intent: CompletionIntent, - cx: &mut App, + cx: &App, ) -> Result { let model = self.model().context("No language model configured")?; + let tools = if let Some(turn) = self.running_turn.as_ref() { + turn.tools + .iter() + .filter_map(|(tool_name, tool)| { + log::trace!("Including tool: {}", tool_name); + Some(LanguageModelRequestTool { + name: tool_name.to_string(), + description: tool.description().to_string(), + input_schema: tool.input_schema(model.tool_input_format()).log_err()?, + }) + }) + .collect::>() + } else { + Vec::new() + }; log::debug!("Building completion request"); log::debug!("Completion intent: {:?}", completion_intent); log::debug!("Completion mode: {:?}", self.completion_mode); let messages = self.build_request_messages(cx); - log::info!("Request will include {} messages", messages.len()); - - let tools = if let Some(tools) = self.tools(cx).log_err() { - tools - .filter_map(|tool| { - let tool_name = tool.name().to_string(); - log::trace!("Including tool: {}", tool_name); - Some(LanguageModelRequestTool { - name: tool_name, - description: tool.description().to_string(), - input_schema: tool.input_schema(model.tool_input_format()).log_err()?, - }) - }) - .collect() - } else { - Vec::new() - }; - - log::info!("Request includes {} tools", tools.len()); + log::debug!("Request will include {} messages", messages.len()); + log::debug!("Request includes {} tools", tools.len()); let request = LanguageModelRequest { thread_id: Some(self.id.to_string()), @@ -1781,37 +1822,76 @@ impl Thread { Ok(request) } - fn tools<'a>(&'a self, cx: &'a App) -> Result>> { - let model = self.model().context("No language model configured")?; + fn enabled_tools( + &self, + profile: &AgentProfileSettings, + model: &Arc, + cx: &App, + ) -> BTreeMap> { + fn truncate(tool_name: &SharedString) -> SharedString { + if tool_name.len() > MAX_TOOL_NAME_LENGTH { + let mut truncated = tool_name.to_string(); + truncated.truncate(MAX_TOOL_NAME_LENGTH); + truncated.into() + } else { + tool_name.clone() + } + } - let profile = AgentSettings::get_global(cx) - .profiles - .get(&self.profile_id) - .context("profile not found")?; - let provider_id = model.provider_id(); - - Ok(self + let mut tools = self .tools .iter() - .filter(move |(_, tool)| tool.supported_provider(&provider_id)) .filter_map(|(tool_name, tool)| { - if profile.is_tool_enabled(tool_name) { - Some(tool) + if tool.supported_provider(&model.provider_id()) + && profile.is_tool_enabled(tool_name) + { + Some((truncate(tool_name), tool.clone())) } else { None } }) - .chain(self.context_server_registry.read(cx).servers().flat_map( - |(server_id, tools)| { - tools.iter().filter_map(|(tool_name, tool)| { - if profile.is_context_server_tool_enabled(&server_id.0, tool_name) { - Some(tool) - } else { - None - } - }) - }, - ))) + .collect::>(); + + let mut context_server_tools = Vec::new(); + let mut seen_tools = tools.keys().cloned().collect::>(); + let mut duplicate_tool_names = HashSet::default(); + for (server_id, server_tools) in self.context_server_registry.read(cx).servers() { + for (tool_name, tool) in server_tools { + if profile.is_context_server_tool_enabled(&server_id.0, &tool_name) { + let tool_name = truncate(tool_name); + if !seen_tools.insert(tool_name.clone()) { + duplicate_tool_names.insert(tool_name.clone()); + } + context_server_tools.push((server_id.clone(), tool_name, tool.clone())); + } + } + } + + // When there are duplicate tool names, disambiguate by prefixing them + // with the server ID. In the rare case there isn't enough space for the + // disambiguated tool name, keep only the last tool with this name. + for (server_id, tool_name, tool) in context_server_tools { + if duplicate_tool_names.contains(&tool_name) { + let available = MAX_TOOL_NAME_LENGTH.saturating_sub(tool_name.len()); + if available >= 2 { + let mut disambiguated = server_id.0.to_string(); + disambiguated.truncate(available - 1); + disambiguated.push('_'); + disambiguated.push_str(&tool_name); + tools.insert(disambiguated.into(), tool.clone()); + } else { + tools.insert(tool_name, tool.clone()); + } + } else { + tools.insert(tool_name, tool.clone()); + } + } + + tools + } + + fn tool(&self, name: &str) -> Option> { + self.running_turn.as_ref()?.tools.get(name).cloned() } fn build_request_messages(&self, cx: &App) -> Vec { @@ -1819,21 +1899,29 @@ impl Thread { "Building request messages from {} thread messages", self.messages.len() ); - let mut messages = vec![self.build_system_message(cx)]; + + let system_prompt = SystemPromptTemplate { + project: self.project_context.read(cx), + available_tools: self.tools.keys().cloned().collect(), + } + .render(&self.templates) + .context("failed to build system prompt") + .expect("Invalid template"); + let mut messages = vec![LanguageModelRequestMessage { + role: Role::System, + content: vec![system_prompt.into()], + cache: false, + }]; for message in &self.messages { messages.extend(message.to_request()); } - if let Some(message) = self.pending_message.as_ref() { - messages.extend(message.to_request()); + if let Some(last_message) = messages.last_mut() { + last_message.cache = true; } - if let Some(last_user_message) = messages - .iter_mut() - .rev() - .find(|message| message.role == Role::User) - { - last_user_message.cache = true; + if let Some(message) = self.pending_message.as_ref() { + messages.extend(message.to_request()); } messages @@ -1976,6 +2064,8 @@ struct RunningTurn { /// The current event stream for the running turn. Used to report a final /// cancellation event if we cancel the turn. event_stream: ThreadEventStream, + /// The tools that were enabled for this turn. + tools: BTreeMap>, } impl RunningTurn { @@ -1989,6 +2079,10 @@ pub struct TokenUsageUpdated(pub Option); impl EventEmitter for Thread {} +pub struct TitleUpdated; + +impl EventEmitter for Thread {} + pub trait AgentTool where Self: 'static + Sized, @@ -1996,7 +2090,7 @@ where type Input: for<'de> Deserialize<'de> + Serialize + JsonSchema; type Output: for<'de> Deserialize<'de> + Serialize + Into; - fn name(&self) -> SharedString; + fn name() -> &'static str; fn description(&self) -> SharedString { let schema = schemars::schema_for!(Self::Input); @@ -2008,7 +2102,7 @@ where ) } - fn kind(&self) -> acp::ToolKind; + fn kind() -> acp::ToolKind; /// The initial tool title to display. Can be updated during the tool run. fn initial_title(&self, input: Result) -> SharedString; @@ -2084,7 +2178,7 @@ where T: AgentTool, { fn name(&self) -> SharedString { - self.0.name() + T::name().into() } fn description(&self) -> SharedString { @@ -2092,7 +2186,7 @@ where } fn kind(&self) -> agent_client_protocol::ToolKind { - self.0.kind() + T::kind() } fn initial_title(&self, input: serde_json::Value) -> SharedString { @@ -2146,12 +2240,6 @@ where struct ThreadEventStream(mpsc::UnboundedSender>); impl ThreadEventStream { - fn send_title_update(&self, text: SharedString) { - self.0 - .unbounded_send(Ok(ThreadEvent::TitleUpdate(text))) - .ok(); - } - fn send_user_message(&self, message: &UserMessage) { self.0 .unbounded_send(Ok(ThreadEvent::UserMessage(message.clone()))) @@ -2225,30 +2313,13 @@ impl ThreadEventStream { self.0.unbounded_send(Ok(ThreadEvent::Retry(status))).ok(); } - fn send_stop(&self, reason: StopReason) { - match reason { - StopReason::EndTurn => { - self.0 - .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::EndTurn))) - .ok(); - } - StopReason::MaxTokens => { - self.0 - .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::MaxTokens))) - .ok(); - } - StopReason::Refusal => { - self.0 - .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::Refusal))) - .ok(); - } - StopReason::ToolUse => {} - } + fn send_stop(&self, reason: acp::StopReason) { + self.0.unbounded_send(Ok(ThreadEvent::Stop(reason))).ok(); } fn send_canceled(&self) { self.0 - .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::Canceled))) + .unbounded_send(Ok(ThreadEvent::Stop(acp::StopReason::Cancelled))) .ok(); } @@ -2388,6 +2459,30 @@ impl ToolCallEventStreamReceiver { } } + pub async fn expect_update_fields(&mut self) -> acp::ToolCallUpdateFields { + let event = self.0.next().await; + if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields( + update, + )))) = event + { + update.fields + } else { + panic!("Expected update fields but got: {:?}", event); + } + } + + pub async fn expect_diff(&mut self) -> Entity { + let event = self.0.next().await; + if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateDiff( + update, + )))) = event + { + update.diff + } else { + panic!("Expected diff but got: {:?}", event); + } + } + pub async fn expect_terminal(&mut self) -> Entity { let event = self.0.next().await; if let Some(Ok(ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateTerminal( diff --git a/crates/agent2/src/tools.rs b/crates/agent2/src/tools.rs index d1f2b3b1c7..bcca7eecd1 100644 --- a/crates/agent2/src/tools.rs +++ b/crates/agent2/src/tools.rs @@ -16,6 +16,29 @@ mod terminal_tool; mod thinking_tool; mod web_search_tool; +/// A list of all built in tool names, for use in deduplicating MCP tool names +pub fn default_tool_names() -> impl Iterator { + [ + CopyPathTool::name(), + CreateDirectoryTool::name(), + DeletePathTool::name(), + DiagnosticsTool::name(), + EditFileTool::name(), + FetchTool::name(), + FindPathTool::name(), + GrepTool::name(), + ListDirectoryTool::name(), + MovePathTool::name(), + NowTool::name(), + OpenTool::name(), + ReadFileTool::name(), + TerminalTool::name(), + ThinkingTool::name(), + WebSearchTool::name(), + ] + .into_iter() +} + pub use context_server_registry::*; pub use copy_path_tool::*; pub use create_directory_tool::*; @@ -33,3 +56,5 @@ pub use read_file_tool::*; pub use terminal_tool::*; pub use thinking_tool::*; pub use web_search_tool::*; + +use crate::AgentTool; diff --git a/crates/agent2/src/tools/copy_path_tool.rs b/crates/agent2/src/tools/copy_path_tool.rs index f973b86990..819a6ff209 100644 --- a/crates/agent2/src/tools/copy_path_tool.rs +++ b/crates/agent2/src/tools/copy_path_tool.rs @@ -1,23 +1,18 @@ use crate::{AgentTool, ToolCallEventStream}; use agent_client_protocol::ToolKind; use anyhow::{Context as _, Result, anyhow}; -use gpui::{App, AppContext, Entity, SharedString, Task}; +use gpui::{App, AppContext, Entity, Task}; use project::Project; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::sync::Arc; use util::markdown::MarkdownInlineCode; -/// Copies a file or directory in the project, and returns confirmation that the -/// copy succeeded. -/// +/// Copies a file or directory in the project, and returns confirmation that the copy succeeded. /// Directory contents will be copied recursively (like `cp -r`). /// -/// This tool should be used when it's desirable to create a copy of a file or -/// directory without modifying the original. It's much more efficient than -/// doing this by separately reading and then writing the file or directory's -/// contents, so this tool should be preferred over that approach whenever -/// copying is the goal. +/// This tool should be used when it's desirable to create a copy of a file or directory without modifying the original. +/// It's much more efficient than doing this by separately reading and then writing the file or directory's contents, so this tool should be preferred over that approach whenever copying is the goal. #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct CopyPathToolInput { /// The source path of the file or directory to copy. @@ -33,12 +28,10 @@ pub struct CopyPathToolInput { /// You can copy the first file by providing a source_path of "directory1/a/something.txt" /// pub source_path: String, - /// The destination path where the file or directory should be copied to. /// /// - /// To copy "directory1/a/something.txt" to "directory2/b/copy.txt", - /// provide a destination_path of "directory2/b/copy.txt" + /// To copy "directory1/a/something.txt" to "directory2/b/copy.txt", provide a destination_path of "directory2/b/copy.txt" /// pub destination_path: String, } @@ -57,11 +50,11 @@ impl AgentTool for CopyPathTool { type Input = CopyPathToolInput; type Output = String; - fn name(&self) -> SharedString { - "copy_path".into() + fn name() -> &'static str { + "copy_path" } - fn kind(&self) -> ToolKind { + fn kind() -> ToolKind { ToolKind::Move } diff --git a/crates/agent2/src/tools/create_directory_tool.rs b/crates/agent2/src/tools/create_directory_tool.rs index c173c5ae67..652363d5fa 100644 --- a/crates/agent2/src/tools/create_directory_tool.rs +++ b/crates/agent2/src/tools/create_directory_tool.rs @@ -9,12 +9,9 @@ use util::markdown::MarkdownInlineCode; use crate::{AgentTool, ToolCallEventStream}; -/// Creates a new directory at the specified path within the project. Returns -/// confirmation that the directory was created. +/// Creates a new directory at the specified path within the project. Returns confirmation that the directory was created. /// -/// This tool creates a directory and all necessary parent directories (similar -/// to `mkdir -p`). It should be used whenever you need to create new -/// directories within the project. +/// This tool creates a directory and all necessary parent directories (similar to `mkdir -p`). It should be used whenever you need to create new directories within the project. #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct CreateDirectoryToolInput { /// The path of the new directory. @@ -44,11 +41,11 @@ impl AgentTool for CreateDirectoryTool { type Input = CreateDirectoryToolInput; type Output = String; - fn name(&self) -> SharedString { - "create_directory".into() + fn name() -> &'static str { + "create_directory" } - fn kind(&self) -> ToolKind { + fn kind() -> ToolKind { ToolKind::Read } diff --git a/crates/agent2/src/tools/delete_path_tool.rs b/crates/agent2/src/tools/delete_path_tool.rs index e013b3a3e7..0f9641127f 100644 --- a/crates/agent2/src/tools/delete_path_tool.rs +++ b/crates/agent2/src/tools/delete_path_tool.rs @@ -9,8 +9,7 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::sync::Arc; -/// Deletes the file or directory (and the directory's contents, recursively) at -/// the specified path in the project, and returns confirmation of the deletion. +/// Deletes the file or directory (and the directory's contents, recursively) at the specified path in the project, and returns confirmation of the deletion. #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct DeletePathToolInput { /// The path of the file or directory to delete. @@ -45,11 +44,11 @@ impl AgentTool for DeletePathTool { type Input = DeletePathToolInput; type Output = String; - fn name(&self) -> SharedString { - "delete_path".into() + fn name() -> &'static str { + "delete_path" } - fn kind(&self) -> ToolKind { + fn kind() -> ToolKind { ToolKind::Delete } diff --git a/crates/agent2/src/tools/diagnostics_tool.rs b/crates/agent2/src/tools/diagnostics_tool.rs index 6ba8b7b377..558bb918ce 100644 --- a/crates/agent2/src/tools/diagnostics_tool.rs +++ b/crates/agent2/src/tools/diagnostics_tool.rs @@ -63,11 +63,11 @@ impl AgentTool for DiagnosticsTool { type Input = DiagnosticsToolInput; type Output = String; - fn name(&self) -> SharedString { - "diagnostics".into() + fn name() -> &'static str { + "diagnostics" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Read } diff --git a/crates/agent2/src/tools/edit_file_tool.rs b/crates/agent2/src/tools/edit_file_tool.rs index 24fedda4eb..f86bfd25f7 100644 --- a/crates/agent2/src/tools/edit_file_tool.rs +++ b/crates/agent2/src/tools/edit_file_tool.rs @@ -34,25 +34,21 @@ const DEFAULT_UI_TEXT: &str = "Editing file"; /// - Use the `list_directory` tool to verify the parent directory exists and is the correct location #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct EditFileToolInput { - /// A one-line, user-friendly markdown description of the edit. This will be - /// shown in the UI and also passed to another model to perform the edit. + /// A one-line, user-friendly markdown description of the edit. This will be shown in the UI and also passed to another model to perform the edit. /// - /// Be terse, but also descriptive in what you want to achieve with this - /// edit. Avoid generic instructions. + /// Be terse, but also descriptive in what you want to achieve with this edit. Avoid generic instructions. /// /// NEVER mention the file path in this description. /// /// Fix API endpoint URLs /// Update copyright year in `page_footer` /// - /// Make sure to include this field before all the others in the input object - /// so that we can display it immediately. + /// Make sure to include this field before all the others in the input object so that we can display it immediately. pub display_description: String, /// The full path of the file to create or modify in the project. /// - /// WARNING: When specifying which file path need changing, you MUST - /// start each path with one of the project's root directories. + /// WARNING: When specifying which file path need changing, you MUST start each path with one of the project's root directories. /// /// The following examples assume we have two root directories in the project: /// - /a/b/backend @@ -61,22 +57,19 @@ pub struct EditFileToolInput { /// /// `backend/src/main.rs` /// - /// Notice how the file path starts with `backend`. Without that, the path - /// would be ambiguous and the call would fail! + /// Notice how the file path starts with `backend`. Without that, the path would be ambiguous and the call would fail! /// /// /// /// `frontend/db.js` /// pub path: PathBuf, - /// The mode of operation on the file. Possible values: /// - 'edit': Make granular edits to an existing file. /// - 'create': Create a new file if it doesn't exist. /// - 'overwrite': Replace the entire contents of an existing file. /// - /// When a file already exists or you just created it, prefer editing - /// it as opposed to recreating it from scratch. + /// When a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch. pub mode: EditFileMode, } @@ -193,11 +186,11 @@ impl AgentTool for EditFileTool { type Input = EditFileToolInput; type Output = EditFileToolOutput; - fn name(&self) -> SharedString { - "edit_file".into() + fn name() -> &'static str { + "edit_file" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Edit } @@ -280,6 +273,13 @@ impl AgentTool for EditFileTool { let diff = cx.new(|cx| Diff::new(buffer.clone(), cx))?; event_stream.update_diff(diff.clone()); + let _finalize_diff = util::defer({ + let diff = diff.downgrade(); + let mut cx = cx.clone(); + move || { + diff.update(&mut cx, |diff, cx| diff.finalize(cx)).ok(); + } + }); let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; let old_text = cx @@ -396,8 +396,6 @@ impl AgentTool for EditFileTool { }) .await; - diff.update(cx, |diff, cx| diff.finalize(cx)).ok(); - let input_path = input.path.display(); if unified_diff.is_empty() { anyhow::ensure!( @@ -524,7 +522,6 @@ fn resolve_path( mod tests { use super::*; use crate::{ContextServerRegistry, Templates}; - use action_log::ActionLog; use client::TelemetrySettings; use fs::Fs; use gpui::{TestAppContext, UpdateGlobal}; @@ -542,7 +539,6 @@ mod tests { fs.insert_tree("/root", json!({})).await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let action_log = cx.new(|_| ActionLog::new(project.clone())); let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let model = Arc::new(FakeLanguageModel::default()); @@ -551,7 +547,6 @@ mod tests { project, cx.new(|_cx| ProjectContext::default()), context_server_registry, - action_log, Templates::new(), Some(model), cx, @@ -742,7 +737,6 @@ mod tests { } }); - let action_log = cx.new(|_| ActionLog::new(project.clone())); let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let model = Arc::new(FakeLanguageModel::default()); @@ -751,7 +745,6 @@ mod tests { project, cx.new(|_cx| ProjectContext::default()), context_server_registry, - action_log.clone(), Templates::new(), Some(model.clone()), cx, @@ -808,7 +801,9 @@ mod tests { "Code should be formatted when format_on_save is enabled" ); - let stale_buffer_count = action_log.read_with(cx, |log, cx| log.stale_buffers(cx).count()); + let stale_buffer_count = thread + .read_with(cx, |thread, _cx| thread.action_log.clone()) + .read_with(cx, |log, cx| log.stale_buffers(cx).count()); assert_eq!( stale_buffer_count, 0, @@ -886,14 +881,12 @@ mod tests { let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let action_log = cx.new(|_| ActionLog::new(project.clone())); let model = Arc::new(FakeLanguageModel::default()); let thread = cx.new(|cx| { Thread::new( project, cx.new(|_cx| ProjectContext::default()), context_server_registry, - action_log.clone(), Templates::new(), Some(model.clone()), cx, @@ -1015,14 +1008,12 @@ mod tests { let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let action_log = cx.new(|_| ActionLog::new(project.clone())); let model = Arc::new(FakeLanguageModel::default()); let thread = cx.new(|cx| { Thread::new( project, cx.new(|_cx| ProjectContext::default()), context_server_registry, - action_log.clone(), Templates::new(), Some(model.clone()), cx, @@ -1153,14 +1144,12 @@ mod tests { let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let action_log = cx.new(|_| ActionLog::new(project.clone())); let model = Arc::new(FakeLanguageModel::default()); let thread = cx.new(|cx| { Thread::new( project, cx.new(|_cx| ProjectContext::default()), context_server_registry, - action_log.clone(), Templates::new(), Some(model.clone()), cx, @@ -1261,7 +1250,6 @@ mod tests { ) .await; let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let action_log = cx.new(|_| ActionLog::new(project.clone())); let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let model = Arc::new(FakeLanguageModel::default()); @@ -1270,7 +1258,6 @@ mod tests { project.clone(), cx.new(|_cx| ProjectContext::default()), context_server_registry.clone(), - action_log.clone(), Templates::new(), Some(model.clone()), cx, @@ -1343,7 +1330,6 @@ mod tests { .await; let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let action_log = cx.new(|_| ActionLog::new(project.clone())); let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let model = Arc::new(FakeLanguageModel::default()); @@ -1352,7 +1338,6 @@ mod tests { project.clone(), cx.new(|_cx| ProjectContext::default()), context_server_registry.clone(), - action_log.clone(), Templates::new(), Some(model.clone()), cx, @@ -1428,7 +1413,6 @@ mod tests { .await; let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let action_log = cx.new(|_| ActionLog::new(project.clone())); let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let model = Arc::new(FakeLanguageModel::default()); @@ -1437,7 +1421,6 @@ mod tests { project.clone(), cx.new(|_cx| ProjectContext::default()), context_server_registry.clone(), - action_log.clone(), Templates::new(), Some(model.clone()), cx, @@ -1510,7 +1493,6 @@ mod tests { let fs = project::FakeFs::new(cx.executor()); let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let action_log = cx.new(|_| ActionLog::new(project.clone())); let context_server_registry = cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let model = Arc::new(FakeLanguageModel::default()); @@ -1519,7 +1501,6 @@ mod tests { project.clone(), cx.new(|_cx| ProjectContext::default()), context_server_registry, - action_log.clone(), Templates::new(), Some(model.clone()), cx, @@ -1569,6 +1550,100 @@ mod tests { ); } + #[gpui::test] + async fn test_diff_finalization(cx: &mut TestAppContext) { + init_test(cx); + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree("/", json!({"main.rs": ""})).await; + + let project = Project::test(fs.clone(), [path!("/").as_ref()], cx).await; + let languages = project.read_with(cx, |project, _cx| project.languages().clone()); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry.clone(), + Templates::new(), + Some(model.clone()), + cx, + ) + }); + + // Ensure the diff is finalized after the edit completes. + { + let tool = Arc::new(EditFileTool::new(thread.downgrade(), languages.clone())); + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let edit = cx.update(|cx| { + tool.run( + EditFileToolInput { + display_description: "Edit file".into(), + path: path!("/main.rs").into(), + mode: EditFileMode::Edit, + }, + stream_tx, + cx, + ) + }); + stream_rx.expect_update_fields().await; + let diff = stream_rx.expect_diff().await; + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_)))); + cx.run_until_parked(); + model.end_last_completion_stream(); + edit.await.unwrap(); + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); + } + + // Ensure the diff is finalized if an error occurs while editing. + { + model.forbid_requests(); + let tool = Arc::new(EditFileTool::new(thread.downgrade(), languages.clone())); + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let edit = cx.update(|cx| { + tool.run( + EditFileToolInput { + display_description: "Edit file".into(), + path: path!("/main.rs").into(), + mode: EditFileMode::Edit, + }, + stream_tx, + cx, + ) + }); + stream_rx.expect_update_fields().await; + let diff = stream_rx.expect_diff().await; + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_)))); + edit.await.unwrap_err(); + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); + model.allow_requests(); + } + + // Ensure the diff is finalized if the tool call gets dropped. + { + let tool = Arc::new(EditFileTool::new(thread.downgrade(), languages.clone())); + let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); + let edit = cx.update(|cx| { + tool.run( + EditFileToolInput { + display_description: "Edit file".into(), + path: path!("/main.rs").into(), + mode: EditFileMode::Edit, + }, + stream_tx, + cx, + ) + }); + stream_rx.expect_update_fields().await; + let diff = stream_rx.expect_diff().await; + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Pending(_)))); + drop(edit); + cx.run_until_parked(); + diff.read_with(cx, |diff, _| assert!(matches!(diff, Diff::Finalized(_)))); + } + } + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); diff --git a/crates/agent2/src/tools/fetch_tool.rs b/crates/agent2/src/tools/fetch_tool.rs index ae26c5fe19..dd97271a79 100644 --- a/crates/agent2/src/tools/fetch_tool.rs +++ b/crates/agent2/src/tools/fetch_tool.rs @@ -118,11 +118,11 @@ impl AgentTool for FetchTool { type Input = FetchToolInput; type Output = String; - fn name(&self) -> SharedString { - "fetch".into() + fn name() -> &'static str { + "fetch" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Fetch } @@ -136,12 +136,17 @@ impl AgentTool for FetchTool { fn run( self: Arc, input: Self::Input, - _event_stream: ToolCallEventStream, + event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { + let authorize = event_stream.authorize(input.url.clone(), cx); + let text = cx.background_spawn({ let http_client = self.http_client.clone(); - async move { Self::build_message(http_client, &input.url).await } + async move { + authorize.await?; + Self::build_message(http_client, &input.url).await + } }); cx.foreground_executor().spawn(async move { diff --git a/crates/agent2/src/tools/find_path_tool.rs b/crates/agent2/src/tools/find_path_tool.rs index deccf37ab7..384bd56e77 100644 --- a/crates/agent2/src/tools/find_path_tool.rs +++ b/crates/agent2/src/tools/find_path_tool.rs @@ -31,7 +31,6 @@ pub struct FindPathToolInput { /// You can get back the first two paths by providing a glob of "*thing*.txt" /// pub glob: String, - /// Optional starting position for paginated results (0-based). /// When not provided, starts from the beginning. #[serde(default)] @@ -86,11 +85,11 @@ impl AgentTool for FindPathTool { type Input = FindPathToolInput; type Output = FindPathToolOutput; - fn name(&self) -> SharedString { - "find_path".into() + fn name() -> &'static str { + "find_path" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Search } @@ -166,16 +165,17 @@ fn search_paths(glob: &str, project: Entity, cx: &mut App) -> Task SharedString { - "grep".into() + fn name() -> &'static str { + "grep" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Search } diff --git a/crates/agent2/src/tools/list_directory_tool.rs b/crates/agent2/src/tools/list_directory_tool.rs index 61f21d8f95..e6fa8d7431 100644 --- a/crates/agent2/src/tools/list_directory_tool.rs +++ b/crates/agent2/src/tools/list_directory_tool.rs @@ -10,14 +10,12 @@ use std::fmt::Write; use std::{path::Path, sync::Arc}; use util::markdown::MarkdownInlineCode; -/// Lists files and directories in a given path. Prefer the `grep` or -/// `find_path` tools when searching the codebase. +/// Lists files and directories in a given path. Prefer the `grep` or `find_path` tools when searching the codebase. #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct ListDirectoryToolInput { /// The fully-qualified path of the directory to list in the project. /// - /// This path should never be absolute, and the first component - /// of the path should always be a root directory in a project. + /// This path should never be absolute, and the first component of the path should always be a root directory in a project. /// /// /// If the project has the following root directories: @@ -53,11 +51,11 @@ impl AgentTool for ListDirectoryTool { type Input = ListDirectoryToolInput; type Output = String; - fn name(&self) -> SharedString { - "list_directory".into() + fn name() -> &'static str { + "list_directory" } - fn kind(&self) -> ToolKind { + fn kind() -> ToolKind { ToolKind::Read } diff --git a/crates/agent2/src/tools/move_path_tool.rs b/crates/agent2/src/tools/move_path_tool.rs index f8d5d0d176..d9fb60651b 100644 --- a/crates/agent2/src/tools/move_path_tool.rs +++ b/crates/agent2/src/tools/move_path_tool.rs @@ -8,14 +8,11 @@ use serde::{Deserialize, Serialize}; use std::{path::Path, sync::Arc}; use util::markdown::MarkdownInlineCode; -/// Moves or rename a file or directory in the project, and returns confirmation -/// that the move succeeded. +/// Moves or rename a file or directory in the project, and returns confirmation that the move succeeded. /// -/// If the source and destination directories are the same, but the filename is -/// different, this performs a rename. Otherwise, it performs a move. +/// If the source and destination directories are the same, but the filename is different, this performs a rename. Otherwise, it performs a move. /// -/// This tool should be used when it's desirable to move or rename a file or -/// directory without changing its contents at all. +/// This tool should be used when it's desirable to move or rename a file or directory without changing its contents at all. #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct MovePathToolInput { /// The source path of the file or directory to move/rename. @@ -55,11 +52,11 @@ impl AgentTool for MovePathTool { type Input = MovePathToolInput; type Output = String; - fn name(&self) -> SharedString { - "move_path".into() + fn name() -> &'static str { + "move_path" } - fn kind(&self) -> ToolKind { + fn kind() -> ToolKind { ToolKind::Move } diff --git a/crates/agent2/src/tools/now_tool.rs b/crates/agent2/src/tools/now_tool.rs index a72ede26fe..9467e7db68 100644 --- a/crates/agent2/src/tools/now_tool.rs +++ b/crates/agent2/src/tools/now_tool.rs @@ -32,11 +32,11 @@ impl AgentTool for NowTool { type Input = NowToolInput; type Output = String; - fn name(&self) -> SharedString { - "now".into() + fn name() -> &'static str { + "now" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Other } diff --git a/crates/agent2/src/tools/open_tool.rs b/crates/agent2/src/tools/open_tool.rs index 36420560c1..df7b04c787 100644 --- a/crates/agent2/src/tools/open_tool.rs +++ b/crates/agent2/src/tools/open_tool.rs @@ -8,19 +8,15 @@ use serde::{Deserialize, Serialize}; use std::{path::PathBuf, sync::Arc}; use util::markdown::MarkdownEscaped; -/// This tool opens a file or URL with the default application associated with -/// it on the user's operating system: +/// This tool opens a file or URL with the default application associated with it on the user's operating system: /// /// - On macOS, it's equivalent to the `open` command /// - On Windows, it's equivalent to `start` /// - On Linux, it uses something like `xdg-open`, `gio open`, `gnome-open`, `kde-open`, `wslview` as appropriate /// -/// For example, it can open a web browser with a URL, open a PDF file with the -/// default PDF viewer, etc. +/// For example, it can open a web browser with a URL, open a PDF file with the default PDF viewer, etc. /// -/// You MUST ONLY use this tool when the user has explicitly requested opening -/// something. You MUST NEVER assume that the user would like for you to use -/// this tool. +/// You MUST ONLY use this tool when the user has explicitly requested opening something. You MUST NEVER assume that the user would like for you to use this tool. #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] pub struct OpenToolInput { /// The path or URL to open with the default application. @@ -41,11 +37,11 @@ impl AgentTool for OpenTool { type Input = OpenToolInput; type Output = String; - fn name(&self) -> SharedString { - "open".into() + fn name() -> &'static str { + "open" } - fn kind(&self) -> ToolKind { + fn kind() -> ToolKind { ToolKind::Execute } diff --git a/crates/agent2/src/tools/read_file_tool.rs b/crates/agent2/src/tools/read_file_tool.rs index f37dff4f47..e771c26eca 100644 --- a/crates/agent2/src/tools/read_file_tool.rs +++ b/crates/agent2/src/tools/read_file_tool.rs @@ -10,7 +10,8 @@ use project::{AgentLocation, ImageItem, Project, WorktreeSettings, image_store}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::Settings; -use std::sync::Arc; +use std::{path::Path, sync::Arc}; +use util::markdown::MarkdownCodeBlock; use crate::{AgentTool, ToolCallEventStream}; @@ -21,8 +22,7 @@ use crate::{AgentTool, ToolCallEventStream}; pub struct ReadFileToolInput { /// The relative path of the file to read. /// - /// This path should never be absolute, and the first component - /// of the path should always be a root directory in a project. + /// This path should never be absolute, and the first component of the path should always be a root directory in a project. /// /// /// If the project has the following root directories: @@ -34,11 +34,9 @@ pub struct ReadFileToolInput { /// If you want to access `file.txt` in `directory2`, you should use the path `directory2/file.txt`. /// pub path: String, - /// Optional line number to start reading on (1-based index) #[serde(default)] pub start_line: Option, - /// Optional line number to end reading on (1-based index, inclusive) #[serde(default)] pub end_line: Option, @@ -62,36 +60,21 @@ impl AgentTool for ReadFileTool { type Input = ReadFileToolInput; type Output = LanguageModelToolResultContent; - fn name(&self) -> SharedString { - "read_file".into() + fn name() -> &'static str { + "read_file" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Read } fn initial_title(&self, input: Result) -> SharedString { - if let Ok(input) = input { - let path = &input.path; - match (input.start_line, input.end_line) { - (Some(start), Some(end)) => { - format!( - "[Read file `{}` (lines {}-{})](@selection:{}:({}-{}))", - path, start, end, path, start, end - ) - } - (Some(start), None) => { - format!( - "[Read file `{}` (from line {})](@selection:{}:({}-{}))", - path, start, path, start, start - ) - } - _ => format!("[Read file `{}`](@file:{})", path, path), - } - .into() - } else { - "Read file".into() - } + input + .ok() + .as_ref() + .and_then(|input| Path::new(&input.path).file_name()) + .map(|file_name| file_name.to_string_lossy().to_string().into()) + .unwrap_or_default() } fn run( @@ -261,6 +244,19 @@ impl AgentTool for ReadFileTool { }]), ..Default::default() }); + if let Ok(LanguageModelToolResultContent::Text(text)) = &result { + let markdown = MarkdownCodeBlock { + tag: &input.path, + text, + } + .to_string(); + event_stream.update_fields(ToolCallUpdateFields { + content: Some(vec![acp::ToolCallContent::Content { + content: markdown.into(), + }]), + ..Default::default() + }) + } } })?; diff --git a/crates/agent2/src/tools/terminal_tool.rs b/crates/agent2/src/tools/terminal_tool.rs index 3d4faf2e03..f41b909d0b 100644 --- a/crates/agent2/src/tools/terminal_tool.rs +++ b/crates/agent2/src/tools/terminal_tool.rs @@ -63,11 +63,11 @@ impl AgentTool for TerminalTool { type Input = TerminalToolInput; type Output = String; - fn name(&self) -> SharedString { - "terminal".into() + fn name() -> &'static str { + "terminal" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Execute } diff --git a/crates/agent2/src/tools/thinking_tool.rs b/crates/agent2/src/tools/thinking_tool.rs index 43647bb468..61fb9eb0d6 100644 --- a/crates/agent2/src/tools/thinking_tool.rs +++ b/crates/agent2/src/tools/thinking_tool.rs @@ -11,8 +11,7 @@ use crate::{AgentTool, ToolCallEventStream}; /// Use this tool when you need to work through complex problems, develop strategies, or outline approaches before taking action. #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct ThinkingToolInput { - /// Content to think about. This should be a description of what to think about or - /// a problem to solve. + /// Content to think about. This should be a description of what to think about or a problem to solve. content: String, } @@ -22,11 +21,11 @@ impl AgentTool for ThinkingTool { type Input = ThinkingToolInput; type Output = String; - fn name(&self) -> SharedString { - "thinking".into() + fn name() -> &'static str { + "thinking" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Think } diff --git a/crates/agent2/src/tools/web_search_tool.rs b/crates/agent2/src/tools/web_search_tool.rs index d71a128bfe..d7a34bec29 100644 --- a/crates/agent2/src/tools/web_search_tool.rs +++ b/crates/agent2/src/tools/web_search_tool.rs @@ -14,7 +14,7 @@ use ui::prelude::*; use web_search::WebSearchRegistry; /// Search the web for information using your query. -/// Use this when you need real-time information, facts, or data that might not be in your training. \ +/// Use this when you need real-time information, facts, or data that might not be in your training. /// Results will include snippets and links from relevant web pages. #[derive(Debug, Serialize, Deserialize, JsonSchema)] pub struct WebSearchToolInput { @@ -40,11 +40,11 @@ impl AgentTool for WebSearchTool { type Input = WebSearchToolInput; type Output = WebSearchToolOutput; - fn name(&self) -> SharedString { - "web_search".into() + fn name() -> &'static str { + "web_search" } - fn kind(&self) -> acp::ToolKind { + fn kind() -> acp::ToolKind { acp::ToolKind::Fetch } diff --git a/crates/agent_servers/Cargo.toml b/crates/agent_servers/Cargo.toml index b654486cb6..9f90f3a78a 100644 --- a/crates/agent_servers/Cargo.toml +++ b/crates/agent_servers/Cargo.toml @@ -6,7 +6,7 @@ publish.workspace = true license = "GPL-3.0-or-later" [features] -test-support = ["acp_thread/test-support", "gpui/test-support", "project/test-support"] +test-support = ["acp_thread/test-support", "gpui/test-support", "project/test-support", "dep:env_logger", "fs", "client/test-support", "dep:gpui_tokio", "reqwest_client/test-support"] e2e = [] [lints] @@ -17,16 +17,20 @@ path = "src/agent_servers.rs" doctest = false [dependencies] +acp_tools.workspace = true acp_thread.workspace = true action_log.workspace = true agent-client-protocol.workspace = true agent_settings.workspace = true -agentic-coding-protocol.workspace = true anyhow.workspace = true +client = { workspace = true, optional = true } collections.workspace = true context_server.workspace = true +env_logger = { workspace = true, optional = true } +fs = { workspace = true, optional = true } futures.workspace = true gpui.workspace = true +gpui_tokio = { workspace = true, optional = true } indoc.workspace = true itertools.workspace = true language.workspace = true @@ -36,6 +40,7 @@ log.workspace = true paths.workspace = true project.workspace = true rand.workspace = true +reqwest_client = { workspace = true, optional = true } schemars.workspace = true semver.workspace = true serde.workspace = true @@ -57,8 +62,12 @@ libc.workspace = true nix.workspace = true [dev-dependencies] +client = { workspace = true, features = ["test-support"] } env_logger.workspace = true +fs.workspace = true language.workspace = true indoc.workspace = true acp_thread = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } +gpui_tokio.workspace = true +reqwest_client = { workspace = true, features = ["test-support"] } diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index 1cfb1fcabf..b4e897374a 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -1,34 +1,414 @@ -use std::{path::Path, rc::Rc}; - use crate::AgentServerCommand; use acp_thread::AgentConnection; -use anyhow::Result; -use gpui::AsyncApp; +use acp_tools::AcpConnectionRegistry; +use action_log::ActionLog; +use agent_client_protocol::{self as acp, Agent as _, ErrorCode}; +use anyhow::anyhow; +use collections::HashMap; +use futures::AsyncBufReadExt as _; +use futures::channel::oneshot; +use futures::io::BufReader; +use project::Project; +use serde::Deserialize; +use std::{any::Any, cell::RefCell}; +use std::{path::Path, rc::Rc}; use thiserror::Error; -mod v0; -mod v1; +use anyhow::{Context as _, Result}; +use gpui::{App, AppContext as _, AsyncApp, Entity, SharedString, Task, WeakEntity}; + +use acp_thread::{AcpThread, AuthRequired, LoadError}; #[derive(Debug, Error)] #[error("Unsupported version")] pub struct UnsupportedVersion; +pub struct AcpConnection { + server_name: SharedString, + connection: Rc, + sessions: Rc>>, + auth_methods: Vec, + prompt_capabilities: acp::PromptCapabilities, + _io_task: Task>, +} + +pub struct AcpSession { + thread: WeakEntity, + suppress_abort_err: bool, +} + pub async fn connect( - server_name: &'static str, + server_name: SharedString, command: AgentServerCommand, root_dir: &Path, cx: &mut AsyncApp, ) -> Result> { - let conn = v1::AcpConnection::stdio(server_name, command.clone(), root_dir, cx).await; + let conn = AcpConnection::stdio(server_name, command.clone(), root_dir, cx).await?; + Ok(Rc::new(conn) as _) +} - match conn { - Ok(conn) => Ok(Rc::new(conn) as _), - Err(err) if err.is::() => { - // Consider re-using initialize response and subprocess when adding another version here - let conn: Rc = - Rc::new(v0::AcpConnection::stdio(server_name, command, root_dir, cx).await?); - Ok(conn) +const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1; + +impl AcpConnection { + pub async fn stdio( + server_name: SharedString, + command: AgentServerCommand, + root_dir: &Path, + cx: &mut AsyncApp, + ) -> Result { + let mut child = util::command::new_smol_command(&command.path) + .args(command.args.iter().map(|arg| arg.as_str())) + .envs(command.env.iter().flatten()) + .current_dir(root_dir) + .stdin(std::process::Stdio::piped()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .kill_on_drop(true) + .spawn()?; + + let stdout = child.stdout.take().context("Failed to take stdout")?; + let stdin = child.stdin.take().context("Failed to take stdin")?; + let stderr = child.stderr.take().context("Failed to take stderr")?; + log::trace!("Spawned (pid: {})", child.id()); + + let sessions = Rc::new(RefCell::new(HashMap::default())); + + let client = ClientDelegate { + sessions: sessions.clone(), + cx: cx.clone(), + }; + let (connection, io_task) = acp::ClientSideConnection::new(client, stdin, stdout, { + let foreground_executor = cx.foreground_executor().clone(); + move |fut| { + foreground_executor.spawn(fut).detach(); + } + }); + + let io_task = cx.background_spawn(io_task); + + cx.background_spawn(async move { + let mut stderr = BufReader::new(stderr); + let mut line = String::new(); + while let Ok(n) = stderr.read_line(&mut line).await + && n > 0 + { + log::warn!("agent stderr: {}", &line); + line.clear(); + } + }) + .detach(); + + cx.spawn({ + let sessions = sessions.clone(); + async move |cx| { + let status = child.status().await?; + + for session in sessions.borrow().values() { + session + .thread + .update(cx, |thread, cx| { + thread.emit_load_error(LoadError::Exited { status }, cx) + }) + .ok(); + } + + anyhow::Ok(()) + } + }) + .detach(); + + let connection = Rc::new(connection); + + cx.update(|cx| { + AcpConnectionRegistry::default_global(cx).update(cx, |registry, cx| { + registry.set_active_connection(server_name.clone(), &connection, cx) + }); + })?; + + let response = connection + .initialize(acp::InitializeRequest { + protocol_version: acp::VERSION, + client_capabilities: acp::ClientCapabilities { + fs: acp::FileSystemCapability { + read_text_file: true, + write_text_file: true, + }, + }, + }) + .await?; + + if response.protocol_version < MINIMUM_SUPPORTED_VERSION { + return Err(UnsupportedVersion.into()); } - Err(err) => Err(err), + + Ok(Self { + auth_methods: response.auth_methods, + connection, + server_name, + sessions, + prompt_capabilities: response.agent_capabilities.prompt_capabilities, + _io_task: io_task, + }) + } +} + +impl AgentConnection for AcpConnection { + fn new_thread( + self: Rc, + project: Entity, + cwd: &Path, + cx: &mut App, + ) -> Task>> { + let conn = self.connection.clone(); + let sessions = self.sessions.clone(); + let cwd = cwd.to_path_buf(); + let context_server_store = project.read(cx).context_server_store().read(cx); + let mcp_servers = context_server_store + .configured_server_ids() + .iter() + .filter_map(|id| { + let configuration = context_server_store.configuration_for_server(id)?; + let command = configuration.command(); + Some(acp::McpServer { + name: id.0.to_string(), + command: command.path.clone(), + args: command.args.clone(), + env: if let Some(env) = command.env.as_ref() { + env.iter() + .map(|(name, value)| acp::EnvVariable { + name: name.clone(), + value: value.clone(), + }) + .collect() + } else { + vec![] + }, + }) + }) + .collect(); + + cx.spawn(async move |cx| { + let response = conn + .new_session(acp::NewSessionRequest { mcp_servers, cwd }) + .await + .map_err(|err| { + if err.code == acp::ErrorCode::AUTH_REQUIRED.code { + let mut error = AuthRequired::new(); + + if err.message != acp::ErrorCode::AUTH_REQUIRED.message { + error = error.with_description(err.message); + } + + anyhow!(error) + } else { + anyhow!(err) + } + })?; + + let session_id = response.session_id; + let action_log = cx.new(|_| ActionLog::new(project.clone()))?; + let thread = cx.new(|cx| { + AcpThread::new( + self.server_name.clone(), + self.clone(), + project, + action_log, + session_id.clone(), + // ACP doesn't currently support per-session prompt capabilities or changing capabilities dynamically. + watch::Receiver::constant(self.prompt_capabilities), + cx, + ) + })?; + + let session = AcpSession { + thread: thread.downgrade(), + suppress_abort_err: false, + }; + sessions.borrow_mut().insert(session_id, session); + + Ok(thread) + }) + } + + fn auth_methods(&self) -> &[acp::AuthMethod] { + &self.auth_methods + } + + fn authenticate(&self, method_id: acp::AuthMethodId, cx: &mut App) -> Task> { + let conn = self.connection.clone(); + cx.foreground_executor().spawn(async move { + let result = conn + .authenticate(acp::AuthenticateRequest { + method_id: method_id.clone(), + }) + .await?; + + Ok(result) + }) + } + + fn prompt( + &self, + _id: Option, + params: acp::PromptRequest, + cx: &mut App, + ) -> Task> { + let conn = self.connection.clone(); + let sessions = self.sessions.clone(); + let session_id = params.session_id.clone(); + cx.foreground_executor().spawn(async move { + let result = conn.prompt(params).await; + + let mut suppress_abort_err = false; + + if let Some(session) = sessions.borrow_mut().get_mut(&session_id) { + suppress_abort_err = session.suppress_abort_err; + session.suppress_abort_err = false; + } + + match result { + Ok(response) => Ok(response), + Err(err) => { + if err.code != ErrorCode::INTERNAL_ERROR.code { + anyhow::bail!(err) + } + + let Some(data) = &err.data else { + anyhow::bail!(err) + }; + + // Temporary workaround until the following PR is generally available: + // https://github.com/google-gemini/gemini-cli/pull/6656 + + #[derive(Deserialize)] + #[serde(deny_unknown_fields)] + struct ErrorDetails { + details: Box, + } + + match serde_json::from_value(data.clone()) { + Ok(ErrorDetails { details }) => { + if suppress_abort_err + && (details.contains("This operation was aborted") + || details.contains("The user aborted a request")) + { + Ok(acp::PromptResponse { + stop_reason: acp::StopReason::Cancelled, + }) + } else { + Err(anyhow!(details)) + } + } + Err(_) => Err(anyhow!(err)), + } + } + } + }) + } + + fn cancel(&self, session_id: &acp::SessionId, cx: &mut App) { + if let Some(session) = self.sessions.borrow_mut().get_mut(session_id) { + session.suppress_abort_err = true; + } + let conn = self.connection.clone(); + let params = acp::CancelNotification { + session_id: session_id.clone(), + }; + cx.foreground_executor() + .spawn(async move { conn.cancel(params).await }) + .detach(); + } + + fn into_any(self: Rc) -> Rc { + self + } +} + +struct ClientDelegate { + sessions: Rc>>, + cx: AsyncApp, +} + +impl acp::Client for ClientDelegate { + async fn request_permission( + &self, + arguments: acp::RequestPermissionRequest, + ) -> Result { + let cx = &mut self.cx.clone(); + let rx = self + .sessions + .borrow() + .get(&arguments.session_id) + .context("Failed to get session")? + .thread + .update(cx, |thread, cx| { + thread.request_tool_call_authorization(arguments.tool_call, arguments.options, cx) + })?; + + let result = rx?.await; + + let outcome = match result { + Ok(option) => acp::RequestPermissionOutcome::Selected { option_id: option }, + Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Cancelled, + }; + + Ok(acp::RequestPermissionResponse { outcome }) + } + + async fn write_text_file( + &self, + arguments: acp::WriteTextFileRequest, + ) -> Result<(), acp::Error> { + let cx = &mut self.cx.clone(); + let task = self + .sessions + .borrow() + .get(&arguments.session_id) + .context("Failed to get session")? + .thread + .update(cx, |thread, cx| { + thread.write_text_file(arguments.path, arguments.content, cx) + })?; + + task.await?; + + Ok(()) + } + + async fn read_text_file( + &self, + arguments: acp::ReadTextFileRequest, + ) -> Result { + let cx = &mut self.cx.clone(); + let task = self + .sessions + .borrow() + .get(&arguments.session_id) + .context("Failed to get session")? + .thread + .update(cx, |thread, cx| { + thread.read_text_file(arguments.path, arguments.line, arguments.limit, false, cx) + })?; + + let content = task.await?; + + Ok(acp::ReadTextFileResponse { content }) + } + + async fn session_notification( + &self, + notification: acp::SessionNotification, + ) -> Result<(), acp::Error> { + let cx = &mut self.cx.clone(); + let sessions = self.sessions.borrow(); + let session = sessions + .get(¬ification.session_id) + .context("Failed to get session")?; + + session.thread.update(cx, |thread, cx| { + thread.handle_session_update(notification.update, cx) + })??; + + Ok(()) } } diff --git a/crates/agent_servers/src/acp/v0.rs b/crates/agent_servers/src/acp/v0.rs deleted file mode 100644 index 30643dd005..0000000000 --- a/crates/agent_servers/src/acp/v0.rs +++ /dev/null @@ -1,516 +0,0 @@ -// Translates old acp agents into the new schema -use action_log::ActionLog; -use agent_client_protocol as acp; -use agentic_coding_protocol::{self as acp_old, AgentRequest as _}; -use anyhow::{Context as _, Result, anyhow}; -use futures::channel::oneshot; -use gpui::{AppContext as _, AsyncApp, Entity, Task, WeakEntity}; -use project::Project; -use std::{any::Any, cell::RefCell, path::Path, rc::Rc}; -use ui::App; -use util::ResultExt as _; - -use crate::AgentServerCommand; -use acp_thread::{AcpThread, AgentConnection, AuthRequired}; - -#[derive(Clone)] -struct OldAcpClientDelegate { - thread: Rc>>, - cx: AsyncApp, - next_tool_call_id: Rc>, - // sent_buffer_versions: HashMap, HashMap>, -} - -impl OldAcpClientDelegate { - fn new(thread: Rc>>, cx: AsyncApp) -> Self { - Self { - thread, - cx, - next_tool_call_id: Rc::new(RefCell::new(0)), - } - } -} - -impl acp_old::Client for OldAcpClientDelegate { - async fn stream_assistant_message_chunk( - &self, - params: acp_old::StreamAssistantMessageChunkParams, - ) -> Result<(), acp_old::Error> { - let cx = &mut self.cx.clone(); - - cx.update(|cx| { - self.thread - .borrow() - .update(cx, |thread, cx| match params.chunk { - acp_old::AssistantMessageChunk::Text { text } => { - thread.push_assistant_content_block(text.into(), false, cx) - } - acp_old::AssistantMessageChunk::Thought { thought } => { - thread.push_assistant_content_block(thought.into(), true, cx) - } - }) - .log_err(); - })?; - - Ok(()) - } - - async fn request_tool_call_confirmation( - &self, - request: acp_old::RequestToolCallConfirmationParams, - ) -> Result { - let cx = &mut self.cx.clone(); - - let old_acp_id = *self.next_tool_call_id.borrow() + 1; - self.next_tool_call_id.replace(old_acp_id); - - let tool_call = into_new_tool_call( - acp::ToolCallId(old_acp_id.to_string().into()), - request.tool_call, - ); - - let mut options = match request.confirmation { - acp_old::ToolCallConfirmation::Edit { .. } => vec![( - acp_old::ToolCallConfirmationOutcome::AlwaysAllow, - acp::PermissionOptionKind::AllowAlways, - "Always Allow Edits".to_string(), - )], - acp_old::ToolCallConfirmation::Execute { root_command, .. } => vec![( - acp_old::ToolCallConfirmationOutcome::AlwaysAllow, - acp::PermissionOptionKind::AllowAlways, - format!("Always Allow {}", root_command), - )], - acp_old::ToolCallConfirmation::Mcp { - server_name, - tool_name, - .. - } => vec![ - ( - acp_old::ToolCallConfirmationOutcome::AlwaysAllowMcpServer, - acp::PermissionOptionKind::AllowAlways, - format!("Always Allow {}", server_name), - ), - ( - acp_old::ToolCallConfirmationOutcome::AlwaysAllowTool, - acp::PermissionOptionKind::AllowAlways, - format!("Always Allow {}", tool_name), - ), - ], - acp_old::ToolCallConfirmation::Fetch { .. } => vec![( - acp_old::ToolCallConfirmationOutcome::AlwaysAllow, - acp::PermissionOptionKind::AllowAlways, - "Always Allow".to_string(), - )], - acp_old::ToolCallConfirmation::Other { .. } => vec![( - acp_old::ToolCallConfirmationOutcome::AlwaysAllow, - acp::PermissionOptionKind::AllowAlways, - "Always Allow".to_string(), - )], - }; - - options.extend([ - ( - acp_old::ToolCallConfirmationOutcome::Allow, - acp::PermissionOptionKind::AllowOnce, - "Allow".to_string(), - ), - ( - acp_old::ToolCallConfirmationOutcome::Reject, - acp::PermissionOptionKind::RejectOnce, - "Reject".to_string(), - ), - ]); - - let mut outcomes = Vec::with_capacity(options.len()); - let mut acp_options = Vec::with_capacity(options.len()); - - for (index, (outcome, kind, label)) in options.into_iter().enumerate() { - outcomes.push(outcome); - acp_options.push(acp::PermissionOption { - id: acp::PermissionOptionId(index.to_string().into()), - name: label, - kind, - }) - } - - let response = cx - .update(|cx| { - self.thread.borrow().update(cx, |thread, cx| { - thread.request_tool_call_authorization(tool_call.into(), acp_options, cx) - }) - })?? - .context("Failed to update thread")? - .await; - - let outcome = match response { - Ok(option_id) => outcomes[option_id.0.parse::().unwrap_or(0)], - Err(oneshot::Canceled) => acp_old::ToolCallConfirmationOutcome::Cancel, - }; - - Ok(acp_old::RequestToolCallConfirmationResponse { - id: acp_old::ToolCallId(old_acp_id), - outcome, - }) - } - - async fn push_tool_call( - &self, - request: acp_old::PushToolCallParams, - ) -> Result { - let cx = &mut self.cx.clone(); - - let old_acp_id = *self.next_tool_call_id.borrow() + 1; - self.next_tool_call_id.replace(old_acp_id); - - cx.update(|cx| { - self.thread.borrow().update(cx, |thread, cx| { - thread.upsert_tool_call( - into_new_tool_call(acp::ToolCallId(old_acp_id.to_string().into()), request), - cx, - ) - }) - })?? - .context("Failed to update thread")?; - - Ok(acp_old::PushToolCallResponse { - id: acp_old::ToolCallId(old_acp_id), - }) - } - - async fn update_tool_call( - &self, - request: acp_old::UpdateToolCallParams, - ) -> Result<(), acp_old::Error> { - let cx = &mut self.cx.clone(); - - cx.update(|cx| { - self.thread.borrow().update(cx, |thread, cx| { - thread.update_tool_call( - acp::ToolCallUpdate { - id: acp::ToolCallId(request.tool_call_id.0.to_string().into()), - fields: acp::ToolCallUpdateFields { - status: Some(into_new_tool_call_status(request.status)), - content: Some( - request - .content - .into_iter() - .map(into_new_tool_call_content) - .collect::>(), - ), - ..Default::default() - }, - }, - cx, - ) - }) - })? - .context("Failed to update thread")??; - - Ok(()) - } - - async fn update_plan(&self, request: acp_old::UpdatePlanParams) -> Result<(), acp_old::Error> { - let cx = &mut self.cx.clone(); - - cx.update(|cx| { - self.thread.borrow().update(cx, |thread, cx| { - thread.update_plan( - acp::Plan { - entries: request - .entries - .into_iter() - .map(into_new_plan_entry) - .collect(), - }, - cx, - ) - }) - })? - .context("Failed to update thread")?; - - Ok(()) - } - - async fn read_text_file( - &self, - acp_old::ReadTextFileParams { path, line, limit }: acp_old::ReadTextFileParams, - ) -> Result { - let content = self - .cx - .update(|cx| { - self.thread.borrow().update(cx, |thread, cx| { - thread.read_text_file(path, line, limit, false, cx) - }) - })? - .context("Failed to update thread")? - .await?; - Ok(acp_old::ReadTextFileResponse { content }) - } - - async fn write_text_file( - &self, - acp_old::WriteTextFileParams { path, content }: acp_old::WriteTextFileParams, - ) -> Result<(), acp_old::Error> { - self.cx - .update(|cx| { - self.thread - .borrow() - .update(cx, |thread, cx| thread.write_text_file(path, content, cx)) - })? - .context("Failed to update thread")? - .await?; - - Ok(()) - } -} - -fn into_new_tool_call(id: acp::ToolCallId, request: acp_old::PushToolCallParams) -> acp::ToolCall { - acp::ToolCall { - id, - title: request.label, - kind: acp_kind_from_old_icon(request.icon), - status: acp::ToolCallStatus::InProgress, - content: request - .content - .into_iter() - .map(into_new_tool_call_content) - .collect(), - locations: request - .locations - .into_iter() - .map(into_new_tool_call_location) - .collect(), - raw_input: None, - raw_output: None, - } -} - -fn acp_kind_from_old_icon(icon: acp_old::Icon) -> acp::ToolKind { - match icon { - acp_old::Icon::FileSearch => acp::ToolKind::Search, - acp_old::Icon::Folder => acp::ToolKind::Search, - acp_old::Icon::Globe => acp::ToolKind::Search, - acp_old::Icon::Hammer => acp::ToolKind::Other, - acp_old::Icon::LightBulb => acp::ToolKind::Think, - acp_old::Icon::Pencil => acp::ToolKind::Edit, - acp_old::Icon::Regex => acp::ToolKind::Search, - acp_old::Icon::Terminal => acp::ToolKind::Execute, - } -} - -fn into_new_tool_call_status(status: acp_old::ToolCallStatus) -> acp::ToolCallStatus { - match status { - acp_old::ToolCallStatus::Running => acp::ToolCallStatus::InProgress, - acp_old::ToolCallStatus::Finished => acp::ToolCallStatus::Completed, - acp_old::ToolCallStatus::Error => acp::ToolCallStatus::Failed, - } -} - -fn into_new_tool_call_content(content: acp_old::ToolCallContent) -> acp::ToolCallContent { - match content { - acp_old::ToolCallContent::Markdown { markdown } => markdown.into(), - acp_old::ToolCallContent::Diff { diff } => acp::ToolCallContent::Diff { - diff: into_new_diff(diff), - }, - } -} - -fn into_new_diff(diff: acp_old::Diff) -> acp::Diff { - acp::Diff { - path: diff.path, - old_text: diff.old_text, - new_text: diff.new_text, - } -} - -fn into_new_tool_call_location(location: acp_old::ToolCallLocation) -> acp::ToolCallLocation { - acp::ToolCallLocation { - path: location.path, - line: location.line, - } -} - -fn into_new_plan_entry(entry: acp_old::PlanEntry) -> acp::PlanEntry { - acp::PlanEntry { - content: entry.content, - priority: into_new_plan_priority(entry.priority), - status: into_new_plan_status(entry.status), - } -} - -fn into_new_plan_priority(priority: acp_old::PlanEntryPriority) -> acp::PlanEntryPriority { - match priority { - acp_old::PlanEntryPriority::Low => acp::PlanEntryPriority::Low, - acp_old::PlanEntryPriority::Medium => acp::PlanEntryPriority::Medium, - acp_old::PlanEntryPriority::High => acp::PlanEntryPriority::High, - } -} - -fn into_new_plan_status(status: acp_old::PlanEntryStatus) -> acp::PlanEntryStatus { - match status { - acp_old::PlanEntryStatus::Pending => acp::PlanEntryStatus::Pending, - acp_old::PlanEntryStatus::InProgress => acp::PlanEntryStatus::InProgress, - acp_old::PlanEntryStatus::Completed => acp::PlanEntryStatus::Completed, - } -} - -pub struct AcpConnection { - pub name: &'static str, - pub connection: acp_old::AgentConnection, - pub _child_status: Task>, - pub current_thread: Rc>>, -} - -impl AcpConnection { - pub fn stdio( - name: &'static str, - command: AgentServerCommand, - root_dir: &Path, - cx: &mut AsyncApp, - ) -> Task> { - let root_dir = root_dir.to_path_buf(); - - cx.spawn(async move |cx| { - let mut child = util::command::new_smol_command(&command.path) - .args(command.args.iter()) - .current_dir(root_dir) - .stdin(std::process::Stdio::piped()) - .stdout(std::process::Stdio::piped()) - .stderr(std::process::Stdio::inherit()) - .kill_on_drop(true) - .spawn()?; - - let stdin = child.stdin.take().unwrap(); - let stdout = child.stdout.take().unwrap(); - log::trace!("Spawned (pid: {})", child.id()); - - let foreground_executor = cx.foreground_executor().clone(); - - let thread_rc = Rc::new(RefCell::new(WeakEntity::new_invalid())); - - let (connection, io_fut) = acp_old::AgentConnection::connect_to_agent( - OldAcpClientDelegate::new(thread_rc.clone(), cx.clone()), - stdin, - stdout, - move |fut| foreground_executor.spawn(fut).detach(), - ); - - let io_task = cx.background_spawn(async move { - io_fut.await.log_err(); - }); - - let child_status = cx.background_spawn(async move { - let result = match child.status().await { - Err(e) => Err(anyhow!(e)), - Ok(result) if result.success() => Ok(()), - Ok(result) => Err(anyhow!(result)), - }; - drop(io_task); - result - }); - - Ok(Self { - name, - connection, - _child_status: child_status, - current_thread: thread_rc, - }) - }) - } -} - -impl AgentConnection for AcpConnection { - fn new_thread( - self: Rc, - project: Entity, - _cwd: &Path, - cx: &mut App, - ) -> Task>> { - let task = self.connection.request_any( - acp_old::InitializeParams { - protocol_version: acp_old::ProtocolVersion::latest(), - } - .into_any(), - ); - let current_thread = self.current_thread.clone(); - cx.spawn(async move |cx| { - let result = task.await?; - let result = acp_old::InitializeParams::response_from_any(result)?; - - if !result.is_authenticated { - anyhow::bail!(AuthRequired::new()) - } - - cx.update(|cx| { - let thread = cx.new(|cx| { - let session_id = acp::SessionId("acp-old-no-id".into()); - let action_log = cx.new(|_| ActionLog::new(project.clone())); - AcpThread::new(self.name, self.clone(), project, action_log, session_id) - }); - current_thread.replace(thread.downgrade()); - thread - }) - }) - } - - fn auth_methods(&self) -> &[acp::AuthMethod] { - &[] - } - - fn authenticate(&self, _method_id: acp::AuthMethodId, cx: &mut App) -> Task> { - let task = self - .connection - .request_any(acp_old::AuthenticateParams.into_any()); - cx.foreground_executor().spawn(async move { - task.await?; - Ok(()) - }) - } - - fn prompt( - &self, - _id: Option, - params: acp::PromptRequest, - cx: &mut App, - ) -> Task> { - let chunks = params - .prompt - .into_iter() - .filter_map(|block| match block { - acp::ContentBlock::Text(text) => { - Some(acp_old::UserMessageChunk::Text { text: text.text }) - } - acp::ContentBlock::ResourceLink(link) => Some(acp_old::UserMessageChunk::Path { - path: link.uri.into(), - }), - _ => None, - }) - .collect(); - - let task = self - .connection - .request_any(acp_old::SendUserMessageParams { chunks }.into_any()); - cx.foreground_executor().spawn(async move { - task.await?; - anyhow::Ok(acp::PromptResponse { - stop_reason: acp::StopReason::EndTurn, - }) - }) - } - - fn cancel(&self, _session_id: &acp::SessionId, cx: &mut App) { - let task = self - .connection - .request_any(acp_old::CancelSendMessageParams.into_any()); - cx.foreground_executor() - .spawn(async move { - task.await?; - anyhow::Ok(()) - }) - .detach_and_log_err(cx) - } - - fn into_any(self: Rc) -> Rc { - self - } -} diff --git a/crates/agent_servers/src/acp/v1.rs b/crates/agent_servers/src/acp/v1.rs deleted file mode 100644 index e0e92f29ba..0000000000 --- a/crates/agent_servers/src/acp/v1.rs +++ /dev/null @@ -1,311 +0,0 @@ -use action_log::ActionLog; -use agent_client_protocol::{self as acp, Agent as _}; -use anyhow::anyhow; -use collections::HashMap; -use futures::AsyncBufReadExt as _; -use futures::channel::oneshot; -use futures::io::BufReader; -use project::Project; -use std::path::Path; -use std::rc::Rc; -use std::{any::Any, cell::RefCell}; - -use anyhow::{Context as _, Result}; -use gpui::{App, AppContext as _, AsyncApp, Entity, Task, WeakEntity}; - -use crate::{AgentServerCommand, acp::UnsupportedVersion}; -use acp_thread::{AcpThread, AgentConnection, AuthRequired, LoadError}; - -pub struct AcpConnection { - server_name: &'static str, - connection: Rc, - sessions: Rc>>, - auth_methods: Vec, - _io_task: Task>, -} - -pub struct AcpSession { - thread: WeakEntity, -} - -const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1; - -impl AcpConnection { - pub async fn stdio( - server_name: &'static str, - command: AgentServerCommand, - root_dir: &Path, - cx: &mut AsyncApp, - ) -> Result { - let mut child = util::command::new_smol_command(&command.path) - .args(command.args.iter().map(|arg| arg.as_str())) - .envs(command.env.iter().flatten()) - .current_dir(root_dir) - .stdin(std::process::Stdio::piped()) - .stdout(std::process::Stdio::piped()) - .stderr(std::process::Stdio::piped()) - .kill_on_drop(true) - .spawn()?; - - let stdout = child.stdout.take().context("Failed to take stdout")?; - let stdin = child.stdin.take().context("Failed to take stdin")?; - let stderr = child.stderr.take().context("Failed to take stderr")?; - log::trace!("Spawned (pid: {})", child.id()); - - let sessions = Rc::new(RefCell::new(HashMap::default())); - - let client = ClientDelegate { - sessions: sessions.clone(), - cx: cx.clone(), - }; - let (connection, io_task) = acp::ClientSideConnection::new(client, stdin, stdout, { - let foreground_executor = cx.foreground_executor().clone(); - move |fut| { - foreground_executor.spawn(fut).detach(); - } - }); - - let io_task = cx.background_spawn(io_task); - - cx.background_spawn(async move { - let mut stderr = BufReader::new(stderr); - let mut line = String::new(); - while let Ok(n) = stderr.read_line(&mut line).await - && n > 0 - { - log::warn!("agent stderr: {}", &line); - line.clear(); - } - }) - .detach(); - - cx.spawn({ - let sessions = sessions.clone(); - async move |cx| { - let status = child.status().await?; - - for session in sessions.borrow().values() { - session - .thread - .update(cx, |thread, cx| { - thread.emit_load_error(LoadError::Exited { status }, cx) - }) - .ok(); - } - - anyhow::Ok(()) - } - }) - .detach(); - - let response = connection - .initialize(acp::InitializeRequest { - protocol_version: acp::VERSION, - client_capabilities: acp::ClientCapabilities { - fs: acp::FileSystemCapability { - read_text_file: true, - write_text_file: true, - }, - }, - }) - .await?; - - if response.protocol_version < MINIMUM_SUPPORTED_VERSION { - return Err(UnsupportedVersion.into()); - } - - Ok(Self { - auth_methods: response.auth_methods, - connection: connection.into(), - server_name, - sessions, - _io_task: io_task, - }) - } -} - -impl AgentConnection for AcpConnection { - fn new_thread( - self: Rc, - project: Entity, - cwd: &Path, - cx: &mut App, - ) -> Task>> { - let conn = self.connection.clone(); - let sessions = self.sessions.clone(); - let cwd = cwd.to_path_buf(); - cx.spawn(async move |cx| { - let response = conn - .new_session(acp::NewSessionRequest { - mcp_servers: vec![], - cwd, - }) - .await - .map_err(|err| { - if err.code == acp::ErrorCode::AUTH_REQUIRED.code { - let mut error = AuthRequired::new(); - - if err.message != acp::ErrorCode::AUTH_REQUIRED.message { - error = error.with_description(err.message); - } - - anyhow!(error) - } else { - anyhow!(err) - } - })?; - - let session_id = response.session_id; - let action_log = cx.new(|_| ActionLog::new(project.clone()))?; - let thread = cx.new(|_cx| { - AcpThread::new( - self.server_name, - self.clone(), - project, - action_log, - session_id.clone(), - ) - })?; - - let session = AcpSession { - thread: thread.downgrade(), - }; - sessions.borrow_mut().insert(session_id, session); - - Ok(thread) - }) - } - - fn auth_methods(&self) -> &[acp::AuthMethod] { - &self.auth_methods - } - - fn authenticate(&self, method_id: acp::AuthMethodId, cx: &mut App) -> Task> { - let conn = self.connection.clone(); - cx.foreground_executor().spawn(async move { - let result = conn - .authenticate(acp::AuthenticateRequest { - method_id: method_id.clone(), - }) - .await?; - - Ok(result) - }) - } - - fn prompt( - &self, - _id: Option, - params: acp::PromptRequest, - cx: &mut App, - ) -> Task> { - let conn = self.connection.clone(); - cx.foreground_executor().spawn(async move { - let response = conn.prompt(params).await?; - Ok(response) - }) - } - - fn cancel(&self, session_id: &acp::SessionId, cx: &mut App) { - let conn = self.connection.clone(); - let params = acp::CancelNotification { - session_id: session_id.clone(), - }; - cx.foreground_executor() - .spawn(async move { conn.cancel(params).await }) - .detach(); - } - - fn into_any(self: Rc) -> Rc { - self - } -} - -struct ClientDelegate { - sessions: Rc>>, - cx: AsyncApp, -} - -impl acp::Client for ClientDelegate { - async fn request_permission( - &self, - arguments: acp::RequestPermissionRequest, - ) -> Result { - let cx = &mut self.cx.clone(); - let rx = self - .sessions - .borrow() - .get(&arguments.session_id) - .context("Failed to get session")? - .thread - .update(cx, |thread, cx| { - thread.request_tool_call_authorization(arguments.tool_call, arguments.options, cx) - })?; - - let result = rx?.await; - - let outcome = match result { - Ok(option) => acp::RequestPermissionOutcome::Selected { option_id: option }, - Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Canceled, - }; - - Ok(acp::RequestPermissionResponse { outcome }) - } - - async fn write_text_file( - &self, - arguments: acp::WriteTextFileRequest, - ) -> Result<(), acp::Error> { - let cx = &mut self.cx.clone(); - let task = self - .sessions - .borrow() - .get(&arguments.session_id) - .context("Failed to get session")? - .thread - .update(cx, |thread, cx| { - thread.write_text_file(arguments.path, arguments.content, cx) - })?; - - task.await?; - - Ok(()) - } - - async fn read_text_file( - &self, - arguments: acp::ReadTextFileRequest, - ) -> Result { - let cx = &mut self.cx.clone(); - let task = self - .sessions - .borrow() - .get(&arguments.session_id) - .context("Failed to get session")? - .thread - .update(cx, |thread, cx| { - thread.read_text_file(arguments.path, arguments.line, arguments.limit, false, cx) - })?; - - let content = task.await?; - - Ok(acp::ReadTextFileResponse { content }) - } - - async fn session_notification( - &self, - notification: acp::SessionNotification, - ) -> Result<(), acp::Error> { - let cx = &mut self.cx.clone(); - let sessions = self.sessions.borrow(); - let session = sessions - .get(¬ification.session_id) - .context("Failed to get session")?; - - session.thread.update(cx, |thread, cx| { - thread.handle_session_update(notification.update, cx) - })??; - - Ok(()) - } -} diff --git a/crates/agent_servers/src/agent_servers.rs b/crates/agent_servers/src/agent_servers.rs index cebf82cddb..7c7e124ca7 100644 --- a/crates/agent_servers/src/agent_servers.rs +++ b/crates/agent_servers/src/agent_servers.rs @@ -1,12 +1,14 @@ mod acp; mod claude; +mod custom; mod gemini; mod settings; -#[cfg(test)] -mod e2e_tests; +#[cfg(any(test, feature = "test-support"))] +pub mod e2e_tests; pub use claude::*; +pub use custom::*; pub use gemini::*; pub use settings::*; @@ -31,9 +33,10 @@ pub fn init(cx: &mut App) { pub trait AgentServer: Send { fn logo(&self) -> ui::IconName; - fn name(&self) -> &'static str; - fn empty_state_headline(&self) -> &'static str; - fn empty_state_message(&self) -> &'static str; + fn name(&self) -> SharedString; + fn empty_state_headline(&self) -> SharedString; + fn empty_state_message(&self) -> SharedString; + fn telemetry_id(&self) -> &'static str; fn connect( &self, @@ -95,7 +98,7 @@ pub struct AgentServerCommand { } impl AgentServerCommand { - pub(crate) async fn resolve( + pub async fn resolve( path_bin_name: &'static str, extra_args: &[&'static str], fallback_path: Option<&Path>, diff --git a/crates/agent_servers/src/claude.rs b/crates/agent_servers/src/claude.rs index 6b9732b468..250e564526 100644 --- a/crates/agent_servers/src/claude.rs +++ b/crates/agent_servers/src/claude.rs @@ -30,7 +30,7 @@ use futures::{ io::BufReader, select_biased, }; -use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity}; +use gpui::{App, AppContext, AsyncApp, Entity, SharedString, Task, WeakEntity}; use serde::{Deserialize, Serialize}; use util::{ResultExt, debug_panic}; @@ -43,16 +43,20 @@ use acp_thread::{AcpThread, AgentConnection, AuthRequired, LoadError, MentionUri pub struct ClaudeCode; impl AgentServer for ClaudeCode { - fn name(&self) -> &'static str { - "Claude Code" + fn telemetry_id(&self) -> &'static str { + "claude-code" } - fn empty_state_headline(&self) -> &'static str { + fn name(&self) -> SharedString { + "Claude Code".into() + } + + fn empty_state_headline(&self) -> SharedString { self.name() } - fn empty_state_message(&self) -> &'static str { - "How can I help you today?" + fn empty_state_message(&self) -> SharedString { + "How can I help you today?".into() } fn logo(&self) -> ui::IconName { @@ -249,13 +253,19 @@ impl AgentConnection for ClaudeAgentConnection { }); let action_log = cx.new(|_| ActionLog::new(project.clone()))?; - let thread = cx.new(|_cx| { + let thread = cx.new(|cx| { AcpThread::new( "Claude Code", self.clone(), project, action_log, session_id.clone(), + watch::Receiver::constant(acp::PromptCapabilities { + image: true, + audio: false, + embedded_context: true, + }), + cx, ) })?; @@ -697,7 +707,7 @@ impl ClaudeAgentSession { let stop_reason = match subtype { ResultErrorType::Success => acp::StopReason::EndTurn, ResultErrorType::ErrorMaxTurns => acp::StopReason::MaxTurnRequests, - ResultErrorType::ErrorDuringExecution => acp::StopReason::Canceled, + ResultErrorType::ErrorDuringExecution => acp::StopReason::Cancelled, }; end_turn_tx .send(Ok(acp::PromptResponse { stop_reason })) @@ -1085,7 +1095,7 @@ pub(crate) mod tests { use gpui::TestAppContext; use serde_json::json; - crate::common_e2e_tests!(ClaudeCode, allow_option_id = "allow"); + crate::common_e2e_tests!(async |_, _, _| ClaudeCode, allow_option_id = "allow"); pub fn local_command() -> AgentServerCommand { AgentServerCommand { diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs new file mode 100644 index 0000000000..72823026d7 --- /dev/null +++ b/crates/agent_servers/src/custom.rs @@ -0,0 +1,63 @@ +use crate::{AgentServerCommand, AgentServerSettings}; +use acp_thread::AgentConnection; +use anyhow::Result; +use gpui::{App, Entity, SharedString, Task}; +use project::Project; +use std::{path::Path, rc::Rc}; +use ui::IconName; + +/// A generic agent server implementation for custom user-defined agents +pub struct CustomAgentServer { + name: SharedString, + command: AgentServerCommand, +} + +impl CustomAgentServer { + pub fn new(name: SharedString, settings: &AgentServerSettings) -> Self { + Self { + name, + command: settings.command.clone(), + } + } +} + +impl crate::AgentServer for CustomAgentServer { + fn telemetry_id(&self) -> &'static str { + "custom" + } + + fn name(&self) -> SharedString { + self.name.clone() + } + + fn logo(&self) -> IconName { + IconName::Terminal + } + + fn empty_state_headline(&self) -> SharedString { + "No conversations yet".into() + } + + fn empty_state_message(&self) -> SharedString { + format!("Start a conversation with {}", self.name).into() + } + + fn connect( + &self, + root_dir: &Path, + _project: &Entity, + cx: &mut App, + ) -> Task>> { + let server_name = self.name(); + let command = self.command.clone(); + let root_dir = root_dir.to_path_buf(); + + cx.spawn(async move |mut cx| { + crate::acp::connect(server_name, command, &root_dir, &mut cx).await + }) + } + + fn into_any(self: Rc) -> Rc { + self + } +} diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs index 8b2703575d..42264b4b4f 100644 --- a/crates/agent_servers/src/e2e_tests.rs +++ b/crates/agent_servers/src/e2e_tests.rs @@ -1,24 +1,31 @@ +use crate::AgentServer; +use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus}; +use agent_client_protocol as acp; +use futures::{FutureExt, StreamExt, channel::mpsc, select}; +use gpui::{AppContext, Entity, TestAppContext}; +use indoc::indoc; +use project::{FakeFs, Project}; use std::{ path::{Path, PathBuf}, sync::Arc, time::Duration, }; - -use crate::{AgentServer, AgentServerSettings, AllAgentServersSettings}; -use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus}; -use agent_client_protocol as acp; - -use futures::{FutureExt, StreamExt, channel::mpsc, select}; -use gpui::{Entity, TestAppContext}; -use indoc::indoc; -use project::{FakeFs, Project}; -use settings::{Settings, SettingsStore}; use util::path; -pub async fn test_basic(server: impl AgentServer + 'static, cx: &mut TestAppContext) { - let fs = init_test(cx).await; - let project = Project::test(fs, [], cx).await; - let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await; +pub async fn test_basic(server: F, cx: &mut TestAppContext) +where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as Arc; + let project = Project::test(fs.clone(), [], cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + "/private/tmp", + cx, + ) + .await; thread .update(cx, |thread, cx| thread.send_raw("Hello from Zed!", cx)) @@ -42,8 +49,12 @@ pub async fn test_basic(server: impl AgentServer + 'static, cx: &mut TestAppCont }); } -pub async fn test_path_mentions(server: impl AgentServer + 'static, cx: &mut TestAppContext) { - let _fs = init_test(cx).await; +pub async fn test_path_mentions(server: F, cx: &mut TestAppContext) +where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as _; let tempdir = tempfile::tempdir().unwrap(); std::fs::write( @@ -56,7 +67,13 @@ pub async fn test_path_mentions(server: impl AgentServer + 'static, cx: &mut Tes ) .expect("failed to write file"); let project = Project::example([tempdir.path()], &mut cx.to_async()).await; - let thread = new_test_thread(server, project.clone(), tempdir.path(), cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + tempdir.path(), + cx, + ) + .await; thread .update(cx, |thread, cx| { thread.send( @@ -110,15 +127,25 @@ pub async fn test_path_mentions(server: impl AgentServer + 'static, cx: &mut Tes drop(tempdir); } -pub async fn test_tool_call(server: impl AgentServer + 'static, cx: &mut TestAppContext) { - let _fs = init_test(cx).await; +pub async fn test_tool_call(server: F, cx: &mut TestAppContext) +where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as _; let tempdir = tempfile::tempdir().unwrap(); let foo_path = tempdir.path().join("foo"); std::fs::write(&foo_path, "Lorem ipsum dolor").expect("failed to write file"); let project = Project::example([tempdir.path()], &mut cx.to_async()).await; - let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + "/private/tmp", + cx, + ) + .await; thread .update(cx, |thread, cx| { @@ -152,14 +179,23 @@ pub async fn test_tool_call(server: impl AgentServer + 'static, cx: &mut TestApp drop(tempdir); } -pub async fn test_tool_call_with_permission( - server: impl AgentServer + 'static, +pub async fn test_tool_call_with_permission( + server: F, allow_option_id: acp::PermissionOptionId, cx: &mut TestAppContext, -) { - let fs = init_test(cx).await; - let project = Project::test(fs, [path!("/private/tmp").as_ref()], cx).await; - let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await; +) where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as Arc; + let project = Project::test(fs.clone(), [path!("/private/tmp").as_ref()], cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + "/private/tmp", + cx, + ) + .await; let full_turn = thread.update(cx, |thread, cx| { thread.send_raw( r#"Run exactly `touch hello.txt && echo "Hello, world!" | tee hello.txt` in the terminal."#, @@ -247,11 +283,21 @@ pub async fn test_tool_call_with_permission( }); } -pub async fn test_cancel(server: impl AgentServer + 'static, cx: &mut TestAppContext) { - let fs = init_test(cx).await; +pub async fn test_cancel(server: F, cx: &mut TestAppContext) +where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as Arc; - let project = Project::test(fs, [path!("/private/tmp").as_ref()], cx).await; - let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await; + let project = Project::test(fs.clone(), [path!("/private/tmp").as_ref()], cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + "/private/tmp", + cx, + ) + .await; let _ = thread.update(cx, |thread, cx| { thread.send_raw( r#"Run exactly `touch hello.txt && echo "Hello, world!" | tee hello.txt` in the terminal."#, @@ -316,10 +362,20 @@ pub async fn test_cancel(server: impl AgentServer + 'static, cx: &mut TestAppCon }); } -pub async fn test_thread_drop(server: impl AgentServer + 'static, cx: &mut TestAppContext) { - let fs = init_test(cx).await; - let project = Project::test(fs, [], cx).await; - let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await; +pub async fn test_thread_drop(server: F, cx: &mut TestAppContext) +where + T: AgentServer + 'static, + F: AsyncFn(&Arc, &Entity, &mut TestAppContext) -> T, +{ + let fs = init_test(cx).await as Arc; + let project = Project::test(fs.clone(), [], cx).await; + let thread = new_test_thread( + server(&fs, &project, cx).await, + project.clone(), + "/private/tmp", + cx, + ) + .await; thread .update(cx, |thread, cx| thread.send_raw("Hello from test!", cx)) @@ -386,27 +442,42 @@ macro_rules! common_e2e_tests { } }; } +pub use common_e2e_tests; // Helpers pub async fn init_test(cx: &mut TestAppContext) -> Arc { + #[cfg(test)] + use settings::Settings; + env_logger::try_init().ok(); cx.update(|cx| { - let settings_store = SettingsStore::test(cx); + let settings_store = settings::SettingsStore::test(cx); cx.set_global(settings_store); Project::init_settings(cx); language::init(cx); + gpui_tokio::init(cx); + let http_client = reqwest_client::ReqwestClient::user_agent("agent tests").unwrap(); + cx.set_http_client(Arc::new(http_client)); + client::init_settings(cx); + let client = client::Client::production(cx); + let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx)); + language_model::init(client.clone(), cx); + language_models::init(user_store, client, cx); + agent_settings::init(cx); crate::settings::init(cx); + #[cfg(test)] crate::AllAgentServersSettings::override_global( - AllAgentServersSettings { - claude: Some(AgentServerSettings { + crate::AllAgentServersSettings { + claude: Some(crate::AgentServerSettings { command: crate::claude::tests::local_command(), }), - gemini: Some(AgentServerSettings { + gemini: Some(crate::AgentServerSettings { command: crate::gemini::tests::local_command(), }), + custom: collections::HashMap::default(), }, cx, ); diff --git a/crates/agent_servers/src/gemini.rs b/crates/agent_servers/src/gemini.rs index 813f8b1fe0..5d6a70fa64 100644 --- a/crates/agent_servers/src/gemini.rs +++ b/crates/agent_servers/src/gemini.rs @@ -4,10 +4,10 @@ use std::{any::Any, path::Path}; use crate::{AgentServer, AgentServerCommand}; use acp_thread::{AgentConnection, LoadError}; use anyhow::Result; -use gpui::{Entity, Task}; +use gpui::{App, Entity, SharedString, Task}; +use language_models::provider::google::GoogleLanguageModelProvider; use project::Project; use settings::SettingsStore; -use ui::App; use crate::AllAgentServersSettings; @@ -17,16 +17,20 @@ pub struct Gemini; const ACP_ARG: &str = "--experimental-acp"; impl AgentServer for Gemini { - fn name(&self) -> &'static str { - "Gemini" + fn telemetry_id(&self) -> &'static str { + "gemini-cli" } - fn empty_state_headline(&self) -> &'static str { - "Welcome to Gemini" + fn name(&self) -> SharedString { + "Gemini CLI".into() } - fn empty_state_message(&self) -> &'static str { - "Ask questions, edit files, run commands" + fn empty_state_headline(&self) -> SharedString { + self.name() + } + + fn empty_state_message(&self) -> SharedString { + "Ask questions, edit files, run commands".into() } fn logo(&self) -> ui::IconName { @@ -47,16 +51,20 @@ impl AgentServer for Gemini { settings.get::(None).gemini.clone() })?; - let Some(command) = + let Some(mut command) = AgentServerCommand::resolve("gemini", &[ACP_ARG], None, settings, &project, cx).await else { return Err(LoadError::NotInstalled { error_message: "Failed to find Gemini CLI binary".into(), install_message: "Install Gemini CLI".into(), - install_command: "npm install -g @google/gemini-cli@latest".into() + install_command: Self::install_command().into(), }.into()); }; + if let Some(api_key)= cx.update(GoogleLanguageModelProvider::api_key)?.await.ok() { + command.env.get_or_insert_default().insert("GEMINI_API_KEY".to_owned(), api_key.key); + } + let result = crate::acp::connect(server_name, command.clone(), &root_dir, cx).await; if result.is_err() { let version_fut = util::command::new_smol_command(&command.path) @@ -84,7 +92,7 @@ impl AgentServer for Gemini { current_version ).into(), upgrade_message: "Upgrade Gemini CLI to latest".into(), - upgrade_command: "npm install -g @google/gemini-cli@latest".into(), + upgrade_command: Self::upgrade_command().into(), }.into()) } } @@ -97,13 +105,27 @@ impl AgentServer for Gemini { } } +impl Gemini { + pub fn binary_name() -> &'static str { + "gemini" + } + + pub fn install_command() -> &'static str { + "npm install -g @google/gemini-cli@preview" + } + + pub fn upgrade_command() -> &'static str { + "npm install -g @google/gemini-cli@preview" + } +} + #[cfg(test)] pub(crate) mod tests { use super::*; use crate::AgentServerCommand; use std::path::Path; - crate::common_e2e_tests!(Gemini, allow_option_id = "proceed_once"); + crate::common_e2e_tests!(async |_, _, _| Gemini, allow_option_id = "proceed_once"); pub fn local_command() -> AgentServerCommand { let cli_path = Path::new(env!("CARGO_MANIFEST_DIR")) diff --git a/crates/agent_servers/src/settings.rs b/crates/agent_servers/src/settings.rs index 645674b5f1..96ac6e3cbe 100644 --- a/crates/agent_servers/src/settings.rs +++ b/crates/agent_servers/src/settings.rs @@ -1,6 +1,7 @@ use crate::AgentServerCommand; use anyhow::Result; -use gpui::App; +use collections::HashMap; +use gpui::{App, SharedString}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; @@ -13,9 +14,13 @@ pub fn init(cx: &mut App) { pub struct AllAgentServersSettings { pub gemini: Option, pub claude: Option, + + /// Custom agent servers configured by the user + #[serde(flatten)] + pub custom: HashMap, } -#[derive(Deserialize, Serialize, Clone, JsonSchema, Debug)] +#[derive(Deserialize, Serialize, Clone, JsonSchema, Debug, PartialEq)] pub struct AgentServerSettings { #[serde(flatten)] pub command: AgentServerCommand, @@ -29,13 +34,26 @@ impl settings::Settings for AllAgentServersSettings { fn load(sources: SettingsSources, _: &mut App) -> Result { let mut settings = AllAgentServersSettings::default(); - for AllAgentServersSettings { gemini, claude } in sources.defaults_and_customizations() { + for AllAgentServersSettings { + gemini, + claude, + custom, + } in sources.defaults_and_customizations() + { if gemini.is_some() { settings.gemini = gemini.clone(); } if claude.is_some() { settings.claude = claude.clone(); } + + // Merge custom agents + for (name, config) in custom { + // Skip built-in agent names to avoid conflicts + if name != "gemini" && name != "claude" { + settings.custom.insert(name.clone(), config.clone()); + } + } } Ok(settings) diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 43e3b25124..6b0979ee69 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -67,6 +67,7 @@ ordered-float.workspace = true parking_lot.workspace = true paths.workspace = true picker.workspace = true +postage.workspace = true project.workspace = true prompt_store.workspace = true proto.workspace = true diff --git a/crates/agent_ui/src/acp/completion_provider.rs b/crates/agent_ui/src/acp/completion_provider.rs index d90520d26a..5b40967069 100644 --- a/crates/agent_ui/src/acp/completion_provider.rs +++ b/crates/agent_ui/src/acp/completion_provider.rs @@ -1,8 +1,11 @@ +use std::cell::Cell; use std::ops::Range; +use std::rc::Rc; use std::sync::Arc; use std::sync::atomic::AtomicBool; use acp_thread::MentionUri; +use agent_client_protocol as acp; use agent2::{HistoryEntry, HistoryStore}; use anyhow::Result; use editor::{CompletionProvider, Editor, ExcerptId}; @@ -63,6 +66,7 @@ pub struct ContextPickerCompletionProvider { workspace: WeakEntity, history_store: Entity, prompt_store: Option>, + prompt_capabilities: Rc>, } impl ContextPickerCompletionProvider { @@ -71,12 +75,14 @@ impl ContextPickerCompletionProvider { workspace: WeakEntity, history_store: Entity, prompt_store: Option>, + prompt_capabilities: Rc>, ) -> Self { Self { message_editor, workspace, history_store, prompt_store, + prompt_capabilities, } } @@ -102,62 +108,7 @@ impl ContextPickerCompletionProvider { confirm: Some(Arc::new(|_, _, _| true)), }), ContextPickerEntry::Action(action) => { - let (new_text, on_action) = match action { - ContextPickerAction::AddSelections => { - const PLACEHOLDER: &str = "selection "; - let selections = selection_ranges(workspace, cx) - .into_iter() - .enumerate() - .map(|(ix, (buffer, range))| { - ( - buffer, - range, - (PLACEHOLDER.len() * ix)..(PLACEHOLDER.len() * (ix + 1) - 1), - ) - }) - .collect::>(); - - let new_text: String = PLACEHOLDER.repeat(selections.len()); - - let callback = Arc::new({ - let source_range = source_range.clone(); - move |_, window: &mut Window, cx: &mut App| { - let selections = selections.clone(); - let message_editor = message_editor.clone(); - let source_range = source_range.clone(); - window.defer(cx, move |window, cx| { - message_editor - .update(cx, |message_editor, cx| { - message_editor.confirm_mention_for_selection( - source_range, - selections, - window, - cx, - ) - }) - .ok(); - }); - false - } - }); - - (new_text, callback) - } - }; - - Some(Completion { - replace_range: source_range, - new_text, - label: CodeLabel::plain(action.label().to_string(), None), - icon_path: Some(action.icon().path().into()), - documentation: None, - source: project::CompletionSource::Custom, - insert_text_mode: None, - // This ensures that when a user accepts this completion, the - // completion menu will still be shown after "@category " is - // inserted - confirm: Some(on_action), - }) + Self::completion_for_action(action, source_range, message_editor, workspace, cx) } } } @@ -296,9 +247,9 @@ impl ContextPickerCompletionProvider { let abs_path = project.read(cx).absolute_path(&symbol.path, cx)?; let uri = MentionUri::Symbol { - path: abs_path, + abs_path, name: symbol.name.clone(), - line_range: symbol.range.start.0.row..symbol.range.end.0.row, + line_range: symbol.range.start.0.row..=symbol.range.end.0.row, }; let new_text = format!("{} ", uri.as_link()); let new_text_len = new_text.len(); @@ -353,6 +304,71 @@ impl ContextPickerCompletionProvider { }) } + pub(crate) fn completion_for_action( + action: ContextPickerAction, + source_range: Range, + message_editor: WeakEntity, + workspace: &Entity, + cx: &mut App, + ) -> Option { + let (new_text, on_action) = match action { + ContextPickerAction::AddSelections => { + const PLACEHOLDER: &str = "selection "; + let selections = selection_ranges(workspace, cx) + .into_iter() + .enumerate() + .map(|(ix, (buffer, range))| { + ( + buffer, + range, + (PLACEHOLDER.len() * ix)..(PLACEHOLDER.len() * (ix + 1) - 1), + ) + }) + .collect::>(); + + let new_text: String = PLACEHOLDER.repeat(selections.len()); + + let callback = Arc::new({ + let source_range = source_range.clone(); + move |_, window: &mut Window, cx: &mut App| { + let selections = selections.clone(); + let message_editor = message_editor.clone(); + let source_range = source_range.clone(); + window.defer(cx, move |window, cx| { + message_editor + .update(cx, |message_editor, cx| { + message_editor.confirm_mention_for_selection( + source_range, + selections, + window, + cx, + ) + }) + .ok(); + }); + false + } + }); + + (new_text, callback) + } + }; + + Some(Completion { + replace_range: source_range, + new_text, + label: CodeLabel::plain(action.label().to_string(), None), + icon_path: Some(action.icon().path().into()), + documentation: None, + source: project::CompletionSource::Custom, + insert_text_mode: None, + // This ensures that when a user accepts this completion, the + // completion menu will still be shown after "@category " is + // inserted + confirm: Some(on_action), + }) + } + fn search( &self, mode: Option, @@ -544,17 +560,19 @@ impl ContextPickerCompletionProvider { }), ); - const RECENT_COUNT: usize = 2; - let threads = self - .history_store - .read(cx) - .recently_opened_entries(cx) - .into_iter() - .filter(|thread| !mentions.contains(&thread.mention_uri())) - .take(RECENT_COUNT) - .collect::>(); + if self.prompt_capabilities.get().embedded_context { + const RECENT_COUNT: usize = 2; + let threads = self + .history_store + .read(cx) + .recently_opened_entries(cx) + .into_iter() + .filter(|thread| !mentions.contains(&thread.mention_uri())) + .take(RECENT_COUNT) + .collect::>(); - recent.extend(threads.into_iter().map(Match::RecentThread)); + recent.extend(threads.into_iter().map(Match::RecentThread)); + } recent } @@ -564,11 +582,17 @@ impl ContextPickerCompletionProvider { workspace: &Entity, cx: &mut App, ) -> Vec { - let mut entries = vec![ - ContextPickerEntry::Mode(ContextPickerMode::File), - ContextPickerEntry::Mode(ContextPickerMode::Symbol), - ContextPickerEntry::Mode(ContextPickerMode::Thread), - ]; + let embedded_context = self.prompt_capabilities.get().embedded_context; + let mut entries = if embedded_context { + vec![ + ContextPickerEntry::Mode(ContextPickerMode::File), + ContextPickerEntry::Mode(ContextPickerMode::Symbol), + ContextPickerEntry::Mode(ContextPickerMode::Thread), + ] + } else { + // File is always available, but we don't need a mode entry + vec![] + }; let has_selection = workspace .read(cx) @@ -583,11 +607,13 @@ impl ContextPickerCompletionProvider { )); } - if self.prompt_store.is_some() { - entries.push(ContextPickerEntry::Mode(ContextPickerMode::Rules)); - } + if embedded_context { + if self.prompt_store.is_some() { + entries.push(ContextPickerEntry::Mode(ContextPickerMode::Rules)); + } - entries.push(ContextPickerEntry::Mode(ContextPickerMode::Fetch)); + entries.push(ContextPickerEntry::Mode(ContextPickerMode::Fetch)); + } entries } @@ -625,7 +651,11 @@ impl CompletionProvider for ContextPickerCompletionProvider { let offset_to_line = buffer.point_to_offset(line_start); let mut lines = buffer.text_for_range(line_start..position).lines(); let line = lines.next()?; - MentionCompletion::try_parse(line, offset_to_line) + MentionCompletion::try_parse( + self.prompt_capabilities.get().embedded_context, + line, + offset_to_line, + ) }); let Some(state) = state else { return Task::ready(Ok(Vec::new())); @@ -745,12 +775,16 @@ impl CompletionProvider for ContextPickerCompletionProvider { let offset_to_line = buffer.point_to_offset(line_start); let mut lines = buffer.text_for_range(line_start..position).lines(); if let Some(line) = lines.next() { - MentionCompletion::try_parse(line, offset_to_line) - .map(|completion| { - completion.source_range.start <= offset_to_line + position.column as usize - && completion.source_range.end >= offset_to_line + position.column as usize - }) - .unwrap_or(false) + MentionCompletion::try_parse( + self.prompt_capabilities.get().embedded_context, + line, + offset_to_line, + ) + .map(|completion| { + completion.source_range.start <= offset_to_line + position.column as usize + && completion.source_range.end >= offset_to_line + position.column as usize + }) + .unwrap_or(false) } else { false } @@ -771,7 +805,7 @@ pub(crate) fn search_threads( history_store: &Entity, cx: &mut App, ) -> Task> { - let threads = history_store.read(cx).entries(cx); + let threads = history_store.read(cx).entries().collect(); if query.is_empty() { return Task::ready(threads); } @@ -841,7 +875,7 @@ struct MentionCompletion { } impl MentionCompletion { - fn try_parse(line: &str, offset_to_line: usize) -> Option { + fn try_parse(allow_non_file_mentions: bool, line: &str, offset_to_line: usize) -> Option { let last_mention_start = line.rfind('@')?; if last_mention_start >= line.len() { return Some(Self::default()); @@ -865,7 +899,9 @@ impl MentionCompletion { if let Some(mode_text) = parts.next() { end += mode_text.len(); - if let Some(parsed_mode) = ContextPickerMode::try_from(mode_text).ok() { + if let Some(parsed_mode) = ContextPickerMode::try_from(mode_text).ok() + && (allow_non_file_mentions || matches!(parsed_mode, ContextPickerMode::File)) + { mode = Some(parsed_mode); } else { argument = Some(mode_text.to_string()); @@ -898,10 +934,10 @@ mod tests { #[test] fn test_mention_completion_parse() { - assert_eq!(MentionCompletion::try_parse("Lorem Ipsum", 0), None); + assert_eq!(MentionCompletion::try_parse(true, "Lorem Ipsum", 0), None); assert_eq!( - MentionCompletion::try_parse("Lorem @", 0), + MentionCompletion::try_parse(true, "Lorem @", 0), Some(MentionCompletion { source_range: 6..7, mode: None, @@ -910,7 +946,7 @@ mod tests { ); assert_eq!( - MentionCompletion::try_parse("Lorem @file", 0), + MentionCompletion::try_parse(true, "Lorem @file", 0), Some(MentionCompletion { source_range: 6..11, mode: Some(ContextPickerMode::File), @@ -919,7 +955,7 @@ mod tests { ); assert_eq!( - MentionCompletion::try_parse("Lorem @file ", 0), + MentionCompletion::try_parse(true, "Lorem @file ", 0), Some(MentionCompletion { source_range: 6..12, mode: Some(ContextPickerMode::File), @@ -928,7 +964,7 @@ mod tests { ); assert_eq!( - MentionCompletion::try_parse("Lorem @file main.rs", 0), + MentionCompletion::try_parse(true, "Lorem @file main.rs", 0), Some(MentionCompletion { source_range: 6..19, mode: Some(ContextPickerMode::File), @@ -937,7 +973,7 @@ mod tests { ); assert_eq!( - MentionCompletion::try_parse("Lorem @file main.rs ", 0), + MentionCompletion::try_parse(true, "Lorem @file main.rs ", 0), Some(MentionCompletion { source_range: 6..19, mode: Some(ContextPickerMode::File), @@ -946,7 +982,7 @@ mod tests { ); assert_eq!( - MentionCompletion::try_parse("Lorem @file main.rs Ipsum", 0), + MentionCompletion::try_parse(true, "Lorem @file main.rs Ipsum", 0), Some(MentionCompletion { source_range: 6..19, mode: Some(ContextPickerMode::File), @@ -955,7 +991,7 @@ mod tests { ); assert_eq!( - MentionCompletion::try_parse("Lorem @main", 0), + MentionCompletion::try_parse(true, "Lorem @main", 0), Some(MentionCompletion { source_range: 6..11, mode: None, @@ -963,6 +999,28 @@ mod tests { }) ); - assert_eq!(MentionCompletion::try_parse("test@", 0), None); + assert_eq!(MentionCompletion::try_parse(true, "test@", 0), None); + + // Allowed non-file mentions + + assert_eq!( + MentionCompletion::try_parse(true, "Lorem @symbol main", 0), + Some(MentionCompletion { + source_range: 6..18, + mode: Some(ContextPickerMode::Symbol), + argument: Some("main".to_string()), + }) + ); + + // Disallowed non-file mentions + + assert_eq!( + MentionCompletion::try_parse(false, "Lorem @symbol main", 0), + Some(MentionCompletion { + source_range: 6..18, + mode: None, + argument: Some("main".to_string()), + }) + ); } } diff --git a/crates/agent_ui/src/acp/entry_view_state.rs b/crates/agent_ui/src/acp/entry_view_state.rs index 67acbb8b5b..0e4080d689 100644 --- a/crates/agent_ui/src/acp/entry_view_state.rs +++ b/crates/agent_ui/src/acp/entry_view_state.rs @@ -1,6 +1,7 @@ -use std::ops::Range; +use std::{cell::Cell, ops::Range, rc::Rc}; use acp_thread::{AcpThread, AgentThreadEntry}; +use agent_client_protocol::{PromptCapabilities, ToolCallId}; use agent2::HistoryStore; use collections::HashMap; use editor::{Editor, EditorMode, MinimapVisibility}; @@ -26,6 +27,7 @@ pub struct EntryViewState { prompt_store: Option>, entries: Vec, prevent_slash_commands: bool, + prompt_capabilities: Rc>, } impl EntryViewState { @@ -34,6 +36,7 @@ impl EntryViewState { project: Entity, history_store: Entity, prompt_store: Option>, + prompt_capabilities: Rc>, prevent_slash_commands: bool, ) -> Self { Self { @@ -43,6 +46,7 @@ impl EntryViewState { prompt_store, entries: Vec::new(), prevent_slash_commands, + prompt_capabilities, } } @@ -80,6 +84,7 @@ impl EntryViewState { self.project.clone(), self.history_store.clone(), self.prompt_store.clone(), + self.prompt_capabilities.clone(), "Edit message - @ to include context", self.prevent_slash_commands, editor::EditorMode::AutoHeight { @@ -106,6 +111,7 @@ impl EntryViewState { } } AgentThreadEntry::ToolCall(tool_call) => { + let id = tool_call.id.clone(); let terminals = tool_call.terminals().cloned().collect::>(); let diffs = tool_call.diffs().cloned().collect::>(); @@ -121,21 +127,31 @@ impl EntryViewState { for terminal in terminals { views.entry(terminal.entity_id()).or_insert_with(|| { - create_terminal( + let element = create_terminal( self.workspace.clone(), self.project.clone(), terminal.clone(), window, cx, ) - .into_any() + .into_any(); + cx.emit(EntryViewEvent { + entry_index: index, + view_event: ViewEvent::NewTerminal(id.clone()), + }); + element }); } for diff in diffs { - views - .entry(diff.entity_id()) - .or_insert_with(|| create_editor_diff(diff.clone(), window, cx).into_any()); + views.entry(diff.entity_id()).or_insert_with(|| { + let element = create_editor_diff(diff.clone(), window, cx).into_any(); + cx.emit(EntryViewEvent { + entry_index: index, + view_event: ViewEvent::NewDiff(id.clone()), + }); + element + }); } } AgentThreadEntry::AssistantMessage(_) => { @@ -187,6 +203,8 @@ pub struct EntryViewEvent { } pub enum ViewEvent { + NewDiff(ToolCallId), + NewTerminal(ToolCallId), MessageEditorEvent(Entity, MessageEditorEvent), } @@ -389,6 +407,7 @@ mod tests { project.clone(), history_store, None, + Default::default(), false, ) }); diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index a50e33dc31..12ae893c31 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -1,41 +1,42 @@ use crate::{ acp::completion_provider::ContextPickerCompletionProvider, - context_picker::fetch_context_picker::fetch_url_content, + context_picker::{ContextPickerAction, fetch_context_picker::fetch_url_content}, }; use acp_thread::{MentionUri, selection_name}; use agent_client_protocol as acp; use agent_servers::AgentServer; use agent2::HistoryStore; -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Result, anyhow}; use assistant_slash_commands::codeblock_fence_for_path; use collections::{HashMap, HashSet}; use editor::{ Addon, Anchor, AnchorRangeExt, ContextMenuOptions, ContextMenuPlacement, Editor, EditorElement, - EditorEvent, EditorMode, EditorStyle, ExcerptId, FoldPlaceholder, MultiBuffer, + EditorEvent, EditorMode, EditorSnapshot, EditorStyle, ExcerptId, FoldPlaceholder, MultiBuffer, SemanticsProvider, ToOffset, actions::Paste, display_map::{Crease, CreaseId, FoldId}, }; use futures::{ - FutureExt as _, TryFutureExt as _, - future::{Shared, join_all, try_join_all}, + FutureExt as _, + future::{Shared, join_all}, }; use gpui::{ - AppContext, ClipboardEntry, Context, Entity, EventEmitter, FocusHandle, Focusable, - HighlightStyle, Image, ImageFormat, Img, KeyContext, Subscription, Task, TextStyle, - UnderlineStyle, WeakEntity, + Animation, AnimationExt as _, AppContext, ClipboardEntry, Context, Entity, EntityId, + EventEmitter, FocusHandle, Focusable, HighlightStyle, Image, ImageFormat, Img, KeyContext, + Subscription, Task, TextStyle, UnderlineStyle, WeakEntity, pulsating_between, }; use language::{Buffer, Language}; use language_model::LanguageModelImage; -use project::{Project, ProjectPath, Worktree}; -use prompt_store::PromptStore; +use postage::stream::Stream as _; +use project::{CompletionIntent, Project, ProjectItem, ProjectPath, Worktree}; +use prompt_store::{PromptId, PromptStore}; use rope::Point; use settings::Settings; use std::{ cell::Cell, ffi::OsStr, - fmt::{Display, Write}, - ops::Range, + fmt::Write, + ops::{Range, RangeInclusive}, path::{Path, PathBuf}, rc::Rc, sync::Arc, @@ -44,13 +45,12 @@ use std::{ use text::{OffsetRangeExt, ToOffset as _}; use theme::ThemeSettings; use ui::{ - ActiveTheme, AnyElement, App, ButtonCommon, ButtonLike, ButtonStyle, Color, Icon, IconName, - IconSize, InteractiveElement, IntoElement, Label, LabelCommon, LabelSize, ParentElement, - Render, SelectableButton, SharedString, Styled, TextSize, TintColor, Toggleable, Window, div, - h_flex, px, + ActiveTheme, AnyElement, App, ButtonCommon, ButtonLike, ButtonStyle, Color, Element as _, + FluentBuilder as _, Icon, IconName, IconSize, InteractiveElement, IntoElement, Label, + LabelCommon, LabelSize, ParentElement, Render, SelectableButton, SharedString, Styled, + TextSize, TintColor, Toggleable, Window, div, h_flex, px, }; -use url::Url; -use util::ResultExt; +use util::{ResultExt, debug_panic}; use workspace::{Workspace, notifications::NotifyResultExt as _}; use zed_actions::agent::Chat; @@ -64,6 +64,7 @@ pub struct MessageEditor { history_store: Entity, prompt_store: Option>, prevent_slash_commands: bool, + prompt_capabilities: Rc>, _subscriptions: Vec, _parse_slash_command_task: Task<()>, } @@ -73,6 +74,7 @@ pub enum MessageEditorEvent { Send, Cancel, Focus, + LostFocus, } impl EventEmitter for MessageEditor {} @@ -83,6 +85,7 @@ impl MessageEditor { project: Entity, history_store: Entity, prompt_store: Option>, + prompt_capabilities: Rc>, placeholder: impl Into>, prevent_slash_commands: bool, mode: EditorMode, @@ -101,6 +104,7 @@ impl MessageEditor { workspace.clone(), history_store.clone(), prompt_store.clone(), + prompt_capabilities.clone(), ); let semantics_provider = Rc::new(SlashCommandSemanticsProvider { range: Cell::new(None), @@ -128,17 +132,21 @@ impl MessageEditor { editor }); - cx.on_focus(&editor.focus_handle(cx), window, |_, _, cx| { + cx.on_focus_in(&editor.focus_handle(cx), window, |_, _, cx| { cx.emit(MessageEditorEvent::Focus) }) .detach(); + cx.on_focus_out(&editor.focus_handle(cx), window, |_, _, _, cx| { + cx.emit(MessageEditorEvent::LostFocus) + }) + .detach(); let mut subscriptions = Vec::new(); - if prevent_slash_commands { - subscriptions.push(cx.subscribe_in(&editor, window, { - let semantics_provider = semantics_provider.clone(); - move |this, editor, event, window, cx| { - if let EditorEvent::Edited { .. } = event { + subscriptions.push(cx.subscribe_in(&editor, window, { + let semantics_provider = semantics_provider.clone(); + move |this, editor, event, window, cx| { + if let EditorEvent::Edited { .. } = event { + if prevent_slash_commands { this.highlight_slash_command( semantics_provider.clone(), editor.clone(), @@ -146,9 +154,12 @@ impl MessageEditor { cx, ); } + let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx)); + this.mention_set.remove_invalid(snapshot); + cx.notify(); } - })); - } + } + })); Self { editor, @@ -158,6 +169,7 @@ impl MessageEditor { history_store, prompt_store, prevent_slash_commands, + prompt_capabilities, _subscriptions: subscriptions, _parse_slash_command_task: Task::ready(()), } @@ -209,9 +221,9 @@ impl MessageEditor { pub fn mentions(&self) -> HashSet { self.mention_set - .uri_by_crease_id + .mentions .values() - .cloned() + .map(|(uri, _)| uri.clone()) .collect() } @@ -230,106 +242,178 @@ impl MessageEditor { let Some((excerpt_id, _, _)) = snapshot.buffer_snapshot.as_singleton() else { return Task::ready(()); }; - let Some(anchor) = snapshot + let Some(start_anchor) = snapshot .buffer_snapshot .anchor_in_excerpt(*excerpt_id, start) else { return Task::ready(()); }; + let end_anchor = snapshot + .buffer_snapshot + .anchor_before(start_anchor.to_offset(&snapshot.buffer_snapshot) + content_len + 1); - if let MentionUri::File { abs_path, .. } = &mention_uri { - let extension = abs_path - .extension() - .and_then(OsStr::to_str) - .unwrap_or_default(); - - if Img::extensions().contains(&extension) && !extension.contains("svg") { - let project = self.project.clone(); - let Some(project_path) = project - .read(cx) - .project_path_for_absolute_path(abs_path, cx) - else { - return Task::ready(()); - }; - let image = cx - .spawn(async move |_, cx| { - let image = project - .update(cx, |project, cx| project.open_image(project_path, cx)) - .map_err(|e| e.to_string())? - .await - .map_err(|e| e.to_string())?; - image - .read_with(cx, |image, _cx| image.image.clone()) - .map_err(|e| e.to_string()) - }) - .shared(); - let Some(crease_id) = insert_crease_for_image( - *excerpt_id, - start, - content_len, - Some(abs_path.as_path().into()), - image.clone(), - self.editor.clone(), - window, - cx, - ) else { - return Task::ready(()); - }; - return self.confirm_mention_for_image( - crease_id, - anchor, - Some(abs_path.clone()), - image, - window, - cx, - ); - } - } - - let Some(crease_id) = crate::context_picker::insert_crease_for_mention( - *excerpt_id, - start, - content_len, - crease_text, - mention_uri.icon_path(cx), - self.editor.clone(), - window, - cx, - ) else { + let crease = if let MentionUri::File { abs_path } = &mention_uri + && let Some(extension) = abs_path.extension() + && let Some(extension) = extension.to_str() + && Img::extensions().contains(&extension) + && !extension.contains("svg") + { + let Some(project_path) = self + .project + .read(cx) + .project_path_for_absolute_path(&abs_path, cx) + else { + log::error!("project path not found"); + return Task::ready(()); + }; + let image = self + .project + .update(cx, |project, cx| project.open_image(project_path, cx)); + let image = cx + .spawn(async move |_, cx| { + let image = image.await.map_err(|e| e.to_string())?; + let image = image + .update(cx, |image, _| image.image.clone()) + .map_err(|e| e.to_string())?; + Ok(image) + }) + .shared(); + insert_crease_for_mention( + *excerpt_id, + start, + content_len, + mention_uri.name().into(), + IconName::Image.path().into(), + Some(image), + self.editor.clone(), + window, + cx, + ) + } else { + insert_crease_for_mention( + *excerpt_id, + start, + content_len, + crease_text, + mention_uri.icon_path(cx), + None, + self.editor.clone(), + window, + cx, + ) + }; + let Some((crease_id, tx)) = crease else { return Task::ready(()); }; - match mention_uri { - MentionUri::Fetch { url } => { - self.confirm_mention_for_fetch(crease_id, anchor, url, window, cx) + let task = match mention_uri.clone() { + MentionUri::Fetch { url } => self.confirm_mention_for_fetch(url, cx), + MentionUri::Directory { abs_path } => self.confirm_mention_for_directory(abs_path, cx), + MentionUri::Thread { id, .. } => self.confirm_mention_for_thread(id, cx), + MentionUri::TextThread { path, .. } => self.confirm_mention_for_text_thread(path, cx), + MentionUri::File { abs_path } => self.confirm_mention_for_file(abs_path, cx), + MentionUri::Symbol { + abs_path, + line_range, + .. + } => self.confirm_mention_for_symbol(abs_path, line_range, cx), + MentionUri::Rule { id, .. } => self.confirm_mention_for_rule(id, cx), + MentionUri::PastedImage => { + debug_panic!("pasted image URI should not be included in completions"); + Task::ready(Err(anyhow!( + "pasted imaged URI should not be included in completions" + ))) } - MentionUri::Directory { abs_path } => { - self.confirm_mention_for_directory(crease_id, anchor, abs_path, window, cx) + MentionUri::Selection { .. } => { + // Handled elsewhere + debug_panic!("unexpected selection URI"); + Task::ready(Err(anyhow!("unexpected selection URI"))) } - MentionUri::Thread { id, name } => { - self.confirm_mention_for_thread(crease_id, anchor, id, name, window, cx) + }; + let task = cx + .spawn(async move |_, _| task.await.map_err(|e| e.to_string())) + .shared(); + self.mention_set + .mentions + .insert(crease_id, (mention_uri, task.clone())); + + // Notify the user if we failed to load the mentioned context + cx.spawn_in(window, async move |this, cx| { + let result = task.await.notify_async_err(cx); + drop(tx); + if result.is_none() { + this.update(cx, |this, cx| { + this.editor.update(cx, |editor, cx| { + // Remove mention + editor.edit([(start_anchor..end_anchor, "")], cx); + }); + this.mention_set.mentions.remove(&crease_id); + }) + .ok(); } - MentionUri::TextThread { path, name } => { - self.confirm_mention_for_text_thread(crease_id, anchor, path, name, window, cx) - } - MentionUri::File { .. } - | MentionUri::Symbol { .. } - | MentionUri::Rule { .. } - | MentionUri::Selection { .. } => { - self.mention_set.insert_uri(crease_id, mention_uri.clone()); - Task::ready(()) + }) + } + + fn confirm_mention_for_file( + &mut self, + abs_path: PathBuf, + cx: &mut Context, + ) -> Task> { + let Some(project_path) = self + .project + .read(cx) + .project_path_for_absolute_path(&abs_path, cx) + else { + return Task::ready(Err(anyhow!("project path not found"))); + }; + let extension = abs_path + .extension() + .and_then(OsStr::to_str) + .unwrap_or_default(); + + if Img::extensions().contains(&extension) && !extension.contains("svg") { + if !self.prompt_capabilities.get().image { + return Task::ready(Err(anyhow!("This model does not support images yet"))); } + let task = self + .project + .update(cx, |project, cx| project.open_image(project_path, cx)); + return cx.spawn(async move |_, cx| { + let image = task.await?; + let image = image.update(cx, |image, _| image.image.clone())?; + let format = image.format; + let image = cx + .update(|cx| LanguageModelImage::from_image(image, cx))? + .await; + if let Some(image) = image { + Ok(Mention::Image(MentionImage { + data: image.source, + format, + })) + } else { + Err(anyhow!("Failed to convert image")) + } + }); } + + let buffer = self + .project + .update(cx, |project, cx| project.open_buffer(project_path, cx)); + cx.spawn(async move |_, cx| { + let buffer = buffer.await?; + let mention = buffer.update(cx, |buffer, cx| Mention::Text { + content: buffer.text(), + tracked_buffers: vec![cx.entity()], + })?; + anyhow::Ok(mention) + }) } fn confirm_mention_for_directory( &mut self, - crease_id: CreaseId, - anchor: Anchor, abs_path: PathBuf, - window: &mut Window, cx: &mut Context, - ) -> Task<()> { + ) -> Task> { fn collect_files_in_path(worktree: &Worktree, path: &Path) -> Vec<(Arc, PathBuf)> { let mut files = Vec::new(); @@ -344,24 +428,21 @@ impl MessageEditor { files } - let uri = MentionUri::Directory { - abs_path: abs_path.clone(), - }; let Some(project_path) = self .project .read(cx) .project_path_for_absolute_path(&abs_path, cx) else { - return Task::ready(()); + return Task::ready(Err(anyhow!("project path not found"))); }; let Some(entry) = self.project.read(cx).entry_for_path(&project_path, cx) else { - return Task::ready(()); + return Task::ready(Err(anyhow!("project entry not found"))); }; let Some(worktree) = self.project.read(cx).worktree_for_entry(entry.id, cx) else { - return Task::ready(()); + return Task::ready(Err(anyhow!("worktree not found"))); }; let project = self.project.clone(); - let task = cx.spawn(async move |_, cx| { + cx.spawn(async move |_, cx| { let directory_path = entry.path.clone(); let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id())?; @@ -391,106 +472,103 @@ impl MessageEditor { let rope = buffer .read_with(cx, |buffer, _cx| buffer.as_rope().clone()) .log_err()?; - Some(rope) + Some((rope, buffer)) }); cx.background_spawn(async move { - let rope = rope_task.await?; - Some((rel_path, full_path, rope.to_string())) + let (rope, buffer) = rope_task.await?; + Some((rel_path, full_path, rope.to_string(), buffer)) }) })) })?; let contents = cx .background_spawn(async move { - let contents = descendants_future.await.into_iter().flatten(); - contents.collect() + let (contents, tracked_buffers) = descendants_future + .await + .into_iter() + .flatten() + .map(|(rel_path, full_path, rope, buffer)| { + ((rel_path, full_path, rope), buffer) + }) + .unzip(); + Mention::Text { + content: render_directory_contents(contents), + tracked_buffers, + } }) .await; anyhow::Ok(contents) - }); - let task = cx - .spawn(async move |_, _| { - task.await - .map(|contents| DirectoryContents(contents).to_string()) - .map_err(|e| e.to_string()) - }) - .shared(); - - self.mention_set - .directories - .insert(abs_path.clone(), task.clone()); - - let editor = self.editor.clone(); - cx.spawn_in(window, async move |this, cx| { - if task.await.notify_async_err(cx).is_some() { - this.update(cx, |this, _| { - this.mention_set.insert_uri(crease_id, uri); - }) - .ok(); - } else { - editor - .update(cx, |editor, cx| { - editor.display_map.update(cx, |display_map, cx| { - display_map.unfold_intersecting(vec![anchor..anchor], true, cx); - }); - editor.remove_creases([crease_id], cx); - }) - .ok(); - this.update(cx, |this, _cx| { - this.mention_set.directories.remove(&abs_path); - }) - .ok(); - } }) } fn confirm_mention_for_fetch( &mut self, - crease_id: CreaseId, - anchor: Anchor, url: url::Url, - window: &mut Window, cx: &mut Context, - ) -> Task<()> { - let Some(http_client) = self + ) -> Task> { + let http_client = match self .workspace - .update(cx, |workspace, _cx| workspace.client().http_client()) - .ok() - else { - return Task::ready(()); + .update(cx, |workspace, _| workspace.client().http_client()) + { + Ok(http_client) => http_client, + Err(e) => return Task::ready(Err(e)), }; - - let url_string = url.to_string(); - let fetch = cx - .background_executor() - .spawn(async move { - fetch_url_content(http_client, url_string) - .map_err(|e| e.to_string()) - .await + cx.background_executor().spawn(async move { + let content = fetch_url_content(http_client, url.to_string()).await?; + Ok(Mention::Text { + content, + tracked_buffers: Vec::new(), }) - .shared(); - self.mention_set - .add_fetch_result(url.clone(), fetch.clone()); + }) + } - cx.spawn_in(window, async move |this, cx| { - let fetch = fetch.await.notify_async_err(cx); - this.update(cx, |this, cx| { - if fetch.is_some() { - this.mention_set - .insert_uri(crease_id, MentionUri::Fetch { url }); - } else { - // Remove crease if we failed to fetch - this.editor.update(cx, |editor, cx| { - editor.display_map.update(cx, |display_map, cx| { - display_map.unfold_intersecting(vec![anchor..anchor], true, cx); - }); - editor.remove_creases([crease_id], cx); - }); - this.mention_set.fetch_results.remove(&url); + fn confirm_mention_for_symbol( + &mut self, + abs_path: PathBuf, + line_range: RangeInclusive, + cx: &mut Context, + ) -> Task> { + let Some(project_path) = self + .project + .read(cx) + .project_path_for_absolute_path(&abs_path, cx) + else { + return Task::ready(Err(anyhow!("project path not found"))); + }; + let buffer = self + .project + .update(cx, |project, cx| project.open_buffer(project_path, cx)); + cx.spawn(async move |_, cx| { + let buffer = buffer.await?; + let mention = buffer.update(cx, |buffer, cx| { + let start = Point::new(*line_range.start(), 0).min(buffer.max_point()); + let end = Point::new(*line_range.end() + 1, 0).min(buffer.max_point()); + let content = buffer.text_for_range(start..end).collect(); + Mention::Text { + content, + tracked_buffers: vec![cx.entity()], } + })?; + anyhow::Ok(mention) + }) + } + + fn confirm_mention_for_rule( + &mut self, + id: PromptId, + cx: &mut Context, + ) -> Task> { + let Some(prompt_store) = self.prompt_store.clone() else { + return Task::ready(Err(anyhow!("missing prompt store"))); + }; + let prompt = prompt_store.read(cx).load(id, cx); + cx.spawn(async move |_, _| { + let prompt = prompt.await?; + Ok(Mention::Text { + content: prompt, + tracked_buffers: Vec::new(), }) - .ok(); }) } @@ -515,21 +593,24 @@ impl MessageEditor { let range = snapshot.anchor_after(offset + range_to_fold.start) ..snapshot.anchor_after(offset + range_to_fold.end); - let path = buffer + let abs_path = buffer .read(cx) - .file() - .map_or(PathBuf::from("untitled"), |file| file.path().to_path_buf()); + .project_path(cx) + .and_then(|project_path| self.project.read(cx).absolute_path(&project_path, cx)); let snapshot = buffer.read(cx).snapshot(); + let text = snapshot + .text_for_range(selection_range.clone()) + .collect::(); let point_range = selection_range.to_point(&snapshot); - let line_range = point_range.start.row..point_range.end.row; + let line_range = point_range.start.row..=point_range.end.row; let uri = MentionUri::Selection { - path: path.clone(), + abs_path: abs_path.clone(), line_range: line_range.clone(), }; let crease = crate::context_picker::crease_for_mention( - selection_name(&path, &line_range).into(), + selection_name(abs_path.as_deref(), &line_range).into(), uri.icon_path(cx), range, self.editor.downgrade(), @@ -541,137 +622,75 @@ impl MessageEditor { crease_ids.first().copied().unwrap() }); - self.mention_set - .insert_uri(crease_id, MentionUri::Selection { path, line_range }); + self.mention_set.mentions.insert( + crease_id, + ( + uri, + Task::ready(Ok(Mention::Text { + content: text, + tracked_buffers: vec![buffer], + })) + .shared(), + ), + ); } } fn confirm_mention_for_thread( &mut self, - crease_id: CreaseId, - anchor: Anchor, id: acp::SessionId, - name: String, - window: &mut Window, cx: &mut Context, - ) -> Task<()> { - let uri = MentionUri::Thread { - id: id.clone(), - name, - }; + ) -> Task> { let server = Rc::new(agent2::NativeAgentServer::new( self.project.read(cx).fs().clone(), self.history_store.clone(), )); let connection = server.connect(Path::new(""), &self.project, cx); - let load_summary = cx.spawn({ - let id = id.clone(); - async move |_, cx| { - let agent = connection.await?; - let agent = agent.downcast::().unwrap(); - let summary = agent - .0 - .update(cx, |agent, cx| agent.thread_summary(id, cx))? - .await?; - anyhow::Ok(summary) - } - }); - let task = cx - .spawn(async move |_, _| load_summary.await.map_err(|e| format!("{e}"))) - .shared(); - - self.mention_set.insert_thread(id.clone(), task.clone()); - - let editor = self.editor.clone(); - cx.spawn_in(window, async move |this, cx| { - if task.await.notify_async_err(cx).is_some() { - this.update(cx, |this, _| { - this.mention_set.insert_uri(crease_id, uri); - }) - .ok(); - } else { - editor - .update(cx, |editor, cx| { - editor.display_map.update(cx, |display_map, cx| { - display_map.unfold_intersecting(vec![anchor..anchor], true, cx); - }); - editor.remove_creases([crease_id], cx); - }) - .ok(); - this.update(cx, |this, _| { - this.mention_set.thread_summaries.remove(&id); - }) - .ok(); - } + cx.spawn(async move |_, cx| { + let agent = connection.await?; + let agent = agent.downcast::().unwrap(); + let summary = agent + .0 + .update(cx, |agent, cx| agent.thread_summary(id, cx))? + .await?; + anyhow::Ok(Mention::Text { + content: summary.to_string(), + tracked_buffers: Vec::new(), + }) }) } fn confirm_mention_for_text_thread( &mut self, - crease_id: CreaseId, - anchor: Anchor, path: PathBuf, - name: String, - window: &mut Window, cx: &mut Context, - ) -> Task<()> { - let uri = MentionUri::TextThread { - path: path.clone(), - name, - }; + ) -> Task> { let context = self.history_store.update(cx, |text_thread_store, cx| { text_thread_store.load_text_thread(path.as_path().into(), cx) }); - let task = cx - .spawn(async move |_, cx| { - let context = context.await.map_err(|e| e.to_string())?; - let xml = context - .update(cx, |context, cx| context.to_xml(cx)) - .map_err(|e| e.to_string())?; - Ok(xml) + cx.spawn(async move |_, cx| { + let context = context.await?; + let xml = context.update(cx, |context, cx| context.to_xml(cx))?; + Ok(Mention::Text { + content: xml, + tracked_buffers: Vec::new(), }) - .shared(); - - self.mention_set - .insert_text_thread(path.clone(), task.clone()); - - let editor = self.editor.clone(); - cx.spawn_in(window, async move |this, cx| { - if task.await.notify_async_err(cx).is_some() { - this.update(cx, |this, _| { - this.mention_set.insert_uri(crease_id, uri); - }) - .ok(); - } else { - editor - .update(cx, |editor, cx| { - editor.display_map.update(cx, |display_map, cx| { - display_map.unfold_intersecting(vec![anchor..anchor], true, cx); - }); - editor.remove_creases([crease_id], cx); - }) - .ok(); - this.update(cx, |this, _| { - this.mention_set.text_thread_summaries.remove(&path); - }) - .ok(); - } }) } pub fn contents( &self, - window: &mut Window, cx: &mut Context, - ) -> Task>> { - let contents = - self.mention_set - .contents(&self.project, self.prompt_store.as_ref(), window, cx); + ) -> Task, Vec>)>> { + let contents = self + .mention_set + .contents(&self.prompt_capabilities.get(), cx); let editor = self.editor.clone(); let prevent_slash_commands = self.prevent_slash_commands; cx.spawn(async move |_, cx| { let contents = contents.await?; + let mut all_tracked_buffers = Vec::new(); editor.update(cx, |editor, cx| { let mut ix = 0; @@ -680,12 +699,7 @@ impl MessageEditor { editor.display_map.update(cx, |map, cx| { let snapshot = map.snapshot(cx); for (crease_id, crease) in snapshot.crease_snapshot.creases() { - // Skip creases that have been edited out of the message buffer. - if !crease.range().start.is_valid(&snapshot.buffer_snapshot) { - continue; - } - - let Some(mention) = contents.get(&crease_id) else { + let Some((uri, mention)) = contents.get(&crease_id) else { continue; }; @@ -702,7 +716,11 @@ impl MessageEditor { chunks.push(chunk); } let chunk = match mention { - Mention::Text { uri, content } => { + Mention::Text { + content, + tracked_buffers, + } => { + all_tracked_buffers.extend(tracked_buffers.iter().cloned()); acp::ContentBlock::Resource(acp::EmbeddedResource { annotations: None, resource: acp::EmbeddedResourceResource::TextResourceContents( @@ -715,14 +733,33 @@ impl MessageEditor { }) } Mention::Image(mention_image) => { + let uri = match uri { + MentionUri::File { .. } => Some(uri.to_uri().to_string()), + MentionUri::PastedImage => None, + other => { + debug_panic!( + "unexpected mention uri for image: {:?}", + other + ); + None + } + }; acp::ContentBlock::Image(acp::ImageContent { annotations: None, data: mention_image.data.to_string(), mime_type: mention_image.format.mime_type().into(), - uri: mention_image - .abs_path - .as_ref() - .map(|path| format!("file://{}", path.display())), + uri, + }) + } + Mention::UriOnly => { + acp::ContentBlock::ResourceLink(acp::ResourceLink { + name: uri.name(), + uri: uri.to_uri().to_string(), + annotations: None, + description: None, + mime_type: None, + size: None, + title: None, }) } }; @@ -745,7 +782,7 @@ impl MessageEditor { } }); - chunks + (chunks, all_tracked_buffers) }) }) } @@ -753,7 +790,13 @@ impl MessageEditor { pub fn clear(&mut self, window: &mut Window, cx: &mut Context) { self.editor.update(cx, |editor, cx| { editor.clear(window, cx); - editor.remove_creases(self.mention_set.drain(), cx) + editor.remove_creases( + self.mention_set + .mentions + .drain() + .map(|(crease_id, _)| crease_id), + cx, + ) }); } @@ -769,6 +812,10 @@ impl MessageEditor { } fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context) { + if !self.prompt_capabilities.get().image { + return; + } + let images = cx .read_from_clipboard() .map(|item| { @@ -789,7 +836,7 @@ impl MessageEditor { } cx.stop_propagation(); - let replacement_text = "image"; + let replacement_text = MentionUri::PastedImage.as_link().to_string(); for image in images { let (excerpt_id, text_anchor, multibuffer_anchor) = self.editor.update(cx, |message_editor, cx| { @@ -812,24 +859,64 @@ impl MessageEditor { }); let content_len = replacement_text.len(); - let Some(anchor) = multibuffer_anchor else { - return; + let Some(start_anchor) = multibuffer_anchor else { + continue; }; - let task = Task::ready(Ok(Arc::new(image))).shared(); - let Some(crease_id) = insert_crease_for_image( + let end_anchor = self.editor.update(cx, |editor, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + snapshot.anchor_before(start_anchor.to_offset(&snapshot) + content_len) + }); + let image = Arc::new(image); + let Some((crease_id, tx)) = insert_crease_for_mention( excerpt_id, text_anchor, content_len, - None.clone(), - task.clone(), + MentionUri::PastedImage.name().into(), + IconName::Image.path().into(), + Some(Task::ready(Ok(image.clone())).shared()), self.editor.clone(), window, cx, ) else { - return; + continue; }; - self.confirm_mention_for_image(crease_id, anchor, None, task, window, cx) - .detach(); + let task = cx + .spawn_in(window, { + async move |_, cx| { + let format = image.format; + let image = cx + .update(|_, cx| LanguageModelImage::from_image(image, cx)) + .map_err(|e| e.to_string())? + .await; + drop(tx); + if let Some(image) = image { + Ok(Mention::Image(MentionImage { + data: image.source, + format, + })) + } else { + Err("Failed to convert image".into()) + } + } + }) + .shared(); + + self.mention_set + .mentions + .insert(crease_id, (MentionUri::PastedImage, task.clone())); + + cx.spawn_in(window, async move |this, cx| { + if task.await.notify_async_err(cx).is_none() { + this.update(cx, |this, cx| { + this.editor.update(cx, |editor, cx| { + editor.edit([(start_anchor..end_anchor, "")], cx); + }); + this.mention_set.mentions.remove(&crease_id); + }) + .ok(); + } + }) + .detach(); } } @@ -892,6 +979,38 @@ impl MessageEditor { .detach(); } + pub fn insert_selections(&mut self, window: &mut Window, cx: &mut Context) { + let buffer = self.editor.read(cx).buffer().clone(); + let Some(buffer) = buffer.read(cx).as_singleton() else { + return; + }; + let anchor = buffer.update(cx, |buffer, _cx| buffer.anchor_before(buffer.len())); + let Some(workspace) = self.workspace.upgrade() else { + return; + }; + let Some(completion) = ContextPickerCompletionProvider::completion_for_action( + ContextPickerAction::AddSelections, + anchor..anchor, + cx.weak_entity(), + &workspace, + cx, + ) else { + return; + }; + self.editor.update(cx, |message_editor, cx| { + message_editor.edit( + [( + multi_buffer::Anchor::max()..multi_buffer::Anchor::max(), + completion.new_text, + )], + cx, + ); + }); + if let Some(confirm) = completion.confirm { + confirm(CompletionIntent::Complete, window, cx); + } + } + pub fn set_read_only(&mut self, read_only: bool, cx: &mut Context) { self.editor.update(cx, |message_editor, cx| { message_editor.set_read_only(read_only); @@ -899,67 +1018,6 @@ impl MessageEditor { }) } - fn confirm_mention_for_image( - &mut self, - crease_id: CreaseId, - anchor: Anchor, - abs_path: Option, - image: Shared, String>>>, - window: &mut Window, - cx: &mut Context, - ) -> Task<()> { - let editor = self.editor.clone(); - let task = cx - .spawn_in(window, { - let abs_path = abs_path.clone(); - async move |_, cx| { - let image = image.await?; - let format = image.format; - let image = cx - .update(|_, cx| LanguageModelImage::from_image(image, cx)) - .map_err(|e| e.to_string())? - .await; - if let Some(image) = image { - Ok(MentionImage { - abs_path, - data: image.source, - format, - }) - } else { - Err("Failed to convert image".into()) - } - } - }) - .shared(); - - self.mention_set.insert_image(crease_id, task.clone()); - - cx.spawn_in(window, async move |this, cx| { - if task.await.notify_async_err(cx).is_some() { - if let Some(abs_path) = abs_path.clone() { - this.update(cx, |this, _cx| { - this.mention_set - .insert_uri(crease_id, MentionUri::File { abs_path }); - }) - .ok(); - } - } else { - editor - .update(cx, |editor, cx| { - editor.display_map.update(cx, |display_map, cx| { - display_map.unfold_intersecting(vec![anchor..anchor], true, cx); - }); - editor.remove_creases([crease_id], cx); - }) - .ok(); - this.update(cx, |this, _cx| { - this.mention_set.images.remove(&crease_id); - }) - .ok(); - } - }) - } - pub fn set_mode(&mut self, mode: EditorMode, cx: &mut Context) { self.editor.update(cx, |editor, cx| { editor.set_mode(mode); @@ -977,7 +1035,6 @@ impl MessageEditor { let mut text = String::new(); let mut mentions = Vec::new(); - let mut images = Vec::new(); for chunk in message { match chunk { @@ -988,22 +1045,60 @@ impl MessageEditor { resource: acp::EmbeddedResourceResource::TextResourceContents(resource), .. }) => { + let Some(mention_uri) = MentionUri::parse(&resource.uri).log_err() else { + continue; + }; + let start = text.len(); + write!(&mut text, "{}", mention_uri.as_link()).ok(); + let end = text.len(); + mentions.push(( + start..end, + mention_uri, + Mention::Text { + content: resource.text, + tracked_buffers: Vec::new(), + }, + )); + } + acp::ContentBlock::ResourceLink(resource) => { if let Some(mention_uri) = MentionUri::parse(&resource.uri).log_err() { let start = text.len(); write!(&mut text, "{}", mention_uri.as_link()).ok(); let end = text.len(); - mentions.push((start..end, mention_uri, resource.text)); + mentions.push((start..end, mention_uri, Mention::UriOnly)); } } - acp::ContentBlock::Image(content) => { + acp::ContentBlock::Image(acp::ImageContent { + uri, + data, + mime_type, + annotations: _, + }) => { + let mention_uri = if let Some(uri) = uri { + MentionUri::parse(&uri) + } else { + Ok(MentionUri::PastedImage) + }; + let Some(mention_uri) = mention_uri.log_err() else { + continue; + }; + let Some(format) = ImageFormat::from_mime_type(&mime_type) else { + log::error!("failed to parse MIME type for image: {mime_type:?}"); + continue; + }; let start = text.len(); - text.push_str("image"); + write!(&mut text, "{}", mention_uri.as_link()).ok(); let end = text.len(); - images.push((start..end, content)); + mentions.push(( + start..end, + mention_uri, + Mention::Image(MentionImage { + data: data.into(), + format, + }), + )); } - acp::ContentBlock::Audio(_) - | acp::ContentBlock::Resource(_) - | acp::ContentBlock::ResourceLink(_) => {} + acp::ContentBlock::Audio(_) | acp::ContentBlock::Resource(_) => {} } } @@ -1012,88 +1107,27 @@ impl MessageEditor { editor.buffer().read(cx).snapshot(cx) }); - for (range, mention_uri, text) in mentions { + for (range, mention_uri, mention) in mentions { let anchor = snapshot.anchor_before(range.start); - let crease_id = crate::context_picker::insert_crease_for_mention( + let Some((crease_id, tx)) = insert_crease_for_mention( anchor.excerpt_id, anchor.text_anchor, range.end - range.start, mention_uri.name().into(), mention_uri.icon_path(cx), + None, self.editor.clone(), window, cx, - ); - - if let Some(crease_id) = crease_id { - self.mention_set.insert_uri(crease_id, mention_uri.clone()); - } - - match mention_uri { - MentionUri::Thread { id, .. } => { - self.mention_set - .insert_thread(id, Task::ready(Ok(text.into())).shared()); - } - MentionUri::TextThread { path, .. } => { - self.mention_set - .insert_text_thread(path, Task::ready(Ok(text)).shared()); - } - MentionUri::Fetch { url } => { - self.mention_set - .add_fetch_result(url, Task::ready(Ok(text)).shared()); - } - MentionUri::Directory { abs_path } => { - let task = Task::ready(Ok(text)).shared(); - self.mention_set.directories.insert(abs_path, task); - } - MentionUri::File { .. } - | MentionUri::Symbol { .. } - | MentionUri::Rule { .. } - | MentionUri::Selection { .. } => {} - } - } - for (range, content) in images { - let Some(format) = ImageFormat::from_mime_type(&content.mime_type) else { + ) else { continue; }; - let anchor = snapshot.anchor_before(range.start); - let abs_path = content - .uri - .as_ref() - .and_then(|uri| uri.strip_prefix("file://").map(|s| Path::new(s).into())); + drop(tx); - let name = content - .uri - .as_ref() - .and_then(|uri| { - uri.strip_prefix("file://") - .and_then(|path| Path::new(path).file_name()) - }) - .map(|name| name.to_string_lossy().to_string()) - .unwrap_or("Image".to_owned()); - let crease_id = crate::context_picker::insert_crease_for_mention( - anchor.excerpt_id, - anchor.text_anchor, - range.end - range.start, - name.into(), - IconName::Image.path().into(), - self.editor.clone(), - window, - cx, + self.mention_set.mentions.insert( + crease_id, + (mention_uri.clone(), Task::ready(Ok(mention)).shared()), ); - let data: SharedString = content.data.to_string().into(); - - if let Some(crease_id) = crease_id { - self.mention_set.insert_image( - crease_id, - Task::ready(Ok(MentionImage { - abs_path, - data, - format, - })) - .shared(), - ); - } } cx.notify(); } @@ -1140,29 +1174,25 @@ impl MessageEditor { }) } + pub fn text(&self, cx: &App) -> String { + self.editor.read(cx).text(cx) + } + #[cfg(test)] pub fn set_text(&mut self, text: &str, window: &mut Window, cx: &mut Context) { self.editor.update(cx, |editor, cx| { editor.set_text(text, window, cx); }); } - - #[cfg(test)] - pub fn text(&self, cx: &App) -> String { - self.editor.read(cx).text(cx) - } } -struct DirectoryContents(Arc<[(Arc, PathBuf, String)]>); - -impl Display for DirectoryContents { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - for (_relative_path, full_path, content) in self.0.iter() { - let fence = codeblock_fence_for_path(Some(full_path), None); - write!(f, "\n{fence}\n{content}\n```")?; - } - Ok(()) +fn render_directory_contents(entries: Vec<(Arc, PathBuf, String)>) -> String { + let mut output = String::new(); + for (_relative_path, full_path, content) in entries { + let fence = codeblock_fence_for_path(Some(&full_path), None); + write!(output, "\n{fence}\n{content}\n```").unwrap(); } + output } impl Focusable for MessageEditor { @@ -1210,23 +1240,21 @@ impl Render for MessageEditor { } } -pub(crate) fn insert_crease_for_image( +pub(crate) fn insert_crease_for_mention( excerpt_id: ExcerptId, anchor: text::Anchor, content_len: usize, - abs_path: Option>, - image: Shared, String>>>, + crease_label: SharedString, + crease_icon: SharedString, + // abs_path: Option>, + image: Option, String>>>>, editor: Entity, window: &mut Window, cx: &mut App, -) -> Option { - let crease_label = abs_path - .as_ref() - .and_then(|path| path.file_name()) - .map(|name| name.to_string_lossy().to_string().into()) - .unwrap_or(SharedString::from("Image")); +) -> Option<(CreaseId, postage::barrier::Sender)> { + let (tx, rx) = postage::barrier::channel(); - editor.update(cx, |editor, cx| { + let crease_id = editor.update(cx, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); let start = snapshot.anchor_in_excerpt(excerpt_id, anchor)?; @@ -1235,7 +1263,15 @@ pub(crate) fn insert_crease_for_image( let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len); let placeholder = FoldPlaceholder { - render: render_image_fold_icon_button(crease_label, image, cx.weak_entity()), + render: render_fold_icon_button( + crease_label, + crease_icon, + start..end, + rx, + image, + cx.weak_entity(), + cx, + ), merge_adjacent: false, ..Default::default() }; @@ -1252,63 +1288,112 @@ pub(crate) fn insert_crease_for_image( editor.fold_creases(vec![crease], false, window, cx); Some(ids[0]) - }) + })?; + + Some((crease_id, tx)) } -fn render_image_fold_icon_button( +fn render_fold_icon_button( label: SharedString, - image_task: Shared, String>>>, + icon: SharedString, + range: Range, + mut loading_finished: postage::barrier::Receiver, + image_task: Option, String>>>>, editor: WeakEntity, + cx: &mut App, ) -> Arc, &mut App) -> AnyElement> { - Arc::new({ - move |fold_id, fold_range, cx| { - let is_in_text_selection = editor - .update(cx, |editor, cx| editor.is_range_selected(&fold_range, cx)) - .unwrap_or_default(); - - ButtonLike::new(fold_id) - .style(ButtonStyle::Filled) - .selected_style(ButtonStyle::Tinted(TintColor::Accent)) - .toggle_state(is_in_text_selection) - .child( - h_flex() - .gap_1() - .child( - Icon::new(IconName::Image) - .size(IconSize::XSmall) - .color(Color::Muted), - ) - .child( - Label::new(label.clone()) - .size(LabelSize::Small) - .buffer_font(cx) - .single_line(), - ), - ) - .hoverable_tooltip({ - let image_task = image_task.clone(); - move |_, cx| { - let image = image_task.peek().cloned().transpose().ok().flatten(); - let image_task = image_task.clone(); - cx.new::(|cx| ImageHover { - image, - _task: cx.spawn(async move |this, cx| { - if let Ok(image) = image_task.clone().await { - this.update(cx, |this, cx| { - if this.image.replace(image).is_none() { - cx.notify(); - } - }) - .ok(); - } - }), - }) - .into() - } - }) - .into_any_element() + let loading = cx.new(|cx| { + let loading = cx.spawn(async move |this, cx| { + loading_finished.recv().await; + this.update(cx, |this: &mut LoadingContext, cx| { + this.loading = None; + cx.notify(); + }) + .ok(); + }); + LoadingContext { + id: cx.entity_id(), + label, + icon, + range, + editor, + loading: Some(loading), + image: image_task.clone(), } - }) + }); + Arc::new(move |_fold_id, _fold_range, _cx| loading.clone().into_any_element()) +} + +struct LoadingContext { + id: EntityId, + label: SharedString, + icon: SharedString, + range: Range, + editor: WeakEntity, + loading: Option>, + image: Option, String>>>>, +} + +impl Render for LoadingContext { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let is_in_text_selection = self + .editor + .update(cx, |editor, cx| editor.is_range_selected(&self.range, cx)) + .unwrap_or_default(); + ButtonLike::new(("loading-context", self.id)) + .style(ButtonStyle::Filled) + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .toggle_state(is_in_text_selection) + .when_some(self.image.clone(), |el, image_task| { + el.hoverable_tooltip(move |_, cx| { + let image = image_task.peek().cloned().transpose().ok().flatten(); + let image_task = image_task.clone(); + cx.new::(|cx| ImageHover { + image, + _task: cx.spawn(async move |this, cx| { + if let Ok(image) = image_task.clone().await { + this.update(cx, |this, cx| { + if this.image.replace(image).is_none() { + cx.notify(); + } + }) + .ok(); + } + }), + }) + .into() + }) + }) + .child( + h_flex() + .gap_1() + .child( + Icon::from_path(self.icon.clone()) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .child( + Label::new(self.label.clone()) + .size(LabelSize::Small) + .buffer_font(cx) + .single_line(), + ) + .map(|el| { + if self.loading.is_some() { + el.with_animation( + "loading-context-crease", + Animation::new(Duration::from_secs(2)) + .repeat() + .with_easing(pulsating_between(0.4, 0.8)), + |label, delta| label.opacity(delta), + ) + .into_any() + } else { + el.into_any() + } + }), + ) + } } struct ImageHover { @@ -1326,240 +1411,63 @@ impl Render for ImageHover { } } -#[derive(Debug, Eq, PartialEq)] +#[derive(Debug, Clone, Eq, PartialEq)] pub enum Mention { - Text { uri: MentionUri, content: String }, + Text { + content: String, + tracked_buffers: Vec>, + }, Image(MentionImage), + UriOnly, } #[derive(Clone, Debug, Eq, PartialEq)] pub struct MentionImage { - pub abs_path: Option, pub data: SharedString, pub format: ImageFormat, } #[derive(Default)] pub struct MentionSet { - uri_by_crease_id: HashMap, - fetch_results: HashMap>>>, - images: HashMap>>>, - thread_summaries: HashMap>>>, - text_thread_summaries: HashMap>>>, - directories: HashMap>>>, + mentions: HashMap>>)>, } impl MentionSet { - pub fn insert_uri(&mut self, crease_id: CreaseId, uri: MentionUri) { - self.uri_by_crease_id.insert(crease_id, uri); - } - - pub fn add_fetch_result(&mut self, url: Url, content: Shared>>) { - self.fetch_results.insert(url, content); - } - - pub fn insert_image( - &mut self, - crease_id: CreaseId, - task: Shared>>, - ) { - self.images.insert(crease_id, task); - } - - fn insert_thread( - &mut self, - id: acp::SessionId, - task: Shared>>, - ) { - self.thread_summaries.insert(id, task); - } - - fn insert_text_thread(&mut self, path: PathBuf, task: Shared>>) { - self.text_thread_summaries.insert(path, task); - } - - pub fn drain(&mut self) -> impl Iterator { - self.fetch_results.clear(); - self.thread_summaries.clear(); - self.text_thread_summaries.clear(); - self.uri_by_crease_id - .drain() - .map(|(id, _)| id) - .chain(self.images.drain().map(|(id, _)| id)) - } - - pub fn contents( + fn contents( &self, - project: &Entity, - prompt_store: Option<&Entity>, - _window: &mut Window, + prompt_capabilities: &acp::PromptCapabilities, cx: &mut App, - ) -> Task>> { - let mut processed_image_creases = HashSet::default(); + ) -> Task>> { + if !prompt_capabilities.embedded_context { + let mentions = self + .mentions + .iter() + .map(|(crease_id, (uri, _))| (*crease_id, (uri.clone(), Mention::UriOnly))) + .collect(); - let mut contents = self - .uri_by_crease_id - .iter() - .map(|(&crease_id, uri)| { - match uri { - MentionUri::File { abs_path, .. } => { - let uri = uri.clone(); - let abs_path = abs_path.to_path_buf(); - - if let Some(task) = self.images.get(&crease_id).cloned() { - processed_image_creases.insert(crease_id); - return cx.spawn(async move |_| { - let image = task.await.map_err(|e| anyhow!("{e}"))?; - anyhow::Ok((crease_id, Mention::Image(image))) - }); - } - - let buffer_task = project.update(cx, |project, cx| { - let path = project - .find_project_path(abs_path, cx) - .context("Failed to find project path")?; - anyhow::Ok(project.open_buffer(path, cx)) - }); - cx.spawn(async move |cx| { - let buffer = buffer_task?.await?; - let content = buffer.read_with(cx, |buffer, _cx| buffer.text())?; - - anyhow::Ok((crease_id, Mention::Text { uri, content })) - }) - } - MentionUri::Directory { abs_path } => { - let Some(content) = self.directories.get(abs_path).cloned() else { - return Task::ready(Err(anyhow!("missing directory load task"))); - }; - let uri = uri.clone(); - cx.spawn(async move |_| { - Ok(( - crease_id, - Mention::Text { - uri, - content: content.await.map_err(|e| anyhow::anyhow!("{e}"))?, - }, - )) - }) - } - MentionUri::Symbol { - path, line_range, .. - } - | MentionUri::Selection { - path, line_range, .. - } => { - let uri = uri.clone(); - let path_buf = path.clone(); - let line_range = line_range.clone(); - - let buffer_task = project.update(cx, |project, cx| { - let path = project - .find_project_path(&path_buf, cx) - .context("Failed to find project path")?; - anyhow::Ok(project.open_buffer(path, cx)) - }); - - cx.spawn(async move |cx| { - let buffer = buffer_task?.await?; - let content = buffer.read_with(cx, |buffer, _cx| { - buffer - .text_for_range( - Point::new(line_range.start, 0) - ..Point::new( - line_range.end, - buffer.line_len(line_range.end), - ), - ) - .collect() - })?; - - anyhow::Ok((crease_id, Mention::Text { uri, content })) - }) - } - MentionUri::Thread { id, .. } => { - let Some(content) = self.thread_summaries.get(id).cloned() else { - return Task::ready(Err(anyhow!("missing thread summary"))); - }; - let uri = uri.clone(); - cx.spawn(async move |_| { - Ok(( - crease_id, - Mention::Text { - uri, - content: content - .await - .map_err(|e| anyhow::anyhow!("{e}"))? - .to_string(), - }, - )) - }) - } - MentionUri::TextThread { path, .. } => { - let Some(content) = self.text_thread_summaries.get(path).cloned() else { - return Task::ready(Err(anyhow!("missing text thread summary"))); - }; - let uri = uri.clone(); - cx.spawn(async move |_| { - Ok(( - crease_id, - Mention::Text { - uri, - content: content.await.map_err(|e| anyhow::anyhow!("{e}"))?, - }, - )) - }) - } - MentionUri::Rule { id: prompt_id, .. } => { - let Some(prompt_store) = prompt_store else { - return Task::ready(Err(anyhow!("missing prompt store"))); - }; - let text_task = prompt_store.read(cx).load(*prompt_id, cx); - let uri = uri.clone(); - cx.spawn(async move |_| { - // TODO: report load errors instead of just logging - let text = text_task.await?; - anyhow::Ok((crease_id, Mention::Text { uri, content: text })) - }) - } - MentionUri::Fetch { url } => { - let Some(content) = self.fetch_results.get(url).cloned() else { - return Task::ready(Err(anyhow!("missing fetch result"))); - }; - let uri = uri.clone(); - cx.spawn(async move |_| { - Ok(( - crease_id, - Mention::Text { - uri, - content: content.await.map_err(|e| anyhow::anyhow!("{e}"))?, - }, - )) - }) - } - } - }) - .collect::>(); - - // Handle images that didn't have a mention URI (because they were added by the paste handler). - contents.extend(self.images.iter().filter_map(|(crease_id, image)| { - if processed_image_creases.contains(crease_id) { - return None; - } - let crease_id = *crease_id; - let image = image.clone(); - Some(cx.spawn(async move |_| { - Ok(( - crease_id, - Mention::Image(image.await.map_err(|e| anyhow::anyhow!("{e}"))?), - )) - })) - })); + return Task::ready(Ok(mentions)); + } + let mentions = self.mentions.clone(); cx.spawn(async move |_cx| { - let contents = try_join_all(contents).await?.into_iter().collect(); - anyhow::Ok(contents) + let mut contents = HashMap::default(); + for (crease_id, (mention_uri, task)) in mentions { + contents.insert( + crease_id, + (mention_uri, task.await.map_err(|e| anyhow!("{e}"))?), + ); + } + Ok(contents) }) } + + fn remove_invalid(&mut self, snapshot: EditorSnapshot) { + for (crease_id, crease) in snapshot.crease_snapshot.creases() { + if !crease.range().start.is_valid(&snapshot.buffer_snapshot) { + self.mentions.remove(&crease_id); + } + } + } } struct SlashCommandSemanticsProvider { @@ -1572,7 +1480,7 @@ impl SemanticsProvider for SlashCommandSemanticsProvider { buffer: &Entity, position: text::Anchor, cx: &mut App, - ) -> Option>> { + ) -> Option>>> { let snapshot = buffer.read(cx).snapshot(); let offset = position.to_offset(&snapshot); let (start, end) = self.range.get()?; @@ -1580,14 +1488,14 @@ impl SemanticsProvider for SlashCommandSemanticsProvider { return None; } let range = snapshot.anchor_after(start)..snapshot.anchor_after(end); - Some(Task::ready(vec![project::Hover { + Some(Task::ready(Some(vec![project::Hover { contents: vec![project::HoverBlock { text: "Slash commands are not supported".into(), kind: project::HoverBlockKind::PlainText, }], range: Some(range), language: None, - }])) + }]))) } fn inline_values( @@ -1637,7 +1545,7 @@ impl SemanticsProvider for SlashCommandSemanticsProvider { _position: text::Anchor, _kind: editor::GotoDefinitionKind, _cx: &mut App, - ) -> Option>>> { + ) -> Option>>>> { None } @@ -1701,8 +1609,9 @@ impl Addon for MessageEditorAddon { #[cfg(test)] mod tests { - use std::{ops::Range, path::Path, sync::Arc}; + use std::{cell::Cell, ops::Range, path::Path, rc::Rc, sync::Arc}; + use acp_thread::MentionUri; use agent_client_protocol as acp; use agent2::HistoryStore; use assistant_context::ContextStore; @@ -1746,6 +1655,7 @@ mod tests { project.clone(), history_store.clone(), None, + Default::default(), "Test", false, EditorMode::AutoHeight { @@ -1815,10 +1725,8 @@ mod tests { editor.backspace(&Default::default(), window, cx); }); - let content = message_editor - .update_in(cx, |message_editor, window, cx| { - message_editor.contents(window, cx) - }) + let (content, _) = message_editor + .update(cx, |message_editor, cx| message_editor.contents(cx)) .await .unwrap(); @@ -1885,7 +1793,8 @@ mod tests { "six.txt": "6", "seven.txt": "7", "eight.txt": "8", - } + }, + "x.png": "", }), ) .await; @@ -1936,6 +1845,7 @@ mod tests { let context_store = cx.new(|cx| ContextStore::fake(project.clone(), cx)); let history_store = cx.new(|cx| HistoryStore::new(context_store, cx)); + let prompt_capabilities = Rc::new(Cell::new(acp::PromptCapabilities::default())); let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| { let workspace_handle = cx.weak_entity(); @@ -1945,6 +1855,7 @@ mod tests { project.clone(), history_store.clone(), None, + prompt_capabilities.clone(), "Test", false, EditorMode::AutoHeight { @@ -1970,6 +1881,31 @@ mod tests { (message_editor, editor) }); + cx.simulate_input("Lorem @"); + + editor.update_in(&mut cx, |editor, window, cx| { + assert_eq!(editor.text(cx), "Lorem @"); + assert!(editor.has_visible_completions_menu()); + + // Only files since we have default capabilities + assert_eq!( + current_completion_labels(editor), + &[ + "eight.txt dir/b/", + "seven.txt dir/b/", + "six.txt dir/b/", + "five.txt dir/b/", + ] + ); + editor.set_text("", window, cx); + }); + + prompt_capabilities.set(acp::PromptCapabilities { + image: true, + audio: true, + embedded_context: true, + }); + cx.simulate_input("Lorem "); editor.update(&mut cx, |editor, cx| { @@ -2035,24 +1971,48 @@ mod tests { assert_eq!(fold_ranges(editor, cx).len(), 1); }); + let all_prompt_capabilities = acp::PromptCapabilities { + image: true, + audio: true, + embedded_context: true, + }; + let contents = message_editor - .update_in(&mut cx, |message_editor, window, cx| { + .update(&mut cx, |message_editor, cx| { message_editor .mention_set() - .contents(&project, None, window, cx) + .contents(&all_prompt_capabilities, cx) }) .await .unwrap() .into_values() .collect::>(); - pretty_assertions::assert_eq!( - contents, - [Mention::Text { - content: "1".into(), - uri: url_one.parse().unwrap() - }] - ); + { + let [(uri, Mention::Text { content, .. })] = contents.as_slice() else { + panic!("Unexpected mentions"); + }; + pretty_assertions::assert_eq!(content, "1"); + pretty_assertions::assert_eq!(uri, &url_one.parse::().unwrap()); + } + + let contents = message_editor + .update(&mut cx, |message_editor, cx| { + message_editor + .mention_set() + .contents(&acp::PromptCapabilities::default(), cx) + }) + .await + .unwrap() + .into_values() + .collect::>(); + + { + let [(uri, Mention::UriOnly)] = contents.as_slice() else { + panic!("Unexpected mentions"); + }; + pretty_assertions::assert_eq!(uri, &url_one.parse::().unwrap()); + } cx.simulate_input(" "); @@ -2088,25 +2048,25 @@ mod tests { cx.run_until_parked(); let contents = message_editor - .update_in(&mut cx, |message_editor, window, cx| { + .update(&mut cx, |message_editor, cx| { message_editor .mention_set() - .contents(&project, None, window, cx) + .contents(&all_prompt_capabilities, cx) }) .await .unwrap() .into_values() .collect::>(); - assert_eq!(contents.len(), 2); let url_eight = uri!("file:///dir/b/eight.txt"); - pretty_assertions::assert_eq!( - contents[1], - Mention::Text { - content: "8".to_string(), - uri: url_eight.parse().unwrap(), - } - ); + + { + let [_, (uri, Mention::Text { content, .. })] = contents.as_slice() else { + panic!("Unexpected mentions"); + }; + pretty_assertions::assert_eq!(content, "8"); + pretty_assertions::assert_eq!(uri, &url_eight.parse::().unwrap()); + } editor.update(&mut cx, |editor, cx| { assert_eq!( @@ -2198,33 +2158,111 @@ mod tests { }); let contents = message_editor - .update_in(&mut cx, |message_editor, window, cx| { + .update(&mut cx, |message_editor, cx| { message_editor .mention_set() - .contents(&project, None, window, cx) + .contents(&all_prompt_capabilities, cx) }) .await .unwrap() .into_values() .collect::>(); - assert_eq!(contents.len(), 3); - pretty_assertions::assert_eq!( - contents[2], - Mention::Text { - content: "1".into(), - uri: format!("{url_one}?symbol=MySymbol#L1:1").parse().unwrap(), - } - ); + { + let [_, _, (uri, Mention::Text { content, .. })] = contents.as_slice() else { + panic!("Unexpected mentions"); + }; + pretty_assertions::assert_eq!(content, "1"); + pretty_assertions::assert_eq!( + uri, + &format!("{url_one}?symbol=MySymbol#L1:1") + .parse::() + .unwrap() + ); + } cx.run_until_parked(); editor.read_with(&cx, |editor, cx| { - assert_eq!( - editor.text(cx), - format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) ") - ); - }); + assert_eq!( + editor.text(cx), + format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) ") + ); + }); + + // Try to mention an "image" file that will fail to load + cx.simulate_input("@file x.png"); + + editor.update(&mut cx, |editor, cx| { + assert_eq!( + editor.text(cx), + format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) @file x.png") + ); + assert!(editor.has_visible_completions_menu()); + assert_eq!(current_completion_labels(editor), &["x.png dir/"]); + }); + + editor.update_in(&mut cx, |editor, window, cx| { + editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); + }); + + // Getting the message contents fails + message_editor + .update(&mut cx, |message_editor, cx| { + message_editor + .mention_set() + .contents(&all_prompt_capabilities, cx) + }) + .await + .expect_err("Should fail to load x.png"); + + cx.run_until_parked(); + + // Mention was removed + editor.read_with(&cx, |editor, cx| { + assert_eq!( + editor.text(cx), + format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) ") + ); + }); + + // Once more + cx.simulate_input("@file x.png"); + + editor.update(&mut cx, |editor, cx| { + assert_eq!( + editor.text(cx), + format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) @file x.png") + ); + assert!(editor.has_visible_completions_menu()); + assert_eq!(current_completion_labels(editor), &["x.png dir/"]); + }); + + editor.update_in(&mut cx, |editor, window, cx| { + editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx); + }); + + // This time don't immediately get the contents, just let the confirmed completion settle + cx.run_until_parked(); + + // Mention was removed + editor.read_with(&cx, |editor, cx| { + assert_eq!( + editor.text(cx), + format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) ") + ); + }); + + // Now getting the contents succeeds, because the invalid mention was removed + let contents = message_editor + .update(&mut cx, |message_editor, cx| { + message_editor + .mention_set() + .contents(&all_prompt_capabilities, cx) + }) + .await + .unwrap(); + assert_eq!(contents.len(), 3); } fn fold_ranges(editor: &Editor, cx: &mut App) -> Vec> { diff --git a/crates/agent_ui/src/acp/thread_history.rs b/crates/agent_ui/src/acp/thread_history.rs index 8a05801139..a49dae25b3 100644 --- a/crates/agent_ui/src/acp/thread_history.rs +++ b/crates/agent_ui/src/acp/thread_history.rs @@ -1,19 +1,20 @@ -use crate::RemoveSelectedThread; +use crate::acp::AcpThreadView; +use crate::{AgentPanel, RemoveSelectedThread}; use agent2::{HistoryEntry, HistoryStore}; use chrono::{Datelike as _, Local, NaiveDate, TimeDelta}; use editor::{Editor, EditorEvent}; -use fuzzy::{StringMatch, StringMatchCandidate}; +use fuzzy::StringMatchCandidate; use gpui::{ - App, Empty, Entity, EventEmitter, FocusHandle, Focusable, ScrollStrategy, Stateful, Task, - UniformListScrollHandle, Window, uniform_list, + App, Entity, EventEmitter, FocusHandle, Focusable, ScrollStrategy, Stateful, Task, + UniformListScrollHandle, WeakEntity, Window, uniform_list, }; -use std::{fmt::Display, ops::Range, sync::Arc}; +use std::{fmt::Display, ops::Range}; +use text::Bias; use time::{OffsetDateTime, UtcOffset}; use ui::{ HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Scrollbar, ScrollbarState, Tooltip, prelude::*, }; -use util::ResultExt; pub struct AcpThreadHistory { pub(crate) history_store: Entity, @@ -21,38 +22,38 @@ pub struct AcpThreadHistory { selected_index: usize, hovered_index: Option, search_editor: Entity, - all_entries: Arc>, - // When the search is empty, we display date separators between history entries - // This vector contains an enum of either a separator or an actual entry - separated_items: Vec, - // Maps entry indexes to list item indexes - separated_item_indexes: Vec, - _separated_items_task: Option>, - search_state: SearchState, + search_query: SharedString, + + visible_items: Vec, + scrollbar_visibility: bool, scrollbar_state: ScrollbarState, local_timezone: UtcOffset, - _subscriptions: Vec, -} -enum SearchState { - Empty, - Searching { - query: SharedString, - _task: Task<()>, - }, - Searched { - query: SharedString, - matches: Vec, - }, + _update_task: Task<()>, + _subscriptions: Vec, } enum ListItemType { BucketSeparator(TimeBucket), Entry { - index: usize, + entry: HistoryEntry, format: EntryTimeFormat, }, + SearchResult { + entry: HistoryEntry, + positions: Vec, + }, +} + +impl ListItemType { + fn history_entry(&self) -> Option<&HistoryEntry> { + match self { + ListItemType::Entry { entry, .. } => Some(entry), + ListItemType::SearchResult { entry, .. } => Some(entry), + _ => None, + } + } } pub enum ThreadHistoryEvent { @@ -77,12 +78,15 @@ impl AcpThreadHistory { cx.subscribe(&search_editor, |this, search_editor, event, cx| { if let EditorEvent::BufferEdited = event { let query = search_editor.read(cx).text(cx); - this.search(query.into(), cx); + if this.search_query != query { + this.search_query = query.into(); + this.update_visible_items(false, cx); + } } }); let history_store_subscription = cx.observe(&history_store, |this, _, cx| { - this.update_all_entries(cx); + this.update_visible_items(true, cx); }); let scroll_handle = UniformListScrollHandle::default(); @@ -93,10 +97,7 @@ impl AcpThreadHistory { scroll_handle, selected_index: 0, hovered_index: None, - search_state: SearchState::Empty, - all_entries: Default::default(), - separated_items: Default::default(), - separated_item_indexes: Default::default(), + visible_items: Default::default(), search_editor, scrollbar_visibility: true, scrollbar_state, @@ -104,29 +105,61 @@ impl AcpThreadHistory { chrono::Local::now().offset().local_minus_utc(), ) .unwrap(), + search_query: SharedString::default(), _subscriptions: vec![search_editor_subscription, history_store_subscription], - _separated_items_task: None, + _update_task: Task::ready(()), }; - this.update_all_entries(cx); + this.update_visible_items(false, cx); this } - fn update_all_entries(&mut self, cx: &mut Context) { - let new_entries: Arc> = self + fn update_visible_items(&mut self, preserve_selected_item: bool, cx: &mut Context) { + let entries = self .history_store - .update(cx, |store, cx| store.entries(cx)) - .into(); + .update(cx, |store, _| store.entries().collect()); + let new_list_items = if self.search_query.is_empty() { + self.add_list_separators(entries, cx) + } else { + self.filter_search_results(entries, cx) + }; + let selected_history_entry = if preserve_selected_item { + self.selected_history_entry().cloned() + } else { + None + }; - self._separated_items_task.take(); + self._update_task = cx.spawn(async move |this, cx| { + let new_visible_items = new_list_items.await; + this.update(cx, |this, cx| { + let new_selected_index = if let Some(history_entry) = selected_history_entry { + let history_entry_id = history_entry.id(); + new_visible_items + .iter() + .position(|visible_entry| { + visible_entry + .history_entry() + .is_some_and(|entry| entry.id() == history_entry_id) + }) + .unwrap_or(0) + } else { + 0 + }; - let mut items = Vec::with_capacity(new_entries.len() + 1); - let mut indexes = Vec::with_capacity(new_entries.len() + 1); + this.visible_items = new_visible_items; + this.set_selected_index(new_selected_index, Bias::Right, cx); + cx.notify(); + }) + .ok(); + }); + } - let bg_task = cx.background_spawn(async move { + fn add_list_separators(&self, entries: Vec, cx: &App) -> Task> { + cx.background_spawn(async move { + let mut items = Vec::with_capacity(entries.len() + 1); let mut bucket = None; let today = Local::now().naive_local().date(); - for (index, entry) in new_entries.iter().enumerate() { + for entry in entries.into_iter() { let entry_date = entry .updated_at() .with_timezone(&Local) @@ -139,75 +172,33 @@ impl AcpThreadHistory { items.push(ListItemType::BucketSeparator(entry_bucket)); } - indexes.push(items.len() as u32); items.push(ListItemType::Entry { - index, + entry, format: entry_bucket.into(), }); } - (new_entries, items, indexes) - }); - - let task = cx.spawn(async move |this, cx| { - let (new_entries, items, indexes) = bg_task.await; - this.update(cx, |this, cx| { - let previously_selected_entry = - this.all_entries.get(this.selected_index).map(|e| e.id()); - - this.all_entries = new_entries; - this.separated_items = items; - this.separated_item_indexes = indexes; - - match &this.search_state { - SearchState::Empty => { - if this.selected_index >= this.all_entries.len() { - this.set_selected_entry_index( - this.all_entries.len().saturating_sub(1), - cx, - ); - } else if let Some(prev_id) = previously_selected_entry - && let Some(new_ix) = this - .all_entries - .iter() - .position(|probe| probe.id() == prev_id) - { - this.set_selected_entry_index(new_ix, cx); - } - } - SearchState::Searching { query, .. } | SearchState::Searched { query, .. } => { - this.search(query.clone(), cx); - } - } - - cx.notify(); - }) - .log_err(); - }); - self._separated_items_task = Some(task); + items + }) } - fn search(&mut self, query: SharedString, cx: &mut Context) { - if query.is_empty() { - self.search_state = SearchState::Empty; - cx.notify(); - return; - } - - let all_entries = self.all_entries.clone(); - - let fuzzy_search_task = cx.background_spawn({ - let query = query.clone(); + fn filter_search_results( + &self, + entries: Vec, + cx: &App, + ) -> Task> { + let query = self.search_query.clone(); + cx.background_spawn({ let executor = cx.background_executor().clone(); async move { - let mut candidates = Vec::with_capacity(all_entries.len()); + let mut candidates = Vec::with_capacity(entries.len()); - for (idx, entry) in all_entries.iter().enumerate() { + for (idx, entry) in entries.iter().enumerate() { candidates.push(StringMatchCandidate::new(idx, entry.title())); } const MAX_MATCHES: usize = 100; - fuzzy::match_strings( + let matches = fuzzy::match_strings( &candidates, &query, false, @@ -216,74 +207,61 @@ impl AcpThreadHistory { &Default::default(), executor, ) - .await + .await; + + matches + .into_iter() + .map(|search_match| ListItemType::SearchResult { + entry: entries[search_match.candidate_id].clone(), + positions: search_match.positions, + }) + .collect() } - }); - - let task = cx.spawn({ - let query = query.clone(); - async move |this, cx| { - let matches = fuzzy_search_task.await; - - this.update(cx, |this, cx| { - let SearchState::Searching { - query: current_query, - _task, - } = &this.search_state - else { - return; - }; - - if &query == current_query { - this.search_state = SearchState::Searched { - query: query.clone(), - matches, - }; - - this.set_selected_entry_index(0, cx); - cx.notify(); - }; - }) - .log_err(); - } - }); - - self.search_state = SearchState::Searching { query, _task: task }; - cx.notify(); - } - - fn matched_count(&self) -> usize { - match &self.search_state { - SearchState::Empty => self.all_entries.len(), - SearchState::Searching { .. } => 0, - SearchState::Searched { matches, .. } => matches.len(), - } - } - - fn list_item_count(&self) -> usize { - match &self.search_state { - SearchState::Empty => self.separated_items.len(), - SearchState::Searching { .. } => 0, - SearchState::Searched { matches, .. } => matches.len(), - } + }) } fn search_produced_no_matches(&self) -> bool { - match &self.search_state { - SearchState::Empty => false, - SearchState::Searching { .. } => false, - SearchState::Searched { matches, .. } => matches.is_empty(), - } + self.visible_items.is_empty() && !self.search_query.is_empty() } - fn get_match(&self, ix: usize) -> Option<&HistoryEntry> { - match &self.search_state { - SearchState::Empty => self.all_entries.get(ix), - SearchState::Searching { .. } => None, - SearchState::Searched { matches, .. } => matches - .get(ix) - .and_then(|m| self.all_entries.get(m.candidate_id)), + fn selected_history_entry(&self) -> Option<&HistoryEntry> { + self.get_history_entry(self.selected_index) + } + + fn get_history_entry(&self, visible_items_ix: usize) -> Option<&HistoryEntry> { + self.visible_items.get(visible_items_ix)?.history_entry() + } + + fn set_selected_index(&mut self, mut index: usize, bias: Bias, cx: &mut Context) { + if self.visible_items.len() == 0 { + self.selected_index = 0; + return; } + while matches!( + self.visible_items.get(index), + None | Some(ListItemType::BucketSeparator(..)) + ) { + index = match bias { + Bias::Left => { + if index == 0 { + self.visible_items.len() - 1 + } else { + index - 1 + } + } + Bias::Right => { + if index >= self.visible_items.len() - 1 { + 0 + } else { + index + 1 + } + } + }; + } + self.selected_index = index; + self.scroll_handle + .scroll_to_item(index, ScrollStrategy::Top); + cx.notify() } pub fn select_previous( @@ -292,13 +270,10 @@ impl AcpThreadHistory { _window: &mut Window, cx: &mut Context, ) { - let count = self.matched_count(); - if count > 0 { - if self.selected_index == 0 { - self.set_selected_entry_index(count - 1, cx); - } else { - self.set_selected_entry_index(self.selected_index - 1, cx); - } + if self.selected_index == 0 { + self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx); + } else { + self.set_selected_index(self.selected_index - 1, Bias::Left, cx); } } @@ -308,13 +283,10 @@ impl AcpThreadHistory { _window: &mut Window, cx: &mut Context, ) { - let count = self.matched_count(); - if count > 0 { - if self.selected_index == count - 1 { - self.set_selected_entry_index(0, cx); - } else { - self.set_selected_entry_index(self.selected_index + 1, cx); - } + if self.selected_index == self.visible_items.len() - 1 { + self.set_selected_index(0, Bias::Right, cx); + } else { + self.set_selected_index(self.selected_index + 1, Bias::Right, cx); } } @@ -324,35 +296,47 @@ impl AcpThreadHistory { _window: &mut Window, cx: &mut Context, ) { - let count = self.matched_count(); - if count > 0 { - self.set_selected_entry_index(0, cx); - } + self.set_selected_index(0, Bias::Right, cx); } fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context) { - let count = self.matched_count(); - if count > 0 { - self.set_selected_entry_index(count - 1, cx); - } + self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx); } - fn set_selected_entry_index(&mut self, entry_index: usize, cx: &mut Context) { - self.selected_index = entry_index; + fn confirm(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context) { + self.confirm_entry(self.selected_index, cx); + } - let scroll_ix = match self.search_state { - SearchState::Empty | SearchState::Searching { .. } => self - .separated_item_indexes - .get(entry_index) - .map(|ix| *ix as usize) - .unwrap_or(entry_index + 1), - SearchState::Searched { .. } => entry_index, + fn confirm_entry(&mut self, ix: usize, cx: &mut Context) { + let Some(entry) = self.get_history_entry(ix) else { + return; + }; + cx.emit(ThreadHistoryEvent::Open(entry.clone())); + } + + fn remove_selected_thread( + &mut self, + _: &RemoveSelectedThread, + _window: &mut Window, + cx: &mut Context, + ) { + self.remove_thread(self.selected_index, cx) + } + + fn remove_thread(&mut self, visible_item_ix: usize, cx: &mut Context) { + let Some(entry) = self.get_history_entry(visible_item_ix) else { + return; }; - self.scroll_handle - .scroll_to_item(scroll_ix, ScrollStrategy::Top); - - cx.notify(); + let task = match entry { + HistoryEntry::AcpThread(thread) => self + .history_store + .update(cx, |this, cx| this.delete_thread(thread.id.clone(), cx)), + HistoryEntry::TextThread(context) => self.history_store.update(cx, |this, cx| { + this.delete_text_thread(context.path.clone(), cx) + }), + }; + task.detach_and_log_err(cx); } fn render_scrollbar(&self, cx: &mut Context) -> Option> { @@ -392,91 +376,33 @@ impl AcpThreadHistory { ) } - fn confirm(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context) { - self.confirm_entry(self.selected_index, cx); - } - - fn confirm_entry(&mut self, ix: usize, cx: &mut Context) { - let Some(entry) = self.get_match(ix) else { - return; - }; - cx.emit(ThreadHistoryEvent::Open(entry.clone())); - } - - fn remove_selected_thread( - &mut self, - _: &RemoveSelectedThread, - _window: &mut Window, - cx: &mut Context, - ) { - self.remove_thread(self.selected_index, cx) - } - - fn remove_thread(&mut self, ix: usize, cx: &mut Context) { - let Some(entry) = self.get_match(ix) else { - return; - }; - - let task = match entry { - HistoryEntry::AcpThread(thread) => self - .history_store - .update(cx, |this, cx| this.delete_thread(thread.id.clone(), cx)), - HistoryEntry::TextThread(context) => self.history_store.update(cx, |this, cx| { - this.delete_text_thread(context.path.clone(), cx) - }), - }; - task.detach_and_log_err(cx); - } - - fn list_items( + fn render_list_items( &mut self, range: Range, _window: &mut Window, cx: &mut Context, ) -> Vec { - match &self.search_state { - SearchState::Empty => self - .separated_items - .get(range) - .iter() - .flat_map(|items| { - items - .iter() - .map(|item| self.render_list_item(item, vec![], cx)) - }) - .collect(), - SearchState::Searched { matches, .. } => matches[range] - .iter() - .filter_map(|m| { - let entry = self.all_entries.get(m.candidate_id)?; - Some(self.render_history_entry( - entry, - EntryTimeFormat::DateAndTime, - m.candidate_id, - m.positions.clone(), - cx, - )) - }) - .collect(), - SearchState::Searching { .. } => { - vec![] - } - } + self.visible_items + .get(range.clone()) + .into_iter() + .flatten() + .enumerate() + .map(|(ix, item)| self.render_list_item(item, range.start + ix, cx)) + .collect() } - fn render_list_item( - &self, - item: &ListItemType, - highlight_positions: Vec, - cx: &Context, - ) -> AnyElement { + fn render_list_item(&self, item: &ListItemType, ix: usize, cx: &Context) -> AnyElement { match item { - ListItemType::Entry { index, format } => match self.all_entries.get(*index) { - Some(entry) => self - .render_history_entry(entry, *format, *index, highlight_positions, cx) - .into_any(), - None => Empty.into_any_element(), - }, + ListItemType::Entry { entry, format } => self + .render_history_entry(entry, *format, ix, Vec::default(), cx) + .into_any(), + ListItemType::SearchResult { entry, positions } => self.render_history_entry( + entry, + EntryTimeFormat::DateAndTime, + ix, + positions.clone(), + cx, + ), ListItemType::BucketSeparator(bucket) => div() .px(DynamicSpacing::Base06.rems(cx)) .pt_2() @@ -494,12 +420,12 @@ impl AcpThreadHistory { &self, entry: &HistoryEntry, format: EntryTimeFormat, - list_entry_ix: usize, + ix: usize, highlight_positions: Vec, cx: &Context, ) -> AnyElement { - let selected = list_entry_ix == self.selected_index; - let hovered = Some(list_entry_ix) == self.hovered_index; + let selected = ix == self.selected_index; + let hovered = Some(ix) == self.hovered_index; let timestamp = entry.updated_at().timestamp(); let thread_timestamp = format.format_timestamp(timestamp, self.local_timezone); @@ -507,7 +433,7 @@ impl AcpThreadHistory { .w_full() .pb_1() .child( - ListItem::new(list_entry_ix) + ListItem::new(ix) .rounded() .toggle_state(selected) .spacing(ListItemSpacing::Sparse) @@ -529,14 +455,14 @@ impl AcpThreadHistory { ) .on_hover(cx.listener(move |this, is_hovered, _window, cx| { if *is_hovered { - this.hovered_index = Some(list_entry_ix); - } else if this.hovered_index == Some(list_entry_ix) { + this.hovered_index = Some(ix); + } else if this.hovered_index == Some(ix) { this.hovered_index = None; } cx.notify(); })) - .end_slot::(if hovered || selected { + .end_slot::(if hovered { Some( IconButton::new("delete", IconName::Trash) .shape(IconButtonShape::Square) @@ -545,16 +471,14 @@ impl AcpThreadHistory { .tooltip(move |window, cx| { Tooltip::for_action("Delete", &RemoveSelectedThread, window, cx) }) - .on_click(cx.listener(move |this, _, _, cx| { - this.remove_thread(list_entry_ix, cx) - })), + .on_click( + cx.listener(move |this, _, _, cx| this.remove_thread(ix, cx)), + ), ) } else { None }) - .on_click( - cx.listener(move |this, _, _, cx| this.confirm_entry(list_entry_ix, cx)), - ), + .on_click(cx.listener(move |this, _, _, cx| this.confirm_entry(ix, cx))), ) .into_any_element() } @@ -577,7 +501,7 @@ impl Render for AcpThreadHistory { .on_action(cx.listener(Self::select_last)) .on_action(cx.listener(Self::confirm)) .on_action(cx.listener(Self::remove_selected_thread)) - .when(!self.all_entries.is_empty(), |parent| { + .when(!self.history_store.read(cx).is_empty(cx), |parent| { parent.child( h_flex() .h(px(41.)) // Match the toolbar perfectly @@ -603,7 +527,7 @@ impl Render for AcpThreadHistory { .overflow_hidden() .flex_grow(); - if self.all_entries.is_empty() { + if self.history_store.read(cx).is_empty(cx) { view.justify_center() .child( h_flex().w_full().justify_center().child( @@ -622,9 +546,9 @@ impl Render for AcpThreadHistory { .child( uniform_list( "thread-history", - self.list_item_count(), + self.visible_items.len(), cx.processor(|this, range: Range, window, cx| { - this.list_items(range, window, cx) + this.render_list_items(range, window, cx) }), ) .p_1() @@ -639,6 +563,141 @@ impl Render for AcpThreadHistory { } } +#[derive(IntoElement)] +pub struct AcpHistoryEntryElement { + entry: HistoryEntry, + thread_view: WeakEntity, + selected: bool, + hovered: bool, + on_hover: Box, +} + +impl AcpHistoryEntryElement { + pub fn new(entry: HistoryEntry, thread_view: WeakEntity) -> Self { + Self { + entry, + thread_view, + selected: false, + hovered: false, + on_hover: Box::new(|_, _, _| {}), + } + } + + pub fn hovered(mut self, hovered: bool) -> Self { + self.hovered = hovered; + self + } + + pub fn on_hover(mut self, on_hover: impl Fn(&bool, &mut Window, &mut App) + 'static) -> Self { + self.on_hover = Box::new(on_hover); + self + } +} + +impl RenderOnce for AcpHistoryEntryElement { + fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { + let id = self.entry.id(); + let title = self.entry.title(); + let timestamp = self.entry.updated_at(); + + let formatted_time = { + let now = chrono::Utc::now(); + let duration = now.signed_duration_since(timestamp); + + if duration.num_days() > 0 { + format!("{}d", duration.num_days()) + } else if duration.num_hours() > 0 { + format!("{}h ago", duration.num_hours()) + } else if duration.num_minutes() > 0 { + format!("{}m ago", duration.num_minutes()) + } else { + "Just now".to_string() + } + }; + + ListItem::new(id) + .rounded() + .toggle_state(self.selected) + .spacing(ListItemSpacing::Sparse) + .start_slot( + h_flex() + .w_full() + .gap_2() + .justify_between() + .child(Label::new(title).size(LabelSize::Small).truncate()) + .child( + Label::new(formatted_time) + .color(Color::Muted) + .size(LabelSize::XSmall), + ), + ) + .on_hover(self.on_hover) + .end_slot::(if self.hovered || self.selected { + Some( + IconButton::new("delete", IconName::Trash) + .shape(IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .tooltip(move |window, cx| { + Tooltip::for_action("Delete", &RemoveSelectedThread, window, cx) + }) + .on_click({ + let thread_view = self.thread_view.clone(); + let entry = self.entry.clone(); + + move |_event, _window, cx| { + if let Some(thread_view) = thread_view.upgrade() { + thread_view.update(cx, |thread_view, cx| { + thread_view.delete_history_entry(entry.clone(), cx); + }); + } + } + }), + ) + } else { + None + }) + .on_click({ + let thread_view = self.thread_view.clone(); + let entry = self.entry; + + move |_event, window, cx| { + if let Some(workspace) = thread_view + .upgrade() + .and_then(|view| view.read(cx).workspace().upgrade()) + { + match &entry { + HistoryEntry::AcpThread(thread_metadata) => { + if let Some(panel) = workspace.read(cx).panel::(cx) { + panel.update(cx, |panel, cx| { + panel.load_agent_thread( + thread_metadata.clone(), + window, + cx, + ); + }); + } + } + HistoryEntry::TextThread(context) => { + if let Some(panel) = workspace.read(cx).panel::(cx) { + panel.update(cx, |panel, cx| { + panel + .open_saved_prompt_editor( + context.path.clone(), + window, + cx, + ) + .detach_and_log_err(cx); + }); + } + } + } + } + } + }) + } +} + #[derive(Clone, Copy)] pub enum EntryTimeFormat { DateAndTime, diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 4ce55cce56..837ce6f90a 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -5,17 +5,17 @@ use acp_thread::{ }; use acp_thread::{AgentConnection, Plan}; use action_log::ActionLog; -use agent_client_protocol::{self as acp}; +use agent_client_protocol::{self as acp, PromptCapabilities}; use agent_servers::{AgentServer, ClaudeCode}; use agent_settings::{AgentProfileId, AgentSettings, CompletionMode, NotifyWhenAgentWaiting}; -use agent2::{DbThreadMetadata, HistoryEntryId, HistoryStore}; +use agent2::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore}; use anyhow::bail; use audio::{Audio, Sound}; use buffer_diff::BufferDiff; use client::zed_urls; use collections::{HashMap, HashSet}; use editor::scroll::Autoscroll; -use editor::{Editor, EditorMode, MultiBuffer, PathKey, SelectionEffects}; +use editor::{Editor, EditorEvent, EditorMode, MultiBuffer, PathKey, SelectionEffects}; use file_icons::FileIcons; use fs::Fs; use gpui::{ @@ -34,6 +34,8 @@ use project::{Project, ProjectEntryId}; use prompt_store::{PromptId, PromptStore}; use rope::Point; use settings::{Settings as _, SettingsStore}; +use std::cell::Cell; +use std::path::Path; use std::sync::Arc; use std::time::Instant; use std::{collections::BTreeMap, rc::Rc, time::Duration}; @@ -41,7 +43,7 @@ use text::Anchor; use theme::ThemeSettings; use ui::{ Callout, Disclosure, Divider, DividerColor, ElevationIndex, KeyBinding, PopoverMenuHandle, - Scrollbar, ScrollbarState, Tooltip, prelude::*, + Scrollbar, ScrollbarState, SpinnerLabel, Tooltip, prelude::*, }; use util::{ResultExt, size::format_file_size, time::duration_alt_display}; use workspace::{CollaboratorId, Workspace}; @@ -54,11 +56,14 @@ use crate::acp::entry_view_state::{EntryViewEvent, ViewEvent}; use crate::acp::message_editor::{MessageEditor, MessageEditorEvent}; use crate::agent_diff::AgentDiff; use crate::profile_selector::{ProfileProvider, ProfileSelector}; + use crate::ui::preview::UsageCallout; -use crate::ui::{AgentNotification, AgentNotificationEvent, BurnModeTooltip}; +use crate::ui::{ + AgentNotification, AgentNotificationEvent, BurnModeTooltip, UnavailableEditingTooltip, +}; use crate::{ AgentDiffPane, AgentPanel, ContinueThread, ContinueWithBurnMode, ExpandMessageEditor, Follow, - KeepAll, OpenAgentDiff, RejectAll, ToggleBurnMode, ToggleProfileSelector, + KeepAll, OpenAgentDiff, OpenHistory, RejectAll, ToggleBurnMode, ToggleProfileSelector, }; const RESPONSE_PADDING_X: Pixels = px(19.); @@ -75,11 +80,12 @@ enum ThreadError { PaymentRequired, ModelRequestLimitReached(cloud_llm_client::Plan), ToolUseLimitReached, + AuthenticationRequired(SharedString), Other(SharedString), } impl ThreadError { - fn from_err(error: anyhow::Error) -> Self { + fn from_err(error: anyhow::Error, agent: &Rc) -> Self { if error.is::() { Self::PaymentRequired } else if error.is::() { @@ -89,7 +95,17 @@ impl ThreadError { { Self::ModelRequestLimitReached(error.plan) } else { - Self::Other(error.to_string().into()) + let string = error.to_string(); + // TODO: we should have Gemini return better errors here. + if agent.clone().downcast::().is_some() + && string.contains("Could not load the default credentials") + || string.contains("API key not valid") + || string.contains("Request had invalid authentication credentials") + { + Self::AuthenticationRequired(string.into()) + } else { + Self::Other(error.to_string().into()) + } } } } @@ -240,8 +256,10 @@ pub struct AcpThreadView { project: Entity, thread_state: ThreadState, history_store: Entity, + hovered_recent_history_item: Option, entry_view_state: Entity, message_editor: Entity, + focus_handle: FocusHandle, model_selector: Option>, profile_selector: Option>, notifications: Vec>, @@ -257,8 +275,10 @@ pub struct AcpThreadView { edits_expanded: bool, plan_expanded: bool, editor_expanded: bool, - terminal_expanded: bool, + should_be_following: bool, editing_message: Option, + prompt_capabilities: Rc>, + is_loading_contents: bool, _cancel_task: Option>, _subscriptions: [Subscription; 3], } @@ -269,13 +289,15 @@ enum ThreadState { }, Ready { thread: Entity, - _subscription: [Subscription; 2], + title_editor: Option>, + _subscriptions: Vec, }, LoadError(LoadError), Unauthenticated { connection: Rc, description: Option>, configuration_view: Option, + pending_auth_method: Option, _subscription: Option, }, } @@ -292,14 +314,23 @@ impl AcpThreadView { window: &mut Window, cx: &mut Context, ) -> Self { + let prompt_capabilities = Rc::new(Cell::new(acp::PromptCapabilities::default())); let prevent_slash_commands = agent.clone().downcast::().is_some(); + + let placeholder = if agent.name() == "Zed Agent" { + format!("Message the {} — @ to include context", agent.name()) + } else { + format!("Message {} — @ to include context", agent.name()) + }; + let message_editor = cx.new(|cx| { let mut editor = MessageEditor::new( workspace.clone(), project.clone(), history_store.clone(), prompt_store.clone(), - "Message the agent — @ to include context", + prompt_capabilities.clone(), + placeholder, prevent_slash_commands, editor::EditorMode::AutoHeight { min_lines: MIN_EDITOR_LINES, @@ -322,6 +353,7 @@ impl AcpThreadView { project.clone(), history_store.clone(), prompt_store.clone(), + prompt_capabilities.clone(), prevent_slash_commands, ) }); @@ -355,10 +387,14 @@ impl AcpThreadView { edits_expanded: false, plan_expanded: false, editor_expanded: false, - terminal_expanded: true, + should_be_following: false, history_store, + hovered_recent_history_item: None, + prompt_capabilities, + is_loading_contents: false, _subscriptions: subscriptions, _cancel_task: None, + focus_handle: cx.focus_handle(), } } @@ -382,8 +418,12 @@ impl AcpThreadView { let connection = match connect_task.await { Ok(connection) => connection, Err(err) => { - this.update(cx, |this, cx| { - this.handle_load_error(err, cx); + this.update_in(cx, |this, window, cx| { + if err.downcast_ref::().is_some() { + this.handle_load_error(err, window, cx); + } else { + this.handle_thread_error(err, cx); + } cx.notify(); }) .log_err(); @@ -432,12 +472,10 @@ impl AcpThreadView { this.update_in(cx, |this, window, cx| { match result { Ok(thread) => { - let thread_subscription = - cx.subscribe_in(&thread, window, Self::handle_thread_event); - let action_log = thread.read(cx).action_log().clone(); - let action_log_subscription = - cx.observe(&action_log, |_, _, cx| cx.notify()); + + this.prompt_capabilities + .set(thread.read(cx).prompt_capabilities()); let count = thread.read(cx).entries().len(); this.list_state.splice(0..0, count); @@ -476,10 +514,33 @@ impl AcpThreadView { }) }); + let mut subscriptions = vec![ + cx.subscribe_in(&thread, window, Self::handle_thread_event), + cx.observe(&action_log, |_, _, cx| cx.notify()), + ]; + + let title_editor = + if thread.update(cx, |thread, cx| thread.can_set_title(cx)) { + let editor = cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_text(thread.read(cx).title(), window, cx); + editor + }); + subscriptions.push(cx.subscribe_in( + &editor, + window, + Self::handle_title_editor_event, + )); + Some(editor) + } else { + None + }; this.thread_state = ThreadState::Ready { thread, - _subscription: [thread_subscription, action_log_subscription], + title_editor, + _subscriptions: subscriptions, }; + this.message_editor.focus_handle(cx).focus(window); this.profile_selector = this.as_native_thread(cx).map(|thread| { cx.new(|cx| { @@ -495,7 +556,7 @@ impl AcpThreadView { cx.notify(); } Err(err) => { - this.handle_load_error(err, cx); + this.handle_load_error(err, window, cx); } }; }) @@ -542,7 +603,7 @@ impl AcpThreadView { let view = registry.read(cx).provider(&provider_id).map(|provider| { provider.configuration_view( - language_model::ConfigurationViewTargetAgent::Other(agent_name), + language_model::ConfigurationViewTargetAgent::Other(agent_name.clone()), window, cx, ) @@ -555,6 +616,7 @@ impl AcpThreadView { this.update(cx, |this, cx| { this.thread_state = ThreadState::Unauthenticated { + pending_auth_method: None, connection, configuration_view, description: err @@ -563,20 +625,35 @@ impl AcpThreadView { .map(|desc| cx.new(|cx| Markdown::new(desc.into(), None, None, cx))), _subscription: subscription, }; + if this.message_editor.focus_handle(cx).is_focused(window) { + this.focus_handle.focus(window) + } cx.notify(); }) .ok(); } - fn handle_load_error(&mut self, err: anyhow::Error, cx: &mut Context) { + fn handle_load_error( + &mut self, + err: anyhow::Error, + window: &mut Window, + cx: &mut Context, + ) { if let Some(load_err) = err.downcast_ref::() { self.thread_state = ThreadState::LoadError(load_err.clone()); } else { self.thread_state = ThreadState::LoadError(LoadError::Other(err.to_string().into())) } + if self.message_editor.focus_handle(cx).is_focused(window) { + self.focus_handle.focus(window) + } cx.notify(); } + pub fn workspace(&self) -> &WeakEntity { + &self.workspace + } + pub fn thread(&self) -> Option<&Entity> { match &self.thread_state { ThreadState::Ready { thread, .. } => Some(thread), @@ -586,12 +663,19 @@ impl AcpThreadView { } } - pub fn title(&self, cx: &App) -> SharedString { + pub fn title(&self) -> SharedString { match &self.thread_state { - ThreadState::Ready { thread, .. } => thread.read(cx).title(), + ThreadState::Ready { .. } | ThreadState::Unauthenticated { .. } => "New Thread".into(), ThreadState::Loading { .. } => "Loading…".into(), ThreadState::LoadError(_) => "Failed to load".into(), - ThreadState::Unauthenticated { .. } => "Authentication Required".into(), + } + } + + pub fn title_editor(&self) -> Option> { + if let ThreadState::Ready { title_editor, .. } = &self.thread_state { + title_editor.clone() + } else { + None } } @@ -639,6 +723,35 @@ impl AcpThreadView { cx.notify(); } + pub fn handle_title_editor_event( + &mut self, + title_editor: &Entity, + event: &EditorEvent, + window: &mut Window, + cx: &mut Context, + ) { + let Some(thread) = self.thread() else { return }; + + match event { + EditorEvent::BufferEdited => { + let new_title = title_editor.read(cx).text(cx); + thread.update(cx, |thread, cx| { + thread + .set_title(new_title.into(), cx) + .detach_and_log_err(cx); + }) + } + EditorEvent::Blurred => { + if title_editor.read(cx).text(cx).is_empty() { + title_editor.update(cx, |editor, cx| { + editor.set_text("New Thread", window, cx); + }); + } + } + _ => {} + } + } + pub fn handle_message_editor_event( &mut self, _: &Entity, @@ -652,6 +765,7 @@ impl AcpThreadView { MessageEditorEvent::Focus => { self.cancel_editing(&Default::default(), window, cx); } + MessageEditorEvent::LostFocus => {} } } @@ -663,9 +777,37 @@ impl AcpThreadView { cx: &mut Context, ) { match &event.view_event { + ViewEvent::NewDiff(tool_call_id) => { + if AgentSettings::get_global(cx).expand_edit_card { + self.expanded_tool_calls.insert(tool_call_id.clone()); + } + } + ViewEvent::NewTerminal(tool_call_id) => { + if AgentSettings::get_global(cx).expand_terminal_card { + self.expanded_tool_calls.insert(tool_call_id.clone()); + } + } ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::Focus) => { - self.editing_message = Some(event.entry_index); - cx.notify(); + if let Some(thread) = self.thread() + && let Some(AgentThreadEntry::UserMessage(user_message)) = + thread.read(cx).entries().get(event.entry_index) + && user_message.id.is_some() + { + self.editing_message = Some(event.entry_index); + cx.notify(); + } + } + ViewEvent::MessageEditorEvent(editor, MessageEditorEvent::LostFocus) => { + if let Some(thread) = self.thread() + && let Some(AgentThreadEntry::UserMessage(user_message)) = + thread.read(cx).entries().get(event.entry_index) + && user_message.id.is_some() + { + if editor.read(cx).text(cx).as_str() == user_message.content.to_markdown(cx) { + self.editing_message = None; + cx.notify(); + } + } } ViewEvent::MessageEditorEvent(editor, MessageEditorEvent::Send) => { self.regenerate(event.entry_index, editor, window, cx); @@ -681,6 +823,9 @@ impl AcpThreadView { let Some(thread) = self.thread() else { return; }; + if !thread.read(cx).can_resume(cx) { + return; + } let task = thread.update(cx, |thread, cx| thread.resume(cx)); cx.spawn(async move |this, cx| { @@ -697,6 +842,11 @@ impl AcpThreadView { fn send(&mut self, window: &mut Window, cx: &mut Context) { let Some(thread) = self.thread() else { return }; + + if self.is_loading_contents { + return; + } + self.history_store.update(cx, |history, cx| { history.push_recently_opened_entry( HistoryEntryId::AcpThread(thread.read(cx).session_id().clone()), @@ -711,7 +861,7 @@ impl AcpThreadView { let contents = self .message_editor - .update(cx, |message_editor, cx| message_editor.contents(window, cx)); + .update(cx, |message_editor, cx| message_editor.contents(cx)); self.send_impl(contents, window, cx) } @@ -724,7 +874,7 @@ impl AcpThreadView { let contents = self .message_editor - .update(cx, |message_editor, cx| message_editor.contents(window, cx)); + .update(cx, |message_editor, cx| message_editor.contents(cx)); cx.spawn_in(window, async move |this, cx| { cancelled.await; @@ -739,10 +889,12 @@ impl AcpThreadView { fn send_impl( &mut self, - contents: Task>>, + contents: Task, Vec>)>>, window: &mut Window, cx: &mut Context, ) { + let agent_telemetry_id = self.agent.telemetry_id(); + self.thread_error.take(); self.editing_message.take(); self.thread_feedback.clear(); @@ -750,8 +902,24 @@ impl AcpThreadView { let Some(thread) = self.thread().cloned() else { return; }; + if self.should_be_following { + self.workspace + .update(cx, |workspace, cx| { + workspace.follow(CollaboratorId::Agent, window, cx); + }) + .ok(); + } + + self.is_loading_contents = true; + let guard = cx.new(|_| ()); + cx.observe_release(&guard, |this, _guard, cx| { + this.is_loading_contents = false; + cx.notify(); + }) + .detach(); + let task = cx.spawn_in(window, async move |this, cx| { - let contents = contents.await?; + let (contents, tracked_buffers) = contents.await?; if contents.is_empty() { return Ok(()); @@ -764,7 +932,18 @@ impl AcpThreadView { message_editor.clear(window, cx); }); })?; - let send = thread.update(cx, |thread, cx| thread.send(contents, cx))?; + let send = thread.update(cx, |thread, cx| { + thread.action_log().update(cx, |action_log, cx| { + for buffer in tracked_buffers { + action_log.buffer_read(buffer, cx) + } + }); + drop(guard); + + telemetry::event!("Agent Message Sent", agent = agent_telemetry_id); + + thread.send(contents, cx) + })?; send.await }); @@ -774,6 +953,16 @@ impl AcpThreadView { this.handle_thread_error(err, cx); }) .ok(); + } else { + this.update(cx, |this, cx| { + this.should_be_following = this + .workspace + .update(cx, |workspace, _| { + workspace.is_being_followed(CollaboratorId::Agent) + }) + .unwrap_or_default(); + }) + .ok(); } }) .detach(); @@ -817,20 +1006,24 @@ impl AcpThreadView { let Some(thread) = self.thread().cloned() else { return; }; + if self.is_loading_contents { + return; + } - let Some(rewind) = thread.update(cx, |thread, cx| { - let user_message_id = thread.entries().get(entry_ix)?.user_message()?.id.clone()?; - Some(thread.rewind(user_message_id, cx)) + let Some(user_message_id) = thread.update(cx, |thread, _| { + thread.entries().get(entry_ix)?.user_message()?.id.clone() }) else { return; }; - let contents = - message_editor.update(cx, |message_editor, cx| message_editor.contents(window, cx)); + let contents = message_editor.update(cx, |message_editor, cx| message_editor.contents(cx)); - let task = cx.foreground_executor().spawn(async move { - rewind.await?; - contents.await + let task = cx.spawn(async move |_, cx| { + let contents = contents.await?; + thread + .update(cx, |thread, cx| thread.rewind(user_message_id, cx))? + .await?; + Ok(contents) }); self.send_impl(task, window, cx); } @@ -895,7 +1088,7 @@ impl AcpThreadView { } fn handle_thread_error(&mut self, error: anyhow::Error, cx: &mut Context) { - self.thread_error = Some(ThreadError::from_err(error)); + self.thread_error = Some(ThreadError::from_err(error, &self.agent)); cx.notify(); } @@ -962,8 +1155,25 @@ impl AcpThreadView { AcpThreadEvent::LoadError(error) => { self.thread_retry_status.take(); self.thread_state = ThreadState::LoadError(error.clone()); + if self.message_editor.focus_handle(cx).is_focused(window) { + self.focus_handle.focus(window) + } } - AcpThreadEvent::TitleUpdated | AcpThreadEvent::TokenUsageUpdated => {} + AcpThreadEvent::TitleUpdated => { + let title = thread.read(cx).title(); + if let Some(title_editor) = self.title_editor() { + title_editor.update(cx, |editor, cx| { + if editor.text(cx) != title { + editor.set_text(title, window, cx); + } + }); + } + } + AcpThreadEvent::PromptCapabilitiesUpdated => { + self.prompt_capabilities + .set(thread.read(cx).prompt_capabilities()); + } + AcpThreadEvent::TokenUsageUpdated => {} } cx.notify(); } @@ -974,36 +1184,112 @@ impl AcpThreadView { window: &mut Window, cx: &mut Context, ) { - let ThreadState::Unauthenticated { ref connection, .. } = self.thread_state else { + let ThreadState::Unauthenticated { + connection, + pending_auth_method, + configuration_view, + .. + } = &mut self.thread_state + else { return; }; - self.thread_error.take(); - let authenticate = connection.authenticate(method, cx); - self.auth_task = Some(cx.spawn_in(window, { - let project = self.project.clone(); - let agent = self.agent.clone(); - async move |this, cx| { - let result = authenticate.await; - - this.update_in(cx, |this, window, cx| { - if let Err(err) = result { - this.handle_thread_error(err, cx); - } else { - this.thread_state = Self::initial_state( - agent, - None, - this.workspace.clone(), - project.clone(), - window, - cx, - ) - } - this.auth_task.take() - }) - .ok(); + if method.0.as_ref() == "gemini-api-key" { + let registry = LanguageModelRegistry::global(cx); + let provider = registry + .read(cx) + .provider(&language_model::GOOGLE_PROVIDER_ID) + .unwrap(); + if !provider.is_authenticated(cx) { + let this = cx.weak_entity(); + let agent = self.agent.clone(); + let connection = connection.clone(); + window.defer(cx, |window, cx| { + Self::handle_auth_required( + this, + AuthRequired { + description: Some("GEMINI_API_KEY must be set".to_owned()), + provider_id: Some(language_model::GOOGLE_PROVIDER_ID), + }, + agent, + connection, + window, + cx, + ); + }); + return; } - })); + } else if method.0.as_ref() == "vertex-ai" + && std::env::var("GOOGLE_API_KEY").is_err() + && (std::env::var("GOOGLE_CLOUD_PROJECT").is_err() + || (std::env::var("GOOGLE_CLOUD_PROJECT").is_err())) + { + let this = cx.weak_entity(); + let agent = self.agent.clone(); + let connection = connection.clone(); + + window.defer(cx, |window, cx| { + Self::handle_auth_required( + this, + AuthRequired { + description: Some( + "GOOGLE_API_KEY must be set in the environment to use Vertex AI authentication for Gemini CLI. Please export it and restart Zed." + .to_owned(), + ), + provider_id: None, + }, + agent, + connection, + window, + cx, + ) + }); + return; + } + + self.thread_error.take(); + configuration_view.take(); + pending_auth_method.replace(method.clone()); + let authenticate = connection.authenticate(method, cx); + cx.notify(); + self.auth_task = + Some(cx.spawn_in(window, { + let project = self.project.clone(); + let agent = self.agent.clone(); + async move |this, cx| { + let result = authenticate.await; + + match &result { + Ok(_) => telemetry::event!( + "Authenticate Agent Succeeded", + agent = agent.telemetry_id() + ), + Err(_) => { + telemetry::event!( + "Authenticate Agent Failed", + agent = agent.telemetry_id(), + ) + } + } + + this.update_in(cx, |this, window, cx| { + if let Err(err) = result { + this.handle_thread_error(err, cx); + } else { + this.thread_state = Self::initial_state( + agent, + None, + this.workspace.clone(), + project.clone(), + window, + cx, + ) + } + this.auth_task.take() + }) + .ok(); + } + })); } fn authorize_tool_call( @@ -1011,6 +1297,7 @@ impl AcpThreadView { tool_call_id: acp::ToolCallId, option_id: acp::PermissionOptionId, option_kind: acp::PermissionOptionKind, + window: &mut Window, cx: &mut Context, ) { let Some(thread) = self.thread() else { @@ -1019,6 +1306,13 @@ impl AcpThreadView { thread.update(cx, |thread, cx| { thread.authorize_tool_call(tool_call_id, option_id, option_kind, cx); }); + if self.should_be_following { + self.workspace + .update(cx, |workspace, cx| { + workspace.follow(CollaboratorId::Agent, window, cx); + }) + .ok(); + } cx.notify(); } @@ -1062,9 +1356,24 @@ impl AcpThreadView { None }; + let has_checkpoint_button = message + .checkpoint + .as_ref() + .is_some_and(|checkpoint| checkpoint.show); + + let agent_name = self.agent.name(); + v_flex() .id(("user_message", entry_ix)) - .pt_2() + .map(|this| { + if entry_ix == 0 && !has_checkpoint_button && rules_item.is_none() { + this.pt_4() + } else if rules_item.is_some() { + this.pt_3() + } else { + this.pt_2() + } + }) .pb_4() .px_2() .gap_1p5() @@ -1073,6 +1382,7 @@ impl AcpThreadView { .children(message.id.clone().and_then(|message_id| { message.checkpoint.as_ref()?.show.then(|| { h_flex() + .px_3() .gap_2() .child(Divider::horizontal()) .child( @@ -1097,63 +1407,93 @@ impl AcpThreadView { div() .py_3() .px_2() - .rounded_lg() + .rounded_md() .shadow_md() .bg(cx.theme().colors().editor_background) .border_1() .when(editing && !editor_focus, |this| this.border_dashed()) .border_color(cx.theme().colors().border) .map(|this|{ - if editor_focus { + if editing && editor_focus { this.border_color(focus_border) - } else { + } else if message.id.is_some() { this.hover(|s| s.border_color(focus_border.opacity(0.8))) + } else { + this } }) .text_xs() .child(editor.clone().into_any_element()), ) - .when(editor_focus, |this| - this.child( - h_flex() - .absolute() - .top_neg_3p5() - .right_3() - .gap_1() - .rounded_sm() - .border_1() - .border_color(cx.theme().colors().border) - .bg(cx.theme().colors().editor_background) - .overflow_hidden() - .child( - IconButton::new("cancel", IconName::Close) - .icon_color(Color::Error) - .icon_size(IconSize::XSmall) - .on_click(cx.listener(Self::cancel_editing)) - ) - .child( - IconButton::new("regenerate", IconName::Return) - .icon_color(Color::Muted) - .icon_size(IconSize::XSmall) - .tooltip(Tooltip::text( - "Editing will restart the thread from this point." - )) - .on_click(cx.listener({ - let editor = editor.clone(); - move |this, _, window, cx| { - this.regenerate( - entry_ix, &editor, window, cx, - ); - } - })), - ) - ) - ), + .when(editor_focus, |this| { + let base_container = h_flex() + .absolute() + .top_neg_3p5() + .right_3() + .gap_1() + .rounded_sm() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().editor_background) + .overflow_hidden(); + + if message.id.is_some() { + this.child( + base_container + .child( + IconButton::new("cancel", IconName::Close) + .disabled(self.is_loading_contents) + .icon_color(Color::Error) + .icon_size(IconSize::XSmall) + .on_click(cx.listener(Self::cancel_editing)) + ) + .child( + if self.is_loading_contents { + div() + .id("loading-edited-message-content") + .tooltip(Tooltip::text("Loading Added Context…")) + .child(loading_contents_spinner(IconSize::XSmall)) + .into_any_element() + } else { + IconButton::new("regenerate", IconName::Return) + .icon_color(Color::Muted) + .icon_size(IconSize::XSmall) + .tooltip(Tooltip::text( + "Editing will restart the thread from this point." + )) + .on_click(cx.listener({ + let editor = editor.clone(); + move |this, _, window, cx| { + this.regenerate( + entry_ix, &editor, window, cx, + ); + } + })).into_any_element() + } + ) + ) + } else { + this.child( + base_container + .border_dashed() + .child( + IconButton::new("editing_unavailable", IconName::PencilUnavailable) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .style(ButtonStyle::Transparent) + .tooltip(move |_window, cx| { + cx.new(|_| UnavailableEditingTooltip::new(agent_name.clone())) + .into() + }) + ) + ) + } + }), ) .into_any() } AgentThreadEntry::AssistantMessage(AssistantMessage { chunks }) => { - let style = default_markdown_style(false, window, cx); + let style = default_markdown_style(false, false, window, cx); let message_body = v_flex() .w_full() .gap_2p5() @@ -1193,7 +1533,7 @@ impl AcpThreadView { AgentThreadEntry::ToolCall(tool_call) => { let has_terminals = tool_call.terminals().next().is_some(); - div().w_full().py_1p5().px_5().map(|this| { + div().w_full().py_1().px_5().map(|this| { if has_terminals { this.children(tool_call.terminals().map(|terminal| { self.render_terminal_tool_call( @@ -1212,17 +1552,14 @@ impl AcpThreadView { return primary; }; - let is_generating = matches!(thread.read(cx).status(), ThreadStatus::Generating); - let primary = if entry_ix == total_entries - 1 && !is_generating { + let primary = if entry_ix == total_entries - 1 { v_flex() .w_full() .child(primary) - .child(self.render_thread_controls(cx)) + .child(self.render_thread_controls(&thread, cx)) .when_some( self.thread_feedback.comments_editor.clone(), - |this, editor| { - this.child(Self::render_feedback_feedback_editor(editor, window, cx)) - }, + |this, editor| this.child(Self::render_feedback_feedback_editor(editor, cx)), ) .into_any_element() } else { @@ -1288,8 +1625,6 @@ impl AcpThreadView { .relative() .w_full() .gap_1p5() - .opacity(0.8) - .hover(|style| style.opacity(1.)) .child( h_flex() .size_4() @@ -1330,6 +1665,7 @@ impl AcpThreadView { .child( div() .text_size(self.tool_name_font_size()) + .text_color(cx.theme().colors().text_muted) .child("Thinking"), ) .on_click(cx.listener({ @@ -1353,75 +1689,15 @@ impl AcpThreadView { .border_l_1() .border_color(self.tool_card_border_color(cx)) .text_ui_sm(cx) - .child( - self.render_markdown(chunk, default_markdown_style(false, window, cx)), - ), + .child(self.render_markdown( + chunk, + default_markdown_style(false, false, window, cx), + )), ) }) .into_any_element() } - fn render_tool_call_icon( - &self, - group_name: SharedString, - entry_ix: usize, - is_collapsible: bool, - is_open: bool, - tool_call: &ToolCall, - cx: &Context, - ) -> Div { - let tool_icon = Icon::new(match tool_call.kind { - acp::ToolKind::Read => IconName::ToolRead, - acp::ToolKind::Edit => IconName::ToolPencil, - acp::ToolKind::Delete => IconName::ToolDeleteFile, - acp::ToolKind::Move => IconName::ArrowRightLeft, - acp::ToolKind::Search => IconName::ToolSearch, - acp::ToolKind::Execute => IconName::ToolTerminal, - acp::ToolKind::Think => IconName::ToolThink, - acp::ToolKind::Fetch => IconName::ToolWeb, - acp::ToolKind::Other => IconName::ToolHammer, - }) - .size(IconSize::Small) - .color(Color::Muted); - - let base_container = h_flex().size_4().justify_center(); - - if is_collapsible { - base_container - .child( - div() - .group_hover(&group_name, |s| s.invisible().w_0()) - .child(tool_icon), - ) - .child( - h_flex() - .absolute() - .inset_0() - .invisible() - .justify_center() - .group_hover(&group_name, |s| s.visible()) - .child( - Disclosure::new(("expand", entry_ix), is_open) - .opened_icon(IconName::ChevronUp) - .closed_icon(IconName::ChevronRight) - .on_click(cx.listener({ - let id = tool_call.id.clone(); - move |this: &mut Self, _, _, cx: &mut Context| { - if is_open { - this.expanded_tool_calls.remove(&id); - } else { - this.expanded_tool_calls.insert(id.clone()); - } - cx.notify(); - } - })), - ), - ) - } else { - base_container.child(tool_icon) - } - } - fn render_tool_call( &self, entry_ix: usize, @@ -1432,29 +1708,37 @@ impl AcpThreadView { let header_id = SharedString::from(format!("outer-tool-call-header-{}", entry_ix)); let card_header_id = SharedString::from("inner-tool-call-header"); - let status_icon = match &tool_call.status { - ToolCallStatus::Pending - | ToolCallStatus::WaitingForConfirmation { .. } - | ToolCallStatus::Completed => None, - ToolCallStatus::InProgress => Some( - Icon::new(IconName::ArrowCircle) - .color(Color::Accent) - .size(IconSize::Small) - .with_animation( - "running", - Animation::new(Duration::from_secs(2)).repeat(), - |icon, delta| icon.transform(Transformation::rotate(percentage(delta))), - ) - .into_any(), - ), - ToolCallStatus::Rejected | ToolCallStatus::Canceled | ToolCallStatus::Failed => Some( - Icon::new(IconName::Close) - .color(Color::Error) - .size(IconSize::Small) - .into_any_element(), - ), + let tool_icon = + if tool_call.kind == acp::ToolKind::Edit && tool_call.locations.len() == 1 { + FileIcons::get_icon(&tool_call.locations[0].path, cx) + .map(Icon::from_path) + .unwrap_or(Icon::new(IconName::ToolPencil)) + } else { + Icon::new(match tool_call.kind { + acp::ToolKind::Read => IconName::ToolSearch, + acp::ToolKind::Edit => IconName::ToolPencil, + acp::ToolKind::Delete => IconName::ToolDeleteFile, + acp::ToolKind::Move => IconName::ArrowRightLeft, + acp::ToolKind::Search => IconName::ToolSearch, + acp::ToolKind::Execute => IconName::ToolTerminal, + acp::ToolKind::Think => IconName::ToolThink, + acp::ToolKind::Fetch => IconName::ToolWeb, + acp::ToolKind::Other => IconName::ToolHammer, + }) + } + .size(IconSize::Small) + .color(Color::Muted); + + let failed_or_canceled = match &tool_call.status { + ToolCallStatus::Rejected | ToolCallStatus::Canceled | ToolCallStatus::Failed => true, + _ => false, }; + let failed_tool_call = matches!( + tool_call.status, + ToolCallStatus::Rejected | ToolCallStatus::Canceled | ToolCallStatus::Failed + ); + let needs_confirmation = matches!( tool_call.status, ToolCallStatus::WaitingForConfirmation { .. } @@ -1463,10 +1747,9 @@ impl AcpThreadView { matches!(tool_call.kind, acp::ToolKind::Edit) || tool_call.diffs().next().is_some(); let use_card_layout = needs_confirmation || is_edit; - let is_collapsible = !tool_call.content.is_empty() && !use_card_layout; + let is_collapsible = !tool_call.content.is_empty() && !needs_confirmation; - let is_open = - needs_confirmation || is_edit || self.expanded_tool_calls.contains(&tool_call.id); + let is_open = needs_confirmation || self.expanded_tool_calls.contains(&tool_call.id); let gradient_overlay = |color: Hsla| { div() @@ -1509,7 +1792,9 @@ impl AcpThreadView { .into_any() } ToolCallStatus::Pending | ToolCallStatus::InProgress - if is_edit && tool_call.content.is_empty() => + if is_edit + && tool_call.content.is_empty() + && self.as_native_connection(cx).is_some() => { self.render_diff_loading(cx).into_any() } @@ -1534,7 +1819,7 @@ impl AcpThreadView { v_flex() .when(use_card_layout, |this| { - this.rounded_lg() + this.rounded_md() .border_1() .border_color(self.tool_card_border_color(cx)) .bg(cx.theme().colors().editor_background) @@ -1543,39 +1828,29 @@ impl AcpThreadView { .child( h_flex() .id(header_id) + .group(&card_header_id) + .relative() .w_full() + .max_w_full() .gap_1() - .justify_between() - .map(|this| { - if use_card_layout { - this.pl_2() - .pr_1p5() - .py_1() - .rounded_t_md() - .when(is_open, |this| { - this.border_b_1() - .border_color(self.tool_card_border_color(cx)) - }) - .bg(self.tool_card_header_bg(cx)) - } else { - this.opacity(0.8).hover(|style| style.opacity(1.)) - } + .when(use_card_layout, |this| { + this.pl_1p5() + .pr_1() + .py_0p5() + .rounded_t_md() + .when(is_open && !failed_tool_call, |this| { + this.border_b_1() + .border_color(self.tool_card_border_color(cx)) + }) + .bg(self.tool_card_header_bg(cx)) }) .child( h_flex() - .group(&card_header_id) .relative() .w_full() - .min_h_6() + .h(window.line_height() - px(2.)) .text_size(self.tool_name_font_size()) - .child(self.render_tool_call_icon( - card_header_id, - entry_ix, - is_collapsible, - is_open, - tool_call, - cx, - )) + .child(tool_icon) .child(if tool_call.locations.len() == 1 { let name = tool_call.locations[0] .path @@ -1591,54 +1866,68 @@ impl AcpThreadView { .px_1p5() .rounded_sm() .overflow_x_scroll() - .opacity(0.8) .hover(|label| { - label.opacity(1.).bg(cx - .theme() - .colors() - .element_hover - .opacity(0.5)) + label.bg(cx.theme().colors().element_hover.opacity(0.5)) + }) + .map(|this| { + if use_card_layout { + this.text_color(cx.theme().colors().text) + } else { + this.text_color(cx.theme().colors().text_muted) + } }) .child(name) .tooltip(Tooltip::text("Jump to File")) + .cursor(gpui::CursorStyle::PointingHand) .on_click(cx.listener(move |this, _, window, cx| { this.open_tool_call_location(entry_ix, 0, window, cx); })) .into_any_element() } else { h_flex() - .id("non-card-label-container") - .w_full() .relative() + .w_full() + .max_w_full() .ml_1p5() .overflow_hidden() - .child( - h_flex() - .id("non-card-label") - .pr_8() - .w_full() - .overflow_x_scroll() - .child(self.render_markdown( - tool_call.label.clone(), - default_markdown_style(false, window, cx), - )), - ) + .child(h_flex().pr_8().child(self.render_markdown( + tool_call.label.clone(), + default_markdown_style(false, true, window, cx), + ))) .child(gradient_overlay(gradient_color)) - .on_click(cx.listener({ - let id = tool_call.id.clone(); - move |this: &mut Self, _, _, cx: &mut Context| { - if is_open { - this.expanded_tool_calls.remove(&id); - } else { - this.expanded_tool_calls.insert(id.clone()); - } - cx.notify(); - } - })) .into_any() }), ) - .children(status_icon), + .child( + h_flex() + .gap_px() + .when(is_collapsible, |this| { + this.child( + Disclosure::new(("expand", entry_ix), is_open) + .opened_icon(IconName::ChevronUp) + .closed_icon(IconName::ChevronDown) + .visible_on_hover(&card_header_id) + .on_click(cx.listener({ + let id = tool_call.id.clone(); + move |this: &mut Self, _, _, cx: &mut Context| { + if is_open { + this.expanded_tool_calls.remove(&id); + } else { + this.expanded_tool_calls.insert(id.clone()); + } + cx.notify(); + } + })), + ) + }) + .when(failed_or_canceled, |this| { + this.child( + Icon::new(IconName::Close) + .color(Color::Error) + .size(IconSize::Small), + ) + }), + ), ) .children(tool_output_display) } @@ -1686,15 +1975,12 @@ impl AcpThreadView { .border_color(self.tool_card_border_color(cx)) .text_sm() .text_color(cx.theme().colors().text_muted) - .child(self.render_markdown(markdown, default_markdown_style(false, window, cx))) + .child(self.render_markdown(markdown, default_markdown_style(false, false, window, cx))) .child( - Button::new(button_id, "Collapse Output") + IconButton::new(button_id, IconName::ChevronUp) .full_width() .style(ButtonStyle::Outlined) - .label_size(LabelSize::Small) - .icon(IconName::ChevronUp) .icon_color(Color::Muted) - .icon_position(IconPosition::Start) .on_click(cx.listener({ move |this: &mut Self, _, _, cx: &mut Context| { this.expanded_tool_calls.remove(&tool_call_id); @@ -1711,9 +1997,27 @@ impl AcpThreadView { cx: &Context, ) -> AnyElement { let uri: SharedString = resource_link.uri.clone().into(); + let is_file = resource_link.uri.strip_prefix("file://"); - let label: SharedString = if let Some(path) = resource_link.uri.strip_prefix("file://") { - path.to_string().into() + let label: SharedString = if let Some(abs_path) = is_file { + if let Some(project_path) = self + .project + .read(cx) + .project_path_for_absolute_path(&Path::new(abs_path), cx) + && let Some(worktree) = self + .project + .read(cx) + .worktree_for_id(project_path.worktree_id, cx) + { + worktree + .read(cx) + .full_path(&project_path.path) + .to_string_lossy() + .to_string() + .into() + } else { + abs_path.to_string().into() + } } else { uri.clone() }; @@ -1730,10 +2034,12 @@ impl AcpThreadView { Button::new(button_id, label) .label_size(LabelSize::Small) .color(Color::Muted) - .icon(IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) .truncate(true) + .when(is_file.is_none(), |this| { + this.icon(IconName::ArrowUpRight) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + }) .on_click(cx.listener({ let workspace = self.workspace.clone(); move |_, _, window, cx: &mut Context| { @@ -1792,11 +2098,12 @@ impl AcpThreadView { let tool_call_id = tool_call_id.clone(); let option_id = option.id.clone(); let option_kind = option.kind; - move |this, _, _, cx| { + move |this, _, window, cx| { this.authorize_tool_call( tool_call_id.clone(), option_id.clone(), option_kind, + window, cx, ); } @@ -1865,7 +2172,7 @@ impl AcpThreadView { && diff.read(cx).has_revealed_range(cx) { editor.into_any_element() - } else if tool_progress { + } else if tool_progress && self.as_native_connection(cx).is_some() { self.render_diff_loading(cx) } else { Empty.into_any() @@ -1918,6 +2225,8 @@ impl AcpThreadView { .map(|path| format!("{}", path.display())) .unwrap_or_else(|| "current directory".to_string()); + let is_expanded = self.expanded_tool_calls.contains(&tool_call.id); + let header = h_flex() .id(SharedString::from(format!( "terminal-tool-header-{}", @@ -2011,7 +2320,7 @@ impl AcpThreadView { .to_string() } else { format!( - "Output is {} long—to avoid unexpected token usage, \ + "Output is {} long, and to avoid unexpected token usage, \ only 16 KB was sent back to the model.", format_file_size(output.original_content_len as u64, true), ) @@ -2051,28 +2360,34 @@ impl AcpThreadView { "terminal-tool-disclosure-{}", terminal.entity_id() )), - self.terminal_expanded, + is_expanded, ) .opened_icon(IconName::ChevronUp) .closed_icon(IconName::ChevronDown) - .on_click(cx.listener(move |this, _event, _window, _cx| { - this.terminal_expanded = !this.terminal_expanded; - })), - ); + .on_click(cx.listener({ + let id = tool_call.id.clone(); + move |this, _event, _window, _cx| { + if is_expanded { + this.expanded_tool_calls.remove(&id); + } else { + this.expanded_tool_calls.insert(id.clone()); + } + }})), + ); let terminal_view = self .entry_view_state .read(cx) .entry(entry_ix) .and_then(|entry| entry.terminal(terminal)); - let show_output = self.terminal_expanded && terminal_view.is_some(); + let show_output = is_expanded && terminal_view.is_some(); v_flex() .mb_2() .border_1() .when(tool_failed || command_failed, |card| card.border_dashed()) .border_color(border_color) - .rounded_lg() + .rounded_md() .overflow_hidden() .child( v_flex() @@ -2113,33 +2428,6 @@ impl AcpThreadView { .into_any() } - fn render_agent_logo(&self) -> AnyElement { - Icon::new(self.agent.logo()) - .color(Color::Muted) - .size(IconSize::XLarge) - .into_any_element() - } - - fn render_error_agent_logo(&self) -> AnyElement { - let logo = Icon::new(self.agent.logo()) - .color(Color::Muted) - .size(IconSize::XLarge) - .into_any_element(); - - h_flex() - .relative() - .justify_center() - .child(div().opacity(0.3).child(logo)) - .child( - h_flex() - .absolute() - .right_1() - .bottom_0() - .child(Icon::new(IconName::XCircleFilled).color(Color::Error)), - ) - .into_any_element() - } - fn render_rules_item(&self, cx: &Context) -> Option { let project_context = self .as_native_thread(cx)? @@ -2187,39 +2475,30 @@ impl AcpThreadView { return None; } + let has_both = user_rules_text.is_some() && rules_file_text.is_some(); + Some( - v_flex() + h_flex() .px_2p5() - .gap_1() + .child( + Icon::new(IconName::Attach) + .size(IconSize::XSmall) + .color(Color::Disabled), + ) .when_some(user_rules_text, |parent, user_rules_text| { parent.child( h_flex() - .group("user-rules") .id("user-rules") - .w_full() - .child( - Icon::new(IconName::Reader) - .size(IconSize::XSmall) - .color(Color::Disabled), - ) + .ml_1() + .mr_1p5() .child( Label::new(user_rules_text) .size(LabelSize::XSmall) .color(Color::Muted) - .truncate() - .buffer_font(cx) - .ml_1p5() - .mr_0p5(), - ) - .child( - IconButton::new("open-prompt-library", IconName::ArrowUpRight) - .shape(ui::IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .icon_color(Color::Ignored) - .visible_on_hover("user-rules") - // TODO: Figure out a way to pass focus handle here so we can display the `OpenRulesLibrary` keybinding - .tooltip(Tooltip::text("View User Rules")), + .truncate(), ) + .hover(|s| s.bg(cx.theme().colors().element_hover)) + .tooltip(Tooltip::text("View User Rules")) .on_click(move |_event, window, cx| { window.dispatch_action( Box::new(OpenRulesLibrary { @@ -2230,33 +2509,25 @@ impl AcpThreadView { }), ) }) + .when(has_both, |this| { + this.child( + Label::new("•") + .size(LabelSize::XSmall) + .color(Color::Disabled), + ) + }) .when_some(rules_file_text, |parent, rules_file_text| { parent.child( h_flex() - .group("project-rules") .id("project-rules") - .w_full() - .child( - Icon::new(IconName::Reader) - .size(IconSize::XSmall) - .color(Color::Disabled), - ) + .ml_1p5() .child( Label::new(rules_file_text) .size(LabelSize::XSmall) - .color(Color::Muted) - .buffer_font(cx) - .ml_1p5() - .mr_0p5(), - ) - .child( - IconButton::new("open-rule", IconName::ArrowUpRight) - .shape(ui::IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .icon_color(Color::Ignored) - .visible_on_hover("project-rules") - .tooltip(Tooltip::text("View Project Rules")), + .color(Color::Muted), ) + .hover(|s| s.bg(cx.theme().colors().element_hover)) + .tooltip(Tooltip::text("View Project Rules")) .on_click(cx.listener(Self::handle_open_rules)), ) }) @@ -2264,51 +2535,104 @@ impl AcpThreadView { ) } - fn render_empty_state(&self, cx: &App) -> AnyElement { - let loading = matches!(&self.thread_state, ThreadState::Loading { .. }); + fn render_empty_state_section_header( + &self, + label: impl Into, + action_slot: Option, + cx: &mut Context, + ) -> impl IntoElement { + div().pl_1().pr_1p5().child( + h_flex() + .mt_2() + .pl_1p5() + .pb_1() + .w_full() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child( + Label::new(label.into()) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .children(action_slot), + ) + } + + fn render_recent_history(&self, window: &mut Window, cx: &mut Context) -> AnyElement { + let render_history = self + .agent + .clone() + .downcast::() + .is_some() + && self + .history_store + .update(cx, |history_store, cx| !history_store.is_empty(cx)); v_flex() .size_full() - .items_center() - .justify_center() - .child(if loading { - h_flex() - .justify_center() - .child(self.render_agent_logo()) - .with_animation( - "pulsating_icon", - Animation::new(Duration::from_secs(2)) - .repeat() - .with_easing(pulsating_between(0.4, 1.0)), - |icon, delta| icon.opacity(delta), - ) - .into_any() - } else { - self.render_agent_logo().into_any_element() + .when(render_history, |this| { + let recent_history: Vec<_> = self.history_store.update(cx, |history_store, _| { + history_store.entries().take(3).collect() + }); + this.justify_end().child( + v_flex() + .child( + self.render_empty_state_section_header( + "Recent", + Some( + Button::new("view-history", "View All") + .style(ButtonStyle::Subtle) + .label_size(LabelSize::Small) + .key_binding( + KeyBinding::for_action_in( + &OpenHistory, + &self.focus_handle(cx), + window, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(move |_event, window, cx| { + window.dispatch_action(OpenHistory.boxed_clone(), cx); + }) + .into_any_element(), + ), + cx, + ), + ) + .child( + v_flex().p_1().pr_1p5().gap_1().children( + recent_history + .into_iter() + .enumerate() + .map(|(index, entry)| { + // TODO: Add keyboard navigation. + let is_hovered = + self.hovered_recent_history_item == Some(index); + crate::acp::thread_history::AcpHistoryEntryElement::new( + entry, + cx.entity().downgrade(), + ) + .hovered(is_hovered) + .on_hover(cx.listener( + move |this, is_hovered, _window, cx| { + if *is_hovered { + this.hovered_recent_history_item = Some(index); + } else if this.hovered_recent_history_item + == Some(index) + { + this.hovered_recent_history_item = None; + } + cx.notify(); + }, + )) + .into_any_element() + }), + ), + ), + ) }) - .child(h_flex().mt_4().mb_1().justify_center().child(if loading { - div() - .child(LoadingLabel::new("").size(LabelSize::Large)) - .into_any_element() - } else { - Headline::new(self.agent.empty_state_headline()) - .size(HeadlineSize::Medium) - .into_any_element() - })) - .child( - div() - .max_w_1_2() - .text_sm() - .text_center() - .map(|this| { - if loading { - this.invisible() - } else { - this.text_color(cx.theme().colors().text_muted) - } - }) - .child(self.agent.empty_state_message()), - ) .into_any() } @@ -2317,132 +2641,149 @@ impl AcpThreadView { connection: &Rc, description: Option<&Entity>, configuration_view: Option<&AnyView>, + pending_auth_method: Option<&acp::AuthMethodId>, window: &mut Window, cx: &Context, ) -> Div { - v_flex() - .p_2() - .gap_2() - .flex_1() - .items_center() - .justify_center() - .child( - v_flex() - .items_center() - .justify_center() - .child(self.render_error_agent_logo()) - .child(h_flex().mt_4().mb_1().justify_center().child( - Headline::new(self.agent.empty_state_headline()).size(HeadlineSize::Medium), - )) - .into_any(), - ) - .children(description.map(|desc| { - div().text_ui(cx).text_center().child( - self.render_markdown(desc.clone(), default_markdown_style(false, window, cx)), + let show_description = + configuration_view.is_none() && description.is_none() && pending_auth_method.is_none(); + + v_flex().flex_1().size_full().justify_end().child( + v_flex() + .p_2() + .pr_3() + .w_full() + .gap_1() + .border_t_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().status().warning.opacity(0.04)) + .child( + h_flex() + .gap_1p5() + .child( + Icon::new(IconName::Warning) + .color(Color::Warning) + .size(IconSize::Small), + ) + .child(Label::new("Authentication Required").size(LabelSize::Small)), ) - })) - .children( - configuration_view - .cloned() - .map(|view| div().px_4().w_full().max_w_128().child(view)), - ) - .child(h_flex().mt_1p5().justify_center().children( - connection.auth_methods().iter().map(|method| { - Button::new(SharedString::from(method.id.0.clone()), method.name.clone()) - .on_click({ - let method_id = method.id.clone(); - cx.listener(move |this, _, window, cx| { - this.authenticate(method_id.clone(), window, cx) + .children(description.map(|desc| { + div().text_ui(cx).child(self.render_markdown( + desc.clone(), + default_markdown_style(false, false, window, cx), + )) + })) + .children( + configuration_view + .cloned() + .map(|view| div().w_full().child(view)), + ) + .when( + show_description, + |el| { + el.child( + Label::new(format!( + "You are not currently authenticated with {}. Please choose one of the following options:", + self.agent.name() + )) + .size(LabelSize::Small) + .color(Color::Muted) + .mb_1() + .ml_5(), + ) + }, + ) + .when_some(pending_auth_method, |el, _| { + el.child( + h_flex() + .py_4() + .w_full() + .justify_center() + .gap_1() + .child( + Icon::new(IconName::ArrowCircle) + .size(IconSize::Small) + .color(Color::Muted) + .with_animation( + "arrow-circle", + Animation::new(Duration::from_secs(2)).repeat(), + |icon, delta| { + icon.transform(Transformation::rotate(percentage( + delta, + ))) + }, + ) + .into_any_element(), + ) + .child(Label::new("Authenticating…").size(LabelSize::Small)), + ) + }) + .when(!connection.auth_methods().is_empty(), |this| { + this.child( + h_flex() + .justify_end() + .flex_wrap() + .gap_1() + .when(!show_description, |this| { + this.border_t_1() + .mt_1() + .pt_2() + .border_color(cx.theme().colors().border.opacity(0.8)) }) - }) - }), - )) + .children( + connection + .auth_methods() + .iter() + .enumerate() + .rev() + .map(|(ix, method)| { + Button::new( + SharedString::from(method.id.0.clone()), + method.name.clone(), + ) + .when(ix == 0, |el| { + el.style(ButtonStyle::Tinted(ui::TintColor::Warning)) + }) + .label_size(LabelSize::Small) + .on_click({ + let method_id = method.id.clone(); + cx.listener(move |this, _, window, cx| { + telemetry::event!( + "Authenticate Agent Started", + agent = this.agent.telemetry_id(), + method = method_id + ); + + this.authenticate(method_id.clone(), window, cx) + }) + }) + }), + ), + ) + }) + + ) } fn render_load_error(&self, e: &LoadError, cx: &Context) -> AnyElement { - let mut container = v_flex() - .items_center() - .justify_center() - .child(self.render_error_agent_logo()) - .child( - v_flex() - .mt_4() - .mb_2() - .gap_0p5() - .text_center() - .items_center() - .child(Headline::new("Failed to launch").size(HeadlineSize::Medium)) - .child( - Label::new(e.to_string()) - .size(LabelSize::Small) - .color(Color::Muted), - ), - ); - - if let LoadError::Unsupported { - upgrade_message, - upgrade_command, - .. - } = &e - { - let upgrade_message = upgrade_message.clone(); - let upgrade_command = upgrade_command.clone(); - container = container.child( - Button::new("upgrade", upgrade_message) - .tooltip(Tooltip::text(upgrade_command.clone())) - .on_click(cx.listener(move |this, _, window, cx| { - let task = this - .workspace - .update(cx, |workspace, cx| { - let project = workspace.project().read(cx); - let cwd = project.first_project_directory(cx); - let shell = project.terminal_settings(&cwd, cx).shell.clone(); - let spawn_in_terminal = task::SpawnInTerminal { - id: task::TaskId("upgrade".to_string()), - full_label: upgrade_command.clone(), - label: upgrade_command.clone(), - command: Some(upgrade_command.clone()), - args: Vec::new(), - command_label: upgrade_command.clone(), - cwd, - env: Default::default(), - use_new_terminal: true, - allow_concurrent_runs: true, - reveal: Default::default(), - reveal_target: Default::default(), - hide: Default::default(), - shell, - show_summary: true, - show_command: true, - show_rerun: false, - }; - workspace.spawn_in_terminal(spawn_in_terminal, window, cx) - }) - .ok(); - let Some(task) = task else { return }; - cx.spawn_in(window, async move |this, cx| { - if let Some(Ok(_)) = task.await { - this.update_in(cx, |this, window, cx| { - this.reset(window, cx); - }) - .ok(); - } - }) - .detach() - })), - ); - } else if let LoadError::NotInstalled { - install_message, - install_command, - .. - } = e - { - let install_message = install_message.clone(); - let install_command = install_command.clone(); - container = container.child( - Button::new("install", install_message) + let (message, action_slot) = match e { + LoadError::NotInstalled { + error_message, + install_message, + install_command, + } => { + let install_command = install_command.clone(); + let button = Button::new("install", install_message) .tooltip(Tooltip::text(install_command.clone())) + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) + .icon(IconName::Download) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) .on_click(cx.listener(move |this, _, window, cx| { + telemetry::event!("Agent Install CLI", agent = this.agent.telemetry_id()); + let task = this .workspace .update(cx, |workspace, cx| { @@ -2450,7 +2791,7 @@ impl AcpThreadView { let cwd = project.first_project_directory(cx); let shell = project.terminal_settings(&cwd, cx).shell.clone(); let spawn_in_terminal = task::SpawnInTerminal { - id: task::TaskId("install".to_string()), + id: task::TaskId(install_command.clone()), full_label: install_command.clone(), label: install_command.clone(), command: Some(install_command.clone()), @@ -2481,11 +2822,83 @@ impl AcpThreadView { } }) .detach() - })), - ); - } + })); - container.into_any() + (error_message.clone(), Some(button.into_any_element())) + } + LoadError::Unsupported { + error_message, + upgrade_message, + upgrade_command, + } => { + let upgrade_command = upgrade_command.clone(); + let button = Button::new("upgrade", upgrade_message) + .tooltip(Tooltip::text(upgrade_command.clone())) + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) + .icon(IconName::Download) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .icon_position(IconPosition::Start) + .on_click(cx.listener(move |this, _, window, cx| { + telemetry::event!("Agent Upgrade CLI", agent = this.agent.telemetry_id()); + + let task = this + .workspace + .update(cx, |workspace, cx| { + let project = workspace.project().read(cx); + let cwd = project.first_project_directory(cx); + let shell = project.terminal_settings(&cwd, cx).shell.clone(); + let spawn_in_terminal = task::SpawnInTerminal { + id: task::TaskId(upgrade_command.to_string()), + full_label: upgrade_command.clone(), + label: upgrade_command.clone(), + command: Some(upgrade_command.clone()), + args: Vec::new(), + command_label: upgrade_command.clone(), + cwd, + env: Default::default(), + use_new_terminal: true, + allow_concurrent_runs: true, + reveal: Default::default(), + reveal_target: Default::default(), + hide: Default::default(), + shell, + show_summary: true, + show_command: true, + show_rerun: false, + }; + workspace.spawn_in_terminal(spawn_in_terminal, window, cx) + }) + .ok(); + let Some(task) = task else { return }; + cx.spawn_in(window, async move |this, cx| { + if let Some(Ok(_)) = task.await { + this.update_in(cx, |this, window, cx| { + this.reset(window, cx); + }) + .ok(); + } + }) + .detach() + })); + + (error_message.clone(), Some(button.into_any_element())) + } + LoadError::Exited { .. } => ("Server exited with status {status}".into(), None), + LoadError::Other(msg) => ( + msg.into(), + Some(self.create_copy_button(msg.to_string()).into_any_element()), + ), + }; + + Callout::new() + .severity(Severity::Error) + .icon(IconName::XCircleFilled) + .title("Failed to Launch") + .description(message) + .actions_slot(div().children(action_slot)) + .into_any_element() } fn render_activity_bar( @@ -2698,13 +3111,13 @@ impl AcpThreadView { h_flex() .p_1() .justify_between() + .flex_wrap() .when(expanded, |this| { this.border_b_1().border_color(cx.theme().colors().border) }) .child( h_flex() .id("edits-container") - .w_full() .gap_1() .child(Disclosure::new("edits-disclosure", expanded)) .map(|this| { @@ -2976,6 +3389,19 @@ impl AcpThreadView { (IconName::Maximize, "Expand Message Editor") }; + let backdrop = div() + .size_full() + .absolute() + .inset_0() + .bg(cx.theme().colors().panel_background) + .opacity(0.8) + .block_mouse_except_scroll(); + + let enable_editor = match self.thread_state { + ThreadState::Loading { .. } | ThreadState::Ready { .. } => true, + ThreadState::Unauthenticated { .. } | ThreadState::LoadError(..) => false, + }; + v_flex() .on_action(cx.listener(Self::expand_message_editor)) .on_action(cx.listener(|this, _: &ToggleProfileSelector, window, cx| { @@ -3051,6 +3477,7 @@ impl AcpThreadView { .child(self.render_send_button(cx)), ), ) + .when(!enable_editor, |this| this.child(backdrop)) .into_any() } @@ -3098,7 +3525,7 @@ impl AcpThreadView { "used-tokens-label", Animation::new(Duration::from_secs(2)) .repeat() - .with_easing(pulsating_between(0.6, 1.)), + .with_easing(pulsating_between(0.3, 0.8)), |label, delta| label.alpha(delta), ) .into_any() @@ -3197,7 +3624,14 @@ impl AcpThreadView { .thread() .is_some_and(|thread| thread.read(cx).status() != ThreadStatus::Idle); - if is_generating && is_editor_empty { + if self.is_loading_contents { + div() + .id("loading-message-content") + .px_1() + .tooltip(Tooltip::text("Loading Added Context…")) + .child(loading_contents_spinner(IconSize::default())) + .into_any_element() + } else if is_generating && is_editor_empty { IconButton::new("stop-generation", IconName::Stop) .icon_color(Color::Error) .style(ButtonStyle::Tinted(ui::TintColor::Error)) @@ -3232,13 +3666,53 @@ impl AcpThreadView { } } + fn is_following(&self, cx: &App) -> bool { + match self.thread().map(|thread| thread.read(cx).status()) { + Some(ThreadStatus::Generating) => self + .workspace + .read_with(cx, |workspace, _| { + workspace.is_being_followed(CollaboratorId::Agent) + }) + .unwrap_or(false), + _ => self.should_be_following, + } + } + + fn toggle_following(&mut self, window: &mut Window, cx: &mut Context) { + let following = self.is_following(cx); + + self.should_be_following = !following; + if self.thread().map(|thread| thread.read(cx).status()) == Some(ThreadStatus::Generating) { + self.workspace + .update(cx, |workspace, cx| { + if following { + workspace.unfollow(CollaboratorId::Agent, window, cx); + } else { + workspace.follow(CollaboratorId::Agent, window, cx); + } + }) + .ok(); + } + + telemetry::event!("Follow Agent Selected", following = !following); + } + fn render_follow_toggle(&self, cx: &mut Context) -> impl IntoElement { - let following = self - .workspace - .read_with(cx, |workspace, _| { - workspace.is_being_followed(CollaboratorId::Agent) - }) - .unwrap_or(false); + let following = self.is_following(cx); + + let tooltip_label = if following { + if self.agent.name() == "Zed Agent" { + format!("Stop Following the {}", self.agent.name()) + } else { + format!("Stop Following {}", self.agent.name()) + } + } else { + if self.agent.name() == "Zed Agent" { + format!("Follow the {}", self.agent.name()) + } else { + format!("Follow {}", self.agent.name()) + } + }; IconButton::new("follow-agent", IconName::Crosshair) .icon_size(IconSize::Small) @@ -3247,10 +3721,10 @@ impl AcpThreadView { .selected_icon_color(Some(Color::Custom(cx.theme().players().agent().cursor))) .tooltip(move |window, cx| { if following { - Tooltip::for_action("Stop Following Agent", &Follow, window, cx) + Tooltip::for_action(tooltip_label.clone(), &Follow, window, cx) } else { Tooltip::with_meta( - "Follow Agent", + tooltip_label.clone(), Some(&Follow), "Track the agent's location as it reads and edits files.", window, @@ -3259,15 +3733,7 @@ impl AcpThreadView { } }) .on_click(cx.listener(move |this, _, window, cx| { - this.workspace - .update(cx, |workspace, cx| { - if following { - workspace.unfollow(CollaboratorId::Agent, window, cx); - } else { - workspace.follow(CollaboratorId::Agent, window, cx); - } - }) - .ok(); + this.toggle_following(window, cx); })) } @@ -3303,6 +3769,7 @@ impl AcpThreadView { .open_path(path, None, true, window, cx) .detach_and_log_err(cx); } + MentionUri::PastedImage => {} MentionUri::Directory { abs_path } => { let project = workspace.project(); let Some(entry) = project.update(cx, |project, cx| { @@ -3317,9 +3784,14 @@ impl AcpThreadView { }); } MentionUri::Symbol { - path, line_range, .. + abs_path: path, + line_range, + .. } - | MentionUri::Selection { path, line_range } => { + | MentionUri::Selection { + abs_path: Some(path), + line_range, + } => { let project = workspace.project(); let Some((path, _)) = project.update(cx, |project, cx| { let path = project.find_project_path(path, cx)?; @@ -3335,8 +3807,8 @@ impl AcpThreadView { let Some(editor) = item.await?.downcast::() else { return Ok(()); }; - let range = - Point::new(line_range.start, 0)..Point::new(line_range.start, 0); + let range = Point::new(*line_range.start(), 0) + ..Point::new(*line_range.start(), 0); editor .update_in(cx, |editor, window, cx| { editor.change_selections( @@ -3351,6 +3823,7 @@ impl AcpThreadView { }) .detach_and_log_err(cx); } + MentionUri::Selection { abs_path: None, .. } => {} MentionUri::Thread { id, name } => { if let Some(panel) = workspace.panel::(cx) { panel.update(cx, |panel, cx| { @@ -3553,7 +4026,8 @@ impl AcpThreadView { return; } - let title = self.title(cx); + // TODO: Change this once we have title summarization for external agents. + let title = self.agent.name(); match AgentSettings::get_global(cx).notify_when_agent_waiting { NotifyWhenAgentWaiting::PrimaryScreen => { @@ -3671,7 +4145,20 @@ impl AcpThreadView { } } - fn render_thread_controls(&self, cx: &Context) -> impl IntoElement { + fn render_thread_controls( + &self, + thread: &Entity, + cx: &Context, + ) -> impl IntoElement { + let is_generating = matches!(thread.read(cx).status(), ThreadStatus::Generating); + if is_generating { + return h_flex().id("thread-controls-container").ml_1().child( + div() + .py_2() + .px(rems_from_px(22.)) + .child(SpinnerLabel::new().size(LabelSize::Small)), + ); + } let open_as_markdown = IconButton::new("open-as-markdown", IconName::FileMarkdown) .shape(ui::IconButtonShape::Square) .icon_size(IconSize::Small) @@ -3698,14 +4185,20 @@ impl AcpThreadView { .group("thread-controls-container") .w_full() .mr_1() + .pt_1() .pb_2() .px(RESPONSE_PADDING_X) + .gap_px() .opacity(0.4) .hover(|style| style.opacity(1.)) .flex_wrap() .justify_end(); - if AgentSettings::get_global(cx).enable_feedback { + if AgentSettings::get_global(cx).enable_feedback + && self + .thread() + .is_some_and(|thread| thread.read(cx).connection().telemetry().is_some()) + { let feedback = self.thread_feedback.feedback; container = container.child( div().visible_on_hover("thread-controls-container").child( @@ -3762,13 +4255,8 @@ impl AcpThreadView { container.child(open_as_markdown).child(scroll_to_top) } - fn render_feedback_feedback_editor( - editor: Entity, - window: &mut Window, - cx: &Context, - ) -> Div { - let focus_handle = editor.focus_handle(cx); - v_flex() + fn render_feedback_feedback_editor(editor: Entity, cx: &Context) -> Div { + h_flex() .key_context("AgentFeedbackMessageEditor") .on_action(cx.listener(move |this, _: &menu::Cancel, _, cx| { this.thread_feedback.dismiss_comments(); @@ -3777,43 +4265,31 @@ impl AcpThreadView { .on_action(cx.listener(move |this, _: &menu::Confirm, _window, cx| { this.submit_feedback_message(cx); })) - .mb_2() - .mx_4() .p_2() + .mb_2() + .mx_5() + .gap_1() .rounded_md() .border_1() .border_color(cx.theme().colors().border) .bg(cx.theme().colors().editor_background) - .child(editor) + .child(div().w_full().child(editor)) .child( h_flex() - .gap_1() - .justify_end() .child( - Button::new("dismiss-feedback-message", "Cancel") - .label_size(LabelSize::Small) - .key_binding( - KeyBinding::for_action_in(&menu::Cancel, &focus_handle, window, cx) - .map(|kb| kb.size(rems_from_px(10.))), - ) + IconButton::new("dismiss-feedback-message", IconName::Close) + .icon_color(Color::Error) + .icon_size(IconSize::XSmall) + .shape(ui::IconButtonShape::Square) .on_click(cx.listener(move |this, _, _window, cx| { this.thread_feedback.dismiss_comments(); cx.notify(); })), ) .child( - Button::new("submit-feedback-message", "Share Feedback") - .style(ButtonStyle::Tinted(ui::TintColor::Accent)) - .label_size(LabelSize::Small) - .key_binding( - KeyBinding::for_action_in( - &menu::Confirm, - &focus_handle, - window, - cx, - ) - .map(|kb| kb.size(rems_from_px(10.))), - ) + IconButton::new("submit-feedback-message", IconName::Return) + .icon_size(IconSize::XSmall) + .shape(ui::IconButtonShape::Square) .on_click(cx.listener(move |this, _, _window, cx| { this.submit_feedback_message(cx); })), @@ -3987,6 +4463,12 @@ impl AcpThreadView { }) } + pub(crate) fn insert_selections(&self, window: &mut Window, cx: &mut Context) { + self.message_editor.update(cx, |message_editor, cx| { + message_editor.insert_selections(window, cx); + }) + } + fn render_thread_retry_status_callout( &self, _window: &mut Window, @@ -4032,6 +4514,9 @@ impl AcpThreadView { fn render_thread_error(&self, window: &mut Window, cx: &mut Context) -> Option
{ let content = match self.thread_error.as_ref()? { ThreadError::Other(error) => self.render_any_thread_error(error.clone(), cx), + ThreadError::AuthenticationRequired(error) => { + self.render_authentication_required_error(error.clone(), cx) + } ThreadError::PaymentRequired => self.render_payment_required_error(cx), ThreadError::ModelRequestLimitReached(plan) => { self.render_model_request_limit_reached_error(*plan, cx) @@ -4045,11 +4530,53 @@ impl AcpThreadView { } fn render_any_thread_error(&self, error: SharedString, cx: &mut Context<'_, Self>) -> Callout { + let can_resume = self + .thread() + .map_or(false, |thread| thread.read(cx).can_resume(cx)); + + let can_enable_burn_mode = self.as_native_thread(cx).map_or(false, |thread| { + let thread = thread.read(cx); + let supports_burn_mode = thread + .model() + .map_or(false, |model| model.supports_burn_mode()); + supports_burn_mode && thread.completion_mode() == CompletionMode::Normal + }); + Callout::new() .severity(Severity::Error) .title("Error") + .icon(IconName::XCircle) .description(error.clone()) - .actions_slot(self.create_copy_button(error.to_string())) + .actions_slot( + h_flex() + .gap_0p5() + .when(can_resume && can_enable_burn_mode, |this| { + this.child( + Button::new("enable-burn-mode-and-retry", "Enable Burn Mode and Retry") + .icon(IconName::ZedBurnMode) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + this.toggle_burn_mode(&ToggleBurnMode, window, cx); + this.resume_chat(cx); + })), + ) + }) + .when(can_resume, |this| { + this.child( + Button::new("retry", "Retry") + .icon(IconName::RotateCw) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, _window, cx| { + this.resume_chat(cx); + })), + ) + }) + .child(self.create_copy_button(error.to_string())), + ) .dismiss_action(self.dismiss_error_button(cx)) } @@ -4059,6 +4586,7 @@ impl AcpThreadView { Callout::new() .severity(Severity::Error) + .icon(IconName::XCircle) .title("Free Usage Exceeded") .description(ERROR_MESSAGE) .actions_slot( @@ -4070,6 +4598,25 @@ impl AcpThreadView { .dismiss_action(self.dismiss_error_button(cx)) } + fn render_authentication_required_error( + &self, + error: SharedString, + cx: &mut Context, + ) -> Callout { + Callout::new() + .severity(Severity::Error) + .title("Authentication Required") + .icon(IconName::XCircle) + .description(error.clone()) + .actions_slot( + h_flex() + .gap_0p5() + .child(self.authenticate_button(cx)) + .child(self.create_copy_button(error)), + ) + .dismiss_action(self.dismiss_error_button(cx)) + } + fn render_model_request_limit_reached_error( &self, plan: cloud_llm_client::Plan, @@ -4085,6 +4632,7 @@ impl AcpThreadView { Callout::new() .severity(Severity::Error) .title("Model Prompt Limit Reached") + .icon(IconName::XCircle) .description(error_message) .actions_slot( h_flex() @@ -4191,6 +4739,49 @@ impl AcpThreadView { })) } + fn authenticate_button(&self, cx: &mut Context) -> impl IntoElement { + Button::new("authenticate", "Authenticate") + .label_size(LabelSize::Small) + .style(ButtonStyle::Filled) + .on_click(cx.listener({ + move |this, _, window, cx| { + let agent = this.agent.clone(); + let ThreadState::Ready { thread, .. } = &this.thread_state else { + return; + }; + + let connection = thread.read(cx).connection().clone(); + let err = AuthRequired { + description: None, + provider_id: None, + }; + this.clear_thread_error(cx); + let this = cx.weak_entity(); + window.defer(cx, |window, cx| { + Self::handle_auth_required(this, err, agent, connection, window, cx); + }) + } + })) + } + + pub(crate) fn reauthenticate(&mut self, window: &mut Window, cx: &mut Context) { + let agent = self.agent.clone(); + let ThreadState::Ready { thread, .. } = &self.thread_state else { + return; + }; + + let connection = thread.read(cx).connection().clone(); + let err = AuthRequired { + description: None, + provider_id: None, + }; + self.clear_thread_error(cx); + let this = cx.weak_entity(); + window.defer(cx, |window, cx| { + Self::handle_auth_required(this, err, agent, connection, window, cx); + }) + } + fn upgrade_button(&self, cx: &mut Context) -> impl IntoElement { Button::new("upgrade", "Upgrade") .label_size(LabelSize::Small) @@ -4214,11 +4805,42 @@ impl AcpThreadView { ); cx.notify(); } + + pub fn delete_history_entry(&mut self, entry: HistoryEntry, cx: &mut Context) { + let task = match entry { + HistoryEntry::AcpThread(thread) => self.history_store.update(cx, |history, cx| { + history.delete_thread(thread.id.clone(), cx) + }), + HistoryEntry::TextThread(context) => self.history_store.update(cx, |history, cx| { + history.delete_text_thread(context.path.clone(), cx) + }), + }; + task.detach_and_log_err(cx); + } +} + +fn loading_contents_spinner(size: IconSize) -> AnyElement { + Icon::new(IconName::LoadCircle) + .size(size) + .color(Color::Accent) + .with_animation( + "load_context_circle", + Animation::new(Duration::from_secs(3)).repeat(), + |icon, delta| icon.transform(Transformation::rotate(percentage(delta))), + ) + .into_any_element() } impl Focusable for AcpThreadView { fn focus_handle(&self, cx: &App) -> FocusHandle { - self.message_editor.focus_handle(cx) + match self.thread_state { + ThreadState::Loading { .. } | ThreadState::Ready { .. } => { + self.message_editor.focus_handle(cx) + } + ThreadState::LoadError(_) | ThreadState::Unauthenticated { .. } => { + self.focus_handle.clone() + } + } } } @@ -4234,66 +4856,56 @@ impl Render for AcpThreadView { .on_action(cx.listener(Self::toggle_burn_mode)) .on_action(cx.listener(Self::keep_all)) .on_action(cx.listener(Self::reject_all)) + .track_focus(&self.focus_handle) .bg(cx.theme().colors().panel_background) .child(match &self.thread_state { ThreadState::Unauthenticated { connection, description, configuration_view, + pending_auth_method, .. } => self.render_auth_required_state( connection, description.as_ref(), configuration_view.as_ref(), + pending_auth_method.as_ref(), window, cx, ), - ThreadState::Loading { .. } => v_flex().flex_1().child(self.render_empty_state(cx)), - ThreadState::LoadError(e) => v_flex() - .p_2() + ThreadState::Loading { .. } => v_flex() .flex_1() + .child(self.render_recent_history(window, cx)), + ThreadState::LoadError(e) => v_flex() + .flex_1() + .size_full() .items_center() - .justify_center() + .justify_end() .child(self.render_load_error(e, cx)), - ThreadState::Ready { thread, .. } => { - let thread_clone = thread.clone(); - - v_flex().flex_1().map(|this| { - if has_messages { - this.child( - list( - self.list_state.clone(), - cx.processor(|this, index: usize, window, cx| { - let Some((entry, len)) = this.thread().and_then(|thread| { - let entries = &thread.read(cx).entries(); - Some((entries.get(index)?, entries.len())) - }) else { - return Empty.into_any(); - }; - this.render_entry(index, len, entry, window, cx) - }), - ) - .with_sizing_behavior(gpui::ListSizingBehavior::Auto) - .flex_grow() - .into_any(), + ThreadState::Ready { .. } => v_flex().flex_1().map(|this| { + if has_messages { + this.child( + list( + self.list_state.clone(), + cx.processor(|this, index: usize, window, cx| { + let Some((entry, len)) = this.thread().and_then(|thread| { + let entries = &thread.read(cx).entries(); + Some((entries.get(index)?, entries.len())) + }) else { + return Empty.into_any(); + }; + this.render_entry(index, len, entry, window, cx) + }), ) - .child(self.render_vertical_scrollbar(cx)) - .children( - match thread_clone.read(cx).status() { - ThreadStatus::Idle - | ThreadStatus::WaitingForToolConfirmation => None, - ThreadStatus::Generating => div() - .px_5() - .py_2() - .child(LoadingLabel::new("").size(LabelSize::Small)) - .into(), - }, - ) - } else { - this.child(self.render_empty_state(cx)) - } - }) - } + .with_sizing_behavior(gpui::ListSizingBehavior::Auto) + .flex_grow() + .into_any(), + ) + .child(self.render_vertical_scrollbar(cx)) + } else { + this.child(self.render_recent_history(window, cx)) + } + }), }) // The activity bar is intentionally rendered outside of the ThreadState::Ready match // above so that the scrollbar doesn't render behind it. The current setup allows @@ -4318,7 +4930,12 @@ impl Render for AcpThreadView { } } -fn default_markdown_style(buffer_font: bool, window: &Window, cx: &App) -> MarkdownStyle { +fn default_markdown_style( + buffer_font: bool, + muted_text: bool, + window: &Window, + cx: &App, +) -> MarkdownStyle { let theme_settings = ThemeSettings::get_global(cx); let colors = cx.theme().colors(); @@ -4339,20 +4956,26 @@ fn default_markdown_style(buffer_font: bool, window: &Window, cx: &App) -> Markd TextSize::Default.rems(cx) }; + let text_color = if muted_text { + colors.text_muted + } else { + colors.text + }; + text_style.refine(&TextStyleRefinement { font_family: Some(font_family), font_fallbacks: theme_settings.ui_font.fallbacks.clone(), font_features: Some(theme_settings.ui_font.features.clone()), font_size: Some(font_size.into()), line_height: Some(line_height.into()), - color: Some(cx.theme().colors().text), + color: Some(text_color), ..Default::default() }); MarkdownStyle { base_text_style: text_style.clone(), syntax: cx.theme().syntax().clone(), - selection_background_color: cx.theme().colors().element_selection_background, + selection_background_color: colors.element_selection_background, code_block_overflow_x_scroll: true, table_overflow_x_scroll: true, heading_level_styles: Some(HeadingLevelStyles { @@ -4438,7 +5061,7 @@ fn plan_label_markdown_style( window: &Window, cx: &App, ) -> MarkdownStyle { - let default_md_style = default_markdown_style(false, window, cx); + let default_md_style = default_markdown_style(false, false, window, cx); MarkdownStyle { base_text_style: TextStyle { @@ -4458,7 +5081,7 @@ fn plan_label_markdown_style( } fn terminal_command_markdown_style(window: &Window, cx: &App) -> MarkdownStyle { - let default_md_style = default_markdown_style(true, window, cx); + let default_md_style = default_markdown_style(true, false, window, cx); MarkdownStyle { base_text_style: TextStyle { @@ -4699,20 +5322,24 @@ pub(crate) mod tests { where C: 'static + AgentConnection + Send + Clone, { + fn telemetry_id(&self) -> &'static str { + "test" + } + fn logo(&self) -> ui::IconName { ui::IconName::Ai } - fn name(&self) -> &'static str { - "Test" + fn name(&self) -> SharedString { + "Test".into() } - fn empty_state_headline(&self) -> &'static str { - "Test" + fn empty_state_headline(&self) -> SharedString { + "Test".into() } - fn empty_state_message(&self) -> &'static str { - "Test" + fn empty_state_message(&self) -> SharedString { + "Test".into() } fn connect( @@ -4747,6 +5374,12 @@ pub(crate) mod tests { project, action_log, SessionId("test".into()), + watch::Receiver::constant(acp::PromptCapabilities { + image: true, + audio: true, + embedded_context: true, + }), + cx, ) }))) } diff --git a/crates/agent_ui/src/active_thread.rs b/crates/agent_ui/src/active_thread.rs index 2cad913295..e0cecad6e2 100644 --- a/crates/agent_ui/src/active_thread.rs +++ b/crates/agent_ui/src/active_thread.rs @@ -1595,11 +1595,6 @@ impl ActiveThread { return; }; - if model.provider.must_accept_terms(cx) { - cx.notify(); - return; - } - let edited_text = state.editor.read(cx).text(cx); let creases = state.editor.update(cx, extract_message_creases); diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 6da84758ee..aa9b2ca94f 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -5,17 +5,21 @@ mod tool_picker; use std::{sync::Arc, time::Duration}; +use agent_servers::{AgentServerCommand, AgentServerSettings, AllAgentServersSettings, Gemini}; use agent_settings::AgentSettings; +use anyhow::Result; use assistant_tool::{ToolSource, ToolWorkingSet}; use cloud_llm_client::Plan; use collections::HashMap; use context_server::ContextServerId; +use editor::{Editor, SelectionEffects, scroll::Autoscroll}; use extension::ExtensionManifest; use extension_host::ExtensionStore; use fs::Fs; use gpui::{ - Action, Animation, AnimationExt as _, AnyView, App, Corner, Entity, EventEmitter, FocusHandle, - Focusable, ScrollHandle, Subscription, Task, Transformation, WeakEntity, percentage, + Action, Animation, AnimationExt as _, AnyView, App, AsyncWindowContext, Corner, Entity, + EventEmitter, FocusHandle, Focusable, Hsla, ScrollHandle, Subscription, Task, Transformation, + WeakEntity, percentage, }; use language::LanguageRegistry; use language_model::{ @@ -23,23 +27,24 @@ use language_model::{ }; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{ + Project, context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore}, project_settings::{ContextServerSettings, ProjectSettings}, }; -use settings::{Settings, update_settings_file}; +use settings::{Settings, SettingsStore, update_settings_file}; use ui::{ Chip, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, PopoverMenu, Scrollbar, ScrollbarState, Switch, SwitchColor, SwitchField, Tooltip, prelude::*, }; use util::ResultExt as _; -use workspace::Workspace; +use workspace::{Workspace, create_and_open_local_file}; use zed_actions::ExtensionCategoryFilter; pub(crate) use configure_context_server_modal::ConfigureContextServerModal; pub(crate) use manage_profiles_modal::ManageProfilesModal; use crate::{ - AddContextServer, + AddContextServer, ExternalAgent, NewExternalAgentThread, agent_configuration::add_llm_provider_modal::{AddLlmProviderModal, LlmCompatibleProvider}, }; @@ -47,6 +52,7 @@ pub struct AgentConfiguration { fs: Arc, language_registry: Arc, workspace: WeakEntity, + project: WeakEntity, focus_handle: FocusHandle, configuration_views_by_provider: HashMap, context_server_store: Entity, @@ -56,6 +62,8 @@ pub struct AgentConfiguration { _registry_subscription: Subscription, scroll_handle: ScrollHandle, scrollbar_state: ScrollbarState, + gemini_is_installed: bool, + _check_for_gemini: Task<()>, } impl AgentConfiguration { @@ -65,6 +73,7 @@ impl AgentConfiguration { tools: Entity, language_registry: Arc, workspace: WeakEntity, + project: WeakEntity, window: &mut Window, cx: &mut Context, ) -> Self { @@ -89,33 +98,34 @@ impl AgentConfiguration { cx.subscribe(&context_server_store, |_, _, _, cx| cx.notify()) .detach(); + cx.observe_global_in::(window, |this, _, cx| { + this.check_for_gemini(cx); + cx.notify(); + }) + .detach(); let scroll_handle = ScrollHandle::new(); let scrollbar_state = ScrollbarState::new(scroll_handle.clone()); - let mut expanded_provider_configurations = HashMap::default(); - if LanguageModelRegistry::read_global(cx) - .provider(&ZED_CLOUD_PROVIDER_ID) - .is_some_and(|cloud_provider| cloud_provider.must_accept_terms(cx)) - { - expanded_provider_configurations.insert(ZED_CLOUD_PROVIDER_ID, true); - } - let mut this = Self { fs, language_registry, workspace, + project, focus_handle, configuration_views_by_provider: HashMap::default(), context_server_store, expanded_context_server_tools: HashMap::default(), - expanded_provider_configurations, + expanded_provider_configurations: HashMap::default(), tools, _registry_subscription: registry_subscription, scroll_handle, scrollbar_state, + gemini_is_installed: false, + _check_for_gemini: Task::ready(()), }; this.build_provider_configuration_views(window, cx); + this.check_for_gemini(cx); this } @@ -145,6 +155,34 @@ impl AgentConfiguration { self.configuration_views_by_provider .insert(provider.id(), configuration_view); } + + fn check_for_gemini(&mut self, cx: &mut Context) { + let project = self.project.clone(); + let settings = AllAgentServersSettings::get_global(cx).clone(); + self._check_for_gemini = cx.spawn({ + async move |this, cx| { + let Some(project) = project.upgrade() else { + return; + }; + let gemini_is_installed = AgentServerCommand::resolve( + Gemini::binary_name(), + &[], + // TODO expose fallback path from the Gemini/CC types so we don't have to hardcode it again here + None, + settings.gemini, + &project, + cx, + ) + .await + .is_some(); + this.update(cx, |this, cx| { + this.gemini_is_installed = gemini_is_installed; + cx.notify(); + }) + .ok(); + } + }); + } } impl Focusable for AgentConfiguration { @@ -192,7 +230,7 @@ impl AgentConfiguration { let is_signed_in = self .workspace .read_with(cx, |workspace, _| { - workspace.client().status().borrow().is_connected() + !workspace.client().status().borrow().is_signed_out() }) .unwrap_or(false); @@ -219,7 +257,6 @@ impl AgentConfiguration { .child( h_flex() .id(provider_id_string.clone()) - .cursor_pointer() .px_2() .py_0p5() .w_full() @@ -239,10 +276,7 @@ impl AgentConfiguration { h_flex() .w_full() .gap_1() - .child( - Label::new(provider_name.clone()) - .size(LabelSize::Large), - ) + .child(Label::new(provider_name.clone())) .map(|this| { if is_zed_provider && is_signed_in { this.child( @@ -287,7 +321,7 @@ impl AgentConfiguration { "Start New Thread", ) .icon_position(IconPosition::Start) - .icon(IconName::Plus) + .icon(IconName::Thread) .icon_size(IconSize::Small) .icon_color(Color::Muted) .label_size(LabelSize::Small) @@ -386,7 +420,7 @@ impl AgentConfiguration { ), ) .child( - Label::new("Add at least one provider to use AI-powered features.") + Label::new("Add at least one provider to use AI-powered features with Zed's native agent.") .color(Color::Muted), ), ), @@ -527,6 +561,14 @@ impl AgentConfiguration { } } + fn card_item_bg_color(&self, cx: &mut Context) -> Hsla { + cx.theme().colors().background.opacity(0.25) + } + + fn card_item_border_color(&self, cx: &mut Context) -> Hsla { + cx.theme().colors().border.opacity(0.6) + } + fn render_context_servers_section( &mut self, window: &mut Window, @@ -544,7 +586,12 @@ impl AgentConfiguration { v_flex() .gap_0p5() .child(Headline::new("Model Context Protocol (MCP) Servers")) - .child(Label::new("Connect to context servers through the Model Context Protocol, either using Zed extensions or directly.").color(Color::Muted)), + .child( + Label::new( + "All context servers connected through the Model Context Protocol.", + ) + .color(Color::Muted), + ), ) .children( context_server_ids.into_iter().map(|context_server_id| { @@ -554,7 +601,7 @@ impl AgentConfiguration { .child( h_flex() .justify_between() - .gap_2() + .gap_1p5() .child( h_flex().w_full().child( Button::new("add-context-server", "Add Custom Server") @@ -645,8 +692,6 @@ impl AgentConfiguration { .map_or([].as_slice(), |tools| tools.as_slice()); let tool_count = tools.len(); - let border_color = cx.theme().colors().border.opacity(0.6); - let (source_icon, source_tooltip) = if is_from_extension { ( IconName::ZedMcpExtension, @@ -789,8 +834,8 @@ impl AgentConfiguration { .id(item_id.clone()) .border_1() .rounded_md() - .border_color(border_color) - .bg(cx.theme().colors().background.opacity(0.2)) + .border_color(self.card_item_border_color(cx)) + .bg(self.card_item_bg_color(cx)) .overflow_hidden() .child( h_flex() @@ -798,7 +843,11 @@ impl AgentConfiguration { .justify_between() .when( error.is_some() || are_tools_expanded && tool_count >= 1, - |element| element.border_b_1().border_color(border_color), + |element| { + element + .border_b_1() + .border_color(self.card_item_border_color(cx)) + }, ) .child( h_flex() @@ -980,6 +1029,195 @@ impl AgentConfiguration { )) }) } + + fn render_agent_servers_section(&mut self, cx: &mut Context) -> impl IntoElement { + let settings = AllAgentServersSettings::get_global(cx).clone(); + let user_defined_agents = settings + .custom + .iter() + .map(|(name, settings)| { + self.render_agent_server( + IconName::Ai, + name.clone(), + ExternalAgent::Custom { + name: name.clone(), + settings: settings.clone(), + }, + None, + cx, + ) + .into_any_element() + }) + .collect::>(); + + v_flex() + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + v_flex() + .p(DynamicSpacing::Base16.rems(cx)) + .pr(DynamicSpacing::Base20.rems(cx)) + .gap_2() + .child( + v_flex() + .gap_0p5() + .child( + h_flex() + .w_full() + .gap_2() + .justify_between() + .child(Headline::new("External Agents")) + .child( + Button::new("add-agent", "Add Agent") + .icon_position(IconPosition::Start) + .icon(IconName::Plus) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .label_size(LabelSize::Small) + .on_click( + move |_, window, cx| { + if let Some(workspace) = window.root().flatten() { + let workspace = workspace.downgrade(); + window + .spawn(cx, async |cx| { + open_new_agent_servers_entry_in_settings_editor( + workspace, + cx, + ).await + }) + .detach_and_log_err(cx); + } + } + ), + ) + ) + .child( + Label::new( + "Use the full power of Zed's UI with your favorite agent, connected via the Agent Client Protocol.", + ) + .color(Color::Muted), + ), + ) + .child(self.render_agent_server( + IconName::AiGemini, + "Gemini CLI", + ExternalAgent::Gemini, + (!self.gemini_is_installed).then_some(Gemini::install_command().into()), + cx, + )) + // TODO add CC + .children(user_defined_agents), + ) + } + + fn render_agent_server( + &self, + icon: IconName, + name: impl Into, + agent: ExternalAgent, + install_command: Option, + cx: &mut Context, + ) -> impl IntoElement { + let name = name.into(); + h_flex() + .p_1() + .pl_2() + .gap_1p5() + .justify_between() + .border_1() + .rounded_md() + .border_color(self.card_item_border_color(cx)) + .bg(self.card_item_bg_color(cx)) + .overflow_hidden() + .child( + h_flex() + .gap_1p5() + .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)) + .child(Label::new(name.clone())), + ) + .map(|this| { + if let Some(install_command) = install_command { + this.child( + Button::new( + SharedString::from(format!("install_external_agent-{name}")), + "Install Agent", + ) + .label_size(LabelSize::Small) + .icon(IconName::Plus) + .icon_position(IconPosition::Start) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .tooltip(Tooltip::text(install_command.clone())) + .on_click(cx.listener( + move |this, _, window, cx| { + let Some(project) = this.project.upgrade() else { + return; + }; + let Some(workspace) = this.workspace.upgrade() else { + return; + }; + let cwd = project.read(cx).first_project_directory(cx); + let shell = + project.read(cx).terminal_settings(&cwd, cx).shell.clone(); + let spawn_in_terminal = task::SpawnInTerminal { + id: task::TaskId(install_command.to_string()), + full_label: install_command.to_string(), + label: install_command.to_string(), + command: Some(install_command.to_string()), + args: Vec::new(), + command_label: install_command.to_string(), + cwd, + env: Default::default(), + use_new_terminal: true, + allow_concurrent_runs: true, + reveal: Default::default(), + reveal_target: Default::default(), + hide: Default::default(), + shell, + show_summary: true, + show_command: true, + show_rerun: false, + }; + let task = workspace.update(cx, |workspace, cx| { + workspace.spawn_in_terminal(spawn_in_terminal, window, cx) + }); + cx.spawn(async move |this, cx| { + task.await; + this.update(cx, |this, cx| { + this.check_for_gemini(cx); + }) + .ok(); + }) + .detach(); + }, + )), + ) + } else { + this.child( + h_flex().gap_1().child( + Button::new( + SharedString::from(format!("start_acp_thread-{name}")), + "Start New Thread", + ) + .label_size(LabelSize::Small) + .icon(IconName::Thread) + .icon_position(IconPosition::Start) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .on_click(move |_, window, cx| { + window.dispatch_action( + NewExternalAgentThread { + agent: Some(agent.clone()), + } + .boxed_clone(), + cx, + ); + }), + ), + ) + } + }) + } } impl Render for AgentConfiguration { @@ -999,6 +1237,7 @@ impl Render for AgentConfiguration { .size_full() .overflow_y_scroll() .child(self.render_general_settings_section(cx)) + .child(self.render_agent_servers_section(cx)) .child(self.render_context_servers_section(window, cx)) .child(self.render_provider_configuration_section(cx)), ) @@ -1117,3 +1356,68 @@ fn show_unable_to_uninstall_extension_with_context_server( workspace.toggle_status_toast(status_toast, cx); } + +async fn open_new_agent_servers_entry_in_settings_editor( + workspace: WeakEntity, + cx: &mut AsyncWindowContext, +) -> Result<()> { + let settings_editor = workspace + .update_in(cx, |_, window, cx| { + create_and_open_local_file(paths::settings_file(), window, cx, || { + settings::initial_user_settings_content().as_ref().into() + }) + })? + .await? + .downcast::() + .unwrap(); + + settings_editor + .downgrade() + .update_in(cx, |item, window, cx| { + let text = item.buffer().read(cx).snapshot(cx).text(); + + let settings = cx.global::(); + + let edits = settings.edits_for_update::(&text, |file| { + let unique_server_name = (0..u8::MAX) + .map(|i| { + if i == 0 { + "your_agent".into() + } else { + format!("your_agent_{}", i).into() + } + }) + .find(|name| !file.custom.contains_key(name)); + if let Some(server_name) = unique_server_name { + file.custom.insert( + server_name, + AgentServerSettings { + command: AgentServerCommand { + path: "path_to_executable".into(), + args: vec![], + env: Some(HashMap::default()), + }, + }, + ); + } + }); + + if !edits.is_empty() { + let ranges = edits + .iter() + .map(|(range, _)| range.clone()) + .collect::>(); + + item.edit(edits, cx); + + item.change_selections( + SelectionEffects::scroll(Autoscroll::newest()), + window, + cx, + |selections| { + selections.select_ranges(ranges); + }, + ); + } + }) +} diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index e07424987c..1e1ff95178 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -1529,6 +1529,7 @@ impl AgentDiff { | AcpThreadEvent::TokenUsageUpdated | AcpThreadEvent::EntriesRemoved(_) | AcpThreadEvent::ToolAuthorizationRequired + | AcpThreadEvent::PromptCapabilitiesUpdated | AcpThreadEvent::Retry(_) => {} } } diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index e2c4acb1ce..269aec3365 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -5,9 +5,12 @@ use std::sync::Arc; use std::time::Duration; use acp_thread::AcpThread; +use agent_servers::AgentServerSettings; use agent2::{DbThreadMetadata, HistoryEntry}; use db::kvp::{Dismissable, KEY_VALUE_STORE}; use serde::{Deserialize, Serialize}; +use zed_actions::OpenBrowser; +use zed_actions::agent::ReauthenticateAgent; use crate::acp::{AcpThreadHistory, ThreadHistoryEvent}; use crate::agent_diff::AgentDiffThread; @@ -54,9 +57,7 @@ use gpui::{ Pixels, Subscription, Task, UpdateGlobal, WeakEntity, prelude::*, pulsating_between, }; use language::LanguageRegistry; -use language_model::{ - ConfigurationError, ConfiguredModel, LanguageModelProviderTosView, LanguageModelRegistry, -}; +use language_model::{ConfigurationError, ConfiguredModel, LanguageModelRegistry}; use project::{DisableAiSettings, Project, ProjectPath, Worktree}; use prompt_store::{PromptBuilder, PromptStore, UserPromptId}; use rules_library::{RulesLibrary, open_rules_library}; @@ -130,7 +131,7 @@ pub fn init(cx: &mut App) { if let Some(panel) = workspace.panel::(cx) { workspace.focus_panel::(window, cx); panel.update(cx, |panel, cx| { - panel.external_thread(action.agent, None, None, window, cx) + panel.external_thread(action.agent.clone(), None, None, window, cx) }); } }) @@ -241,7 +242,8 @@ enum WhichFontSize { None, } -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +// TODO unify this with ExternalAgent +#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] pub enum AgentType { #[default] Zed, @@ -249,23 +251,29 @@ pub enum AgentType { Gemini, ClaudeCode, NativeAgent, + Custom { + name: SharedString, + settings: AgentServerSettings, + }, } impl AgentType { - fn label(self) -> impl Into { + fn label(&self) -> SharedString { match self { - Self::Zed | Self::TextThread => "Zed Agent", - Self::NativeAgent => "Agent 2", - Self::Gemini => "Gemini CLI", - Self::ClaudeCode => "Claude Code", + Self::Zed | Self::TextThread => "Zed Agent".into(), + Self::NativeAgent => "Agent 2".into(), + Self::Gemini => "Gemini CLI".into(), + Self::ClaudeCode => "Claude Code".into(), + Self::Custom { name, .. } => name.into(), } } - fn icon(self) -> Option { + fn icon(&self) -> Option { match self { Self::Zed | Self::NativeAgent | Self::TextThread => None, Self::Gemini => Some(IconName::AiGemini), Self::ClaudeCode => Some(IconName::AiClaude), + Self::Custom { .. } => Some(IconName::Terminal), } } } @@ -519,7 +527,7 @@ pub struct AgentPanel { impl AgentPanel { fn serialize(&mut self, cx: &mut Context) { let width = self.width; - let selected_agent = self.selected_agent; + let selected_agent = self.selected_agent.clone(); self.pending_serialization = Some(cx.background_spawn(async move { KEY_VALUE_STORE .write_kvp( @@ -609,7 +617,7 @@ impl AgentPanel { panel.update(cx, |panel, cx| { panel.width = serialized_panel.width.map(|w| w.round()); if let Some(selected_agent) = serialized_panel.selected_agent { - panel.selected_agent = selected_agent; + panel.selected_agent = selected_agent.clone(); panel.new_agent_thread(selected_agent, window, cx); } cx.notify(); @@ -903,6 +911,16 @@ impl AgentPanel { } } + fn active_thread_view(&self) -> Option<&Entity> { + match &self.active_view { + ActiveView::ExternalAgentThread { thread_view, .. } => Some(thread_view), + ActiveView::Thread { .. } + | ActiveView::TextThread { .. } + | ActiveView::History + | ActiveView::Configuration => None, + } + } + fn new_thread(&mut self, action: &NewThread, window: &mut Window, cx: &mut Context) { if cx.has_flag::() { return self.new_agent_thread(AgentType::NativeAgent, window, cx); @@ -1009,6 +1027,8 @@ impl AgentPanel { } fn new_prompt_editor(&mut self, window: &mut Window, cx: &mut Context) { + telemetry::event!("Agent Thread Started", agent = "zed-text"); + let context = self .context_store .update(cx, |context_store, cx| context_store.create(cx)); @@ -1069,14 +1089,17 @@ impl AgentPanel { cx.spawn_in(window, async move |this, cx| { let ext_agent = match agent_choice { Some(agent) => { - cx.background_spawn(async move { - if let Some(serialized) = - serde_json::to_string(&LastUsedExternalAgent { agent }).log_err() - { - KEY_VALUE_STORE - .write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized) - .await - .log_err(); + cx.background_spawn({ + let agent = agent.clone(); + async move { + if let Some(serialized) = + serde_json::to_string(&LastUsedExternalAgent { agent }).log_err() + { + KEY_VALUE_STORE + .write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized) + .await + .log_err(); + } } }) .detach(); @@ -1098,11 +1121,15 @@ impl AgentPanel { } }; + telemetry::event!("Agent Thread Started", agent = ext_agent.name()); + let server = ext_agent.server(fs, history); this.update_in(cx, |this, window, cx| { match ext_agent { - crate::ExternalAgent::Gemini | crate::ExternalAgent::NativeAgent => { + crate::ExternalAgent::Gemini + | crate::ExternalAgent::NativeAgent + | crate::ExternalAgent::Custom { .. } => { if !cx.has_flag::() { return; } @@ -1453,6 +1480,7 @@ impl AgentPanel { tools, self.language_registry.clone(), self.workspace.clone(), + self.project.downgrade(), window, cx, ) @@ -1831,14 +1859,14 @@ impl AgentPanel { cx: &mut Context, ) { if self.selected_agent != agent { - self.selected_agent = agent; + self.selected_agent = agent.clone(); self.serialize(cx); } self.new_agent_thread(agent, window, cx); } pub fn selected_agent(&self) -> AgentType { - self.selected_agent + self.selected_agent.clone() } pub fn new_agent_thread( @@ -1877,6 +1905,13 @@ impl AgentPanel { window, cx, ), + AgentType::Custom { name, settings } => self.external_thread( + Some(crate::ExternalAgent::Custom { name, settings }), + None, + None, + window, + cx, + ), } } @@ -2031,9 +2066,11 @@ impl AgentPanel { match state { ThreadSummary::Pending => Label::new(ThreadSummary::DEFAULT) .truncate() + .color(Color::Muted) .into_any_element(), ThreadSummary::Generating => Label::new(LOADING_SUMMARY_PLACEHOLDER) .truncate() + .color(Color::Muted) .into_any_element(), ThreadSummary::Ready(_) => div() .w_full() @@ -2065,9 +2102,33 @@ impl AgentPanel { } } ActiveView::ExternalAgentThread { thread_view } => { - Label::new(thread_view.read(cx).title(cx)) - .truncate() - .into_any_element() + if let Some(title_editor) = thread_view.read(cx).title_editor() { + div() + .w_full() + .on_action({ + let thread_view = thread_view.downgrade(); + move |_: &menu::Confirm, window, cx| { + if let Some(thread_view) = thread_view.upgrade() { + thread_view.focus_handle(cx).focus(window); + } + } + }) + .on_action({ + let thread_view = thread_view.downgrade(); + move |_: &editor::actions::Cancel, window, cx| { + if let Some(thread_view) = thread_view.upgrade() { + thread_view.focus_handle(cx).focus(window); + } + } + }) + .child(title_editor) + .into_any_element() + } else { + Label::new(thread_view.read(cx).title()) + .color(Color::Muted) + .truncate() + .into_any_element() + } } ActiveView::TextThread { title_editor, @@ -2078,6 +2139,7 @@ impl AgentPanel { match summary { ContextSummary::Pending => Label::new(ContextSummary::DEFAULT) + .color(Color::Muted) .truncate() .into_any_element(), ContextSummary::Content(summary) => { @@ -2089,6 +2151,7 @@ impl AgentPanel { } else { Label::new(LOADING_SUMMARY_PLACEHOLDER) .truncate() + .color(Color::Muted) .into_any_element() } } @@ -2149,6 +2212,8 @@ impl AgentPanel { "Enable Full Screen" }; + let selected_agent = self.selected_agent.clone(); + PopoverMenu::new("agent-options-menu") .trigger_with_tooltip( IconButton::new("agent-options-menu", IconName::Ellipsis) @@ -2228,6 +2293,11 @@ impl AgentPanel { .action("Settings", Box::new(OpenSettings)) .separator() .action(full_screen_label, Box::new(ToggleZoom)); + + if selected_agent == AgentType::Gemini { + menu = menu.action("Reauthenticate", Box::new(ReauthenticateAgent)) + } + menu })) } @@ -2262,6 +2332,8 @@ impl AgentPanel { .menu({ let menu = self.assistant_navigation_menu.clone(); move |window, cx| { + telemetry::event!("View Thread History Clicked"); + if let Some(menu) = menu.as_ref() { menu.update(cx, |_, cx| { cx.defer_in(window, |menu, window, cx| { @@ -2440,6 +2512,8 @@ impl AgentPanel { let workspace = self.workspace.clone(); move |window, cx| { + telemetry::event!("New Thread Clicked"); + let active_thread = active_thread.clone(); Some(ContextMenu::build(window, cx, |mut menu, _window, cx| { menu = menu @@ -2574,13 +2648,64 @@ impl AgentPanel { } }), ) + }) + .when(cx.has_flag::(), |mut menu| { + // Add custom agents from settings + let settings = + agent_servers::AllAgentServersSettings::get_global(cx); + for (agent_name, agent_settings) in &settings.custom { + menu = menu.item( + ContextMenuEntry::new(format!("New {} Thread", agent_name)) + .icon(IconName::Terminal) + .icon_color(Color::Muted) + .handler({ + let workspace = workspace.clone(); + let agent_name = agent_name.clone(); + let agent_settings = agent_settings.clone(); + move |window, cx| { + if let Some(workspace) = workspace.upgrade() { + workspace.update(cx, |workspace, cx| { + if let Some(panel) = + workspace.panel::(cx) + { + panel.update(cx, |panel, cx| { + panel.set_selected_agent( + AgentType::Custom { + name: agent_name + .clone(), + settings: + agent_settings + .clone(), + }, + window, + cx, + ); + }); + } + }); + } + } + }), + ); + } + + menu + }) + .when(cx.has_flag::(), |menu| { + menu.separator().link( + "Add Your Own Agent", + OpenBrowser { + url: "https://agentclientprotocol.com/".into(), + } + .boxed_clone(), + ) }); menu })) } }); - let selected_agent_label = self.selected_agent.label().into(); + let selected_agent_label = self.selected_agent.label(); let selected_agent = div() .id("selected_agent_icon") .when_some(self.selected_agent.icon(), |this, icon| { @@ -3165,17 +3290,6 @@ impl AgentPanel { ConfigurationError::ModelNotFound | ConfigurationError::ProviderNotAuthenticated(_) | ConfigurationError::NoProvider => callout.into_any_element(), - ConfigurationError::ProviderPendingTermsAcceptance(provider) => { - Banner::new() - .severity(Severity::Warning) - .child(h_flex().w_full().children( - provider.render_accept_terms( - LanguageModelProviderTosView::ThreadEmptyState, - cx, - ), - )) - .into_any_element() - } } } @@ -3665,6 +3779,11 @@ impl Render for AgentPanel { } })) .on_action(cx.listener(Self::toggle_burn_mode)) + .on_action(cx.listener(|this, _: &ReauthenticateAgent, window, cx| { + if let Some(thread_view) = this.active_thread_view() { + thread_view.update(cx, |thread_view, cx| thread_view.reauthenticate(window, cx)) + } + })) .child(self.render_toolbar(window, cx)) .children(self.render_onboarding(window, cx)) .map(|parent| match &self.active_view { @@ -3882,7 +4001,11 @@ impl AgentPanelDelegate for ConcreteAssistantPanelDelegate { // Wait to create a new context until the workspace is no longer // being updated. cx.defer_in(window, move |panel, window, cx| { - if let Some(message_editor) = panel.active_message_editor() { + if let Some(thread_view) = panel.active_thread_view() { + thread_view.update(cx, |thread_view, cx| { + thread_view.insert_selections(window, cx); + }); + } else if let Some(message_editor) = panel.active_message_editor() { message_editor.update(cx, |message_editor, cx| { message_editor.context_store().update(cx, |store, cx| { let buffer = buffer.read(cx); diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 7b6557245f..110c432df3 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -28,13 +28,14 @@ use std::rc::Rc; use std::sync::Arc; use agent::{Thread, ThreadId}; +use agent_servers::AgentServerSettings; use agent_settings::{AgentProfileId, AgentSettings, LanguageModelSelection}; use assistant_slash_command::SlashCommandRegistry; use client::Client; use command_palette_hooks::CommandPaletteFilter; use feature_flags::FeatureFlagAppExt as _; use fs::Fs; -use gpui::{Action, App, Entity, actions}; +use gpui::{Action, App, Entity, SharedString, actions}; use language::LanguageRegistry; use language_model::{ ConfiguredModel, LanguageModel, LanguageModelId, LanguageModelProviderId, LanguageModelRegistry, @@ -128,6 +129,12 @@ actions!( ] ); +#[derive(Clone, Copy, Debug, PartialEq, Eq, Action)] +#[action(namespace = agent)] +#[action(deprecated_aliases = ["assistant::QuoteSelection"])] +/// Quotes the current selection in the agent panel's message editor. +pub struct QuoteSelection; + /// Creates a new conversation thread, optionally based on an existing thread. #[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)] #[action(namespace = agent)] @@ -153,25 +160,43 @@ pub struct NewNativeAgentThreadFromSummary { from_session_id: agent_client_protocol::SessionId, } -#[derive(Default, Debug, Clone, Copy, PartialEq, Serialize, Deserialize, JsonSchema)] +// TODO unify this with AgentType +#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] enum ExternalAgent { #[default] Gemini, ClaudeCode, NativeAgent, + Custom { + name: SharedString, + settings: AgentServerSettings, + }, } impl ExternalAgent { + fn name(&self) -> &'static str { + match self { + Self::NativeAgent => "zed", + Self::Gemini => "gemini-cli", + Self::ClaudeCode => "claude-code", + Self::Custom { .. } => "custom", + } + } + pub fn server( &self, fs: Arc, history: Entity, ) -> Rc { match self { - ExternalAgent::Gemini => Rc::new(agent_servers::Gemini), - ExternalAgent::ClaudeCode => Rc::new(agent_servers::ClaudeCode), - ExternalAgent::NativeAgent => Rc::new(agent2::NativeAgentServer::new(fs, history)), + Self::Gemini => Rc::new(agent_servers::Gemini), + Self::ClaudeCode => Rc::new(agent_servers::ClaudeCode), + Self::NativeAgent => Rc::new(agent2::NativeAgentServer::new(fs, history)), + Self::Custom { name, settings } => Rc::new(agent_servers::CustomAgentServer::new( + name.clone(), + settings, + )), } } } diff --git a/crates/agent_ui/src/language_model_selector.rs b/crates/agent_ui/src/language_model_selector.rs index 3633e533da..aceca79dbf 100644 --- a/crates/agent_ui/src/language_model_selector.rs +++ b/crates/agent_ui/src/language_model_selector.rs @@ -6,8 +6,7 @@ use feature_flags::ZedProFeatureFlag; use fuzzy::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{Action, AnyElement, App, BackgroundExecutor, DismissEvent, Subscription, Task}; use language_model::{ - AuthenticateError, ConfiguredModel, LanguageModel, LanguageModelProviderId, - LanguageModelRegistry, + ConfiguredModel, LanguageModel, LanguageModelProviderId, LanguageModelRegistry, }; use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; @@ -77,7 +76,6 @@ pub struct LanguageModelPickerDelegate { all_models: Arc, filtered_entries: Vec, selected_index: usize, - _authenticate_all_providers_task: Task<()>, _subscriptions: Vec, } @@ -98,7 +96,6 @@ impl LanguageModelPickerDelegate { selected_index: Self::get_active_model_index(&entries, get_active_model(cx)), filtered_entries: entries, get_active_model: Arc::new(get_active_model), - _authenticate_all_providers_task: Self::authenticate_all_providers(cx), _subscriptions: vec![cx.subscribe_in( &LanguageModelRegistry::global(cx), window, @@ -142,56 +139,6 @@ impl LanguageModelPickerDelegate { .unwrap_or(0) } - /// Authenticates all providers in the [`LanguageModelRegistry`]. - /// - /// We do this so that we can populate the language selector with all of the - /// models from the configured providers. - fn authenticate_all_providers(cx: &mut App) -> Task<()> { - let authenticate_all_providers = LanguageModelRegistry::global(cx) - .read(cx) - .providers() - .iter() - .map(|provider| (provider.id(), provider.name(), provider.authenticate(cx))) - .collect::>(); - - cx.spawn(async move |_cx| { - for (provider_id, provider_name, authenticate_task) in authenticate_all_providers { - if let Err(err) = authenticate_task.await { - if matches!(err, AuthenticateError::CredentialsNotFound) { - // Since we're authenticating these providers in the - // background for the purposes of populating the - // language selector, we don't care about providers - // where the credentials are not found. - } else { - // Some providers have noisy failure states that we - // don't want to spam the logs with every time the - // language model selector is initialized. - // - // Ideally these should have more clear failure modes - // that we know are safe to ignore here, like what we do - // with `CredentialsNotFound` above. - match provider_id.0.as_ref() { - "lmstudio" | "ollama" => { - // LM Studio and Ollama both make fetch requests to the local APIs to determine if they are "authenticated". - // - // These fail noisily, so we don't log them. - } - "copilot_chat" => { - // Copilot Chat returns an error if Copilot is not enabled, so we don't log those errors. - } - _ => { - log::error!( - "Failed to authenticate provider: {}: {err}", - provider_name.0 - ); - } - } - } - } - } - }) - } - pub fn active_model(&self, cx: &App) -> Option { (self.get_active_model)(cx) } diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index bed10e90a7..45e7529ec2 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -378,18 +378,13 @@ impl MessageEditor { } fn send_to_model(&mut self, window: &mut Window, cx: &mut Context) { - let Some(ConfiguredModel { model, provider }) = self + let Some(ConfiguredModel { model, .. }) = self .thread .update(cx, |thread, cx| thread.get_or_init_configured_model(cx)) else { return; }; - if provider.must_accept_terms(cx) { - cx.notify(); - return; - } - let (user_message, user_message_creases) = self.editor.update(cx, |editor, cx| { let creases = extract_message_creases(editor, cx); let text = editor.text(cx); diff --git a/crates/agent_ui/src/text_thread_editor.rs b/crates/agent_ui/src/text_thread_editor.rs index a928f7af54..e9e7eba4b6 100644 --- a/crates/agent_ui/src/text_thread_editor.rs +++ b/crates/agent_ui/src/text_thread_editor.rs @@ -1,4 +1,5 @@ use crate::{ + QuoteSelection, language_model_selector::{LanguageModelSelector, language_model_selector}, ui::BurnModeTooltip, }; @@ -89,8 +90,6 @@ actions!( CycleMessageRole, /// Inserts the selected text into the active editor. InsertIntoEditor, - /// Quotes the current selection in the assistant conversation. - QuoteSelection, /// Splits the conversation at the current cursor position. Split, ] @@ -191,7 +190,6 @@ pub struct TextThreadEditor { invoked_slash_command_creases: HashMap, _subscriptions: Vec, last_error: Option, - show_accept_terms: bool, pub(crate) slash_menu_handle: PopoverMenuHandle>, // dragged_file_worktrees is used to keep references to worktrees that were added @@ -290,7 +288,6 @@ impl TextThreadEditor { invoked_slash_command_creases: HashMap::default(), _subscriptions, last_error: None, - show_accept_terms: false, slash_menu_handle: Default::default(), dragged_file_worktrees: Vec::new(), language_model_selector: cx.new(|cx| { @@ -364,24 +361,12 @@ impl TextThreadEditor { if self.sending_disabled(cx) { return; } + telemetry::event!("Agent Message Sent", agent = "zed-text"); self.send_to_model(window, cx); } fn send_to_model(&mut self, window: &mut Window, cx: &mut Context) { - let provider = LanguageModelRegistry::read_global(cx) - .default_model() - .map(|default| default.provider); - if provider - .as_ref() - .is_some_and(|provider| provider.must_accept_terms(cx)) - { - self.show_accept_terms = true; - cx.notify(); - return; - } - self.last_error = None; - if let Some(user_message) = self.context.update(cx, |context, cx| context.assist(cx)) { let new_selection = { let cursor = user_message @@ -1931,7 +1916,6 @@ impl TextThreadEditor { ConfigurationError::NoProvider | ConfigurationError::ModelNotFound | ConfigurationError::ProviderNotAuthenticated(_) => true, - ConfigurationError::ProviderPendingTermsAcceptance(_) => self.show_accept_terms, } } diff --git a/crates/agent_ui/src/ui.rs b/crates/agent_ui/src/ui.rs index beeaf0c43b..ada973cddf 100644 --- a/crates/agent_ui/src/ui.rs +++ b/crates/agent_ui/src/ui.rs @@ -2,13 +2,13 @@ mod agent_notification; mod burn_mode_tooltip; mod context_pill; mod end_trial_upsell; -// mod new_thread_button; mod onboarding_modal; pub mod preview; +mod unavailable_editing_tooltip; pub use agent_notification::*; pub use burn_mode_tooltip::*; pub use context_pill::*; pub use end_trial_upsell::*; -// pub use new_thread_button::*; pub use onboarding_modal::*; +pub use unavailable_editing_tooltip::*; diff --git a/crates/agent_ui/src/ui/new_thread_button.rs b/crates/agent_ui/src/ui/new_thread_button.rs deleted file mode 100644 index 347d6adcaf..0000000000 --- a/crates/agent_ui/src/ui/new_thread_button.rs +++ /dev/null @@ -1,75 +0,0 @@ -use gpui::{ClickEvent, ElementId, IntoElement, ParentElement, Styled}; -use ui::prelude::*; - -#[derive(IntoElement)] -pub struct NewThreadButton { - id: ElementId, - label: SharedString, - icon: IconName, - keybinding: Option, - on_click: Option>, -} - -impl NewThreadButton { - fn new(id: impl Into, label: impl Into, icon: IconName) -> Self { - Self { - id: id.into(), - label: label.into(), - icon, - keybinding: None, - on_click: None, - } - } - - fn keybinding(mut self, keybinding: Option) -> Self { - self.keybinding = keybinding; - self - } - - fn on_click(mut self, handler: F) -> Self - where - F: Fn(&mut Window, &mut App) + 'static, - { - self.on_click = Some(Box::new( - move |_: &ClickEvent, window: &mut Window, cx: &mut App| handler(window, cx), - )); - self - } -} - -impl RenderOnce for NewThreadButton { - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - h_flex() - .id(self.id) - .w_full() - .py_1p5() - .px_2() - .gap_1() - .justify_between() - .rounded_md() - .border_1() - .border_color(cx.theme().colors().border.opacity(0.4)) - .bg(cx.theme().colors().element_active.opacity(0.2)) - .hover(|style| { - style - .bg(cx.theme().colors().element_hover) - .border_color(cx.theme().colors().border) - }) - .child( - h_flex() - .gap_1p5() - .child( - Icon::new(self.icon) - .size(IconSize::XSmall) - .color(Color::Muted), - ) - .child(Label::new(self.label).size(LabelSize::Small)), - ) - .when_some(self.keybinding, |this, keybinding| { - this.child(keybinding.size(rems_from_px(10.))) - }) - .when_some(self.on_click, |this, on_click| { - this.on_click(move |event, window, cx| on_click(event, window, cx)) - }) - } -} diff --git a/crates/agent_ui/src/ui/preview/usage_callouts.rs b/crates/agent_ui/src/ui/preview/usage_callouts.rs index 29b12ea627..d4d037b976 100644 --- a/crates/agent_ui/src/ui/preview/usage_callouts.rs +++ b/crates/agent_ui/src/ui/preview/usage_callouts.rs @@ -86,23 +86,18 @@ impl RenderOnce for UsageCallout { (IconName::Warning, Severity::Warning) }; - div() - .border_t_1() - .border_color(cx.theme().colors().border) - .child( - Callout::new() - .icon(icon) - .severity(severity) - .icon(icon) - .title(title) - .description(message) - .actions_slot( - Button::new("upgrade", button_text) - .label_size(LabelSize::Small) - .on_click(move |_, _, cx| { - cx.open_url(&url); - }), - ), + Callout::new() + .icon(icon) + .severity(severity) + .icon(icon) + .title(title) + .description(message) + .actions_slot( + Button::new("upgrade", button_text) + .label_size(LabelSize::Small) + .on_click(move |_, _, cx| { + cx.open_url(&url); + }), ) .into_any_element() } diff --git a/crates/agent_ui/src/ui/unavailable_editing_tooltip.rs b/crates/agent_ui/src/ui/unavailable_editing_tooltip.rs new file mode 100644 index 0000000000..78d4c64e0a --- /dev/null +++ b/crates/agent_ui/src/ui/unavailable_editing_tooltip.rs @@ -0,0 +1,29 @@ +use gpui::{Context, IntoElement, Render, Window}; +use ui::{prelude::*, tooltip_container}; + +pub struct UnavailableEditingTooltip { + agent_name: SharedString, +} + +impl UnavailableEditingTooltip { + pub fn new(agent_name: SharedString) -> Self { + Self { agent_name } + } +} + +impl Render for UnavailableEditingTooltip { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + tooltip_container(window, cx, |this, _, _| { + this.child(Label::new("Unavailable Editing")).child( + div().max_w_64().child( + Label::new(format!( + "Editing previous messages is not available for {} yet.", + self.agent_name + )) + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + }) + } +} diff --git a/crates/ai_onboarding/src/ai_onboarding.rs b/crates/ai_onboarding/src/ai_onboarding.rs index 717abebfd1..6d8ac64725 100644 --- a/crates/ai_onboarding/src/ai_onboarding.rs +++ b/crates/ai_onboarding/src/ai_onboarding.rs @@ -19,7 +19,7 @@ use std::sync::Arc; use client::{Client, UserStore, zed_urls}; use gpui::{AnyElement, Entity, IntoElement, ParentElement}; -use ui::{Divider, RegisterComponent, TintColor, Tooltip, prelude::*}; +use ui::{Divider, RegisterComponent, Tooltip, prelude::*}; #[derive(PartialEq)] pub enum SignInStatus { @@ -43,12 +43,10 @@ impl From for SignInStatus { #[derive(RegisterComponent, IntoElement)] pub struct ZedAiOnboarding { pub sign_in_status: SignInStatus, - pub has_accepted_terms_of_service: bool, pub plan: Option, pub account_too_young: bool, pub continue_with_zed_ai: Arc, pub sign_in: Arc, - pub accept_terms_of_service: Arc, pub dismiss_onboarding: Option>, } @@ -64,17 +62,9 @@ impl ZedAiOnboarding { Self { sign_in_status: status.into(), - has_accepted_terms_of_service: store.has_accepted_terms_of_service(), plan: store.plan(), account_too_young: store.account_too_young(), continue_with_zed_ai, - accept_terms_of_service: Arc::new({ - let store = user_store.clone(); - move |_window, cx| { - let task = store.update(cx, |store, cx| store.accept_terms_of_service(cx)); - task.detach_and_log_err(cx); - } - }), sign_in: Arc::new(move |_window, cx| { cx.spawn({ let client = client.clone(); @@ -94,42 +84,6 @@ impl ZedAiOnboarding { self } - fn render_accept_terms_of_service(&self) -> AnyElement { - v_flex() - .gap_1() - .w_full() - .child(Headline::new("Accept Terms of Service")) - .child( - Label::new("We don’t sell your data, track you across the web, or compromise your privacy.") - .color(Color::Muted) - .mb_2(), - ) - .child( - Button::new("terms_of_service", "Review Terms of Service") - .full_width() - .style(ButtonStyle::Outlined) - .icon(IconName::ArrowUpRight) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .on_click(move |_, _window, cx| { - telemetry::event!("Review Terms of Service Clicked"); - cx.open_url(&zed_urls::terms_of_service(cx)) - }), - ) - .child( - Button::new("accept_terms", "Accept") - .full_width() - .style(ButtonStyle::Tinted(TintColor::Accent)) - .on_click({ - let callback = self.accept_terms_of_service.clone(); - move |_, window, cx| { - telemetry::event!("Terms of Service Accepted"); - (callback)(window, cx)} - }), - ) - .into_any_element() - } - fn render_sign_in_disclaimer(&self, _cx: &mut App) -> AnyElement { let signing_in = matches!(self.sign_in_status, SignInStatus::SigningIn); let plan_definitions = PlanDefinitions; @@ -359,14 +313,10 @@ impl ZedAiOnboarding { impl RenderOnce for ZedAiOnboarding { fn render(self, _window: &mut ui::Window, cx: &mut App) -> impl IntoElement { if matches!(self.sign_in_status, SignInStatus::SignedIn) { - if self.has_accepted_terms_of_service { - match self.plan { - None | Some(Plan::ZedFree) => self.render_free_plan_state(cx), - Some(Plan::ZedProTrial) => self.render_trial_state(cx), - Some(Plan::ZedPro) => self.render_pro_plan_state(cx), - } - } else { - self.render_accept_terms_of_service() + match self.plan { + None | Some(Plan::ZedFree) => self.render_free_plan_state(cx), + Some(Plan::ZedProTrial) => self.render_trial_state(cx), + Some(Plan::ZedPro) => self.render_pro_plan_state(cx), } } else { self.render_sign_in_disclaimer(cx) @@ -390,18 +340,15 @@ impl Component for ZedAiOnboarding { fn preview(_window: &mut Window, _cx: &mut App) -> Option { fn onboarding( sign_in_status: SignInStatus, - has_accepted_terms_of_service: bool, plan: Option, account_too_young: bool, ) -> AnyElement { ZedAiOnboarding { sign_in_status, - has_accepted_terms_of_service, plan, account_too_young, continue_with_zed_ai: Arc::new(|_, _| {}), sign_in: Arc::new(|_, _| {}), - accept_terms_of_service: Arc::new(|_, _| {}), dismiss_onboarding: None, } .into_any_element() @@ -415,27 +362,23 @@ impl Component for ZedAiOnboarding { .children(vec![ single_example( "Not Signed-in", - onboarding(SignInStatus::SignedOut, false, None, false), - ), - single_example( - "Not Accepted ToS", - onboarding(SignInStatus::SignedIn, false, None, false), + onboarding(SignInStatus::SignedOut, None, false), ), single_example( "Young Account", - onboarding(SignInStatus::SignedIn, true, None, true), + onboarding(SignInStatus::SignedIn, None, true), ), single_example( "Free Plan", - onboarding(SignInStatus::SignedIn, true, Some(Plan::ZedFree), false), + onboarding(SignInStatus::SignedIn, Some(Plan::ZedFree), false), ), single_example( "Pro Trial", - onboarding(SignInStatus::SignedIn, true, Some(Plan::ZedProTrial), false), + onboarding(SignInStatus::SignedIn, Some(Plan::ZedProTrial), false), ), single_example( "Pro Plan", - onboarding(SignInStatus::SignedIn, true, Some(Plan::ZedPro), false), + onboarding(SignInStatus::SignedIn, Some(Plan::ZedPro), false), ), ]) .into_any_element(), diff --git a/crates/ai_onboarding/src/ai_upsell_card.rs b/crates/ai_onboarding/src/ai_upsell_card.rs index e9639ca075..106dcb0aef 100644 --- a/crates/ai_onboarding/src/ai_upsell_card.rs +++ b/crates/ai_onboarding/src/ai_upsell_card.rs @@ -12,11 +12,11 @@ use crate::{SignInStatus, YoungAccountBanner, plan_definitions::PlanDefinitions} #[derive(IntoElement, RegisterComponent)] pub struct AiUpsellCard { - pub sign_in_status: SignInStatus, - pub sign_in: Arc, - pub account_too_young: bool, - pub user_plan: Option, - pub tab_index: Option, + sign_in_status: SignInStatus, + sign_in: Arc, + account_too_young: bool, + user_plan: Option, + tab_index: Option, } impl AiUpsellCard { @@ -43,6 +43,11 @@ impl AiUpsellCard { tab_index: None, } } + + pub fn tab_index(mut self, tab_index: Option) -> Self { + self.tab_index = tab_index; + self + } } impl RenderOnce for AiUpsellCard { diff --git a/crates/assistant_tools/src/fetch_tool.rs b/crates/assistant_tools/src/fetch_tool.rs index 79e205f205..cc22c9fc09 100644 --- a/crates/assistant_tools/src/fetch_tool.rs +++ b/crates/assistant_tools/src/fetch_tool.rs @@ -118,7 +118,7 @@ impl Tool for FetchTool { } fn needs_confirmation(&self, _: &serde_json::Value, _: &Entity, _: &App) -> bool { - false + true } fn may_perform_edits(&self) -> bool { diff --git a/crates/assistant_tools/src/find_path_tool.rs b/crates/assistant_tools/src/find_path_tool.rs index ac2c7a32ab..d1451132ae 100644 --- a/crates/assistant_tools/src/find_path_tool.rs +++ b/crates/assistant_tools/src/find_path_tool.rs @@ -435,8 +435,8 @@ mod test { assert_eq!( matches, &[ - PathBuf::from("root/apple/banana/carrot"), - PathBuf::from("root/apple/bandana/carbonara") + PathBuf::from(path!("root/apple/banana/carrot")), + PathBuf::from(path!("root/apple/bandana/carbonara")) ] ); @@ -447,8 +447,8 @@ mod test { assert_eq!( matches, &[ - PathBuf::from("root/apple/banana/carrot"), - PathBuf::from("root/apple/bandana/carbonara") + PathBuf::from(path!("root/apple/banana/carrot")), + PathBuf::from(path!("root/apple/bandana/carbonara")) ] ); } diff --git a/crates/assistant_tools/src/read_file_tool.rs b/crates/assistant_tools/src/read_file_tool.rs index 766ee3b161..a6e984fca6 100644 --- a/crates/assistant_tools/src/read_file_tool.rs +++ b/crates/assistant_tools/src/read_file_tool.rs @@ -68,7 +68,7 @@ impl Tool for ReadFileTool { } fn icon(&self) -> IconName { - IconName::ToolRead + IconName::ToolSearch } fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result { diff --git a/crates/audio/Cargo.toml b/crates/audio/Cargo.toml index 5146396b92..ae7eb52fd3 100644 --- a/crates/audio/Cargo.toml +++ b/crates/audio/Cargo.toml @@ -15,9 +15,10 @@ doctest = false [dependencies] anyhow.workspace = true collections.workspace = true -derive_more.workspace = true gpui.workspace = true -parking_lot.workspace = true +settings.workspace = true +schemars.workspace = true +serde.workspace = true rodio = { workspace = true, features = [ "wav", "playback", "tracing" ] } util.workspace = true workspace-hack.workspace = true diff --git a/crates/audio/src/assets.rs b/crates/audio/src/assets.rs deleted file mode 100644 index fd5c935d87..0000000000 --- a/crates/audio/src/assets.rs +++ /dev/null @@ -1,54 +0,0 @@ -use std::{io::Cursor, sync::Arc}; - -use anyhow::{Context as _, Result}; -use collections::HashMap; -use gpui::{App, AssetSource, Global}; -use rodio::{Decoder, Source, source::Buffered}; - -type Sound = Buffered>>>; - -pub struct SoundRegistry { - cache: Arc>>, - assets: Box, -} - -struct GlobalSoundRegistry(Arc); - -impl Global for GlobalSoundRegistry {} - -impl SoundRegistry { - pub fn new(source: impl AssetSource) -> Arc { - Arc::new(Self { - cache: Default::default(), - assets: Box::new(source), - }) - } - - pub fn global(cx: &App) -> Arc { - cx.global::().0.clone() - } - - pub(crate) fn set_global(source: impl AssetSource, cx: &mut App) { - cx.set_global(GlobalSoundRegistry(SoundRegistry::new(source))); - } - - pub fn get(&self, name: &str) -> Result + use<>> { - if let Some(wav) = self.cache.lock().get(name) { - return Ok(wav.clone()); - } - - let path = format!("sounds/{}.wav", name); - let bytes = self - .assets - .load(&path)? - .map(anyhow::Ok) - .with_context(|| format!("No asset available for path {path}"))?? - .into_owned(); - let cursor = Cursor::new(bytes); - let source = Decoder::new(cursor)?.buffered(); - - self.cache.lock().insert(name.to_string(), source.clone()); - - Ok(source) - } -} diff --git a/crates/audio/src/audio.rs b/crates/audio/src/audio.rs index 44baa16aa2..b4f2c24fef 100644 --- a/crates/audio/src/audio.rs +++ b/crates/audio/src/audio.rs @@ -1,16 +1,19 @@ -use assets::SoundRegistry; -use derive_more::{Deref, DerefMut}; -use gpui::{App, AssetSource, BorrowAppContext, Global}; -use rodio::{OutputStream, OutputStreamBuilder}; +use anyhow::{Context as _, Result, anyhow}; +use collections::HashMap; +use gpui::{App, BorrowAppContext, Global}; +use rodio::{Decoder, OutputStream, OutputStreamBuilder, Source, source::Buffered}; +use settings::Settings; +use std::io::Cursor; use util::ResultExt; -mod assets; +mod audio_settings; +pub use audio_settings::AudioSettings; -pub fn init(source: impl AssetSource, cx: &mut App) { - SoundRegistry::set_global(source, cx); - cx.set_global(GlobalAudio(Audio::new())); +pub fn init(cx: &mut App) { + AudioSettings::register(cx); } +#[derive(Copy, Clone, Eq, Hash, PartialEq)] pub enum Sound { Joined, Leave, @@ -38,18 +41,12 @@ impl Sound { #[derive(Default)] pub struct Audio { output_handle: Option, + source_cache: HashMap>>>>, } -#[derive(Deref, DerefMut)] -struct GlobalAudio(Audio); - -impl Global for GlobalAudio {} +impl Global for Audio {} impl Audio { - pub fn new() -> Self { - Self::default() - } - fn ensure_output_exists(&mut self) -> Option<&OutputStream> { if self.output_handle.is_none() { self.output_handle = OutputStreamBuilder::open_default_stream().log_err(); @@ -58,26 +55,51 @@ impl Audio { self.output_handle.as_ref() } - pub fn play_sound(sound: Sound, cx: &mut App) { - if !cx.has_global::() { - return; - } + pub fn play_source( + source: impl rodio::Source + Send + 'static, + cx: &mut App, + ) -> anyhow::Result<()> { + cx.update_default_global(|this: &mut Self, _cx| { + let output_handle = this + .ensure_output_exists() + .ok_or_else(|| anyhow!("Could not open audio output"))?; + output_handle.mixer().add(source); + Ok(()) + }) + } - cx.update_global::(|this, cx| { + pub fn play_sound(sound: Sound, cx: &mut App) { + cx.update_default_global(|this: &mut Self, cx| { + let source = this.sound_source(sound, cx).log_err()?; let output_handle = this.ensure_output_exists()?; - let source = SoundRegistry::global(cx).get(sound.file()).log_err()?; output_handle.mixer().add(source); Some(()) }); } pub fn end_call(cx: &mut App) { - if !cx.has_global::() { - return; - } - - cx.update_global::(|this, _| { + cx.update_default_global(|this: &mut Self, _cx| { this.output_handle.take(); }); } + + fn sound_source(&mut self, sound: Sound, cx: &App) -> Result> { + if let Some(wav) = self.source_cache.get(&sound) { + return Ok(wav.clone()); + } + + let path = format!("sounds/{}.wav", sound.file()); + let bytes = cx + .asset_source() + .load(&path)? + .map(anyhow::Ok) + .with_context(|| format!("No asset available for path {path}"))?? + .into_owned(); + let cursor = Cursor::new(bytes); + let source = Decoder::new(cursor)?.buffered(); + + self.source_cache.insert(sound, source.clone()); + + Ok(source) + } } diff --git a/crates/audio/src/audio_settings.rs b/crates/audio/src/audio_settings.rs new file mode 100644 index 0000000000..807179881c --- /dev/null +++ b/crates/audio/src/audio_settings.rs @@ -0,0 +1,33 @@ +use anyhow::Result; +use gpui::App; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsSources}; + +#[derive(Deserialize, Debug)] +pub struct AudioSettings { + /// Opt into the new audio system. + #[serde(rename = "experimental.rodio_audio", default)] + pub rodio_audio: bool, // default is false +} + +/// Configuration of audio in Zed. +#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)] +#[serde(default)] +pub struct AudioSettingsContent { + /// Whether to use the experimental audio system + #[serde(rename = "experimental.rodio_audio", default)] + pub rodio_audio: bool, +} + +impl Settings for AudioSettings { + const KEY: Option<&'static str> = Some("audio"); + + type FileContent = AudioSettingsContent; + + fn load(sources: SettingsSources, _cx: &mut App) -> Result { + sources.json_merge() + } + + fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {} +} diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index bef0c5cfc3..b20dad4ebb 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -162,6 +162,22 @@ impl BufferDiffSnapshot { } } + fn unchanged( + buffer: &text::BufferSnapshot, + base_text: language::BufferSnapshot, + ) -> BufferDiffSnapshot { + debug_assert_eq!(buffer.text(), base_text.text()); + BufferDiffSnapshot { + inner: BufferDiffInner { + base_text, + hunks: SumTree::new(buffer), + pending_hunks: SumTree::new(buffer), + base_text_exists: false, + }, + secondary_diff: None, + } + } + fn new_with_base_text( buffer: text::BufferSnapshot, base_text: Option>, @@ -213,7 +229,10 @@ impl BufferDiffSnapshot { cx: &App, ) -> impl Future + use<> { let base_text_exists = base_text.is_some(); - let base_text_pair = base_text.map(|text| (text, base_text_snapshot.as_rope().clone())); + let base_text_pair = base_text.map(|text| { + debug_assert_eq!(&*text, &base_text_snapshot.text()); + (text, base_text_snapshot.as_rope().clone()) + }); cx.background_executor() .spawn_labeled(*CALCULATE_DIFF_TASK, async move { Self { @@ -873,6 +892,18 @@ impl BufferDiff { } } + pub fn new_unchanged( + buffer: &text::BufferSnapshot, + base_text: language::BufferSnapshot, + ) -> Self { + debug_assert_eq!(buffer.text(), base_text.text()); + BufferDiff { + buffer_id: buffer.remote_id(), + inner: BufferDiffSnapshot::unchanged(buffer, base_text).inner, + secondary_diff: None, + } + } + #[cfg(any(test, feature = "test-support"))] pub fn new_with_base_text( base_text: &str, @@ -2024,8 +2055,8 @@ mod tests { fn gen_working_copy(rng: &mut StdRng, head: &str) -> String { let mut old_lines = { let mut old_lines = Vec::new(); - let mut old_lines_iter = head.lines(); - while let Some(line) = old_lines_iter.next() { + let old_lines_iter = head.lines(); + for line in old_lines_iter { assert!(!line.ends_with("\n")); old_lines.push(line.to_owned()); } diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index ed3f114943..2bbe7dd1b5 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -66,6 +66,8 @@ pub static IMPERSONATE_LOGIN: LazyLock> = LazyLock::new(|| { .and_then(|s| if s.is_empty() { None } else { Some(s) }) }); +pub static USE_WEB_LOGIN: LazyLock = LazyLock::new(|| std::env::var("ZED_WEB_LOGIN").is_ok()); + pub static ADMIN_API_TOKEN: LazyLock> = LazyLock::new(|| { std::env::var("ZED_ADMIN_API_TOKEN") .ok() @@ -1290,19 +1292,21 @@ impl Client { "http" => Http, _ => Err(anyhow!("invalid rpc url: {}", rpc_url))?, }; - let rpc_host = rpc_url - .host_str() - .zip(rpc_url.port_or_known_default()) - .context("missing host in rpc url")?; - let stream = { - let handle = cx.update(|cx| gpui_tokio::Tokio::handle(cx)).ok().unwrap(); - let _guard = handle.enter(); - match proxy { - Some(proxy) => connect_proxy_stream(&proxy, rpc_host).await?, - None => Box::new(TcpStream::connect(rpc_host).await?), + let stream = gpui_tokio::Tokio::spawn_result(cx, { + let rpc_url = rpc_url.clone(); + async move { + let rpc_host = rpc_url + .host_str() + .zip(rpc_url.port_or_known_default()) + .context("missing host in rpc url")?; + Ok(match proxy { + Some(proxy) => connect_proxy_stream(&proxy, rpc_host).await?, + None => Box::new(TcpStream::connect(rpc_host).await?), + }) } - }; + })? + .await?; log::info!("connected to rpc endpoint {}", rpc_url); @@ -1390,11 +1394,13 @@ impl Client { if let Some((login, token)) = IMPERSONATE_LOGIN.as_ref().zip(ADMIN_API_TOKEN.as_ref()) { - eprintln!("authenticate as admin {login}, {token}"); + if !*USE_WEB_LOGIN { + eprintln!("authenticate as admin {login}, {token}"); - return this - .authenticate_as_admin(http, login.clone(), token.clone()) - .await; + return this + .authenticate_as_admin(http, login.clone(), token.clone()) + .await; + } } // Start an HTTP server to receive the redirect from Zed's sign-in page. diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index f3142a0af6..a5c1532c75 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -76,7 +76,7 @@ static ZED_CLIENT_CHECKSUM_SEED: LazyLock>> = LazyLock::new(|| { pub static MINIDUMP_ENDPOINT: LazyLock> = LazyLock::new(|| { option_env!("ZED_MINIDUMP_ENDPOINT") - .map(|s| s.to_owned()) + .map(str::to_string) .or_else(|| env::var("ZED_MINIDUMP_ENDPOINT").ok()) }); diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 20f99e3944..d23eb37519 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -1,5 +1,5 @@ use super::{Client, Status, TypedEnvelope, proto}; -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Context as _, Result}; use chrono::{DateTime, Utc}; use cloud_api_client::websocket_protocol::MessageToClient; use cloud_api_client::{GetAuthenticatedUserResponse, PlanInfo}; @@ -46,11 +46,6 @@ impl ProjectId { } } -#[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, -)] -pub struct DevServerProjectId(pub u64); - #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct ParticipantIndex(pub u32); @@ -116,7 +111,6 @@ pub struct UserStore { edit_prediction_usage: Option, plan_info: Option, current_user: watch::Receiver>>, - accepted_tos_at: Option>, contacts: Vec>, incoming_contact_requests: Vec>, outgoing_contact_requests: Vec>, @@ -194,7 +188,6 @@ impl UserStore { plan_info: None, model_request_usage: None, edit_prediction_usage: None, - accepted_tos_at: None, contacts: Default::default(), incoming_contact_requests: Default::default(), participant_indices: Default::default(), @@ -271,7 +264,6 @@ impl UserStore { Status::SignedOut => { current_user_tx.send(None).await.ok(); this.update(cx, |this, cx| { - this.accepted_tos_at = None; cx.emit(Event::PrivateUserInfoUpdated); cx.notify(); this.clear_contacts() @@ -791,19 +783,6 @@ impl UserStore { .set_authenticated_user_info(Some(response.user.metrics_id.clone()), staff); } - let accepted_tos_at = { - #[cfg(debug_assertions)] - if std::env::var("ZED_IGNORE_ACCEPTED_TOS").is_ok() { - None - } else { - response.user.accepted_tos_at - } - - #[cfg(not(debug_assertions))] - response.user.accepted_tos_at - }; - - self.accepted_tos_at = Some(accepted_tos_at); self.model_request_usage = Some(ModelRequestUsage(RequestUsage { limit: response.plan.usage.model_requests.limit, amount: response.plan.usage.model_requests.used as i32, @@ -846,32 +825,6 @@ impl UserStore { self.current_user.clone() } - pub fn has_accepted_terms_of_service(&self) -> bool { - self.accepted_tos_at - .is_some_and(|accepted_tos_at| accepted_tos_at.is_some()) - } - - pub fn accept_terms_of_service(&self, cx: &Context) -> Task> { - if self.current_user().is_none() { - return Task::ready(Err(anyhow!("no current user"))); - }; - - let client = self.client.clone(); - cx.spawn(async move |this, cx| -> anyhow::Result<()> { - let client = client.upgrade().context("client not found")?; - let response = client - .cloud_client() - .accept_terms_of_service() - .await - .context("error accepting tos")?; - this.update(cx, |this, cx| { - this.accepted_tos_at = Some(response.user.accepted_tos_at); - cx.emit(Event::PrivateUserInfoUpdated); - })?; - Ok(()) - }) - } - fn load_users( &self, request: impl RequestMessage, diff --git a/crates/cloud_api_client/src/cloud_api_client.rs b/crates/cloud_api_client/src/cloud_api_client.rs index 92417d8319..7fd96fcef0 100644 --- a/crates/cloud_api_client/src/cloud_api_client.rs +++ b/crates/cloud_api_client/src/cloud_api_client.rs @@ -102,13 +102,7 @@ impl CloudApiClient { let credentials = credentials.as_ref().context("no credentials provided")?; let authorization_header = format!("{} {}", credentials.user_id, credentials.access_token); - Ok(cx.spawn(async move |cx| { - let handle = cx - .update(|cx| Tokio::handle(cx)) - .ok() - .context("failed to get Tokio handle")?; - let _guard = handle.enter(); - + Ok(Tokio::spawn_result(cx, async move { let ws = WebSocket::connect(connect_url) .with_request( request::Builder::new() @@ -121,34 +115,6 @@ impl CloudApiClient { })) } - pub async fn accept_terms_of_service(&self) -> Result { - let request = self.build_request( - Request::builder().method(Method::POST).uri( - self.http_client - .build_zed_cloud_url("/client/terms_of_service/accept", &[])? - .as_ref(), - ), - AsyncBody::default(), - )?; - - let mut response = self.http_client.send(request).await?; - - if !response.status().is_success() { - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - - anyhow::bail!( - "Failed to accept terms of service.\nStatus: {:?}\nBody: {body}", - response.status() - ) - } - - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - - Ok(serde_json::from_str(&body)?) - } - pub async fn create_llm_token( &self, system_id: Option, diff --git a/crates/collab/migrations/20250821133754_add_orb_subscription_status_and_period_to_billing_subscriptions.sql b/crates/collab/migrations/20250821133754_add_orb_subscription_status_and_period_to_billing_subscriptions.sql new file mode 100644 index 0000000000..89a42ab82b --- /dev/null +++ b/crates/collab/migrations/20250821133754_add_orb_subscription_status_and_period_to_billing_subscriptions.sql @@ -0,0 +1,4 @@ +alter table billing_subscriptions + add column orb_subscription_status text, + add column orb_current_billing_period_start_date timestamp without time zone, + add column orb_current_billing_period_end_date timestamp without time zone; diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 06eb68610f..73f327166a 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -400,6 +400,8 @@ impl Server { .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(multi_lsp_query) + .add_request_handler(lsp_query) + .add_message_handler(broadcast_project_message_from_host::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) @@ -910,7 +912,9 @@ impl Server { user_id=field::Empty, login=field::Empty, impersonator=field::Empty, + // todo(lsp) remove after Zed Stable hits v0.204.x multi_lsp_query_request=field::Empty, + lsp_query_request=field::Empty, release_channel=field::Empty, { TOTAL_DURATION_MS }=field::Empty, { PROCESSING_DURATION_MS }=field::Empty, @@ -2356,6 +2360,7 @@ where Ok(()) } +// todo(lsp) remove after Zed Stable hits v0.204.x async fn multi_lsp_query( request: MultiLspQuery, response: Response, @@ -2366,6 +2371,21 @@ async fn multi_lsp_query( forward_mutating_project_request(request, response, session).await } +async fn lsp_query( + request: proto::LspQuery, + response: Response, + session: MessageContext, +) -> Result<()> { + let (name, should_write) = request.query_name_and_write_permissions(); + tracing::Span::current().record("lsp_query_request", name); + tracing::info!("lsp_query message received"); + if should_write { + forward_mutating_project_request(request, response, session).await + } else { + forward_read_only_project_request(request, response, session).await + } +} + /// Notify other participants that a new buffer has been created async fn create_buffer_for_peer( request: proto::CreateBufferForPeer, diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 1b0c581983..59d66f1821 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -15,13 +15,14 @@ use editor::{ }, }; use fs::Fs; -use futures::{StreamExt, lock::Mutex}; +use futures::{SinkExt, StreamExt, channel::mpsc, lock::Mutex}; use gpui::{App, Rgba, TestAppContext, UpdateGlobal, VisualContext, VisualTestContext}; use indoc::indoc; use language::{ FakeLspAdapter, language_settings::{AllLanguageSettings, InlayHintSettings}, }; +use lsp::LSP_REQUEST_TIMEOUT; use project::{ ProjectPath, SERVER_PROGRESS_THROTTLE_TIMEOUT, lsp_store::lsp_ext_command::{ExpandedMacro, LspExtExpandMacro}, @@ -1017,6 +1018,211 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T }) } +#[gpui::test] +async fn test_slow_lsp_server(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { + let mut server = TestServer::start(cx_a.executor()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + cx_b.update(editor::init); + + let command_name = "test_command"; + let capabilities = lsp::ServerCapabilities { + code_lens_provider: Some(lsp::CodeLensOptions { + resolve_provider: None, + }), + execute_command_provider: Some(lsp::ExecuteCommandOptions { + commands: vec![command_name.to_string()], + ..lsp::ExecuteCommandOptions::default() + }), + ..lsp::ServerCapabilities::default() + }; + client_a.language_registry().add(rust_lang()); + let mut fake_language_servers = client_a.language_registry().register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: capabilities.clone(), + ..FakeLspAdapter::default() + }, + ); + client_b.language_registry().add(rust_lang()); + client_b.language_registry().register_fake_lsp_adapter( + "Rust", + FakeLspAdapter { + capabilities, + ..FakeLspAdapter::default() + }, + ); + + client_a + .fs() + .insert_tree( + path!("/dir"), + json!({ + "one.rs": "const ONE: usize = 1;" + }), + ) + .await; + let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await; + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + let editor_b = workspace_b + .update_in(cx_b, |workspace, window, cx| { + workspace.open_path((worktree_id, "one.rs"), None, true, window, cx) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + let (lsp_store_b, buffer_b) = editor_b.update(cx_b, |editor, cx| { + let lsp_store = editor.project().unwrap().read(cx).lsp_store(); + let buffer = editor.buffer().read(cx).as_singleton().unwrap(); + (lsp_store, buffer) + }); + let fake_language_server = fake_language_servers.next().await.unwrap(); + cx_a.run_until_parked(); + cx_b.run_until_parked(); + + let long_request_time = LSP_REQUEST_TIMEOUT / 2; + let (request_started_tx, mut request_started_rx) = mpsc::unbounded(); + let requests_started = Arc::new(AtomicUsize::new(0)); + let requests_completed = Arc::new(AtomicUsize::new(0)); + let _lens_requests = fake_language_server + .set_request_handler::({ + let request_started_tx = request_started_tx.clone(); + let requests_started = requests_started.clone(); + let requests_completed = requests_completed.clone(); + move |params, cx| { + let mut request_started_tx = request_started_tx.clone(); + let requests_started = requests_started.clone(); + let requests_completed = requests_completed.clone(); + async move { + assert_eq!( + params.text_document.uri.as_str(), + uri!("file:///dir/one.rs") + ); + requests_started.fetch_add(1, atomic::Ordering::Release); + request_started_tx.send(()).await.unwrap(); + cx.background_executor().timer(long_request_time).await; + let i = requests_completed.fetch_add(1, atomic::Ordering::Release) + 1; + Ok(Some(vec![lsp::CodeLens { + range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 9)), + command: Some(lsp::Command { + title: format!("LSP Command {i}"), + command: command_name.to_string(), + arguments: None, + }), + data: None, + }])) + } + } + }); + + // Move cursor to a location, this should trigger the code lens call. + editor_b.update_in(cx_b, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([7..7]) + }); + }); + let () = request_started_rx.next().await.unwrap(); + assert_eq!( + requests_started.load(atomic::Ordering::Acquire), + 1, + "Selection change should have initiated the first request" + ); + assert_eq!( + requests_completed.load(atomic::Ordering::Acquire), + 0, + "Slow requests should be running still" + ); + let _first_task = lsp_store_b.update(cx_b, |lsp_store, cx| { + lsp_store + .forget_code_lens_task(buffer_b.read(cx).remote_id()) + .expect("Should have the fetch task started") + }); + + editor_b.update_in(cx_b, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([1..1]) + }); + }); + let () = request_started_rx.next().await.unwrap(); + assert_eq!( + requests_started.load(atomic::Ordering::Acquire), + 2, + "Selection change should have initiated the second request" + ); + assert_eq!( + requests_completed.load(atomic::Ordering::Acquire), + 0, + "Slow requests should be running still" + ); + let _second_task = lsp_store_b.update(cx_b, |lsp_store, cx| { + lsp_store + .forget_code_lens_task(buffer_b.read(cx).remote_id()) + .expect("Should have the fetch task started for the 2nd time") + }); + + editor_b.update_in(cx_b, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([2..2]) + }); + }); + let () = request_started_rx.next().await.unwrap(); + assert_eq!( + requests_started.load(atomic::Ordering::Acquire), + 3, + "Selection change should have initiated the third request" + ); + assert_eq!( + requests_completed.load(atomic::Ordering::Acquire), + 0, + "Slow requests should be running still" + ); + + _first_task.await.unwrap(); + _second_task.await.unwrap(); + cx_b.run_until_parked(); + assert_eq!( + requests_started.load(atomic::Ordering::Acquire), + 3, + "No selection changes should trigger no more code lens requests" + ); + assert_eq!( + requests_completed.load(atomic::Ordering::Acquire), + 3, + "After enough time, all 3 LSP requests should have been served by the language server" + ); + let resulting_lens_actions = editor_b + .update(cx_b, |editor, cx| { + let lsp_store = editor.project().unwrap().read(cx).lsp_store(); + lsp_store.update(cx, |lsp_store, cx| { + lsp_store.code_lens_actions(&buffer_b, cx) + }) + }) + .await + .unwrap() + .unwrap(); + assert_eq!( + resulting_lens_actions.len(), + 1, + "Should have fetched one code lens action, but got: {resulting_lens_actions:?}" + ); + assert_eq!( + resulting_lens_actions.first().unwrap().lsp_action.title(), + "LSP Command 3", + "Only the final code lens action should be in the data" + ) +} + #[gpui::test(iterations = 10)] async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { let mut server = TestServer::start(cx_a.executor()).await; diff --git a/crates/collab/src/tests/following_tests.rs b/crates/collab/src/tests/following_tests.rs index d9fd8ffeb2..1e0c915bcb 100644 --- a/crates/collab/src/tests/following_tests.rs +++ b/crates/collab/src/tests/following_tests.rs @@ -970,7 +970,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T // the follow. workspace_b.update_in(cx_b, |workspace, window, cx| { workspace.active_pane().update(cx, |pane, cx| { - pane.activate_prev_item(true, window, cx); + pane.activate_previous_item(&Default::default(), window, cx); }); }); executor.run_until_parked(); @@ -1073,7 +1073,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T // Client A cycles through some tabs. workspace_a.update_in(cx_a, |workspace, window, cx| { workspace.active_pane().update(cx, |pane, cx| { - pane.activate_prev_item(true, window, cx); + pane.activate_previous_item(&Default::default(), window, cx); }); }); executor.run_until_parked(); @@ -1117,7 +1117,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T workspace_a.update_in(cx_a, |workspace, window, cx| { workspace.active_pane().update(cx, |pane, cx| { - pane.activate_prev_item(true, window, cx); + pane.activate_previous_item(&Default::default(), window, cx); }); }); executor.run_until_parked(); @@ -1164,7 +1164,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T workspace_a.update_in(cx_a, |workspace, window, cx| { workspace.active_pane().update(cx, |pane, cx| { - pane.activate_prev_item(true, window, cx); + pane.activate_previous_item(&Default::default(), window, cx); }); }); executor.run_until_parked(); diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index e01736f0ef..5c73253048 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -4850,6 +4850,7 @@ async fn test_definition( let definitions_1 = project_b .update(cx_b, |p, cx| p.definitions(&buffer_b, 23, cx)) .await + .unwrap() .unwrap(); cx_b.read(|cx| { assert_eq!( @@ -4885,6 +4886,7 @@ async fn test_definition( let definitions_2 = project_b .update(cx_b, |p, cx| p.definitions(&buffer_b, 33, cx)) .await + .unwrap() .unwrap(); cx_b.read(|cx| { assert_eq!(definitions_2.len(), 1); @@ -4922,6 +4924,7 @@ async fn test_definition( let type_definitions = project_b .update(cx_b, |p, cx| p.type_definitions(&buffer_b, 7, cx)) .await + .unwrap() .unwrap(); cx_b.read(|cx| { assert_eq!( @@ -5060,7 +5063,7 @@ async fn test_references( ]))) .unwrap(); - let references = references.await.unwrap(); + let references = references.await.unwrap().unwrap(); executor.run_until_parked(); project_b.read_with(cx_b, |project, cx| { // User is informed that a request is no longer pending. @@ -5104,7 +5107,7 @@ async fn test_references( lsp_response_tx .unbounded_send(Err(anyhow!("can't find references"))) .unwrap(); - assert_eq!(references.await.unwrap(), []); + assert_eq!(references.await.unwrap().unwrap(), []); // User is informed that the request is no longer pending. executor.run_until_parked(); @@ -5505,7 +5508,8 @@ async fn test_lsp_hover( // Request hover information as the guest. let mut hovers = project_b .update(cx_b, |p, cx| p.hover(&buffer_b, 22, cx)) - .await; + .await + .unwrap(); assert_eq!( hovers.len(), 2, @@ -5764,7 +5768,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it( definitions = project_b.update(cx_b, |p, cx| p.definitions(&buffer_b1, 23, cx)); } - let definitions = definitions.await.unwrap(); + let definitions = definitions.await.unwrap().unwrap(); assert_eq!( definitions.len(), 1, diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index cd37549783..d85a6610a5 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2905,6 +2905,8 @@ impl CollabPanel { h_flex().absolute().right(rems(0.)).h_full().child( h_flex() .h_full() + .bg(cx.theme().colors().background) + .rounded_l_sm() .gap_1() .px_1() .child( @@ -2920,8 +2922,7 @@ impl CollabPanel { .on_click(cx.listener(move |this, _, window, cx| { this.join_channel_chat(channel_id, window, cx) })) - .tooltip(Tooltip::text("Open channel chat")) - .visible_on_hover(""), + .tooltip(Tooltip::text("Open channel chat")), ) .child( IconButton::new("channel_notes", IconName::Reader) @@ -2936,9 +2937,9 @@ impl CollabPanel { .on_click(cx.listener(move |this, _, window, cx| { this.open_channel_notes(channel_id, window, cx) })) - .tooltip(Tooltip::text("Open channel notes")) - .visible_on_hover(""), - ), + .tooltip(Tooltip::text("Open channel notes")), + ) + .visible_on_hover(""), ), ) .tooltip({ diff --git a/crates/command_palette/src/persistence.rs b/crates/command_palette/src/persistence.rs index 5be97c36bc..01cf403083 100644 --- a/crates/command_palette/src/persistence.rs +++ b/crates/command_palette/src/persistence.rs @@ -1,7 +1,10 @@ use anyhow::Result; use db::{ - define_connection, query, - sqlez::{bindable::Column, statement::Statement}, + query, + sqlez::{ + bindable::Column, domain::Domain, statement::Statement, + thread_safe_connection::ThreadSafeConnection, + }, sqlez_macros::sql, }; use serde::{Deserialize, Serialize}; @@ -50,8 +53,11 @@ impl Column for SerializedCommandInvocation { } } -define_connection!(pub static ref COMMAND_PALETTE_HISTORY: CommandPaletteDB<()> = - &[sql!( +pub struct CommandPaletteDB(ThreadSafeConnection); + +impl Domain for CommandPaletteDB { + const NAME: &str = stringify!(CommandPaletteDB); + const MIGRATIONS: &[&str] = &[sql!( CREATE TABLE IF NOT EXISTS command_invocations( id INTEGER PRIMARY KEY AUTOINCREMENT, command_name TEXT NOT NULL, @@ -59,7 +65,9 @@ define_connection!(pub static ref COMMAND_PALETTE_HISTORY: CommandPaletteDB<()> last_invoked INTEGER DEFAULT (unixepoch()) NOT NULL ) STRICT; )]; -); +} + +db::static_connection!(COMMAND_PALETTE_HISTORY, CommandPaletteDB, []); impl CommandPaletteDB { pub async fn write_command_invocation( diff --git a/crates/context_server/src/test.rs b/crates/context_server/src/test.rs index dedf589664..008542ab24 100644 --- a/crates/context_server/src/test.rs +++ b/crates/context_server/src/test.rs @@ -1,6 +1,6 @@ use anyhow::Context as _; use collections::HashMap; -use futures::{Stream, StreamExt as _, lock::Mutex}; +use futures::{FutureExt, Stream, StreamExt as _, future::BoxFuture, lock::Mutex}; use gpui::BackgroundExecutor; use std::{pin::Pin, sync::Arc}; @@ -14,9 +14,12 @@ pub fn create_fake_transport( executor: BackgroundExecutor, ) -> FakeTransport { let name = name.into(); - FakeTransport::new(executor).on_request::(move |_params| { - create_initialize_response(name.clone()) - }) + FakeTransport::new(executor).on_request::( + move |_params| { + let name = name.clone(); + async move { create_initialize_response(name.clone()) } + }, + ) } fn create_initialize_response(server_name: String) -> InitializeResponse { @@ -32,8 +35,10 @@ fn create_initialize_response(server_name: String) -> InitializeResponse { } pub struct FakeTransport { - request_handlers: - HashMap<&'static str, Arc serde_json::Value + Send + Sync>>, + request_handlers: HashMap< + &'static str, + Arc BoxFuture<'static, serde_json::Value>>, + >, tx: futures::channel::mpsc::UnboundedSender, rx: Arc>>, executor: BackgroundExecutor, @@ -50,18 +55,25 @@ impl FakeTransport { } } - pub fn on_request( + pub fn on_request( mut self, - handler: impl Fn(T::Params) -> T::Response + Send + Sync + 'static, - ) -> Self { + handler: impl 'static + Send + Sync + Fn(T::Params) -> Fut, + ) -> Self + where + T: crate::types::Request, + Fut: 'static + Send + Future, + { self.request_handlers.insert( T::METHOD, Arc::new(move |value| { - let params = value.get("params").expect("Missing parameters").clone(); + let params = value + .get("params") + .cloned() + .unwrap_or(serde_json::Value::Null); let params: T::Params = serde_json::from_value(params).expect("Invalid parameters received"); let response = handler(params); - serde_json::to_value(response).unwrap() + async move { serde_json::to_value(response.await).unwrap() }.boxed() }), ); self @@ -77,7 +89,7 @@ impl Transport for FakeTransport { if let Some(method) = msg.get("method") { let method = method.as_str().expect("Invalid method received"); if let Some(handler) = self.request_handlers.get(method) { - let payload = handler(msg); + let payload = handler(msg).await; let response = serde_json::json!({ "jsonrpc": "2.0", "id": id, diff --git a/crates/copilot/src/copilot_completion_provider.rs b/crates/copilot/src/copilot_completion_provider.rs index 9308500ed4..52d75175e5 100644 --- a/crates/copilot/src/copilot_completion_provider.rs +++ b/crates/copilot/src/copilot_completion_provider.rs @@ -301,6 +301,7 @@ mod tests { init_test(cx, |settings| { settings.defaults.completions = Some(CompletionSettings { words: WordsCompletionMode::Disabled, + words_min_length: 0, lsp: true, lsp_fetch_timeout_ms: 0, lsp_insert_mode: LspInsertMode::Insert, @@ -533,6 +534,7 @@ mod tests { init_test(cx, |settings| { settings.defaults.completions = Some(CompletionSettings { words: WordsCompletionMode::Disabled, + words_min_length: 0, lsp: true, lsp_fetch_timeout_ms: 0, lsp_insert_mode: LspInsertMode::Insert, diff --git a/crates/crashes/Cargo.toml b/crates/crashes/Cargo.toml index f12913d1cb..370f0bb5f6 100644 --- a/crates/crashes/Cargo.toml +++ b/crates/crashes/Cargo.toml @@ -6,6 +6,7 @@ edition.workspace = true license = "GPL-3.0-or-later" [dependencies] +bincode.workspace = true crash-handler.workspace = true log.workspace = true minidumper.workspace = true @@ -14,6 +15,7 @@ release_channel.workspace = true smol.workspace = true serde.workspace = true serde_json.workspace = true +system_specs.workspace = true workspace-hack.workspace = true [target.'cfg(target_os = "macos")'.dependencies] diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index 4e4b69f639..f7bc96bff9 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -4,6 +4,8 @@ use minidumper::{Client, LoopAction, MinidumpBinary}; use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; use serde::{Deserialize, Serialize}; +#[cfg(target_os = "macos")] +use std::sync::atomic::AtomicU32; use std::{ env, fs::{self, File}, @@ -26,6 +28,9 @@ pub static REQUESTED_MINIDUMP: AtomicBool = AtomicBool::new(false); const CRASH_HANDLER_PING_TIMEOUT: Duration = Duration::from_secs(60); const CRASH_HANDLER_CONNECT_TIMEOUT: Duration = Duration::from_secs(10); +#[cfg(target_os = "macos")] +static PANIC_THREAD_ID: AtomicU32 = AtomicU32::new(0); + pub async fn init(crash_init: InitCrashHandler) { if *RELEASE_CHANNEL == ReleaseChannel::Dev && env::var("ZED_GENERATE_MINIDUMPS").is_err() { return; @@ -110,9 +115,10 @@ unsafe fn suspend_all_other_threads() { mach2::task::task_threads(task, &raw mut threads, &raw mut count); } let current = unsafe { mach2::mach_init::mach_thread_self() }; + let panic_thread = PANIC_THREAD_ID.load(Ordering::SeqCst); for i in 0..count { let t = unsafe { *threads.add(i as usize) }; - if t != current { + if t != current && t != panic_thread { unsafe { mach2::thread_act::thread_suspend(t) }; } } @@ -121,6 +127,7 @@ unsafe fn suspend_all_other_threads() { pub struct CrashServer { initialization_params: OnceLock, panic_info: OnceLock, + active_gpu: OnceLock, has_connection: Arc, } @@ -129,6 +136,8 @@ pub struct CrashInfo { pub init: InitCrashHandler, pub panic: Option, pub minidump_error: Option, + pub gpus: Vec, + pub active_gpu: Option, } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -137,7 +146,6 @@ pub struct InitCrashHandler { pub zed_version: String, pub release_channel: String, pub commit_sha: String, - // pub gpu: String, } #[derive(Deserialize, Serialize, Debug, Clone)] @@ -172,6 +180,18 @@ impl minidumper::ServerHandler for CrashServer { Err(e) => Some(format!("{e:?}")), }; + #[cfg(not(any(target_os = "linux", target_os = "freebsd")))] + let gpus = vec![]; + + #[cfg(any(target_os = "linux", target_os = "freebsd"))] + let gpus = match system_specs::read_gpu_info_from_sys_class_drm() { + Ok(gpus) => gpus, + Err(err) => { + log::warn!("Failed to collect GPU information for crash report: {err}"); + vec![] + } + }; + let crash_info = CrashInfo { init: self .initialization_params @@ -180,6 +200,8 @@ impl minidumper::ServerHandler for CrashServer { .clone(), panic: self.panic_info.get().cloned(), minidump_error, + active_gpu: self.active_gpu.get().cloned(), + gpus, }; let crash_data_path = paths::logs_dir() @@ -205,6 +227,13 @@ impl minidumper::ServerHandler for CrashServer { serde_json::from_slice::(&buffer).expect("invalid panic data"); self.panic_info.set(panic_data).expect("already panicked"); } + 3 => { + let gpu_specs: system_specs::GpuSpecs = + bincode::deserialize(&buffer).expect("gpu specs"); + self.active_gpu + .set(gpu_specs) + .expect("already set active gpu"); + } _ => { panic!("invalid message kind"); } @@ -238,6 +267,13 @@ pub fn handle_panic(message: String, span: Option<&Location>) { ) .ok(); log::error!("triggering a crash to generate a minidump..."); + + #[cfg(target_os = "macos")] + PANIC_THREAD_ID.store( + unsafe { mach2::mach_init::mach_thread_self() }, + Ordering::SeqCst, + ); + #[cfg(target_os = "linux")] CrashHandler.simulate_signal(crash_handler::Signal::Trap as u32); #[cfg(not(target_os = "linux"))] @@ -274,6 +310,7 @@ pub fn crash_server(socket: &Path) { initialization_params: OnceLock::new(), panic_info: OnceLock::new(), has_connection, + active_gpu: OnceLock::new(), }), &shutdown, Some(CRASH_HANDLER_PING_TIMEOUT), diff --git a/crates/db/src/db.rs b/crates/db/src/db.rs index 8b790cbec8..0802bd8bb7 100644 --- a/crates/db/src/db.rs +++ b/crates/db/src/db.rs @@ -110,11 +110,14 @@ pub async fn open_test_db(db_name: &str) -> ThreadSafeConnection { } /// Implements a basic DB wrapper for a given domain +/// +/// Arguments: +/// - static variable name for connection +/// - type of connection wrapper +/// - dependencies, whose migrations should be run prior to this domain's migrations #[macro_export] -macro_rules! define_connection { - (pub static ref $id:ident: $t:ident<()> = $migrations:expr; $($global:ident)?) => { - pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection); - +macro_rules! static_connection { + ($id:ident, $t:ident, [ $($d:ty),* ] $(, $global:ident)?) => { impl ::std::ops::Deref for $t { type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection; @@ -123,16 +126,6 @@ macro_rules! define_connection { } } - impl $crate::sqlez::domain::Domain for $t { - fn name() -> &'static str { - stringify!($t) - } - - fn migrations() -> &'static [&'static str] { - $migrations - } - } - impl $t { #[cfg(any(test, feature = "test-support"))] pub async fn open_test_db(name: &'static str) -> Self { @@ -142,7 +135,8 @@ macro_rules! define_connection { #[cfg(any(test, feature = "test-support"))] pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { - $t($crate::smol::block_on($crate::open_test_db::<$t>(stringify!($id)))) + #[allow(unused_parens)] + $t($crate::smol::block_on($crate::open_test_db::<($($d,)* $t)>(stringify!($id)))) }); #[cfg(not(any(test, feature = "test-support")))] @@ -153,46 +147,10 @@ macro_rules! define_connection { } else { $crate::RELEASE_CHANNEL.dev_name() }; - $t($crate::smol::block_on($crate::open_db::<$t>(db_dir, scope))) + #[allow(unused_parens)] + $t($crate::smol::block_on($crate::open_db::<($($d,)* $t)>(db_dir, scope))) }); - }; - (pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr; $($global:ident)?) => { - pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection); - - impl ::std::ops::Deref for $t { - type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection; - - fn deref(&self) -> &Self::Target { - &self.0 - } - } - - impl $crate::sqlez::domain::Domain for $t { - fn name() -> &'static str { - stringify!($t) - } - - fn migrations() -> &'static [&'static str] { - $migrations - } - } - - #[cfg(any(test, feature = "test-support"))] - pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { - $t($crate::smol::block_on($crate::open_test_db::<($($d),+, $t)>(stringify!($id)))) - }); - - #[cfg(not(any(test, feature = "test-support")))] - pub static $id: std::sync::LazyLock<$t> = std::sync::LazyLock::new(|| { - let db_dir = $crate::database_dir(); - let scope = if false $(|| stringify!($global) == "global")? { - "global" - } else { - $crate::RELEASE_CHANNEL.dev_name() - }; - $t($crate::smol::block_on($crate::open_db::<($($d),+, $t)>(db_dir, scope))) - }); - }; + } } pub fn write_and_log(cx: &App, db_write: impl FnOnce() -> F + Send + 'static) @@ -219,17 +177,12 @@ mod tests { enum BadDB {} impl Domain for BadDB { - fn name() -> &'static str { - "db_tests" - } - - fn migrations() -> &'static [&'static str] { - &[ - sql!(CREATE TABLE test(value);), - // failure because test already exists - sql!(CREATE TABLE test(value);), - ] - } + const NAME: &str = "db_tests"; + const MIGRATIONS: &[&str] = &[ + sql!(CREATE TABLE test(value);), + // failure because test already exists + sql!(CREATE TABLE test(value);), + ]; } let tempdir = tempfile::Builder::new() @@ -251,25 +204,15 @@ mod tests { enum CorruptedDB {} impl Domain for CorruptedDB { - fn name() -> &'static str { - "db_tests" - } - - fn migrations() -> &'static [&'static str] { - &[sql!(CREATE TABLE test(value);)] - } + const NAME: &str = "db_tests"; + const MIGRATIONS: &[&str] = &[sql!(CREATE TABLE test(value);)]; } enum GoodDB {} impl Domain for GoodDB { - fn name() -> &'static str { - "db_tests" //Notice same name - } - - fn migrations() -> &'static [&'static str] { - &[sql!(CREATE TABLE test2(value);)] //But different migration - } + const NAME: &str = "db_tests"; //Notice same name + const MIGRATIONS: &[&str] = &[sql!(CREATE TABLE test2(value);)]; } let tempdir = tempfile::Builder::new() @@ -305,25 +248,16 @@ mod tests { enum CorruptedDB {} impl Domain for CorruptedDB { - fn name() -> &'static str { - "db_tests" - } + const NAME: &str = "db_tests"; - fn migrations() -> &'static [&'static str] { - &[sql!(CREATE TABLE test(value);)] - } + const MIGRATIONS: &[&str] = &[sql!(CREATE TABLE test(value);)]; } enum GoodDB {} impl Domain for GoodDB { - fn name() -> &'static str { - "db_tests" //Notice same name - } - - fn migrations() -> &'static [&'static str] { - &[sql!(CREATE TABLE test2(value);)] //But different migration - } + const NAME: &str = "db_tests"; //Notice same name + const MIGRATIONS: &[&str] = &[sql!(CREATE TABLE test2(value);)]; // But different migration } let tempdir = tempfile::Builder::new() diff --git a/crates/db/src/kvp.rs b/crates/db/src/kvp.rs index 256b789c9b..8ea877b35b 100644 --- a/crates/db/src/kvp.rs +++ b/crates/db/src/kvp.rs @@ -2,16 +2,26 @@ use gpui::App; use sqlez_macros::sql; use util::ResultExt as _; -use crate::{define_connection, query, write_and_log}; +use crate::{ + query, + sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection}, + write_and_log, +}; -define_connection!(pub static ref KEY_VALUE_STORE: KeyValueStore<()> = - &[sql!( +pub struct KeyValueStore(crate::sqlez::thread_safe_connection::ThreadSafeConnection); + +impl Domain for KeyValueStore { + const NAME: &str = stringify!(KeyValueStore); + + const MIGRATIONS: &[&str] = &[sql!( CREATE TABLE IF NOT EXISTS kv_store( key TEXT PRIMARY KEY, value TEXT NOT NULL ) STRICT; )]; -); +} + +crate::static_connection!(KEY_VALUE_STORE, KeyValueStore, []); pub trait Dismissable { const KEY: &'static str; @@ -91,15 +101,19 @@ mod tests { } } -define_connection!(pub static ref GLOBAL_KEY_VALUE_STORE: GlobalKeyValueStore<()> = - &[sql!( +pub struct GlobalKeyValueStore(ThreadSafeConnection); + +impl Domain for GlobalKeyValueStore { + const NAME: &str = stringify!(GlobalKeyValueStore); + const MIGRATIONS: &[&str] = &[sql!( CREATE TABLE IF NOT EXISTS kv_store( key TEXT PRIMARY KEY, value TEXT NOT NULL ) STRICT; )]; - global -); +} + +crate::static_connection!(GLOBAL_KEY_VALUE_STORE, GlobalKeyValueStore, [], global); impl GlobalKeyValueStore { query! { diff --git a/crates/debugger_ui/src/session/running.rs b/crates/debugger_ui/src/session/running.rs index 0574091851..9991395f35 100644 --- a/crates/debugger_ui/src/session/running.rs +++ b/crates/debugger_ui/src/session/running.rs @@ -916,7 +916,10 @@ impl RunningState { let task_store = project.read(cx).task_store().downgrade(); let weak_project = project.downgrade(); let weak_workspace = workspace.downgrade(); - let is_local = project.read(cx).is_local(); + let ssh_info = project + .read(cx) + .ssh_client() + .and_then(|it| it.read(cx).ssh_info()); cx.spawn_in(window, async move |this, cx| { let DebugScenario { @@ -1000,7 +1003,7 @@ impl RunningState { None }; - let builder = ShellBuilder::new(is_local, &task.resolved.shell); + let builder = ShellBuilder::new(ssh_info.as_ref().map(|info| &*info.shell), &task.resolved.shell); let command_label = builder.command_label(&task.resolved.command_label); let (command, args) = builder.build(task.resolved.command.clone(), &task.resolved.args); diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml index 53b5792e10..fd678078e8 100644 --- a/crates/diagnostics/Cargo.toml +++ b/crates/diagnostics/Cargo.toml @@ -18,7 +18,6 @@ collections.workspace = true component.workspace = true ctor.workspace = true editor.workspace = true -futures.workspace = true gpui.workspace = true indoc.workspace = true language.workspace = true diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index 2e20118381..1c27e820a0 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -13,7 +13,6 @@ use editor::{ DEFAULT_MULTIBUFFER_CONTEXT, Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey, display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId}, }; -use futures::future::join_all; use gpui::{ AnyElement, AnyView, App, AsyncApp, Context, Entity, EventEmitter, FocusHandle, Focusable, Global, InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, @@ -24,7 +23,6 @@ use language::{ }; use project::{ DiagnosticSummary, Project, ProjectPath, - lsp_store::rust_analyzer_ext::{cancel_flycheck, run_flycheck}, project_settings::{DiagnosticSeverity, ProjectSettings}, }; use settings::Settings; @@ -79,17 +77,10 @@ pub(crate) struct ProjectDiagnosticsEditor { paths_to_update: BTreeSet, include_warnings: bool, update_excerpts_task: Option>>, - cargo_diagnostics_fetch: CargoDiagnosticsFetchState, diagnostic_summary_update: Task<()>, _subscription: Subscription, } -struct CargoDiagnosticsFetchState { - fetch_task: Option>, - cancel_task: Option>, - diagnostic_sources: Arc>, -} - impl EventEmitter for ProjectDiagnosticsEditor {} const DIAGNOSTICS_UPDATE_DELAY: Duration = Duration::from_millis(50); @@ -260,11 +251,7 @@ impl ProjectDiagnosticsEditor { ) }); this.diagnostics.clear(); - this.update_all_diagnostics(false, window, cx); - }) - .detach(); - cx.observe_release(&cx.entity(), |editor, _, cx| { - editor.stop_cargo_diagnostics_fetch(cx); + this.update_all_excerpts(window, cx); }) .detach(); @@ -281,15 +268,10 @@ impl ProjectDiagnosticsEditor { editor, paths_to_update: Default::default(), update_excerpts_task: None, - cargo_diagnostics_fetch: CargoDiagnosticsFetchState { - fetch_task: None, - cancel_task: None, - diagnostic_sources: Arc::new(Vec::new()), - }, diagnostic_summary_update: Task::ready(()), _subscription: project_event_subscription, }; - this.update_all_diagnostics(true, window, cx); + this.update_all_excerpts(window, cx); this } @@ -373,20 +355,10 @@ impl ProjectDiagnosticsEditor { window: &mut Window, cx: &mut Context, ) { - let fetch_cargo_diagnostics = ProjectSettings::get_global(cx) - .diagnostics - .fetch_cargo_diagnostics(); - - if fetch_cargo_diagnostics { - if self.cargo_diagnostics_fetch.fetch_task.is_some() { - self.stop_cargo_diagnostics_fetch(cx); - } else { - self.update_all_diagnostics(false, window, cx); - } - } else if self.update_excerpts_task.is_some() { + if self.update_excerpts_task.is_some() { self.update_excerpts_task = None; } else { - self.update_all_diagnostics(false, window, cx); + self.update_all_excerpts(window, cx); } cx.notify(); } @@ -404,73 +376,6 @@ impl ProjectDiagnosticsEditor { } } - fn update_all_diagnostics( - &mut self, - first_launch: bool, - window: &mut Window, - cx: &mut Context, - ) { - let cargo_diagnostics_sources = self.cargo_diagnostics_sources(cx); - if cargo_diagnostics_sources.is_empty() { - self.update_all_excerpts(window, cx); - } else if first_launch && !self.summary.is_empty() { - self.update_all_excerpts(window, cx); - } else { - self.fetch_cargo_diagnostics(Arc::new(cargo_diagnostics_sources), cx); - } - } - - fn fetch_cargo_diagnostics( - &mut self, - diagnostics_sources: Arc>, - cx: &mut Context, - ) { - let project = self.project.clone(); - self.cargo_diagnostics_fetch.cancel_task = None; - self.cargo_diagnostics_fetch.fetch_task = None; - self.cargo_diagnostics_fetch.diagnostic_sources = diagnostics_sources.clone(); - if self.cargo_diagnostics_fetch.diagnostic_sources.is_empty() { - return; - } - - self.cargo_diagnostics_fetch.fetch_task = Some(cx.spawn(async move |editor, cx| { - let mut fetch_tasks = Vec::new(); - for buffer_path in diagnostics_sources.iter().cloned() { - if cx - .update(|cx| { - fetch_tasks.push(run_flycheck(project.clone(), buffer_path, cx)); - }) - .is_err() - { - break; - } - } - - let _ = join_all(fetch_tasks).await; - editor - .update(cx, |editor, _| { - editor.cargo_diagnostics_fetch.fetch_task = None; - }) - .ok(); - })); - } - - fn stop_cargo_diagnostics_fetch(&mut self, cx: &mut App) { - self.cargo_diagnostics_fetch.fetch_task = None; - let mut cancel_gasks = Vec::new(); - for buffer_path in std::mem::take(&mut self.cargo_diagnostics_fetch.diagnostic_sources) - .iter() - .cloned() - { - cancel_gasks.push(cancel_flycheck(self.project.clone(), buffer_path, cx)); - } - - self.cargo_diagnostics_fetch.cancel_task = Some(cx.background_spawn(async move { - let _ = join_all(cancel_gasks).await; - log::info!("Finished fetching cargo diagnostics"); - })); - } - /// Enqueue an update of all excerpts. Updates all paths that either /// currently have diagnostics or are currently present in this view. fn update_all_excerpts(&mut self, window: &mut Window, cx: &mut Context) { @@ -695,30 +600,6 @@ impl ProjectDiagnosticsEditor { }) }) } - - pub fn cargo_diagnostics_sources(&self, cx: &App) -> Vec { - let fetch_cargo_diagnostics = ProjectSettings::get_global(cx) - .diagnostics - .fetch_cargo_diagnostics(); - if !fetch_cargo_diagnostics { - return Vec::new(); - } - self.project - .read(cx) - .worktrees(cx) - .filter_map(|worktree| { - let _cargo_toml_entry = worktree.read(cx).entry_for_path("Cargo.toml")?; - let rust_file_entry = worktree.read(cx).entries(false, 0).find(|entry| { - entry - .path - .extension() - .and_then(|extension| extension.to_str()) - == Some("rs") - })?; - self.project.read(cx).path_for_entry(rust_file_entry.id, cx) - }) - .collect() - } } impl Focusable for ProjectDiagnosticsEditor { diff --git a/crates/diagnostics/src/toolbar_controls.rs b/crates/diagnostics/src/toolbar_controls.rs index e77b80115f..404db39164 100644 --- a/crates/diagnostics/src/toolbar_controls.rs +++ b/crates/diagnostics/src/toolbar_controls.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use crate::{ProjectDiagnosticsEditor, ToggleDiagnosticsRefresh}; use gpui::{Context, Entity, EventEmitter, ParentElement, Render, WeakEntity, Window}; use ui::prelude::*; @@ -15,26 +13,18 @@ impl Render for ToolbarControls { let mut include_warnings = false; let mut has_stale_excerpts = false; let mut is_updating = false; - let cargo_diagnostics_sources = Arc::new(self.diagnostics().map_or(Vec::new(), |editor| { - editor.read(cx).cargo_diagnostics_sources(cx) - })); - let fetch_cargo_diagnostics = !cargo_diagnostics_sources.is_empty(); if let Some(editor) = self.diagnostics() { let diagnostics = editor.read(cx); include_warnings = diagnostics.include_warnings; has_stale_excerpts = !diagnostics.paths_to_update.is_empty(); - is_updating = if fetch_cargo_diagnostics { - diagnostics.cargo_diagnostics_fetch.fetch_task.is_some() - } else { - diagnostics.update_excerpts_task.is_some() - || diagnostics - .project - .read(cx) - .language_servers_running_disk_based_diagnostics(cx) - .next() - .is_some() - }; + is_updating = diagnostics.update_excerpts_task.is_some() + || diagnostics + .project + .read(cx) + .language_servers_running_disk_based_diagnostics(cx) + .next() + .is_some(); } let tooltip = if include_warnings { @@ -64,7 +54,6 @@ impl Render for ToolbarControls { .on_click(cx.listener(move |toolbar_controls, _, _, cx| { if let Some(diagnostics) = toolbar_controls.diagnostics() { diagnostics.update(cx, |diagnostics, cx| { - diagnostics.stop_cargo_diagnostics_fetch(cx); diagnostics.update_excerpts_task = None; cx.notify(); }); @@ -76,7 +65,7 @@ impl Render for ToolbarControls { IconButton::new("refresh-diagnostics", IconName::ArrowCircle) .icon_color(Color::Info) .shape(IconButtonShape::Square) - .disabled(!has_stale_excerpts && !fetch_cargo_diagnostics) + .disabled(!has_stale_excerpts) .tooltip(Tooltip::for_action_title( "Refresh diagnostics", &ToggleDiagnosticsRefresh, @@ -84,17 +73,8 @@ impl Render for ToolbarControls { .on_click(cx.listener({ move |toolbar_controls, _, window, cx| { if let Some(diagnostics) = toolbar_controls.diagnostics() { - let cargo_diagnostics_sources = - Arc::clone(&cargo_diagnostics_sources); diagnostics.update(cx, move |diagnostics, cx| { - if fetch_cargo_diagnostics { - diagnostics.fetch_cargo_diagnostics( - cargo_diagnostics_sources, - cx, - ); - } else { - diagnostics.update_all_excerpts(window, cx); - } + diagnostics.update_all_excerpts(window, cx); }); } } diff --git a/crates/docs_preprocessor/src/main.rs b/crates/docs_preprocessor/src/main.rs index 33158577c4..c900eb692a 100644 --- a/crates/docs_preprocessor/src/main.rs +++ b/crates/docs_preprocessor/src/main.rs @@ -99,6 +99,7 @@ fn handle_preprocessing() -> Result<()> { let mut errors = HashSet::::new(); handle_frontmatter(&mut book, &mut errors); + template_big_table_of_actions(&mut book); template_and_validate_keybindings(&mut book, &mut errors); template_and_validate_actions(&mut book, &mut errors); @@ -147,6 +148,18 @@ fn handle_frontmatter(book: &mut Book, errors: &mut HashSet) }); } +fn template_big_table_of_actions(book: &mut Book) { + for_each_chapter_mut(book, |chapter| { + let needle = "{#ACTIONS_TABLE#}"; + if let Some(start) = chapter.content.rfind(needle) { + chapter.content.replace_range( + start..start + needle.len(), + &generate_big_table_of_actions(), + ); + } + }); +} + fn template_and_validate_keybindings(book: &mut Book, errors: &mut HashSet) { let regex = Regex::new(r"\{#kb (.*?)\}").unwrap(); @@ -277,6 +290,7 @@ struct ActionDef { name: &'static str, human_name: String, deprecated_aliases: &'static [&'static str], + docs: Option<&'static str>, } fn dump_all_gpui_actions() -> Vec { @@ -285,6 +299,7 @@ fn dump_all_gpui_actions() -> Vec { name: action.name, human_name: command_palette::humanize_action_name(action.name), deprecated_aliases: action.deprecated_aliases, + docs: action.documentation, }) .collect::>(); @@ -418,3 +433,54 @@ fn title_regex() -> &'static Regex { static TITLE_REGEX: OnceLock = OnceLock::new(); TITLE_REGEX.get_or_init(|| Regex::new(r"\s*(.*?)\s*").unwrap()) } + +fn generate_big_table_of_actions() -> String { + let actions = &*ALL_ACTIONS; + let mut output = String::new(); + + let mut actions_sorted = actions.iter().collect::>(); + actions_sorted.sort_by_key(|a| a.name); + + // Start the definition list with custom styling for better spacing + output.push_str("
\n"); + + for action in actions_sorted.into_iter() { + // Add the humanized action name as the term with margin + output.push_str( + "
", + ); + output.push_str(&action.human_name); + output.push_str("
\n"); + + // Add the definition with keymap name and description + output.push_str("
\n"); + + // Add the description, escaping HTML if needed + if let Some(description) = action.docs { + output.push_str( + &description + .replace("&", "&") + .replace("<", "<") + .replace(">", ">"), + ); + output.push_str("
\n"); + } + output.push_str("Keymap Name: "); + output.push_str(action.name); + output.push_str("
\n"); + if !action.deprecated_aliases.is_empty() { + output.push_str("Deprecated Aliases:"); + for alias in action.deprecated_aliases.iter() { + output.push_str(""); + output.push_str(alias); + output.push_str(", "); + } + } + output.push_str("\n
\n"); + } + + // Close the definition list + output.push_str("
\n"); + + output +} diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 964f202934..6b695af1ae 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -89,9 +89,6 @@ pub trait EditPredictionProvider: 'static + Sized { debounce: bool, cx: &mut Context, ); - fn needs_terms_acceptance(&self, _cx: &App) -> bool { - false - } fn cycle( &mut self, buffer: Entity, @@ -124,7 +121,6 @@ pub trait EditPredictionProviderHandle { fn data_collection_state(&self, cx: &App) -> DataCollectionState; fn usage(&self, cx: &App) -> Option; fn toggle_data_collection(&self, cx: &mut App); - fn needs_terms_acceptance(&self, cx: &App) -> bool; fn is_refreshing(&self, cx: &App) -> bool; fn refresh( &self, @@ -196,10 +192,6 @@ where self.read(cx).is_enabled(buffer, cursor_position, cx) } - fn needs_terms_acceptance(&self, cx: &App) -> bool { - self.read(cx).needs_terms_acceptance(cx) - } - fn is_refreshing(&self, cx: &App) -> bool { self.read(cx).is_refreshing() } diff --git a/crates/edit_prediction_button/src/edit_prediction_button.rs b/crates/edit_prediction_button/src/edit_prediction_button.rs index 4f69af7ee4..0e3fe8cb1a 100644 --- a/crates/edit_prediction_button/src/edit_prediction_button.rs +++ b/crates/edit_prediction_button/src/edit_prediction_button.rs @@ -242,13 +242,9 @@ impl Render for EditPredictionButton { IconName::ZedPredictDisabled }; - if zeta::should_show_upsell_modal(&self.user_store, cx) { + if zeta::should_show_upsell_modal() { let tooltip_meta = if self.user_store.read(cx).current_user().is_some() { - if self.user_store.read(cx).has_accepted_terms_of_service() { - "Choose a Plan" - } else { - "Accept the Terms of Service" - } + "Choose a Plan" } else { "Sign In" }; diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 1e0cdc34ac..b073fe7be7 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -290,7 +290,10 @@ pub enum Block { ExcerptBoundary { excerpt: ExcerptInfo, height: u32, - starts_new_buffer: bool, + }, + BufferHeader { + excerpt: ExcerptInfo, + height: u32, }, } @@ -303,27 +306,37 @@ impl Block { .. } => BlockId::ExcerptBoundary(next_excerpt.id), Block::FoldedBuffer { first_excerpt, .. } => BlockId::FoldedBuffer(first_excerpt.id), + Block::BufferHeader { + excerpt: next_excerpt, + .. + } => BlockId::ExcerptBoundary(next_excerpt.id), } } pub fn has_height(&self) -> bool { match self { Block::Custom(block) => block.height.is_some(), - Block::ExcerptBoundary { .. } | Block::FoldedBuffer { .. } => true, + Block::ExcerptBoundary { .. } + | Block::FoldedBuffer { .. } + | Block::BufferHeader { .. } => true, } } pub fn height(&self) -> u32 { match self { Block::Custom(block) => block.height.unwrap_or(0), - Block::ExcerptBoundary { height, .. } | Block::FoldedBuffer { height, .. } => *height, + Block::ExcerptBoundary { height, .. } + | Block::FoldedBuffer { height, .. } + | Block::BufferHeader { height, .. } => *height, } } pub fn style(&self) -> BlockStyle { match self { Block::Custom(block) => block.style, - Block::ExcerptBoundary { .. } | Block::FoldedBuffer { .. } => BlockStyle::Sticky, + Block::ExcerptBoundary { .. } + | Block::FoldedBuffer { .. } + | Block::BufferHeader { .. } => BlockStyle::Sticky, } } @@ -332,6 +345,7 @@ impl Block { Block::Custom(block) => matches!(block.placement, BlockPlacement::Above(_)), Block::FoldedBuffer { .. } => false, Block::ExcerptBoundary { .. } => true, + Block::BufferHeader { .. } => true, } } @@ -340,6 +354,7 @@ impl Block { Block::Custom(block) => matches!(block.placement, BlockPlacement::Near(_)), Block::FoldedBuffer { .. } => false, Block::ExcerptBoundary { .. } => false, + Block::BufferHeader { .. } => false, } } @@ -351,6 +366,7 @@ impl Block { ), Block::FoldedBuffer { .. } => false, Block::ExcerptBoundary { .. } => false, + Block::BufferHeader { .. } => false, } } @@ -359,6 +375,7 @@ impl Block { Block::Custom(block) => matches!(block.placement, BlockPlacement::Replace(_)), Block::FoldedBuffer { .. } => true, Block::ExcerptBoundary { .. } => false, + Block::BufferHeader { .. } => false, } } @@ -367,6 +384,7 @@ impl Block { Block::Custom(_) => false, Block::FoldedBuffer { .. } => true, Block::ExcerptBoundary { .. } => true, + Block::BufferHeader { .. } => true, } } @@ -374,9 +392,8 @@ impl Block { match self { Block::Custom(_) => false, Block::FoldedBuffer { .. } => true, - Block::ExcerptBoundary { - starts_new_buffer, .. - } => *starts_new_buffer, + Block::ExcerptBoundary { .. } => false, + Block::BufferHeader { .. } => true, } } } @@ -393,14 +410,14 @@ impl Debug for Block { .field("first_excerpt", &first_excerpt) .field("height", height) .finish(), - Self::ExcerptBoundary { - starts_new_buffer, - excerpt, - height, - } => f + Self::ExcerptBoundary { excerpt, height } => f .debug_struct("ExcerptBoundary") .field("excerpt", excerpt) - .field("starts_new_buffer", starts_new_buffer) + .field("height", height) + .finish(), + Self::BufferHeader { excerpt, height } => f + .debug_struct("BufferHeader") + .field("excerpt", excerpt) .field("height", height) .finish(), } @@ -662,13 +679,11 @@ impl BlockMap { }), ); - if buffer.show_headers() { - blocks_in_edit.extend(self.header_and_footer_blocks( - buffer, - (start_bound, end_bound), - wrap_snapshot, - )); - } + blocks_in_edit.extend(self.header_and_footer_blocks( + buffer, + (start_bound, end_bound), + wrap_snapshot, + )); BlockMap::sort_blocks(&mut blocks_in_edit); @@ -771,7 +786,7 @@ impl BlockMap { if self.buffers_with_disabled_headers.contains(&new_buffer_id) { continue; } - if self.folded_buffers.contains(&new_buffer_id) { + if self.folded_buffers.contains(&new_buffer_id) && buffer.show_headers() { let mut last_excerpt_end_row = first_excerpt.end_row; while let Some(next_boundary) = boundaries.peek() { @@ -804,20 +819,24 @@ impl BlockMap { } } - if new_buffer_id.is_some() { + let starts_new_buffer = new_buffer_id.is_some(); + let block = if starts_new_buffer && buffer.show_headers() { height += self.buffer_header_height; - } else { + Block::BufferHeader { + excerpt: excerpt_boundary.next, + height, + } + } else if excerpt_boundary.prev.is_some() { height += self.excerpt_header_height; - } - - return Some(( - BlockPlacement::Above(WrapRow(wrap_row)), Block::ExcerptBoundary { excerpt: excerpt_boundary.next, height, - starts_new_buffer: new_buffer_id.is_some(), - }, - )); + } + } else { + continue; + }; + + return Some((BlockPlacement::Above(WrapRow(wrap_row)), block)); } }) } @@ -842,13 +861,25 @@ impl BlockMap { ( Block::ExcerptBoundary { excerpt: excerpt_a, .. + } + | Block::BufferHeader { + excerpt: excerpt_a, .. }, Block::ExcerptBoundary { excerpt: excerpt_b, .. + } + | Block::BufferHeader { + excerpt: excerpt_b, .. }, ) => Some(excerpt_a.id).cmp(&Some(excerpt_b.id)), - (Block::ExcerptBoundary { .. }, Block::Custom(_)) => Ordering::Less, - (Block::Custom(_), Block::ExcerptBoundary { .. }) => Ordering::Greater, + ( + Block::ExcerptBoundary { .. } | Block::BufferHeader { .. }, + Block::Custom(_), + ) => Ordering::Less, + ( + Block::Custom(_), + Block::ExcerptBoundary { .. } | Block::BufferHeader { .. }, + ) => Ordering::Greater, (Block::Custom(block_a), Block::Custom(block_b)) => block_a .priority .cmp(&block_b.priority) @@ -1377,7 +1408,9 @@ impl BlockSnapshot { while let Some(transform) = cursor.item() { match &transform.block { - Some(Block::ExcerptBoundary { excerpt, .. }) => { + Some( + Block::ExcerptBoundary { excerpt, .. } | Block::BufferHeader { excerpt, .. }, + ) => { return Some(StickyHeaderExcerpt { excerpt }); } Some(block) if block.is_buffer_header() => return None, @@ -3183,9 +3216,9 @@ mod tests { // so we special case row 0 to assume a leading '\n'. // // Linehood is the birthright of strings. - let mut input_text_lines = input_text.split('\n').enumerate().peekable(); + let input_text_lines = input_text.split('\n').enumerate().peekable(); let mut block_row = 0; - while let Some((wrap_row, input_line)) = input_text_lines.next() { + for (wrap_row, input_line) in input_text_lines { let wrap_row = wrap_row as u32; let multibuffer_row = wraps_snapshot .to_point(WrapPoint::new(wrap_row, 0), Bias::Left) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 2136d5f4b3..29e009fdf8 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -253,7 +253,6 @@ pub type RenderDiffHunkControlsFn = Arc< enum ReportEditorEvent { Saved { auto_saved: bool }, EditorOpened, - ZetaTosClicked, Closed, } @@ -262,7 +261,6 @@ impl ReportEditorEvent { match self { Self::Saved { .. } => "Editor Saved", Self::EditorOpened => "Editor Opened", - Self::ZetaTosClicked => "Edit Prediction Provider ToS Clicked", Self::Closed => "Editor Closed", } } @@ -1900,6 +1898,60 @@ impl Editor { editor.update_lsp_data(false, Some(*buffer_id), window, cx); } } + + project::Event::EntryRenamed(transaction) => { + let Some(workspace) = editor.workspace() else { + return; + }; + let Some(active_editor) = workspace.read(cx).active_item_as::(cx) + else { + return; + }; + if active_editor.entity_id() == cx.entity_id() { + let edited_buffers_already_open = { + let other_editors: Vec> = workspace + .read(cx) + .panes() + .iter() + .flat_map(|pane| pane.read(cx).items_of_type::()) + .filter(|editor| editor.entity_id() != cx.entity_id()) + .collect(); + + transaction.0.keys().all(|buffer| { + other_editors.iter().any(|editor| { + let multi_buffer = editor.read(cx).buffer(); + multi_buffer.read(cx).is_singleton() + && multi_buffer.read(cx).as_singleton().map_or( + false, + |singleton| { + singleton.entity_id() == buffer.entity_id() + }, + ) + }) + }) + }; + + if !edited_buffers_already_open { + let workspace = workspace.downgrade(); + let transaction = transaction.clone(); + cx.defer_in(window, move |_, window, cx| { + cx.spawn_in(window, async move |editor, cx| { + Self::open_project_transaction( + &editor, + workspace, + transaction, + "Rename".to_string(), + cx, + ) + .await + .ok() + }) + .detach(); + }); + } + } + } + _ => {} }, )); @@ -4876,11 +4928,7 @@ impl Editor { cx: &mut Context, ) -> bool { let position = self.selections.newest_anchor().head(); - let multibuffer = self.buffer.read(cx); - let Some(buffer) = position - .buffer_id - .and_then(|buffer_id| multibuffer.buffer(buffer_id)) - else { + let Some(buffer) = self.buffer.read(cx).buffer_for_anchor(position, cx) else { return false; }; @@ -5526,6 +5574,11 @@ impl Editor { .as_ref() .is_none_or(|query| !query.chars().any(|c| c.is_digit(10))); + let omit_word_completions = match &query { + Some(query) => query.chars().count() < completion_settings.words_min_length, + None => completion_settings.words_min_length != 0, + }; + let (mut words, provider_responses) = match &provider { Some(provider) => { let provider_responses = provider.completions( @@ -5537,9 +5590,11 @@ impl Editor { cx, ); - let words = match completion_settings.words { - WordsCompletionMode::Disabled => Task::ready(BTreeMap::default()), - WordsCompletionMode::Enabled | WordsCompletionMode::Fallback => cx + let words = match (omit_word_completions, completion_settings.words) { + (true, _) | (_, WordsCompletionMode::Disabled) => { + Task::ready(BTreeMap::default()) + } + (false, WordsCompletionMode::Enabled | WordsCompletionMode::Fallback) => cx .background_spawn(async move { buffer_snapshot.words_in_range(WordsQuery { fuzzy_contents: None, @@ -5551,16 +5606,20 @@ impl Editor { (words, provider_responses) } - None => ( - cx.background_spawn(async move { - buffer_snapshot.words_in_range(WordsQuery { - fuzzy_contents: None, - range: word_search_range, - skip_digits, + None => { + let words = if omit_word_completions { + Task::ready(BTreeMap::default()) + } else { + cx.background_spawn(async move { + buffer_snapshot.words_in_range(WordsQuery { + fuzzy_contents: None, + range: word_search_range, + skip_digits, + }) }) - }), - Task::ready(Ok(Vec::new())), - ), + }; + (words, Task::ready(Ok(Vec::new()))) + } }; let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order; @@ -5844,7 +5903,7 @@ impl Editor { multibuffer_anchor.start.to_offset(&snapshot) ..multibuffer_anchor.end.to_offset(&snapshot) }; - if newest_anchor.head().buffer_id != Some(buffer.remote_id()) { + if snapshot.buffer_id_for_anchor(newest_anchor.head()) != Some(buffer.remote_id()) { return None; } @@ -6286,7 +6345,7 @@ impl Editor { } pub async fn open_project_transaction( - this: &WeakEntity, + editor: &WeakEntity, workspace: WeakEntity, transaction: ProjectTransaction, title: String, @@ -6304,7 +6363,7 @@ impl Editor { if let Some((buffer, transaction)) = entries.first() { if entries.len() == 1 { - let excerpt = this.update(cx, |editor, cx| { + let excerpt = editor.update(cx, |editor, cx| { editor .buffer() .read(cx) @@ -6701,7 +6760,6 @@ impl Editor { return; } - let buffer_id = cursor_position.buffer_id; let buffer = this.buffer.read(cx); if buffer .text_anchor_for_position(cursor_position, cx) @@ -6714,8 +6772,8 @@ impl Editor { let mut write_ranges = Vec::new(); let mut read_ranges = Vec::new(); for highlight in highlights { - for (excerpt_id, excerpt_range) in - buffer.excerpts_for_buffer(cursor_buffer.read(cx).remote_id(), cx) + let buffer_id = cursor_buffer.read(cx).remote_id(); + for (excerpt_id, excerpt_range) in buffer.excerpts_for_buffer(buffer_id, cx) { let start = highlight .range @@ -6730,12 +6788,12 @@ impl Editor { } let range = Anchor { - buffer_id, + buffer_id: Some(buffer_id), excerpt_id, text_anchor: start, diff_base_anchor: None, }..Anchor { - buffer_id, + buffer_id: Some(buffer_id), excerpt_id, text_anchor: end, diff_base_anchor: None, @@ -9120,45 +9178,6 @@ impl Editor { let provider = self.edit_prediction_provider.as_ref()?; let provider_icon = Self::get_prediction_provider_icon_name(&self.edit_prediction_provider); - if provider.provider.needs_terms_acceptance(cx) { - return Some( - h_flex() - .min_w(min_width) - .flex_1() - .px_2() - .py_1() - .gap_3() - .elevation_2(cx) - .hover(|style| style.bg(cx.theme().colors().element_hover)) - .id("accept-terms") - .cursor_pointer() - .on_mouse_down(MouseButton::Left, |_, window, _| window.prevent_default()) - .on_click(cx.listener(|this, _event, window, cx| { - cx.stop_propagation(); - this.report_editor_event(ReportEditorEvent::ZetaTosClicked, None, cx); - window.dispatch_action( - zed_actions::OpenZedPredictOnboarding.boxed_clone(), - cx, - ); - })) - .child( - h_flex() - .flex_1() - .gap_2() - .child(Icon::new(provider_icon)) - .child(Label::new("Accept Terms of Service")) - .child(div().w_full()) - .child( - Icon::new(IconName::ArrowUpRight) - .color(Color::Muted) - .size(IconSize::Small), - ) - .into_any_element(), - ) - .into_any(), - ); - } - let is_refreshing = provider.provider.is_refreshing(cx); fn pending_completion_container(icon: IconName) -> Div { @@ -9500,17 +9519,21 @@ impl Editor { selection: Range, cx: &mut Context, ) { - let buffer_id = match (&selection.start.buffer_id, &selection.end.buffer_id) { - (Some(a), Some(b)) if a == b => a, - _ => { - log::error!("expected anchor range to have matching buffer IDs"); - return; - } - }; - let multi_buffer = self.buffer().read(cx); - let Some(buffer) = multi_buffer.buffer(*buffer_id) else { + let Some((_, buffer, _)) = self + .buffer() + .read(cx) + .excerpt_containing(selection.start, cx) + else { return; }; + let Some((_, end_buffer, _)) = self.buffer().read(cx).excerpt_containing(selection.end, cx) + else { + return; + }; + if buffer != end_buffer { + log::error!("expected anchor range to have matching buffer IDs"); + return; + } let id = post_inc(&mut self.next_completion_id); let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order; @@ -9756,6 +9779,9 @@ impl Editor { } pub fn backspace(&mut self, _: &Backspace, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.select_autoclose_pair(window, cx); @@ -9849,6 +9875,9 @@ impl Editor { } pub fn delete(&mut self, _: &Delete, window: &mut Window, cx: &mut Context) { + if self.read_only(cx) { + return; + } self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); self.transact(window, cx, |this, window, cx| { this.change_selections(Default::default(), window, cx, |s| { @@ -10597,16 +10626,12 @@ impl Editor { snapshot: &EditorSnapshot, cx: &mut Context, ) -> Option<(Anchor, Breakpoint)> { - let project = self.project.clone()?; - - let buffer_id = breakpoint_position.buffer_id.or_else(|| { - snapshot - .buffer_snapshot - .buffer_id_for_excerpt(breakpoint_position.excerpt_id) - })?; + let buffer = self + .buffer + .read(cx) + .buffer_for_anchor(breakpoint_position, cx)?; let enclosing_excerpt = breakpoint_position.excerpt_id; - let buffer = project.read(cx).buffer_for_id(buffer_id, cx)?; let buffer_snapshot = buffer.read(cx).snapshot(); let row = buffer_snapshot @@ -10779,21 +10804,11 @@ impl Editor { return; }; - let Some(buffer_id) = breakpoint_position.buffer_id.or_else(|| { - if breakpoint_position == Anchor::min() { - self.buffer() - .read(cx) - .excerpt_buffer_ids() - .into_iter() - .next() - } else { - None - } - }) else { - return; - }; - - let Some(buffer) = self.buffer().read(cx).buffer(buffer_id) else { + let Some(buffer) = self + .buffer + .read(cx) + .buffer_for_anchor(breakpoint_position, cx) + else { return; }; @@ -11021,7 +11036,7 @@ impl Editor { let mut col = 0; let mut changed = false; - while let Some(ch) = chars.next() { + for ch in chars.by_ref() { match ch { ' ' => { reindented_line.push(' '); @@ -11077,7 +11092,7 @@ impl Editor { let mut first_non_indent_char = None; let mut changed = false; - while let Some(ch) = chars.next() { + for ch in chars.by_ref() { match ch { ' ' => { // Keep track of spaces. Append \t when we reach tab_size @@ -15436,7 +15451,8 @@ impl Editor { return; }; - let Some(buffer_id) = buffer.anchor_after(next_diagnostic.range.start).buffer_id else { + let next_diagnostic_start = buffer.anchor_after(next_diagnostic.range.start); + let Some(buffer_id) = buffer.buffer_id_for_anchor(next_diagnostic_start) else { return; }; self.change_selections(Default::default(), window, cx, |s| { @@ -15714,7 +15730,9 @@ impl Editor { }; cx.spawn_in(window, async move |editor, cx| { - let definitions = definitions.await?; + let Some(definitions) = definitions.await? else { + return Ok(Navigated::No); + }; let navigated = editor .update_in(cx, |editor, window, cx| { editor.navigate_to_hover_links( @@ -16056,7 +16074,9 @@ impl Editor { } }); - let locations = references.await?; + let Some(locations) = references.await? else { + return anyhow::Ok(Navigated::No); + }; if locations.is_empty() { return anyhow::Ok(Navigated::No); } @@ -20425,11 +20445,8 @@ impl Editor { .range_to_buffer_ranges_with_deleted_hunks(selection.range()) { if let Some(anchor) = anchor { - // selection is in a deleted hunk - let Some(buffer_id) = anchor.buffer_id else { - continue; - }; - let Some(buffer_handle) = multi_buffer.buffer(buffer_id) else { + let Some(buffer_handle) = multi_buffer.buffer_for_anchor(anchor, cx) + else { continue; }; let offset = text::ToOffset::to_offset( @@ -21841,7 +21858,7 @@ pub trait SemanticsProvider { buffer: &Entity, position: text::Anchor, cx: &mut App, - ) -> Option>>; + ) -> Option>>>; fn inline_values( &self, @@ -21880,7 +21897,7 @@ pub trait SemanticsProvider { position: text::Anchor, kind: GotoDefinitionKind, cx: &mut App, - ) -> Option>>>; + ) -> Option>>>>; fn range_for_rename( &self, @@ -21993,7 +22010,13 @@ impl CodeActionProvider for Entity { Ok(code_lens_actions .context("code lens fetch")? .into_iter() - .chain(code_actions.context("code action fetch")?) + .flatten() + .chain( + code_actions + .context("code action fetch")? + .into_iter() + .flatten(), + ) .collect()) }) }) @@ -22288,7 +22311,7 @@ impl SemanticsProvider for Entity { buffer: &Entity, position: text::Anchor, cx: &mut App, - ) -> Option>> { + ) -> Option>>> { Some(self.update(cx, |project, cx| project.hover(buffer, position, cx))) } @@ -22309,7 +22332,7 @@ impl SemanticsProvider for Entity { position: text::Anchor, kind: GotoDefinitionKind, cx: &mut App, - ) -> Option>>> { + ) -> Option>>>> { Some(self.update(cx, |project, cx| match kind { GotoDefinitionKind::Symbol => project.definitions(buffer, position, cx), GotoDefinitionKind::Declaration => project.declarations(buffer, position, cx), diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 96261fdb2c..2cfdb92593 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -57,7 +57,9 @@ use util::{ use workspace::{ CloseActiveItem, CloseAllItems, CloseOtherItems, MoveItemToPaneInDirection, NavigationEntry, OpenOptions, ViewId, + invalid_buffer_view::InvalidBufferView, item::{FollowEvent, FollowableItem, Item, ItemHandle, SaveOptions}, + register_project_item, }; #[gpui::test] @@ -12237,6 +12239,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) { settings.defaults.completions = Some(CompletionSettings { lsp_insert_mode, words: WordsCompletionMode::Disabled, + words_min_length: 0, lsp: true, lsp_fetch_timeout_ms: 0, }); @@ -12295,6 +12298,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext) update_test_language_settings(&mut cx, |settings| { settings.defaults.completions = Some(CompletionSettings { words: WordsCompletionMode::Disabled, + words_min_length: 0, // set the opposite here to ensure that the action is overriding the default behavior lsp_insert_mode: LspInsertMode::Insert, lsp: true, @@ -12331,6 +12335,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext) update_test_language_settings(&mut cx, |settings| { settings.defaults.completions = Some(CompletionSettings { words: WordsCompletionMode::Disabled, + words_min_length: 0, // set the opposite here to ensure that the action is overriding the default behavior lsp_insert_mode: LspInsertMode::Replace, lsp: true, @@ -13072,6 +13077,7 @@ async fn test_word_completion(cx: &mut TestAppContext) { init_test(cx, |language_settings| { language_settings.defaults.completions = Some(CompletionSettings { words: WordsCompletionMode::Fallback, + words_min_length: 0, lsp: true, lsp_fetch_timeout_ms: 10, lsp_insert_mode: LspInsertMode::Insert, @@ -13168,6 +13174,7 @@ async fn test_word_completions_do_not_duplicate_lsp_ones(cx: &mut TestAppContext init_test(cx, |language_settings| { language_settings.defaults.completions = Some(CompletionSettings { words: WordsCompletionMode::Enabled, + words_min_length: 0, lsp: true, lsp_fetch_timeout_ms: 0, lsp_insert_mode: LspInsertMode::Insert, @@ -13231,6 +13238,7 @@ async fn test_word_completions_continue_on_typing(cx: &mut TestAppContext) { init_test(cx, |language_settings| { language_settings.defaults.completions = Some(CompletionSettings { words: WordsCompletionMode::Disabled, + words_min_length: 0, lsp: true, lsp_fetch_timeout_ms: 0, lsp_insert_mode: LspInsertMode::Insert, @@ -13304,6 +13312,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) { init_test(cx, |language_settings| { language_settings.defaults.completions = Some(CompletionSettings { words: WordsCompletionMode::Fallback, + words_min_length: 0, lsp: false, lsp_fetch_timeout_ms: 0, lsp_insert_mode: LspInsertMode::Insert, @@ -13361,6 +13370,56 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_word_completions_do_not_show_before_threshold(cx: &mut TestAppContext) { + init_test(cx, |language_settings| { + language_settings.defaults.completions = Some(CompletionSettings { + words: WordsCompletionMode::Enabled, + words_min_length: 3, + lsp: true, + lsp_fetch_timeout_ms: 0, + lsp_insert_mode: LspInsertMode::Insert, + }); + }); + + let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await; + cx.set_state(indoc! {"ˇ + wow + wowen + wowser + "}); + cx.simulate_keystroke("w"); + cx.executor().run_until_parked(); + cx.update_editor(|editor, _, _| { + if editor.context_menu.borrow_mut().is_some() { + panic!( + "expected completion menu to be hidden, as words completion threshold is not met" + ); + } + }); + + cx.simulate_keystroke("o"); + cx.executor().run_until_parked(); + cx.update_editor(|editor, _, _| { + if editor.context_menu.borrow_mut().is_some() { + panic!( + "expected completion menu to be hidden, as words completion threshold is not met still" + ); + } + }); + + cx.simulate_keystroke("w"); + cx.executor().run_until_parked(); + cx.update_editor(|editor, _, _| { + if let Some(CodeContextMenu::Completions(menu)) = editor.context_menu.borrow_mut().as_ref() + { + assert_eq!(completion_menu_entries(menu), &["wowen", "wowser"], "After word completion threshold is met, matching words should be shown, excluding the already typed word"); + } else { + panic!("expected completion menu to be open after the word completions threshold is met"); + } + }); +} + fn gen_text_edit(params: &CompletionParams, text: &str) -> Option { let position = || lsp::Position { line: params.text_document_position.position.line, @@ -22656,7 +22715,7 @@ async fn test_invisible_worktree_servers(cx: &mut TestAppContext) { .await .unwrap(); pane.update_in(cx, |pane, window, cx| { - pane.navigate_backward(window, cx); + pane.navigate_backward(&Default::default(), window, cx); }); cx.run_until_parked(); pane.update(cx, |pane, cx| { @@ -24243,7 +24302,7 @@ async fn test_document_colors(cx: &mut TestAppContext) { workspace .update(cx, |workspace, window, cx| { workspace.active_pane().update(cx, |pane, cx| { - pane.navigate_backward(window, cx); + pane.navigate_backward(&Default::default(), window, cx); }) }) .unwrap(); @@ -24291,6 +24350,41 @@ async fn test_newline_replacement_in_single_line(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_non_utf_8_opens(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + cx.update(|cx| { + register_project_item::(cx); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root1", json!({})).await; + fs.insert_file("/root1/one.pdf", vec![0xff, 0xfe, 0xfd]) + .await; + + let project = Project::test(fs, ["/root1".as_ref()], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let worktree_id = project.update(cx, |project, cx| { + project.worktrees(cx).next().unwrap().read(cx).id() + }); + + let handle = workspace + .update_in(cx, |workspace, window, cx| { + let project_path = (worktree_id, "one.pdf"); + workspace.open_path(project_path, None, true, window, cx) + }) + .await + .unwrap(); + + assert_eq!( + handle.to_any().entity_type(), + TypeId::of::() + ); +} + #[track_caller] fn extract_color_inlays(editor: &Editor, cx: &App) -> Vec { editor diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 416f35d7a7..4f3580da07 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -74,6 +74,7 @@ use std::{ fmt::{self, Write}, iter, mem, ops::{Deref, Range}, + path::{self, Path}, rc::Rc, sync::Arc, time::{Duration, Instant}, @@ -89,8 +90,8 @@ use unicode_segmentation::UnicodeSegmentation; use util::post_inc; use util::{RangeExt, ResultExt, debug_panic}; use workspace::{ - CollaboratorId, OpenInTerminal, OpenTerminal, RevealInProjectPanel, Workspace, item::Item, - notifications::NotifyTaskExt, + CollaboratorId, ItemSettings, OpenInTerminal, OpenTerminal, RevealInProjectPanel, Workspace, + item::Item, notifications::NotifyTaskExt, }; /// Determines what kinds of highlights should be applied to a lines background. @@ -2749,7 +2750,10 @@ impl EditorElement { let mut block_offset = 0; let mut found_excerpt_header = false; for (_, block) in snapshot.blocks_in_range(prev_line..row_range.start) { - if matches!(block, Block::ExcerptBoundary { .. }) { + if matches!( + block, + Block::ExcerptBoundary { .. } | Block::BufferHeader { .. } + ) { found_excerpt_header = true; break; } @@ -2766,7 +2770,10 @@ impl EditorElement { let mut block_height = 0; let mut found_excerpt_header = false; for (_, block) in snapshot.blocks_in_range(row_range.end..cons_line) { - if matches!(block, Block::ExcerptBoundary { .. }) { + if matches!( + block, + Block::ExcerptBoundary { .. } | Block::BufferHeader { .. } + ) { found_excerpt_header = true; } block_height += block.height(); @@ -3452,42 +3459,41 @@ impl EditorElement { .into_any_element() } - Block::ExcerptBoundary { - excerpt, - height, - starts_new_buffer, - .. - } => { + Block::ExcerptBoundary { .. } => { let color = cx.theme().colors().clone(); let mut result = v_flex().id(block_id).w_full(); + result = result.child( + h_flex().relative().child( + div() + .top(line_height / 2.) + .absolute() + .w_full() + .h_px() + .bg(color.border_variant), + ), + ); + + result.into_any() + } + + Block::BufferHeader { excerpt, height } => { + let mut result = v_flex().id(block_id).w_full(); + let jump_data = header_jump_data(snapshot, block_row_start, *height, excerpt); - if *starts_new_buffer { - if sticky_header_excerpt_id != Some(excerpt.id) { - let selected = selected_buffer_ids.contains(&excerpt.buffer_id); + if sticky_header_excerpt_id != Some(excerpt.id) { + let selected = selected_buffer_ids.contains(&excerpt.buffer_id); - result = result.child(div().pr(editor_margins.right).child( - self.render_buffer_header( - excerpt, false, selected, false, jump_data, window, cx, - ), - )); - } else { - result = - result.child(div().h(FILE_HEADER_HEIGHT as f32 * window.line_height())); - } - } else { - result = result.child( - h_flex().relative().child( - div() - .top(line_height / 2.) - .absolute() - .w_full() - .h_px() - .bg(color.border_variant), + result = result.child(div().pr(editor_margins.right).child( + self.render_buffer_header( + excerpt, false, selected, false, jump_data, window, cx, ), - ); - }; + )); + } else { + result = + result.child(div().h(FILE_HEADER_HEIGHT as f32 * window.line_height())); + } result.into_any() } @@ -3597,171 +3603,187 @@ impl EditorElement { let focus_handle = editor.focus_handle(cx); let colors = cx.theme().colors(); - let header = - div() - .p_1() - .w_full() - .h(FILE_HEADER_HEIGHT as f32 * window.line_height()) - .child( - h_flex() - .size_full() - .gap_2() - .flex_basis(Length::Definite(DefiniteLength::Fraction(0.667))) - .pl_0p5() - .pr_5() - .rounded_sm() - .when(is_sticky, |el| el.shadow_md()) - .border_1() - .map(|div| { - let border_color = if is_selected - && is_folded - && focus_handle.contains_focused(window, cx) - { - colors.border_focused - } else { - colors.border - }; - div.border_color(border_color) - }) - .bg(colors.editor_subheader_background) - .hover(|style| style.bg(colors.element_hover)) - .map(|header| { - let editor = self.editor.clone(); - let buffer_id = for_excerpt.buffer_id; - let toggle_chevron_icon = - FileIcons::get_chevron_icon(!is_folded, cx).map(Icon::from_path); - header.child( - div() - .hover(|style| style.bg(colors.element_selected)) - .rounded_xs() - .child( - ButtonLike::new("toggle-buffer-fold") - .style(ui::ButtonStyle::Transparent) - .height(px(28.).into()) - .width(px(28.)) - .children(toggle_chevron_icon) - .tooltip({ - let focus_handle = focus_handle.clone(); - move |window, cx| { - Tooltip::with_meta_in( - "Toggle Excerpt Fold", - Some(&ToggleFold), - "Alt+click to toggle all", - &focus_handle, + let header = div() + .p_1() + .w_full() + .h(FILE_HEADER_HEIGHT as f32 * window.line_height()) + .child( + h_flex() + .size_full() + .gap_2() + .flex_basis(Length::Definite(DefiniteLength::Fraction(0.667))) + .pl_0p5() + .pr_5() + .rounded_sm() + .when(is_sticky, |el| el.shadow_md()) + .border_1() + .map(|div| { + let border_color = if is_selected + && is_folded + && focus_handle.contains_focused(window, cx) + { + colors.border_focused + } else { + colors.border + }; + div.border_color(border_color) + }) + .bg(colors.editor_subheader_background) + .hover(|style| style.bg(colors.element_hover)) + .map(|header| { + let editor = self.editor.clone(); + let buffer_id = for_excerpt.buffer_id; + let toggle_chevron_icon = + FileIcons::get_chevron_icon(!is_folded, cx).map(Icon::from_path); + header.child( + div() + .hover(|style| style.bg(colors.element_selected)) + .rounded_xs() + .child( + ButtonLike::new("toggle-buffer-fold") + .style(ui::ButtonStyle::Transparent) + .height(px(28.).into()) + .width(px(28.)) + .children(toggle_chevron_icon) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |window, cx| { + Tooltip::with_meta_in( + "Toggle Excerpt Fold", + Some(&ToggleFold), + "Alt+click to toggle all", + &focus_handle, + window, + cx, + ) + } + }) + .on_click(move |event, window, cx| { + if event.modifiers().alt { + // Alt+click toggles all buffers + editor.update(cx, |editor, cx| { + editor.toggle_fold_all( + &ToggleFoldAll, window, cx, - ) - } - }) - .on_click(move |event, window, cx| { - if event.modifiers().alt { - // Alt+click toggles all buffers + ); + }); + } else { + // Regular click toggles single buffer + if is_folded { editor.update(cx, |editor, cx| { - editor.toggle_fold_all( - &ToggleFoldAll, - window, - cx, - ); + editor.unfold_buffer(buffer_id, cx); }); } else { - // Regular click toggles single buffer - if is_folded { - editor.update(cx, |editor, cx| { - editor.unfold_buffer(buffer_id, cx); - }); - } else { - editor.update(cx, |editor, cx| { - editor.fold_buffer(buffer_id, cx); - }); - } + editor.update(cx, |editor, cx| { + editor.fold_buffer(buffer_id, cx); + }); } - }), - ), - ) - }) - .children( - editor - .addons - .values() - .filter_map(|addon| { - addon.render_buffer_header_controls(for_excerpt, window, cx) - }) - .take(1), + } + }), + ), ) - .children(indicator) - .child( - h_flex() - .cursor_pointer() - .id("path header block") - .size_full() - .justify_between() - .overflow_hidden() - .child( - h_flex() - .gap_2() - .child( - Label::new( - filename - .map(SharedString::from) - .unwrap_or_else(|| "untitled".into()), - ) - .single_line() - .when_some(file_status, |el, status| { - el.color(if status.is_conflicted() { - Color::Conflict - } else if status.is_modified() { - Color::Modified - } else if status.is_deleted() { - Color::Disabled - } else { - Color::Created - }) - .when(status.is_deleted(), |el| el.strikethrough()) - }), - ) - .when_some(parent_path, |then, path| { - then.child(div().child(path).text_color( - if file_status.is_some_and(FileStatus::is_deleted) { - colors.text_disabled - } else { - colors.text_muted + }) + .children( + editor + .addons + .values() + .filter_map(|addon| { + addon.render_buffer_header_controls(for_excerpt, window, cx) + }) + .take(1), + ) + .child( + h_flex() + .size(Pixels(12.0)) + .justify_center() + .children(indicator), + ) + .child( + h_flex() + .cursor_pointer() + .id("path header block") + .size_full() + .justify_between() + .overflow_hidden() + .child( + h_flex() + .gap_2() + .map(|path_header| { + let filename = filename + .map(SharedString::from) + .unwrap_or_else(|| "untitled".into()); + + path_header + .when(ItemSettings::get_global(cx).file_icons, |el| { + let path = path::Path::new(filename.as_str()); + let icon = FileIcons::get_icon(path, cx) + .unwrap_or_default(); + let icon = + Icon::from_path(icon).color(Color::Muted); + el.child(icon) + }) + .child(Label::new(filename).single_line().when_some( + file_status, + |el, status| { + el.color(if status.is_conflicted() { + Color::Conflict + } else if status.is_modified() { + Color::Modified + } else if status.is_deleted() { + Color::Disabled + } else { + Color::Created + }) + .when(status.is_deleted(), |el| { + el.strikethrough() + }) }, )) - }), - ) - .when( - can_open_excerpts && is_selected && relative_path.is_some(), - |el| { - el.child( - h_flex() - .id("jump-to-file-button") - .gap_2p5() - .child(Label::new("Jump To File")) - .children( - KeyBinding::for_action_in( - &OpenExcerpts, - &focus_handle, - window, - cx, - ) - .map(|binding| binding.into_any_element()), - ), - ) - }, - ) - .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) - .on_click(window.listener_for(&self.editor, { - move |editor, e: &ClickEvent, window, cx| { - editor.open_excerpts_common( - Some(jump_data.clone()), - e.modifiers().secondary(), - window, - cx, - ); - } - })), - ), - ); + }) + .when_some(parent_path, |then, path| { + then.child(div().child(path).text_color( + if file_status.is_some_and(FileStatus::is_deleted) { + colors.text_disabled + } else { + colors.text_muted + }, + )) + }), + ) + .when( + can_open_excerpts && is_selected && relative_path.is_some(), + |el| { + el.child( + h_flex() + .id("jump-to-file-button") + .gap_2p5() + .child(Label::new("Jump To File")) + .children( + KeyBinding::for_action_in( + &OpenExcerpts, + &focus_handle, + window, + cx, + ) + .map(|binding| binding.into_any_element()), + ), + ) + }, + ) + .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) + .on_click(window.listener_for(&self.editor, { + move |editor, e: &ClickEvent, window, cx| { + editor.open_excerpts_common( + Some(jump_data.clone()), + e.modifiers().secondary(), + window, + cx, + ); + } + })), + ), + ); let file = for_excerpt.buffer.file().cloned(); let editor = self.editor.clone(); @@ -3777,25 +3799,31 @@ impl EditorElement { && let Some(worktree) = project.read(cx).worktree_for_id(file.worktree_id(cx), cx) { + let worktree = worktree.read(cx); let relative_path = file.path(); - let entry_for_path = worktree.read(cx).entry_for_path(relative_path); - let abs_path = entry_for_path.and_then(|e| e.canonical_path.as_deref()); - let has_relative_path = - worktree.read(cx).root_entry().is_some_and(Entry::is_dir); + let entry_for_path = worktree.entry_for_path(relative_path); + let abs_path = entry_for_path.map(|e| { + e.canonical_path.as_deref().map_or_else( + || worktree.abs_path().join(relative_path), + Path::to_path_buf, + ) + }); + let has_relative_path = worktree.root_entry().is_some_and(Entry::is_dir); - let parent_abs_path = - abs_path.and_then(|abs_path| Some(abs_path.parent()?.to_path_buf())); + let parent_abs_path = abs_path + .as_ref() + .and_then(|abs_path| Some(abs_path.parent()?.to_path_buf())); let relative_path = has_relative_path .then_some(relative_path) .map(ToOwned::to_owned); let visible_in_project_panel = - relative_path.is_some() && worktree.read(cx).is_visible(); + relative_path.is_some() && worktree.is_visible(); let reveal_in_project_panel = entry_for_path .filter(|_| visible_in_project_panel) .map(|entry| entry.id); menu = menu - .when_some(abs_path.map(ToOwned::to_owned), |menu, abs_path| { + .when_some(abs_path, |menu, abs_path| { menu.entry( "Copy Path", Some(Box::new(zed_actions::workspace::CopyPath)), @@ -5708,7 +5736,10 @@ impl EditorElement { let end_row_in_current_excerpt = snapshot .blocks_in_range(start_row..end_row) .find_map(|(start_row, block)| { - if matches!(block, Block::ExcerptBoundary { .. }) { + if matches!( + block, + Block::ExcerptBoundary { .. } | Block::BufferHeader { .. } + ) { Some(start_row) } else { None diff --git a/crates/editor/src/highlight_matching_bracket.rs b/crates/editor/src/highlight_matching_bracket.rs index e38197283d..aa4e616924 100644 --- a/crates/editor/src/highlight_matching_bracket.rs +++ b/crates/editor/src/highlight_matching_bracket.rs @@ -1,6 +1,7 @@ use crate::{Editor, RangeToAnchorExt}; -use gpui::{Context, Window}; +use gpui::{Context, HighlightStyle, Window}; use language::CursorShape; +use theme::ActiveTheme; enum MatchingBracketHighlight {} @@ -9,7 +10,7 @@ pub fn refresh_matching_bracket_highlights( window: &mut Window, cx: &mut Context, ) { - editor.clear_background_highlights::(cx); + editor.clear_highlights::(cx); let newest_selection = editor.selections.newest::(cx); // Don't highlight brackets if the selection isn't empty @@ -35,12 +36,19 @@ pub fn refresh_matching_bracket_highlights( .buffer_snapshot .innermost_enclosing_bracket_ranges(head..tail, None) { - editor.highlight_background::( - &[ + editor.highlight_text::( + vec![ opening_range.to_anchors(&snapshot.buffer_snapshot), closing_range.to_anchors(&snapshot.buffer_snapshot), ], - |theme| theme.colors().editor_document_highlight_bracket_background, + HighlightStyle { + background_color: Some( + cx.theme() + .colors() + .editor_document_highlight_bracket_background, + ), + ..Default::default() + }, cx, ) } @@ -104,7 +112,7 @@ mod tests { another_test(1, 2, 3); } "#}); - cx.assert_editor_background_highlights::(indoc! {r#" + cx.assert_editor_text_highlights::(indoc! {r#" pub fn test«(»"Test argument"«)» { another_test(1, 2, 3); } @@ -115,7 +123,7 @@ mod tests { another_test(1, ˇ2, 3); } "#}); - cx.assert_editor_background_highlights::(indoc! {r#" + cx.assert_editor_text_highlights::(indoc! {r#" pub fn test("Test argument") { another_test«(»1, 2, 3«)»; } @@ -126,7 +134,7 @@ mod tests { anotherˇ_test(1, 2, 3); } "#}); - cx.assert_editor_background_highlights::(indoc! {r#" + cx.assert_editor_text_highlights::(indoc! {r#" pub fn test("Test argument") «{» another_test(1, 2, 3); «}» @@ -138,7 +146,7 @@ mod tests { another_test(1, 2, 3); } "#}); - cx.assert_editor_background_highlights::(indoc! {r#" + cx.assert_editor_text_highlights::(indoc! {r#" pub fn test("Test argument") { another_test(1, 2, 3); } @@ -150,8 +158,8 @@ mod tests { another_test(1, 2, 3); } "#}); - cx.assert_editor_background_highlights::(indoc! {r#" - pub fn test("Test argument") { + cx.assert_editor_text_highlights::(indoc! {r#" + pub fn test«("Test argument") { another_test(1, 2, 3); } "#}); diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 04e66a234c..94f49f601a 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -321,7 +321,10 @@ pub fn update_inlay_link_and_hover_points( if let Some(cached_hint) = inlay_hint_cache.hint_by_id(excerpt_id, hovered_hint.id) { match cached_hint.resolve_state { ResolveState::CanResolve(_, _) => { - if let Some(buffer_id) = previous_valid_anchor.buffer_id { + if let Some(buffer_id) = snapshot + .buffer_snapshot + .buffer_id_for_anchor(previous_valid_anchor) + { inlay_hint_cache.spawn_hint_resolve( buffer_id, excerpt_id, @@ -559,7 +562,7 @@ pub fn show_link_definition( provider.definitions(&buffer, buffer_position, preferred_kind, cx) })?; if let Some(task) = task { - task.await.ok().map(|definition_result| { + task.await.ok().flatten().map(|definition_result| { ( definition_result.iter().find_map(|link| { link.origin.as_ref().and_then(|origin| { diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 28a09e947f..fab5345787 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -428,7 +428,7 @@ fn show_hover( }; let hovers_response = if let Some(hover_request) = hover_request { - hover_request.await + hover_request.await.unwrap_or_default() } else { Vec::new() }; diff --git a/crates/editor/src/indent_guides.rs b/crates/editor/src/indent_guides.rs index a1de2b604b..23717eeb15 100644 --- a/crates/editor/src/indent_guides.rs +++ b/crates/editor/src/indent_guides.rs @@ -164,8 +164,8 @@ pub fn indent_guides_in_range( let end_anchor = snapshot.buffer_snapshot.anchor_after(end_offset); let mut fold_ranges = Vec::>::new(); - let mut folds = snapshot.folds_in_range(start_offset..end_offset).peekable(); - while let Some(fold) = folds.next() { + let folds = snapshot.folds_in_range(start_offset..end_offset).peekable(); + for fold in folds { let start = fold.range.start.to_point(&snapshot.buffer_snapshot); let end = fold.range.end.to_point(&snapshot.buffer_snapshot); if let Some(last_range) = fold_ranges.last_mut() diff --git a/crates/editor/src/inlay_hint_cache.rs b/crates/editor/src/inlay_hint_cache.rs index cea0e32d7f..dbf5ac95b7 100644 --- a/crates/editor/src/inlay_hint_cache.rs +++ b/crates/editor/src/inlay_hint_cache.rs @@ -475,10 +475,7 @@ impl InlayHintCache { let excerpt_cached_hints = excerpt_cached_hints.read(); let mut excerpt_cache = excerpt_cached_hints.ordered_hints.iter().fuse().peekable(); shown_excerpt_hints_to_remove.retain(|(shown_anchor, shown_hint_id)| { - let Some(buffer) = shown_anchor - .buffer_id - .and_then(|buffer_id| multi_buffer.buffer(buffer_id)) - else { + let Some(buffer) = multi_buffer.buffer_for_anchor(*shown_anchor, cx) else { return false; }; let buffer_snapshot = buffer.read(cx).snapshot(); diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 62889c638f..b7110190fd 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -42,6 +42,7 @@ use ui::{IconDecorationKind, prelude::*}; use util::{ResultExt, TryFutureExt, paths::PathExt}; use workspace::{ CollaboratorId, ItemId, ItemNavHistory, ToolbarItemLocation, ViewId, Workspace, WorkspaceId, + invalid_buffer_view::InvalidBufferView, item::{FollowableItem, Item, ItemEvent, ProjectItem, SaveOptions}, searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle}, }; @@ -103,9 +104,9 @@ impl FollowableItem for Editor { multibuffer = MultiBuffer::new(project.read(cx).capability()); let mut sorted_excerpts = state.excerpts.clone(); sorted_excerpts.sort_by_key(|e| e.id); - let mut sorted_excerpts = sorted_excerpts.into_iter().peekable(); + let sorted_excerpts = sorted_excerpts.into_iter().peekable(); - while let Some(excerpt) = sorted_excerpts.next() { + for excerpt in sorted_excerpts { let Ok(buffer_id) = BufferId::new(excerpt.buffer_id) else { continue; }; @@ -1401,6 +1402,16 @@ impl ProjectItem for Editor { editor } + + fn for_broken_project_item( + abs_path: &Path, + is_local: bool, + e: &anyhow::Error, + window: &mut Window, + cx: &mut App, + ) -> Option { + Some(InvalidBufferView::new(abs_path, is_local, e, window, cx)) + } } fn clip_ranges<'a>( diff --git a/crates/editor/src/linked_editing_ranges.rs b/crates/editor/src/linked_editing_ranges.rs index aaf9032b04..4f1313797f 100644 --- a/crates/editor/src/linked_editing_ranges.rs +++ b/crates/editor/src/linked_editing_ranges.rs @@ -72,7 +72,7 @@ pub(super) fn refresh_linked_ranges( // Throw away selections spanning multiple buffers. continue; } - if let Some(buffer) = end_position.buffer_id.and_then(|id| buffer.buffer(id)) { + if let Some(buffer) = buffer.buffer_for_anchor(end_position, cx) { applicable_selections.push(( buffer, start_position.text_anchor, diff --git a/crates/editor/src/mouse_context_menu.rs b/crates/editor/src/mouse_context_menu.rs index 5cf22de537..3bc334c54c 100644 --- a/crates/editor/src/mouse_context_menu.rs +++ b/crates/editor/src/mouse_context_menu.rs @@ -190,14 +190,16 @@ pub fn deploy_context_menu( .all::(cx) .into_iter() .any(|s| !s.is_empty()); - let has_git_repo = anchor.buffer_id.is_some_and(|buffer_id| { - project - .read(cx) - .git_store() - .read(cx) - .repository_and_path_for_buffer_id(buffer_id, cx) - .is_some() - }); + let has_git_repo = buffer + .buffer_id_for_anchor(anchor) + .is_some_and(|buffer_id| { + project + .read(cx) + .git_store() + .read(cx) + .repository_and_path_for_buffer_id(buffer_id, cx) + .is_some() + }); let evaluate_selection = window.is_action_available(&EvaluateSelectedText, cx); let run_to_cursor = window.is_action_available(&RunToCursor, cx); diff --git a/crates/editor/src/persistence.rs b/crates/editor/src/persistence.rs index 88fde53947..ec7c149b4e 100644 --- a/crates/editor/src/persistence.rs +++ b/crates/editor/src/persistence.rs @@ -1,13 +1,17 @@ use anyhow::Result; -use db::sqlez::bindable::{Bind, Column, StaticColumnCount}; -use db::sqlez::statement::Statement; +use db::{ + query, + sqlez::{ + bindable::{Bind, Column, StaticColumnCount}, + domain::Domain, + statement::Statement, + }, + sqlez_macros::sql, +}; use fs::MTime; use itertools::Itertools as _; use std::path::PathBuf; -use db::sqlez_macros::sql; -use db::{define_connection, query}; - use workspace::{ItemId, WorkspaceDb, WorkspaceId}; #[derive(Clone, Debug, PartialEq, Default)] @@ -83,7 +87,11 @@ impl Column for SerializedEditor { } } -define_connection!( +pub struct EditorDb(db::sqlez::thread_safe_connection::ThreadSafeConnection); + +impl Domain for EditorDb { + const NAME: &str = stringify!(EditorDb); + // Current schema shape using pseudo-rust syntax: // editors( // item_id: usize, @@ -113,7 +121,8 @@ define_connection!( // start: usize, // end: usize, // ) - pub static ref DB: EditorDb = &[ + + const MIGRATIONS: &[&str] = &[ sql! ( CREATE TABLE editors( item_id INTEGER NOT NULL, @@ -189,7 +198,9 @@ define_connection!( ) STRICT; ), ]; -); +} + +db::static_connection!(DB, EditorDb, [WorkspaceDb]); // https://www.sqlite.org/limits.html // > <..> the maximum value of a host parameter number is SQLITE_MAX_VARIABLE_NUMBER, diff --git a/crates/editor/src/proposed_changes_editor.rs b/crates/editor/src/proposed_changes_editor.rs index c79feccb4b..2d4710a8d4 100644 --- a/crates/editor/src/proposed_changes_editor.rs +++ b/crates/editor/src/proposed_changes_editor.rs @@ -431,7 +431,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider { buffer: &Entity, position: text::Anchor, cx: &mut App, - ) -> Option>> { + ) -> Option>>> { let buffer = self.to_base(buffer, &[position], cx)?; self.0.hover(&buffer, position, cx) } @@ -490,7 +490,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider { position: text::Anchor, kind: crate::GotoDefinitionKind, cx: &mut App, - ) -> Option>>> { + ) -> Option>>>> { let buffer = self.to_base(buffer, &[position], cx)?; self.0.definitions(&buffer, position, kind, cx) } diff --git a/crates/editor/src/rust_analyzer_ext.rs b/crates/editor/src/rust_analyzer_ext.rs index e3d83ab160..cf74ee0a9e 100644 --- a/crates/editor/src/rust_analyzer_ext.rs +++ b/crates/editor/src/rust_analyzer_ext.rs @@ -26,6 +26,17 @@ fn is_rust_language(language: &Language) -> bool { } pub fn apply_related_actions(editor: &Entity, window: &mut Window, cx: &mut App) { + if editor.read(cx).project().is_some_and(|project| { + project + .read(cx) + .language_server_statuses(cx) + .any(|(_, status)| status.name == RUST_ANALYZER_NAME) + }) { + register_action(editor, window, cancel_flycheck_action); + register_action(editor, window, run_flycheck_action); + register_action(editor, window, clear_flycheck_action); + } + if editor .read(cx) .buffer() @@ -38,9 +49,6 @@ pub fn apply_related_actions(editor: &Entity, window: &mut Window, cx: & register_action(editor, window, go_to_parent_module); register_action(editor, window, expand_macro_recursively); register_action(editor, window, open_docs); - register_action(editor, window, cancel_flycheck_action); - register_action(editor, window, run_flycheck_action); - register_action(editor, window, clear_flycheck_action); } } @@ -309,7 +317,7 @@ fn cancel_flycheck_action( let Some(project) = &editor.project else { return; }; - let Some(buffer_id) = editor + let buffer_id = editor .selections .disjoint_anchors() .iter() @@ -321,10 +329,7 @@ fn cancel_flycheck_action( .read(cx) .entry_id(cx)?; project.path_for_entry(entry_id, cx) - }) - else { - return; - }; + }); cancel_flycheck(project.clone(), buffer_id, cx).detach_and_log_err(cx); } @@ -337,7 +342,7 @@ fn run_flycheck_action( let Some(project) = &editor.project else { return; }; - let Some(buffer_id) = editor + let buffer_id = editor .selections .disjoint_anchors() .iter() @@ -349,10 +354,7 @@ fn run_flycheck_action( .read(cx) .entry_id(cx)?; project.path_for_entry(entry_id, cx) - }) - else { - return; - }; + }); run_flycheck(project.clone(), buffer_id, cx).detach_and_log_err(cx); } @@ -365,7 +367,7 @@ fn clear_flycheck_action( let Some(project) = &editor.project else { return; }; - let Some(buffer_id) = editor + let buffer_id = editor .selections .disjoint_anchors() .iter() @@ -377,9 +379,6 @@ fn clear_flycheck_action( .read(cx) .entry_id(cx)?; project.path_for_entry(entry_id, cx) - }) - else { - return; - }; + }); clear_flycheck(project.clone(), buffer_id, cx).detach_and_log_err(cx); } diff --git a/crates/editor/src/signature_help.rs b/crates/editor/src/signature_help.rs index 5c9800ab55..cb21f35d7e 100644 --- a/crates/editor/src/signature_help.rs +++ b/crates/editor/src/signature_help.rs @@ -182,7 +182,9 @@ impl Editor { let signature_help = task.await; editor .update(cx, |editor, cx| { - let Some(mut signature_help) = signature_help.into_iter().next() else { + let Some(mut signature_help) = + signature_help.unwrap_or_default().into_iter().next() + else { editor .signature_help_state .hide(SignatureHelpHiddenBy::AutoClose); diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index d388e8f3b7..960fecf59a 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -230,26 +230,23 @@ pub fn editor_content_with_blocks(editor: &Entity, cx: &mut VisualTestCo lines[row as usize].push_str("§ -----"); } } - Block::ExcerptBoundary { - excerpt, - height, - starts_new_buffer, - } => { - if starts_new_buffer { - lines[row.0 as usize].push_str(&cx.update(|_, cx| { - format!( - "§ {}", - excerpt - .buffer - .file() - .unwrap() - .file_name(cx) - .to_string_lossy() - ) - })); - } else { - lines[row.0 as usize].push_str("§ -----") + Block::ExcerptBoundary { height, .. } => { + for row in row.0..row.0 + height { + lines[row as usize].push_str("§ -----"); } + } + Block::BufferHeader { excerpt, height } => { + lines[row.0 as usize].push_str(&cx.update(|_, cx| { + format!( + "§ {}", + excerpt + .buffer + .file() + .unwrap() + .file_name(cx) + .to_string_lossy() + ) + })); for row in row.0 + 1..row.0 + height { lines[row as usize].push_str("§ -----"); } diff --git a/crates/eval/src/eval.rs b/crates/eval/src/eval.rs index c5a072eea1..9e0504abca 100644 --- a/crates/eval/src/eval.rs +++ b/crates/eval/src/eval.rs @@ -706,7 +706,7 @@ fn print_report( println!("Average thread score: {average_thread_score}%"); } - println!(""); + println!(); print_h2("CUMULATIVE TOOL METRICS"); println!("{}", cumulative_tool_metrics); diff --git a/crates/eval/src/instance.rs b/crates/eval/src/instance.rs index 074cb121d3..c6e4e0b6ec 100644 --- a/crates/eval/src/instance.rs +++ b/crates/eval/src/instance.rs @@ -913,9 +913,9 @@ impl RequestMarkdown { for tool in &request.tools { write!(&mut tools, "# {}\n\n", tool.name).unwrap(); write!(&mut tools, "{}\n\n", tool.description).unwrap(); - write!( + writeln!( &mut tools, - "{}\n", + "{}", MarkdownCodeBlock { tag: "json", text: &format!("{:#}", tool.input_schema) diff --git a/crates/feedback/Cargo.toml b/crates/feedback/Cargo.toml index 3a2c1fd713..db872f7a15 100644 --- a/crates/feedback/Cargo.toml +++ b/crates/feedback/Cargo.toml @@ -15,13 +15,9 @@ path = "src/feedback.rs" test-support = [] [dependencies] -client.workspace = true gpui.workspace = true -human_bytes = "0.4.1" menu.workspace = true -release_channel.workspace = true -serde.workspace = true -sysinfo.workspace = true +system_specs.workspace = true ui.workspace = true urlencoding.workspace = true util.workspace = true diff --git a/crates/feedback/src/feedback.rs b/crates/feedback/src/feedback.rs index 40c2707d34..3822dd7ba3 100644 --- a/crates/feedback/src/feedback.rs +++ b/crates/feedback/src/feedback.rs @@ -1,18 +1,14 @@ use gpui::{App, ClipboardItem, PromptLevel, actions}; -use system_specs::SystemSpecs; +use system_specs::{CopySystemSpecsIntoClipboard, SystemSpecs}; use util::ResultExt; use workspace::Workspace; use zed_actions::feedback::FileBugReport; pub mod feedback_modal; -pub mod system_specs; - actions!( zed, [ - /// Copies system specifications to the clipboard for bug reports. - CopySystemSpecsIntoClipboard, /// Opens email client to send feedback to Zed support. EmailZed, /// Opens the Zed repository on GitHub. diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index 8aaaa04729..7512152324 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -1401,13 +1401,16 @@ impl PickerDelegate for FileFinderDelegate { #[cfg(windows)] let raw_query = raw_query.trim().to_owned().replace("/", "\\"); #[cfg(not(windows))] - let raw_query = raw_query.trim().to_owned(); + let raw_query = raw_query.trim(); - let file_query_end = if path_position.path.to_str().unwrap_or(&raw_query) == raw_query { + let raw_query = raw_query.trim_end_matches(':').to_owned(); + let path = path_position.path.to_str(); + let path_trimmed = path.unwrap_or(&raw_query).trim_end_matches(':'); + let file_query_end = if path_trimmed == raw_query { None } else { // Safe to unwrap as we won't get here when the unwrap in if fails - Some(path_position.path.to_str().unwrap().len()) + Some(path.unwrap().len()) }; let query = FileSearchQuery { diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index 8203d1b1fd..cd0f203d6a 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -218,6 +218,7 @@ async fn test_matching_paths(cx: &mut TestAppContext) { " ndan ", " band ", "a bandana", + "bandana:", ] { picker .update_in(cx, |picker, window, cx| { @@ -252,6 +253,53 @@ async fn test_matching_paths(cx: &mut TestAppContext) { } } +#[gpui::test] +async fn test_matching_paths_with_colon(cx: &mut TestAppContext) { + let app_state = init_test(cx); + app_state + .fs + .as_fake() + .insert_tree( + path!("/root"), + json!({ + "a": { + "foo:bar.rs": "", + "foo.rs": "", + } + }), + ) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await; + + let (picker, _, cx) = build_find_picker(project, cx); + + // 'foo:' matches both files + cx.simulate_input("foo:"); + picker.update(cx, |picker, _| { + assert_eq!(picker.delegate.matches.len(), 3); + assert_match_at_position(picker, 0, "foo.rs"); + assert_match_at_position(picker, 1, "foo:bar.rs"); + }); + + // 'foo:b' matches one of the files + cx.simulate_input("b"); + picker.update(cx, |picker, _| { + assert_eq!(picker.delegate.matches.len(), 2); + assert_match_at_position(picker, 0, "foo:bar.rs"); + }); + + cx.dispatch_action(editor::actions::Backspace); + + // 'foo:1' matches both files, specifying which row to jump to + cx.simulate_input("1"); + picker.update(cx, |picker, _| { + assert_eq!(picker.delegate.matches.len(), 3); + assert_match_at_position(picker, 0, "foo.rs"); + assert_match_at_position(picker, 1, "foo:bar.rs"); + }); +} + #[gpui::test] async fn test_unicode_paths(cx: &mut TestAppContext) { let app_state = init_test(cx); diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 9c125d2c47..fd12dafa98 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -916,7 +916,7 @@ impl GitRepository for RealGitRepository { .context("no stdin for git cat-file subprocess")?; let mut stdin = BufWriter::new(stdin); for rev in &revs { - write!(&mut stdin, "{rev}\n")?; + writeln!(&mut stdin, "{rev}")?; } stdin.flush()?; drop(stdin); diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 4ecb4a8829..958a609a09 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -4466,7 +4466,7 @@ fn current_language_model(cx: &Context<'_, GitPanel>) -> Option Flatten for Result { } /// Information about the GPU GPUI is running on. -#[derive(Default, Debug)] +#[derive(Default, Debug, serde::Serialize, serde::Deserialize, Clone)] pub struct GpuSpecs { /// Whether the GPU is really a fake (like `llvmpipe`) running on the CPU. pub is_software_emulated: bool, diff --git a/crates/gpui/src/platform/windows/dispatcher.rs b/crates/gpui/src/platform/windows/dispatcher.rs index e5b9c020d5..f554dea128 100644 --- a/crates/gpui/src/platform/windows/dispatcher.rs +++ b/crates/gpui/src/platform/windows/dispatcher.rs @@ -9,10 +9,8 @@ use parking::Parker; use parking_lot::Mutex; use util::ResultExt; use windows::{ - Foundation::TimeSpan, System::Threading::{ - ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemOptions, - WorkItemPriority, + ThreadPool, ThreadPoolTimer, TimerElapsedHandler, WorkItemHandler, WorkItemPriority, }, Win32::{ Foundation::{LPARAM, WPARAM}, @@ -56,12 +54,7 @@ impl WindowsDispatcher { Ok(()) }) }; - ThreadPool::RunWithPriorityAndOptionsAsync( - &handler, - WorkItemPriority::High, - WorkItemOptions::TimeSliced, - ) - .log_err(); + ThreadPool::RunWithPriorityAsync(&handler, WorkItemPriority::High).log_err(); } fn dispatch_on_threadpool_after(&self, runnable: Runnable, duration: Duration) { @@ -72,12 +65,7 @@ impl WindowsDispatcher { Ok(()) }) }; - let delay = TimeSpan { - // A time period expressed in 100-nanosecond units. - // 10,000,000 ticks per second - Duration: (duration.as_nanos() / 100) as i64, - }; - ThreadPoolTimer::CreateTimer(&handler, delay).log_err(); + ThreadPoolTimer::CreateTimer(&handler, duration.into()).log_err(); } } diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index b13b9915f1..6202e05fb3 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -1,5 +1,6 @@ use std::{ cell::RefCell, + ffi::OsStr, mem::ManuallyDrop, path::{Path, PathBuf}, rc::Rc, @@ -460,13 +461,15 @@ impl Platform for WindowsPlatform { } fn open_url(&self, url: &str) { + if url.is_empty() { + return; + } let url_string = url.to_string(); self.background_executor() .spawn(async move { - if url_string.is_empty() { - return; - } - open_target(url_string.as_str()); + open_target(&url_string) + .with_context(|| format!("Opening url: {}", url_string)) + .log_err(); }) .detach(); } @@ -514,37 +517,29 @@ impl Platform for WindowsPlatform { } fn reveal_path(&self, path: &Path) { - let Ok(file_full_path) = path.canonicalize() else { - log::error!("unable to parse file path"); + if path.as_os_str().is_empty() { return; - }; + } + let path = path.to_path_buf(); self.background_executor() .spawn(async move { - let Some(path) = file_full_path.to_str() else { - return; - }; - if path.is_empty() { - return; - } - open_target_in_explorer(path); + open_target_in_explorer(&path) + .with_context(|| format!("Revealing path {} in explorer", path.display())) + .log_err(); }) .detach(); } fn open_with_system(&self, path: &Path) { - let Ok(full_path) = path.canonicalize() else { - log::error!("unable to parse file full path: {}", path.display()); + if path.as_os_str().is_empty() { return; - }; + } + let path = path.to_path_buf(); self.background_executor() .spawn(async move { - let Some(full_path_str) = full_path.to_str() else { - return; - }; - if full_path_str.is_empty() { - return; - }; - open_target(full_path_str); + open_target(&path) + .with_context(|| format!("Opening {} with system", path.display())) + .log_err(); }) .detach(); } @@ -735,39 +730,67 @@ pub(crate) struct WindowCreationInfo { pub(crate) disable_direct_composition: bool, } -fn open_target(target: &str) { - unsafe { - let ret = ShellExecuteW( +fn open_target(target: impl AsRef) -> Result<()> { + let target = target.as_ref(); + let ret = unsafe { + ShellExecuteW( None, windows::core::w!("open"), &HSTRING::from(target), None, None, SW_SHOWDEFAULT, - ); - if ret.0 as isize <= 32 { - log::error!("Unable to open target: {}", std::io::Error::last_os_error()); - } + ) + }; + if ret.0 as isize <= 32 { + Err(anyhow::anyhow!( + "Unable to open target: {}", + std::io::Error::last_os_error() + )) + } else { + Ok(()) } } -fn open_target_in_explorer(target: &str) { +fn open_target_in_explorer(target: &Path) -> Result<()> { + let dir = target.parent().context("No parent folder found")?; + let desktop = unsafe { SHGetDesktopFolder()? }; + + let mut dir_item = std::ptr::null_mut(); unsafe { - let ret = ShellExecuteW( + desktop.ParseDisplayName( + HWND::default(), None, - windows::core::w!("open"), - windows::core::w!("explorer.exe"), - &HSTRING::from(format!("/select,{}", target).as_str()), + &HSTRING::from(dir), None, - SW_SHOWDEFAULT, - ); - if ret.0 as isize <= 32 { - log::error!( - "Unable to open target in explorer: {}", - std::io::Error::last_os_error() - ); - } + &mut dir_item, + std::ptr::null_mut(), + )?; } + + let mut file_item = std::ptr::null_mut(); + unsafe { + desktop.ParseDisplayName( + HWND::default(), + None, + &HSTRING::from(target), + None, + &mut file_item, + std::ptr::null_mut(), + )?; + } + + let highlight = [file_item as *const _]; + unsafe { SHOpenFolderAndSelectItems(dir_item as _, Some(&highlight), 0) }.or_else(|err| { + if err.code().0 == ERROR_FILE_NOT_FOUND.0 as i32 { + // On some systems, the above call mysteriously fails with "file not + // found" even though the file is there. In these cases, ShellExecute() + // seems to work as a fallback (although it won't select the file). + open_target(dir).context("Opening target parent folder") + } else { + Err(anyhow::anyhow!("Can not open target path: {}", err)) + } + }) } fn file_open_dialog( diff --git a/crates/gpui/src/taffy.rs b/crates/gpui/src/taffy.rs index f198bb7718..58386ad1f5 100644 --- a/crates/gpui/src/taffy.rs +++ b/crates/gpui/src/taffy.rs @@ -164,7 +164,6 @@ impl TaffyLayoutEngine { // for (a, b) in self.get_edges(id)? { // println!("N{} --> N{}", u64::from(a), u64::from(b)); // } - // println!(""); // if !self.computed_layouts.insert(id) { diff --git a/crates/gpui_macros/tests/derive_inspector_reflection.rs b/crates/gpui_macros/tests/derive_inspector_reflection.rs index aab44a70ce..a0adcb7801 100644 --- a/crates/gpui_macros/tests/derive_inspector_reflection.rs +++ b/crates/gpui_macros/tests/derive_inspector_reflection.rs @@ -34,13 +34,6 @@ trait Transform: Clone { /// Adds one to the value fn add_one(self) -> Self; - - /// cfg attributes are respected - #[cfg(all())] - fn cfg_included(self) -> Self; - - #[cfg(any())] - fn cfg_omitted(self) -> Self; } #[derive(Debug, Clone, PartialEq)] @@ -70,10 +63,6 @@ impl Transform for Number { fn add_one(self) -> Self { Number(self.0 + 1) } - - fn cfg_included(self) -> Self { - Number(self.0) - } } #[test] @@ -83,14 +72,13 @@ fn test_derive_inspector_reflection() { // Get all methods that match the pattern fn(self) -> Self or fn(mut self) -> Self let methods = methods::(); - assert_eq!(methods.len(), 6); + assert_eq!(methods.len(), 5); let method_names: Vec<_> = methods.iter().map(|m| m.name).collect(); assert!(method_names.contains(&"double")); assert!(method_names.contains(&"triple")); assert!(method_names.contains(&"increment")); assert!(method_names.contains(&"quadruple")); assert!(method_names.contains(&"add_one")); - assert!(method_names.contains(&"cfg_included")); // Invoke methods by name let num = Number(5); diff --git a/crates/gpui_tokio/Cargo.toml b/crates/gpui_tokio/Cargo.toml index 46d5eafd5a..2d4abf4063 100644 --- a/crates/gpui_tokio/Cargo.toml +++ b/crates/gpui_tokio/Cargo.toml @@ -13,6 +13,7 @@ path = "src/gpui_tokio.rs" doctest = false [dependencies] +anyhow.workspace = true util.workspace = true gpui.workspace = true tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } diff --git a/crates/gpui_tokio/src/gpui_tokio.rs b/crates/gpui_tokio/src/gpui_tokio.rs index fffe18a616..8384f2a88e 100644 --- a/crates/gpui_tokio/src/gpui_tokio.rs +++ b/crates/gpui_tokio/src/gpui_tokio.rs @@ -52,6 +52,28 @@ impl Tokio { }) } + /// Spawns the given future on Tokio's thread pool, and returns it via a GPUI task + /// Note that the Tokio task will be cancelled if the GPUI task is dropped + pub fn spawn_result(cx: &C, f: Fut) -> C::Result>> + where + C: AppContext, + Fut: Future> + Send + 'static, + R: Send + 'static, + { + cx.read_global(|tokio: &GlobalTokio, cx| { + let join_handle = tokio.runtime.spawn(f); + let abort_handle = join_handle.abort_handle(); + let cancel = defer(move || { + abort_handle.abort(); + }); + cx.background_spawn(async move { + let result = join_handle.await?; + drop(cancel); + result + }) + }) + } + pub fn handle(cx: &App) -> tokio::runtime::Handle { GlobalTokio::global(cx).runtime.handle().clone() } diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 38f02c2206..4fc6039fd7 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -34,6 +34,7 @@ pub enum IconName { ArrowRightLeft, ArrowUp, ArrowUpRight, + Attach, AudioOff, AudioOn, Backspace, @@ -164,6 +165,7 @@ pub enum IconName { PageDown, PageUp, Pencil, + PencilUnavailable, Person, Pin, PlayOutlined, diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index b96557b391..2dca57424b 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -401,12 +401,19 @@ pub fn init(cx: &mut App) { mod persistence { use std::path::PathBuf; - use db::{define_connection, query, sqlez_macros::sql}; + use db::{ + query, + sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection}, + sqlez_macros::sql, + }; use workspace::{ItemId, WorkspaceDb, WorkspaceId}; - define_connection! { - pub static ref IMAGE_VIEWER: ImageViewerDb = - &[sql!( + pub struct ImageViewerDb(ThreadSafeConnection); + + impl Domain for ImageViewerDb { + const NAME: &str = stringify!(ImageViewerDb); + + const MIGRATIONS: &[&str] = &[sql!( CREATE TABLE image_viewers ( workspace_id INTEGER, item_id INTEGER UNIQUE, @@ -417,9 +424,11 @@ mod persistence { FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) ON DELETE CASCADE ) STRICT; - )]; + )]; } + db::static_connection!(IMAGE_VIEWER, ImageViewerDb, [WorkspaceDb]); + impl ImageViewerDb { query! { pub async fn save_image_path( diff --git a/crates/inspector_ui/Cargo.toml b/crates/inspector_ui/Cargo.toml index 8e55a8a477..cefe888974 100644 --- a/crates/inspector_ui/Cargo.toml +++ b/crates/inspector_ui/Cargo.toml @@ -24,6 +24,7 @@ serde_json_lenient.workspace = true theme.workspace = true ui.workspace = true util.workspace = true +util_macros.workspace = true workspace-hack.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/inspector_ui/src/div_inspector.rs b/crates/inspector_ui/src/div_inspector.rs index 0c2b16b9f4..c3d687e57a 100644 --- a/crates/inspector_ui/src/div_inspector.rs +++ b/crates/inspector_ui/src/div_inspector.rs @@ -25,7 +25,7 @@ use util::split_str_with_ranges; /// Path used for unsaved buffer that contains style json. To support the json language server, this /// matches the name used in the generated schemas. -const ZED_INSPECTOR_STYLE_JSON: &str = "/zed-inspector-style.json"; +const ZED_INSPECTOR_STYLE_JSON: &str = util_macros::path!("/zed-inspector-style.json"); pub(crate) struct DivInspector { state: State, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index b106110c33..4ddc2b3018 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1569,11 +1569,21 @@ impl Buffer { self.send_operation(op, true, cx); } - pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> { - let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else { - return None; - }; - Some(&self.diagnostics[idx].1) + pub fn buffer_diagnostics( + &self, + for_server: Option, + ) -> Vec<&DiagnosticEntry> { + match for_server { + Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) { + Ok(idx) => self.diagnostics[idx].1.iter().collect(), + Err(_) => Vec::new(), + }, + None => self + .diagnostics + .iter() + .flat_map(|(_, diagnostic_set)| diagnostic_set.iter()) + .collect(), + } } fn request_autoindent(&mut self, cx: &mut Context) { diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 90a59ce066..0f82d3997f 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -5,7 +5,7 @@ use anyhow::Result; use collections::{FxHashMap, HashMap, HashSet}; use ec4rs::{ Properties as EditorconfigProperties, - property::{FinalNewline, IndentSize, IndentStyle, TabWidth, TrimTrailingWs}, + property::{FinalNewline, IndentSize, IndentStyle, MaxLineLen, TabWidth, TrimTrailingWs}, }; use globset::{Glob, GlobMatcher, GlobSet, GlobSetBuilder}; use gpui::{App, Modifiers}; @@ -350,6 +350,12 @@ pub struct CompletionSettings { /// Default: `fallback` #[serde(default = "default_words_completion_mode")] pub words: WordsCompletionMode, + /// How many characters has to be in the completions query to automatically show the words-based completions. + /// Before that value, it's still possible to trigger the words-based completion manually with the corresponding editor command. + /// + /// Default: 3 + #[serde(default = "default_3")] + pub words_min_length: usize, /// Whether to fetch LSP completions or not. /// /// Default: true @@ -359,7 +365,7 @@ pub struct CompletionSettings { /// When set to 0, waits indefinitely. /// /// Default: 0 - #[serde(default = "default_lsp_fetch_timeout_ms")] + #[serde(default)] pub lsp_fetch_timeout_ms: u64, /// Controls how LSP completions are inserted. /// @@ -405,8 +411,8 @@ fn default_lsp_insert_mode() -> LspInsertMode { LspInsertMode::ReplaceSuffix } -fn default_lsp_fetch_timeout_ms() -> u64 { - 0 +fn default_3() -> usize { + 3 } /// The settings for a particular language. @@ -1131,6 +1137,10 @@ impl AllLanguageSettings { } fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigProperties) { + let preferred_line_length = cfg.get::().ok().and_then(|v| match v { + MaxLineLen::Value(u) => Some(u as u32), + MaxLineLen::Off => None, + }); let tab_size = cfg.get::().ok().and_then(|v| match v { IndentSize::Value(u) => NonZeroU32::new(u as u32), IndentSize::UseTabWidth => cfg.get::().ok().and_then(|w| match w { @@ -1158,6 +1168,7 @@ fn merge_with_editorconfig(settings: &mut LanguageSettings, cfg: &EditorconfigPr *target = value; } } + merge(&mut settings.preferred_line_length, preferred_line_length); merge(&mut settings.tab_size, tab_size); merge(&mut settings.hard_tabs, hard_tabs); merge( @@ -1463,6 +1474,7 @@ impl settings::Settings for AllLanguageSettings { } else { d.completions = Some(CompletionSettings { words: mode, + words_min_length: 3, lsp: true, lsp_fetch_timeout_ms: 0, lsp_insert_mode: LspInsertMode::ReplaceSuffix, diff --git a/crates/language/src/text_diff.rs b/crates/language/src/text_diff.rs index cb2242a6b1..11d8a070d2 100644 --- a/crates/language/src/text_diff.rs +++ b/crates/language/src/text_diff.rs @@ -186,7 +186,7 @@ fn tokenize(text: &str, language_scope: Option) -> impl Iterator< let mut prev = None; let mut start_ix = 0; iter::from_fn(move || { - while let Some((ix, c)) = chars.next() { + for (ix, c) in chars.by_ref() { let mut token = None; let kind = classifier.kind(c); if let Some((prev_char, prev_kind)) = prev diff --git a/crates/language/src/toolchain.rs b/crates/language/src/toolchain.rs index 979513bc96..73c142c8ca 100644 --- a/crates/language/src/toolchain.rs +++ b/crates/language/src/toolchain.rs @@ -96,7 +96,7 @@ impl LanguageToolchainStore for T { } type DefaultIndex = usize; -#[derive(Default, Clone)] +#[derive(Default, Clone, Debug)] pub struct ToolchainList { pub toolchains: Vec, pub default: Option, diff --git a/crates/language_model/src/fake_provider.rs b/crates/language_model/src/fake_provider.rs index ebfd37d16c..b06a475f93 100644 --- a/crates/language_model/src/fake_provider.rs +++ b/crates/language_model/src/fake_provider.rs @@ -4,12 +4,16 @@ use crate::{ LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice, }; +use anyhow::anyhow; use futures::{FutureExt, channel::mpsc, future::BoxFuture, stream::BoxStream}; use gpui::{AnyView, App, AsyncApp, Entity, Task, Window}; use http_client::Result; use parking_lot::Mutex; use smol::stream::StreamExt; -use std::sync::Arc; +use std::sync::{ + Arc, + atomic::{AtomicBool, Ordering::SeqCst}, +}; #[derive(Clone)] pub struct FakeLanguageModelProvider { @@ -106,6 +110,7 @@ pub struct FakeLanguageModel { >, )>, >, + forbid_requests: AtomicBool, } impl Default for FakeLanguageModel { @@ -114,11 +119,20 @@ impl Default for FakeLanguageModel { provider_id: LanguageModelProviderId::from("fake".to_string()), provider_name: LanguageModelProviderName::from("Fake".to_string()), current_completion_txs: Mutex::new(Vec::new()), + forbid_requests: AtomicBool::new(false), } } } impl FakeLanguageModel { + pub fn allow_requests(&self) { + self.forbid_requests.store(false, SeqCst); + } + + pub fn forbid_requests(&self) { + self.forbid_requests.store(true, SeqCst); + } + pub fn pending_completions(&self) -> Vec { self.current_completion_txs .lock() @@ -251,9 +265,18 @@ impl LanguageModel for FakeLanguageModel { LanguageModelCompletionError, >, > { - let (tx, rx) = mpsc::unbounded(); - self.current_completion_txs.lock().push((request, tx)); - async move { Ok(rx.boxed()) }.boxed() + if self.forbid_requests.load(SeqCst) { + async move { + Err(LanguageModelCompletionError::Other(anyhow!( + "requests are forbidden" + ))) + } + .boxed() + } else { + let (tx, rx) = mpsc::unbounded(); + self.current_completion_txs.lock().push((request, tx)); + async move { Ok(rx.boxed()) }.boxed() + } } fn as_fake(&self) -> &Self { diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index 158bebcbbf..d5313b6a3a 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -14,7 +14,7 @@ use client::Client; use cloud_llm_client::{CompletionMode, CompletionRequestStatus}; use futures::FutureExt; use futures::{StreamExt, future::BoxFuture, stream::BoxStream}; -use gpui::{AnyElement, AnyView, App, AsyncApp, SharedString, Task, Window}; +use gpui::{AnyView, App, AsyncApp, SharedString, Task, Window}; use http_client::{StatusCode, http}; use icons::IconName; use parking_lot::Mutex; @@ -640,24 +640,14 @@ pub trait LanguageModelProvider: 'static { window: &mut Window, cx: &mut App, ) -> AnyView; - fn must_accept_terms(&self, _cx: &App) -> bool { - false - } - fn render_accept_terms( - &self, - _view: LanguageModelProviderTosView, - _cx: &mut App, - ) -> Option { - None - } fn reset_credentials(&self, cx: &mut App) -> Task>; } -#[derive(Default, Clone, Copy)] +#[derive(Default, Clone)] pub enum ConfigurationViewTargetAgent { #[default] ZedAgent, - Other(&'static str), + Other(SharedString), } #[derive(PartialEq, Eq)] diff --git a/crates/language_model/src/registry.rs b/crates/language_model/src/registry.rs index 8f52f8c1c3..c7693a64c7 100644 --- a/crates/language_model/src/registry.rs +++ b/crates/language_model/src/registry.rs @@ -6,7 +6,6 @@ use collections::BTreeMap; use gpui::{App, Context, Entity, EventEmitter, Global, prelude::*}; use std::{str::FromStr, sync::Arc}; use thiserror::Error; -use util::maybe; pub fn init(cx: &mut App) { let registry = cx.new(|_cx| LanguageModelRegistry::default()); @@ -25,9 +24,6 @@ pub enum ConfigurationError { ModelNotFound, #[error("{} LLM provider is not configured.", .0.name().0)] ProviderNotAuthenticated(Arc), - #[error("Using the {} LLM provider requires accepting the Terms of Service.", - .0.name().0)] - ProviderPendingTermsAcceptance(Arc), } impl std::fmt::Debug for ConfigurationError { @@ -38,9 +34,6 @@ impl std::fmt::Debug for ConfigurationError { Self::ProviderNotAuthenticated(provider) => { write!(f, "ProviderNotAuthenticated({})", provider.id()) } - Self::ProviderPendingTermsAcceptance(provider) => { - write!(f, "ProviderPendingTermsAcceptance({})", provider.id()) - } } } } @@ -48,7 +41,9 @@ impl std::fmt::Debug for ConfigurationError { #[derive(Default)] pub struct LanguageModelRegistry { default_model: Option, - default_fast_model: Option, + /// This model is automatically configured by a user's environment after + /// authenticating all providers. It's only used when default_model is not available. + environment_fallback_model: Option, inline_assistant_model: Option, commit_message_model: Option, thread_summary_model: Option, @@ -104,9 +99,6 @@ impl ConfiguredModel { pub enum Event { DefaultModelChanged, - InlineAssistantModelChanged, - CommitMessageModelChanged, - ThreadSummaryModelChanged, ProviderStateChanged(LanguageModelProviderId), AddedProvider(LanguageModelProviderId), RemovedProvider(LanguageModelProviderId), @@ -200,12 +192,6 @@ impl LanguageModelRegistry { return Some(ConfigurationError::ProviderNotAuthenticated(model.provider)); } - if model.provider.must_accept_terms(cx) { - return Some(ConfigurationError::ProviderPendingTermsAcceptance( - model.provider, - )); - } - None } @@ -238,7 +224,7 @@ impl LanguageModelRegistry { cx: &mut Context, ) { let configured_model = model.and_then(|model| self.select_model(model, cx)); - self.set_inline_assistant_model(configured_model, cx); + self.set_inline_assistant_model(configured_model); } pub fn select_commit_message_model( @@ -247,7 +233,7 @@ impl LanguageModelRegistry { cx: &mut Context, ) { let configured_model = model.and_then(|model| self.select_model(model, cx)); - self.set_commit_message_model(configured_model, cx); + self.set_commit_message_model(configured_model); } pub fn select_thread_summary_model( @@ -256,7 +242,7 @@ impl LanguageModelRegistry { cx: &mut Context, ) { let configured_model = model.and_then(|model| self.select_model(model, cx)); - self.set_thread_summary_model(configured_model, cx); + self.set_thread_summary_model(configured_model); } /// Selects and sets the inline alternatives for language models based on @@ -290,68 +276,60 @@ impl LanguageModelRegistry { } pub fn set_default_model(&mut self, model: Option, cx: &mut Context) { - match (self.default_model.as_ref(), model.as_ref()) { + match (self.default_model(), model.as_ref()) { (Some(old), Some(new)) if old.is_same_as(new) => {} (None, None) => {} _ => cx.emit(Event::DefaultModelChanged), } - self.default_fast_model = maybe!({ - let provider = &model.as_ref()?.provider; - let fast_model = provider.default_fast_model(cx)?; - Some(ConfiguredModel { - provider: provider.clone(), - model: fast_model, - }) - }); self.default_model = model; } - pub fn set_inline_assistant_model( + pub fn set_environment_fallback_model( &mut self, model: Option, cx: &mut Context, ) { - match (self.inline_assistant_model.as_ref(), model.as_ref()) { - (Some(old), Some(new)) if old.is_same_as(new) => {} - (None, None) => {} - _ => cx.emit(Event::InlineAssistantModelChanged), + if self.default_model.is_none() { + match (self.environment_fallback_model.as_ref(), model.as_ref()) { + (Some(old), Some(new)) if old.is_same_as(new) => {} + (None, None) => {} + _ => cx.emit(Event::DefaultModelChanged), + } } + self.environment_fallback_model = model; + } + + pub fn set_inline_assistant_model(&mut self, model: Option) { self.inline_assistant_model = model; } - pub fn set_commit_message_model( - &mut self, - model: Option, - cx: &mut Context, - ) { - match (self.commit_message_model.as_ref(), model.as_ref()) { - (Some(old), Some(new)) if old.is_same_as(new) => {} - (None, None) => {} - _ => cx.emit(Event::CommitMessageModelChanged), - } + pub fn set_commit_message_model(&mut self, model: Option) { self.commit_message_model = model; } - pub fn set_thread_summary_model( - &mut self, - model: Option, - cx: &mut Context, - ) { - match (self.thread_summary_model.as_ref(), model.as_ref()) { - (Some(old), Some(new)) if old.is_same_as(new) => {} - (None, None) => {} - _ => cx.emit(Event::ThreadSummaryModelChanged), - } + pub fn set_thread_summary_model(&mut self, model: Option) { self.thread_summary_model = model; } + #[track_caller] pub fn default_model(&self) -> Option { #[cfg(debug_assertions)] if std::env::var("ZED_SIMULATE_NO_LLM_PROVIDER").is_ok() { return None; } - self.default_model.clone() + self.default_model + .clone() + .or_else(|| self.environment_fallback_model.clone()) + } + + pub fn default_fast_model(&self, cx: &App) -> Option { + let provider = self.default_model()?.provider; + let fast_model = provider.default_fast_model(cx)?; + Some(ConfiguredModel { + provider, + model: fast_model, + }) } pub fn inline_assistant_model(&self) -> Option { @@ -365,7 +343,7 @@ impl LanguageModelRegistry { .or_else(|| self.default_model.clone()) } - pub fn commit_message_model(&self) -> Option { + pub fn commit_message_model(&self, cx: &App) -> Option { #[cfg(debug_assertions)] if std::env::var("ZED_SIMULATE_NO_LLM_PROVIDER").is_ok() { return None; @@ -373,11 +351,11 @@ impl LanguageModelRegistry { self.commit_message_model .clone() - .or_else(|| self.default_fast_model.clone()) + .or_else(|| self.default_fast_model(cx)) .or_else(|| self.default_model.clone()) } - pub fn thread_summary_model(&self) -> Option { + pub fn thread_summary_model(&self, cx: &App) -> Option { #[cfg(debug_assertions)] if std::env::var("ZED_SIMULATE_NO_LLM_PROVIDER").is_ok() { return None; @@ -385,7 +363,7 @@ impl LanguageModelRegistry { self.thread_summary_model .clone() - .or_else(|| self.default_fast_model.clone()) + .or_else(|| self.default_fast_model(cx)) .or_else(|| self.default_model.clone()) } @@ -422,4 +400,34 @@ mod tests { let providers = registry.read(cx).providers(); assert!(providers.is_empty()); } + + #[gpui::test] + async fn test_configure_environment_fallback_model(cx: &mut gpui::TestAppContext) { + let registry = cx.new(|_| LanguageModelRegistry::default()); + + let provider = FakeLanguageModelProvider::default(); + registry.update(cx, |registry, cx| { + registry.register_provider(provider.clone(), cx); + }); + + cx.update(|cx| provider.authenticate(cx)).await.unwrap(); + + registry.update(cx, |registry, cx| { + let provider = registry.provider(&provider.id()).unwrap(); + + registry.set_environment_fallback_model( + Some(ConfiguredModel { + provider: provider.clone(), + model: provider.default_model(cx).unwrap(), + }), + cx, + ); + + let default_model = registry.default_model().unwrap(); + let fallback_model = registry.environment_fallback_model.clone().unwrap(); + + assert_eq!(default_model.model.id(), fallback_model.model.id()); + assert_eq!(default_model.provider.id(), fallback_model.provider.id()); + }); + } } diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index ad4e593d4f..114de9028b 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -45,6 +45,7 @@ ollama = { workspace = true, features = ["schemars"] } open_ai = { workspace = true, features = ["schemars"] } open_router = { workspace = true, features = ["schemars"] } partial-json-fixer.workspace = true +project.workspace = true release_channel.workspace = true schemars.workspace = true serde.workspace = true diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index 738b72b0c9..beed306e74 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -3,8 +3,12 @@ use std::sync::Arc; use ::settings::{Settings, SettingsStore}; use client::{Client, UserStore}; use collections::HashSet; -use gpui::{App, Context, Entity}; -use language_model::{LanguageModelProviderId, LanguageModelRegistry}; +use futures::future; +use gpui::{App, AppContext as _, Context, Entity}; +use language_model::{ + AuthenticateError, ConfiguredModel, LanguageModelProviderId, LanguageModelRegistry, +}; +use project::DisableAiSettings; use provider::deepseek::DeepSeekLanguageModelProvider; pub mod provider; @@ -13,7 +17,7 @@ pub mod ui; use crate::provider::anthropic::AnthropicLanguageModelProvider; use crate::provider::bedrock::BedrockLanguageModelProvider; -use crate::provider::cloud::CloudLanguageModelProvider; +use crate::provider::cloud::{self, CloudLanguageModelProvider}; use crate::provider::copilot_chat::CopilotChatLanguageModelProvider; use crate::provider::google::GoogleLanguageModelProvider; use crate::provider::lmstudio::LmStudioLanguageModelProvider; @@ -48,6 +52,13 @@ pub fn init(user_store: Entity, client: Arc, cx: &mut App) { cx, ); }); + + let mut already_authenticated = false; + if !DisableAiSettings::get_global(cx).disable_ai { + authenticate_all_providers(registry.clone(), cx); + already_authenticated = true; + } + cx.observe_global::(move |cx| { let openai_compatible_providers_new = AllLanguageModelSettings::get_global(cx) .openai_compatible @@ -65,6 +76,12 @@ pub fn init(user_store: Entity, client: Arc, cx: &mut App) { ); }); openai_compatible_providers = openai_compatible_providers_new; + already_authenticated = false; + } + + if !DisableAiSettings::get_global(cx).disable_ai && !already_authenticated { + authenticate_all_providers(registry.clone(), cx); + already_authenticated = true; } }) .detach(); @@ -151,3 +168,83 @@ fn register_language_model_providers( registry.register_provider(XAiLanguageModelProvider::new(client.http_client(), cx), cx); registry.register_provider(CopilotChatLanguageModelProvider::new(cx), cx); } + +/// Authenticates all providers in the [`LanguageModelRegistry`]. +/// +/// We do this so that we can populate the language selector with all of the +/// models from the configured providers. +/// +/// This function won't do anything if AI is disabled. +fn authenticate_all_providers(registry: Entity, cx: &mut App) { + let providers_to_authenticate = registry + .read(cx) + .providers() + .iter() + .map(|provider| (provider.id(), provider.name(), provider.authenticate(cx))) + .collect::>(); + + let mut tasks = Vec::with_capacity(providers_to_authenticate.len()); + + for (provider_id, provider_name, authenticate_task) in providers_to_authenticate { + tasks.push(cx.background_spawn(async move { + if let Err(err) = authenticate_task.await { + if matches!(err, AuthenticateError::CredentialsNotFound) { + // Since we're authenticating these providers in the + // background for the purposes of populating the + // language selector, we don't care about providers + // where the credentials are not found. + } else { + // Some providers have noisy failure states that we + // don't want to spam the logs with every time the + // language model selector is initialized. + // + // Ideally these should have more clear failure modes + // that we know are safe to ignore here, like what we do + // with `CredentialsNotFound` above. + match provider_id.0.as_ref() { + "lmstudio" | "ollama" => { + // LM Studio and Ollama both make fetch requests to the local APIs to determine if they are "authenticated". + // + // These fail noisily, so we don't log them. + } + "copilot_chat" => { + // Copilot Chat returns an error if Copilot is not enabled, so we don't log those errors. + } + _ => { + log::error!( + "Failed to authenticate provider: {}: {err}", + provider_name.0 + ); + } + } + } + } + })); + } + + let all_authenticated_future = future::join_all(tasks); + + cx.spawn(async move |cx| { + all_authenticated_future.await; + + registry + .update(cx, |registry, cx| { + let cloud_provider = registry.provider(&cloud::PROVIDER_ID); + let fallback_model = cloud_provider + .iter() + .chain(registry.providers().iter()) + .find(|provider| provider.is_authenticated(cx)) + .and_then(|provider| { + Some(ConfiguredModel { + provider: provider.clone(), + model: provider + .default_model(cx) + .or_else(|| provider.recommended_models(cx).first().cloned())?, + }) + }); + registry.set_environment_fallback_model(fallback_model, cx); + }) + .ok(); + }) + .detach(); +} diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index 0d061c0587..c492edeaf5 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -1041,9 +1041,9 @@ impl Render for ConfigurationView { v_flex() .size_full() .on_action(cx.listener(Self::save_api_key)) - .child(Label::new(format!("To use {}, you need to add an API key. Follow these steps:", match self.target_agent { - ConfigurationViewTargetAgent::ZedAgent => "Zed's agent with Anthropic", - ConfigurationViewTargetAgent::Other(agent) => agent, + .child(Label::new(format!("To use {}, you need to add an API key. Follow these steps:", match &self.target_agent { + ConfigurationViewTargetAgent::ZedAgent => "Zed's agent with Anthropic".into(), + ConfigurationViewTargetAgent::Other(agent) => agent.clone(), }))) .child( List::new() diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index b1b5ff3eb3..fb6e2fb1e4 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -23,9 +23,9 @@ use language_model::{ AuthenticateError, LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, - LanguageModelProviderState, LanguageModelProviderTosView, LanguageModelRequest, - LanguageModelToolChoice, LanguageModelToolSchemaFormat, LlmApiToken, - ModelRequestLimitReachedError, PaymentRequiredError, RateLimiter, RefreshLlmTokenListener, + LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice, + LanguageModelToolSchemaFormat, LlmApiToken, ModelRequestLimitReachedError, + PaymentRequiredError, RateLimiter, RefreshLlmTokenListener, }; use release_channel::AppVersion; use schemars::JsonSchema; @@ -44,8 +44,8 @@ use crate::provider::anthropic::{AnthropicEventMapper, count_anthropic_tokens, i use crate::provider::google::{GoogleEventMapper, into_google}; use crate::provider::open_ai::{OpenAiEventMapper, count_open_ai_tokens, into_open_ai}; -const PROVIDER_ID: LanguageModelProviderId = language_model::ZED_CLOUD_PROVIDER_ID; -const PROVIDER_NAME: LanguageModelProviderName = language_model::ZED_CLOUD_PROVIDER_NAME; +pub const PROVIDER_ID: LanguageModelProviderId = language_model::ZED_CLOUD_PROVIDER_ID; +pub const PROVIDER_NAME: LanguageModelProviderName = language_model::ZED_CLOUD_PROVIDER_NAME; #[derive(Default, Clone, Debug, PartialEq)] pub struct ZedDotDevSettings { @@ -118,7 +118,6 @@ pub struct State { llm_api_token: LlmApiToken, user_store: Entity, status: client::Status, - accept_terms_of_service_task: Option>>, models: Vec>, default_model: Option>, default_fast_model: Option>, @@ -142,13 +141,12 @@ impl State { llm_api_token: LlmApiToken::default(), user_store, status, - accept_terms_of_service_task: None, models: Vec::new(), default_model: None, default_fast_model: None, recommended_models: Vec::new(), _fetch_models_task: cx.spawn(async move |this, cx| { - maybe!(async move { + maybe!(async { let (client, llm_api_token) = this .read_with(cx, |this, _cx| (client.clone(), this.llm_api_token.clone()))?; @@ -197,24 +195,6 @@ impl State { state.update(cx, |_, cx| cx.notify()) }) } - - fn has_accepted_terms_of_service(&self, cx: &App) -> bool { - self.user_store.read(cx).has_accepted_terms_of_service() - } - - fn accept_terms_of_service(&mut self, cx: &mut Context) { - let user_store = self.user_store.clone(); - self.accept_terms_of_service_task = Some(cx.spawn(async move |this, cx| { - let _ = user_store - .update(cx, |store, cx| store.accept_terms_of_service(cx))? - .await; - this.update(cx, |this, cx| { - this.accept_terms_of_service_task = None; - cx.notify() - }) - })); - } - fn update_models(&mut self, response: ListModelsResponse, cx: &mut Context) { let mut models = Vec::new(); @@ -384,7 +364,7 @@ impl LanguageModelProvider for CloudLanguageModelProvider { fn is_authenticated(&self, cx: &App) -> bool { let state = self.state.read(cx); - !state.is_signed_out(cx) && state.has_accepted_terms_of_service(cx) + !state.is_signed_out(cx) } fn authenticate(&self, _cx: &mut App) -> Task> { @@ -401,112 +381,11 @@ impl LanguageModelProvider for CloudLanguageModelProvider { .into() } - fn must_accept_terms(&self, cx: &App) -> bool { - !self.state.read(cx).has_accepted_terms_of_service(cx) - } - - fn render_accept_terms( - &self, - view: LanguageModelProviderTosView, - cx: &mut App, - ) -> Option { - let state = self.state.read(cx); - if state.has_accepted_terms_of_service(cx) { - return None; - } - Some( - render_accept_terms(view, state.accept_terms_of_service_task.is_some(), { - let state = self.state.clone(); - move |_window, cx| { - state.update(cx, |state, cx| state.accept_terms_of_service(cx)); - } - }) - .into_any_element(), - ) - } - fn reset_credentials(&self, _cx: &mut App) -> Task> { Task::ready(Ok(())) } } -fn render_accept_terms( - view_kind: LanguageModelProviderTosView, - accept_terms_of_service_in_progress: bool, - accept_terms_callback: impl Fn(&mut Window, &mut App) + 'static, -) -> impl IntoElement { - let thread_fresh_start = matches!(view_kind, LanguageModelProviderTosView::ThreadFreshStart); - let thread_empty_state = matches!(view_kind, LanguageModelProviderTosView::ThreadEmptyState); - - let terms_button = Button::new("terms_of_service", "Terms of Service") - .style(ButtonStyle::Subtle) - .icon(IconName::ArrowUpRight) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .when(thread_empty_state, |this| this.label_size(LabelSize::Small)) - .on_click(move |_, _window, cx| cx.open_url("https://zed.dev/terms-of-service")); - - let button_container = h_flex().child( - Button::new("accept_terms", "I accept the Terms of Service") - .when(!thread_empty_state, |this| { - this.full_width() - .style(ButtonStyle::Tinted(TintColor::Accent)) - .icon(IconName::Check) - .icon_position(IconPosition::Start) - .icon_size(IconSize::Small) - }) - .when(thread_empty_state, |this| { - this.style(ButtonStyle::Tinted(TintColor::Warning)) - .label_size(LabelSize::Small) - }) - .disabled(accept_terms_of_service_in_progress) - .on_click(move |_, window, cx| (accept_terms_callback)(window, cx)), - ); - - if thread_empty_state { - h_flex() - .w_full() - .flex_wrap() - .justify_between() - .child( - h_flex() - .child( - Label::new("To start using Zed AI, please read and accept the") - .size(LabelSize::Small), - ) - .child(terms_button), - ) - .child(button_container) - } else { - v_flex() - .w_full() - .gap_2() - .child( - h_flex() - .flex_wrap() - .when(thread_fresh_start, |this| this.justify_center()) - .child(Label::new( - "To start using Zed AI, please read and accept the", - )) - .child(terms_button), - ) - .child({ - match view_kind { - LanguageModelProviderTosView::TextThreadPopup => { - button_container.w_full().justify_end() - } - LanguageModelProviderTosView::Configuration => { - button_container.w_full().justify_start() - } - LanguageModelProviderTosView::ThreadFreshStart => { - button_container.w_full().justify_center() - } - LanguageModelProviderTosView::ThreadEmptyState => div().w_0(), - } - }) - } -} - pub struct CloudLanguageModel { id: LanguageModelId, model: Arc, @@ -1107,10 +986,7 @@ struct ZedAiConfiguration { plan: Option, subscription_period: Option<(DateTime, DateTime)>, eligible_for_trial: bool, - has_accepted_terms_of_service: bool, account_too_young: bool, - accept_terms_of_service_in_progress: bool, - accept_terms_of_service_callback: Arc, sign_in_callback: Arc, } @@ -1176,58 +1052,30 @@ impl RenderOnce for ZedAiConfiguration { ); } - v_flex() - .gap_2() - .w_full() - .when(!self.has_accepted_terms_of_service, |this| { - this.child(render_accept_terms( - LanguageModelProviderTosView::Configuration, - self.accept_terms_of_service_in_progress, - { - let callback = self.accept_terms_of_service_callback.clone(); - move |window, cx| (callback)(window, cx) - }, - )) - }) - .map(|this| { - if self.has_accepted_terms_of_service && self.account_too_young { - this.child(young_account_banner).child( - Button::new("upgrade", "Upgrade to Pro") - .style(ui::ButtonStyle::Tinted(ui::TintColor::Accent)) - .full_width() - .on_click(|_, _, cx| { - cx.open_url(&zed_urls::upgrade_to_zed_pro_url(cx)) - }), - ) - } else if self.has_accepted_terms_of_service { - this.text_sm() - .child(subscription_text) - .child(manage_subscription_buttons) - } else { - this - } - }) - .when(self.has_accepted_terms_of_service, |this| this) + v_flex().gap_2().w_full().map(|this| { + if self.account_too_young { + this.child(young_account_banner).child( + Button::new("upgrade", "Upgrade to Pro") + .style(ui::ButtonStyle::Tinted(ui::TintColor::Accent)) + .full_width() + .on_click(|_, _, cx| cx.open_url(&zed_urls::upgrade_to_zed_pro_url(cx))), + ) + } else { + this.text_sm() + .child(subscription_text) + .child(manage_subscription_buttons) + } + }) } } struct ConfigurationView { state: Entity, - accept_terms_of_service_callback: Arc, sign_in_callback: Arc, } impl ConfigurationView { fn new(state: Entity) -> Self { - let accept_terms_of_service_callback = Arc::new({ - let state = state.clone(); - move |_window: &mut Window, cx: &mut App| { - state.update(cx, |state, cx| { - state.accept_terms_of_service(cx); - }); - } - }); - let sign_in_callback = Arc::new({ let state = state.clone(); move |_window: &mut Window, cx: &mut App| { @@ -1239,7 +1087,6 @@ impl ConfigurationView { Self { state, - accept_terms_of_service_callback, sign_in_callback, } } @@ -1255,10 +1102,7 @@ impl Render for ConfigurationView { plan: user_store.plan(), subscription_period: user_store.subscription_period(), eligible_for_trial: user_store.trial_started_at().is_none(), - has_accepted_terms_of_service: state.has_accepted_terms_of_service(cx), account_too_young: user_store.account_too_young(), - accept_terms_of_service_in_progress: state.accept_terms_of_service_task.is_some(), - accept_terms_of_service_callback: self.accept_terms_of_service_callback.clone(), sign_in_callback: self.sign_in_callback.clone(), } } @@ -1283,7 +1127,6 @@ impl Component for ZedAiConfiguration { plan: Option, eligible_for_trial: bool, account_too_young: bool, - has_accepted_terms_of_service: bool, ) -> AnyElement { ZedAiConfiguration { is_connected, @@ -1292,10 +1135,7 @@ impl Component for ZedAiConfiguration { .is_some() .then(|| (Utc::now(), Utc::now() + chrono::Duration::days(7))), eligible_for_trial, - has_accepted_terms_of_service, account_too_young, - accept_terms_of_service_in_progress: false, - accept_terms_of_service_callback: Arc::new(|_, _| {}), sign_in_callback: Arc::new(|_, _| {}), } .into_any_element() @@ -1306,33 +1146,30 @@ impl Component for ZedAiConfiguration { .p_4() .gap_4() .children(vec![ - single_example( - "Not connected", - configuration(false, None, false, false, true), - ), + single_example("Not connected", configuration(false, None, false, false)), single_example( "Accept Terms of Service", - configuration(true, None, true, false, false), + configuration(true, None, true, false), ), single_example( "No Plan - Not eligible for trial", - configuration(true, None, false, false, true), + configuration(true, None, false, false), ), single_example( "No Plan - Eligible for trial", - configuration(true, None, true, false, true), + configuration(true, None, true, false), ), single_example( "Free Plan", - configuration(true, Some(Plan::ZedFree), true, false, true), + configuration(true, Some(Plan::ZedFree), true, false), ), single_example( "Zed Pro Trial Plan", - configuration(true, Some(Plan::ZedProTrial), true, false, true), + configuration(true, Some(Plan::ZedProTrial), true, false), ), single_example( "Zed Pro Plan", - configuration(true, Some(Plan::ZedPro), true, false, true), + configuration(true, Some(Plan::ZedPro), true, false), ), ]) .into_any_element(), diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index 1ac12b4cd4..f252ab7aa3 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -12,9 +12,9 @@ use gpui::{ }; use http_client::HttpClient; use language_model::{ - AuthenticateError, LanguageModelCompletionError, LanguageModelCompletionEvent, - LanguageModelToolChoice, LanguageModelToolSchemaFormat, LanguageModelToolUse, - LanguageModelToolUseId, MessageContent, StopReason, + AuthenticateError, ConfigurationViewTargetAgent, LanguageModelCompletionError, + LanguageModelCompletionEvent, LanguageModelToolChoice, LanguageModelToolSchemaFormat, + LanguageModelToolUse, LanguageModelToolUseId, MessageContent, StopReason, }; use language_model::{ LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider, @@ -37,6 +37,8 @@ use util::ResultExt; use crate::AllLanguageModelSettings; use crate::ui::InstructionListItem; +use super::anthropic::ApiKey; + const PROVIDER_ID: LanguageModelProviderId = language_model::GOOGLE_PROVIDER_ID; const PROVIDER_NAME: LanguageModelProviderName = language_model::GOOGLE_PROVIDER_NAME; @@ -198,6 +200,33 @@ impl GoogleLanguageModelProvider { request_limiter: RateLimiter::new(4), }) } + + pub fn api_key(cx: &mut App) -> Task> { + let credentials_provider = ::global(cx); + let api_url = AllLanguageModelSettings::get_global(cx) + .google + .api_url + .clone(); + + if let Ok(key) = std::env::var(GEMINI_API_KEY_VAR) { + Task::ready(Ok(ApiKey { + key, + from_env: true, + })) + } else { + cx.spawn(async move |cx| { + let (_, api_key) = credentials_provider + .read_credentials(&api_url, cx) + .await? + .ok_or(AuthenticateError::CredentialsNotFound)?; + + Ok(ApiKey { + key: String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?, + from_env: false, + }) + }) + } + } } impl LanguageModelProviderState for GoogleLanguageModelProvider { @@ -279,11 +308,11 @@ impl LanguageModelProvider for GoogleLanguageModelProvider { fn configuration_view( &self, - _target_agent: language_model::ConfigurationViewTargetAgent, + target_agent: language_model::ConfigurationViewTargetAgent, window: &mut Window, cx: &mut App, ) -> AnyView { - cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) + cx.new(|cx| ConfigurationView::new(self.state.clone(), target_agent, window, cx)) .into() } @@ -776,11 +805,17 @@ fn convert_usage(usage: &UsageMetadata) -> language_model::TokenUsage { struct ConfigurationView { api_key_editor: Entity, state: gpui::Entity, + target_agent: language_model::ConfigurationViewTargetAgent, load_credentials_task: Option>, } impl ConfigurationView { - fn new(state: gpui::Entity, window: &mut Window, cx: &mut Context) -> Self { + fn new( + state: gpui::Entity, + target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut Context, + ) -> Self { cx.observe(&state, |_, _, cx| { cx.notify(); }) @@ -810,6 +845,7 @@ impl ConfigurationView { editor.set_placeholder_text("AIzaSy...", cx); editor }), + target_agent, state, load_credentials_task, } @@ -885,7 +921,10 @@ impl Render for ConfigurationView { v_flex() .size_full() .on_action(cx.listener(Self::save_api_key)) - .child(Label::new("To use Zed's agent with Google AI, you need to add an API key. Follow these steps:")) + .child(Label::new(format!("To use {}, you need to add an API key. Follow these steps:", match &self.target_agent { + ConfigurationViewTargetAgent::ZedAgent => "Zed's agent with Google AI".into(), + ConfigurationViewTargetAgent::Other(agent) => agent.clone(), + }))) .child( List::new() .child(InstructionListItem::new( diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index 43c0365291..d5206c1f26 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -1743,6 +1743,5 @@ pub enum Event { } impl EventEmitter for LogStore {} -impl EventEmitter for LspLogView {} impl EventEmitter for LspLogView {} impl EventEmitter for LspLogView {} diff --git a/crates/languages/src/javascript/highlights.scm b/crates/languages/src/javascript/highlights.scm index 9d5ebbaf71..ebeac7efff 100644 --- a/crates/languages/src/javascript/highlights.scm +++ b/crates/languages/src/javascript/highlights.scm @@ -231,6 +231,7 @@ "implements" "interface" "keyof" + "module" "namespace" "private" "protected" @@ -250,4 +251,4 @@ (jsx_closing_element ([""]) @punctuation.bracket.jsx) (jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx) (jsx_attribute "=" @punctuation.delimiter.jsx) -(jsx_text) @text.jsx \ No newline at end of file +(jsx_text) @text.jsx diff --git a/crates/languages/src/javascript/injections.scm b/crates/languages/src/javascript/injections.scm index 7baba5f227..dbec1937b1 100644 --- a/crates/languages/src/javascript/injections.scm +++ b/crates/languages/src/javascript/injections.scm @@ -11,6 +11,21 @@ (#set! injection.language "css")) ) +(call_expression + function: (member_expression + object: (identifier) @_obj (#eq? @_obj "styled") + property: (property_identifier)) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "css")) +) + +(call_expression + function: (call_expression + function: (identifier) @_name (#eq? @_name "styled")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "css")) +) + (call_expression function: (identifier) @_name (#eq? @_name "html") arguments: (template_string) @injection.content diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index c6c7357148..3e8dce756b 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -510,20 +510,6 @@ impl LspAdapter for RustLspAdapter { } } - let cargo_diagnostics_fetched_separately = ProjectSettings::get_global(cx) - .diagnostics - .fetch_cargo_diagnostics(); - if cargo_diagnostics_fetched_separately { - let disable_check_on_save = json!({ - "checkOnSave": false, - }); - if let Some(initialization_options) = &mut original.initialization_options { - merge_json_value_into(disable_check_on_save, initialization_options); - } else { - original.initialization_options = Some(disable_check_on_save); - } - } - Ok(original) } } diff --git a/crates/languages/src/tsx/highlights.scm b/crates/languages/src/tsx/highlights.scm index 5e2fbbf63a..f7cb987831 100644 --- a/crates/languages/src/tsx/highlights.scm +++ b/crates/languages/src/tsx/highlights.scm @@ -237,6 +237,7 @@ "implements" "interface" "keyof" + "module" "namespace" "private" "protected" @@ -256,4 +257,4 @@ (jsx_closing_element ([""]) @punctuation.bracket.jsx) (jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx) (jsx_attribute "=" @punctuation.delimiter.jsx) -(jsx_text) @text.jsx \ No newline at end of file +(jsx_text) @text.jsx diff --git a/crates/languages/src/tsx/injections.scm b/crates/languages/src/tsx/injections.scm index 48da80995b..9eec01cc89 100644 --- a/crates/languages/src/tsx/injections.scm +++ b/crates/languages/src/tsx/injections.scm @@ -11,6 +11,21 @@ (#set! injection.language "css")) ) +(call_expression + function: (member_expression + object: (identifier) @_obj (#eq? @_obj "styled") + property: (property_identifier)) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "css")) +) + +(call_expression + function: (call_expression + function: (identifier) @_name (#eq? @_name "styled")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "css")) +) + (call_expression function: (identifier) @_name (#eq? @_name "html") arguments: (template_string (string_fragment) @injection.content diff --git a/crates/languages/src/typescript/highlights.scm b/crates/languages/src/typescript/highlights.scm index af37ef6415..84cbbae77d 100644 --- a/crates/languages/src/typescript/highlights.scm +++ b/crates/languages/src/typescript/highlights.scm @@ -248,6 +248,7 @@ "is" "keyof" "let" + "module" "namespace" "new" "of" @@ -272,4 +273,4 @@ "while" "with" "yield" -] @keyword \ No newline at end of file +] @keyword diff --git a/crates/languages/src/typescript/injections.scm b/crates/languages/src/typescript/injections.scm index 7affdc5b75..1ca1e9ad59 100644 --- a/crates/languages/src/typescript/injections.scm +++ b/crates/languages/src/typescript/injections.scm @@ -15,6 +15,21 @@ (#set! injection.language "css")) ) +(call_expression + function: (member_expression + object: (identifier) @_obj (#eq? @_obj "styled") + property: (property_identifier)) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "css")) +) + +(call_expression + function: (call_expression + function: (identifier) @_name (#eq? @_name "styled")) + arguments: (template_string (string_fragment) @injection.content + (#set! injection.language "css")) +) + (call_expression function: (identifier) @_name (#eq? @_name "html") arguments: (template_string) @injection.content diff --git a/crates/livekit_client/Cargo.toml b/crates/livekit_client/Cargo.toml index 58059967b7..3575325ac0 100644 --- a/crates/livekit_client/Cargo.toml +++ b/crates/livekit_client/Cargo.toml @@ -25,6 +25,7 @@ async-trait.workspace = true collections.workspace = true cpal.workspace = true futures.workspace = true +audio.workspace = true gpui = { workspace = true, features = ["screen-capture", "x11", "wayland", "windows-manifest"] } gpui_tokio.workspace = true http_client_tls.workspace = true @@ -35,6 +36,7 @@ nanoid.workspace = true parking_lot.workspace = true postage.workspace = true smallvec.workspace = true +settings.workspace = true tokio-tungstenite.workspace = true util.workspace = true workspace-hack.workspace = true diff --git a/crates/livekit_client/src/lib.rs b/crates/livekit_client/src/lib.rs index e3934410e1..055aa3704e 100644 --- a/crates/livekit_client/src/lib.rs +++ b/crates/livekit_client/src/lib.rs @@ -24,8 +24,11 @@ mod livekit_client; )))] pub use livekit_client::*; -// If you need proper LSP in livekit_client you've got to comment out -// the mocks and test +// If you need proper LSP in livekit_client you've got to comment +// - the cfg blocks above +// - the mods: mock_client & test and their conditional blocks +// - the pub use mock_client::* and their conditional blocks + #[cfg(any( test, feature = "test-support", diff --git a/crates/livekit_client/src/livekit_client.rs b/crates/livekit_client/src/livekit_client.rs index adeea4f512..0751b014f4 100644 --- a/crates/livekit_client/src/livekit_client.rs +++ b/crates/livekit_client/src/livekit_client.rs @@ -1,15 +1,16 @@ use std::sync::Arc; use anyhow::{Context as _, Result}; +use audio::AudioSettings; use collections::HashMap; use futures::{SinkExt, channel::mpsc}; use gpui::{App, AsyncApp, ScreenCaptureSource, ScreenCaptureStream, Task}; use gpui_tokio::Tokio; +use log::info; use playback::capture_local_video_track; +use settings::Settings; mod playback; -#[cfg(feature = "record-microphone")] -mod record; use crate::{LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication}; pub use playback::AudioStream; @@ -125,9 +126,14 @@ impl Room { pub fn play_remote_audio_track( &self, track: &RemoteAudioTrack, - _cx: &App, + cx: &mut App, ) -> Result { - Ok(self.playback.play_remote_audio_track(&track.0)) + if AudioSettings::get_global(cx).rodio_audio { + info!("Using experimental.rodio_audio audio pipeline"); + playback::play_remote_audio_track(&track.0, cx) + } else { + Ok(self.playback.play_remote_audio_track(&track.0)) + } } } diff --git a/crates/livekit_client/src/livekit_client/playback.rs b/crates/livekit_client/src/livekit_client/playback.rs index e13fb7bd81..d6b64dbaca 100644 --- a/crates/livekit_client/src/livekit_client/playback.rs +++ b/crates/livekit_client/src/livekit_client/playback.rs @@ -18,13 +18,16 @@ use livekit::webrtc::{ video_stream::native::NativeVideoStream, }; use parking_lot::Mutex; +use rodio::Source; use std::cell::RefCell; use std::sync::Weak; -use std::sync::atomic::{self, AtomicI32}; +use std::sync::atomic::{AtomicBool, AtomicI32, Ordering}; use std::time::Duration; use std::{borrow::Cow, collections::VecDeque, sync::Arc, thread}; use util::{ResultExt as _, maybe}; +mod source; + pub(crate) struct AudioStack { executor: BackgroundExecutor, apm: Arc>, @@ -40,6 +43,29 @@ pub(crate) struct AudioStack { const SAMPLE_RATE: u32 = 48000; const NUM_CHANNELS: u32 = 2; +pub(crate) fn play_remote_audio_track( + track: &livekit::track::RemoteAudioTrack, + cx: &mut gpui::App, +) -> Result { + let stop_handle = Arc::new(AtomicBool::new(false)); + let stop_handle_clone = stop_handle.clone(); + let stream = source::LiveKitStream::new(cx.background_executor(), track) + .stoppable() + .periodic_access(Duration::from_millis(50), move |s| { + if stop_handle.load(Ordering::Relaxed) { + s.stop(); + } + }); + audio::Audio::play_source(stream, cx).context("Could not play audio")?; + + let on_drop = util::defer(move || { + stop_handle_clone.store(true, Ordering::Relaxed); + }); + Ok(AudioStream::Output { + _drop: Box::new(on_drop), + }) +} + impl AudioStack { pub(crate) fn new(executor: BackgroundExecutor) -> Self { let apm = Arc::new(Mutex::new(apm::AudioProcessingModule::new( @@ -61,7 +87,7 @@ impl AudioStack { ) -> AudioStream { let output_task = self.start_output(); - let next_ssrc = self.next_ssrc.fetch_add(1, atomic::Ordering::Relaxed); + let next_ssrc = self.next_ssrc.fetch_add(1, Ordering::Relaxed); let source = AudioMixerSource { ssrc: next_ssrc, sample_rate: SAMPLE_RATE, @@ -97,6 +123,23 @@ impl AudioStack { } } + fn start_output(&self) -> Arc> { + if let Some(task) = self._output_task.borrow().upgrade() { + return task; + } + let task = Arc::new(self.executor.spawn({ + let apm = self.apm.clone(); + let mixer = self.mixer.clone(); + async move { + Self::play_output(apm, mixer, SAMPLE_RATE, NUM_CHANNELS) + .await + .log_err(); + } + })); + *self._output_task.borrow_mut() = Arc::downgrade(&task); + task + } + pub(crate) fn capture_local_microphone_track( &self, ) -> Result<(crate::LocalAudioTrack, AudioStream)> { @@ -139,23 +182,6 @@ impl AudioStack { )) } - fn start_output(&self) -> Arc> { - if let Some(task) = self._output_task.borrow().upgrade() { - return task; - } - let task = Arc::new(self.executor.spawn({ - let apm = self.apm.clone(); - let mixer = self.mixer.clone(); - async move { - Self::play_output(apm, mixer, SAMPLE_RATE, NUM_CHANNELS) - .await - .log_err(); - } - })); - *self._output_task.borrow_mut() = Arc::downgrade(&task); - task - } - async fn play_output( apm: Arc>, mixer: Arc>, diff --git a/crates/livekit_client/src/livekit_client/playback/source.rs b/crates/livekit_client/src/livekit_client/playback/source.rs new file mode 100644 index 0000000000..021640247d --- /dev/null +++ b/crates/livekit_client/src/livekit_client/playback/source.rs @@ -0,0 +1,67 @@ +use futures::StreamExt; +use libwebrtc::{audio_stream::native::NativeAudioStream, prelude::AudioFrame}; +use livekit::track::RemoteAudioTrack; +use rodio::{Source, buffer::SamplesBuffer, conversions::SampleTypeConverter}; + +use crate::livekit_client::playback::{NUM_CHANNELS, SAMPLE_RATE}; + +fn frame_to_samplesbuffer(frame: AudioFrame) -> SamplesBuffer { + let samples = frame.data.iter().copied(); + let samples = SampleTypeConverter::<_, _>::new(samples); + let samples: Vec = samples.collect(); + SamplesBuffer::new(frame.num_channels as u16, frame.sample_rate, samples) +} + +pub struct LiveKitStream { + // shared_buffer: SharedBuffer, + inner: rodio::queue::SourcesQueueOutput, + _receiver_task: gpui::Task<()>, +} + +impl LiveKitStream { + pub fn new(executor: &gpui::BackgroundExecutor, track: &RemoteAudioTrack) -> Self { + let mut stream = + NativeAudioStream::new(track.rtc_track(), SAMPLE_RATE as i32, NUM_CHANNELS as i32); + let (queue_input, queue_output) = rodio::queue::queue(true); + // spawn rtc stream + let receiver_task = executor.spawn({ + async move { + while let Some(frame) = stream.next().await { + let samples = frame_to_samplesbuffer(frame); + queue_input.append(samples); + } + } + }); + + LiveKitStream { + _receiver_task: receiver_task, + inner: queue_output, + } + } +} + +impl Iterator for LiveKitStream { + type Item = rodio::Sample; + + fn next(&mut self) -> Option { + self.inner.next() + } +} + +impl Source for LiveKitStream { + fn current_span_len(&self) -> Option { + self.inner.current_span_len() + } + + fn channels(&self) -> rodio::ChannelCount { + self.inner.channels() + } + + fn sample_rate(&self) -> rodio::SampleRate { + self.inner.sample_rate() + } + + fn total_duration(&self) -> Option { + self.inner.total_duration() + } +} diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index ce9e2fe229..942225d098 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -45,7 +45,7 @@ use util::{ConnectionResult, ResultExt, TryFutureExt, redact}; const JSON_RPC_VERSION: &str = "2.0"; const CONTENT_LEN_HEADER: &str = "Content-Length: "; -const LSP_REQUEST_TIMEOUT: Duration = Duration::from_secs(60 * 2); +pub const LSP_REQUEST_TIMEOUT: Duration = Duration::from_secs(60 * 2); const SERVER_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5); type NotificationHandler = Box, Value, &mut AsyncApp)>; diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index 755506bd12..f16da45d79 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -1085,11 +1085,11 @@ impl Element for MarkdownElement { ); el.child( h_flex() - .w_5() + .w_4() .absolute() - .top_1() - .right_1() - .justify_center() + .top_1p5() + .right_1p5() + .justify_end() .child(codeblock), ) }); @@ -1115,11 +1115,12 @@ impl Element for MarkdownElement { cx, ); el.child( - div() + h_flex() + .w_4() .absolute() .top_0() .right_0() - .w_5() + .justify_end() .visible_on_hover("code_block") .child(codeblock), ) @@ -1320,6 +1321,7 @@ fn render_copy_code_block_button( ) .icon_color(Color::Muted) .icon_size(IconSize::Small) + .style(ButtonStyle::Filled) .shape(ui::IconButtonShape::Square) .tooltip(Tooltip::text("Copy Code")) .on_click({ diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 60e9c14c34..e27cbf868a 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -835,7 +835,7 @@ impl MultiBuffer { this.convert_edits_to_buffer_edits(edits, &snapshot, &original_indent_columns); drop(snapshot); - let mut buffer_ids = Vec::new(); + let mut buffer_ids = Vec::with_capacity(buffer_edits.len()); for (buffer_id, mut edits) in buffer_edits { buffer_ids.push(buffer_id); edits.sort_by_key(|edit| edit.range.start); @@ -2196,6 +2196,15 @@ impl MultiBuffer { }) } + pub fn buffer_for_anchor(&self, anchor: Anchor, cx: &App) -> Option> { + if let Some(buffer_id) = anchor.buffer_id { + self.buffer(buffer_id) + } else { + let (_, buffer, _) = self.excerpt_containing(anchor, cx)?; + Some(buffer) + } + } + // If point is at the end of the buffer, the last excerpt is returned pub fn point_to_buffer_offset( &self, @@ -5228,15 +5237,6 @@ impl MultiBufferSnapshot { excerpt_offset += ExcerptOffset::new(offset_in_transform); }; - if let Some((excerpt_id, buffer_id, buffer)) = self.as_singleton() { - return Anchor { - buffer_id: Some(buffer_id), - excerpt_id: *excerpt_id, - text_anchor: buffer.anchor_at(excerpt_offset.value, bias), - diff_base_anchor, - }; - } - let mut excerpts = self .excerpts .cursor::>>(&()); @@ -5260,10 +5260,17 @@ impl MultiBufferSnapshot { text_anchor, diff_base_anchor, } - } else if excerpt_offset.is_zero() && bias == Bias::Left { - Anchor::min() } else { - Anchor::max() + let mut anchor = if excerpt_offset.is_zero() && bias == Bias::Left { + Anchor::min() + } else { + Anchor::max() + }; + // TODO this is a hack, remove it + if let Some((excerpt_id, _, _)) = self.as_singleton() { + anchor.excerpt_id = *excerpt_id; + } + anchor } } @@ -6305,6 +6312,14 @@ impl MultiBufferSnapshot { }) } + pub fn buffer_id_for_anchor(&self, anchor: Anchor) -> Option { + if let Some(id) = anchor.buffer_id { + return Some(id); + } + let excerpt = self.excerpt_containing(anchor..anchor)?; + Some(excerpt.buffer_id()) + } + pub fn selections_in_range<'a>( &'a self, range: &'a Range, @@ -6983,7 +6998,7 @@ impl Excerpt { } fn contains(&self, anchor: &Anchor) -> bool { - Some(self.buffer_id) == anchor.buffer_id + (anchor.buffer_id == None || anchor.buffer_id == Some(self.buffer_id)) && self .range .context diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index 598ee0f9cb..61b4b0520f 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -2250,11 +2250,11 @@ impl ReferenceMultibuffer { let base_buffer = diff.base_text(); let mut offset = buffer_range.start; - let mut hunks = diff + let hunks = diff .hunks_intersecting_range(excerpt.range.clone(), buffer, cx) .peekable(); - while let Some(hunk) = hunks.next() { + for hunk in hunks { // Ignore hunks that are outside the excerpt range. let mut hunk_range = hunk.buffer_range.to_offset(buffer); diff --git a/crates/onboarding/src/ai_setup_page.rs b/crates/onboarding/src/ai_setup_page.rs index 672bcf1cd9..54c49bc72a 100644 --- a/crates/onboarding/src/ai_setup_page.rs +++ b/crates/onboarding/src/ai_setup_page.rs @@ -283,17 +283,13 @@ pub(crate) fn render_ai_setup_page( v_flex() .mt_2() .gap_6() - .child({ - let mut ai_upsell_card = - AiUpsellCard::new(client, &user_store, user_store.read(cx).plan(), cx); - - ai_upsell_card.tab_index = Some({ - tab_index += 1; - tab_index - 1 - }); - - ai_upsell_card - }) + .child( + AiUpsellCard::new(client, &user_store, user_store.read(cx).plan(), cx) + .tab_index(Some({ + tab_index += 1; + tab_index - 1 + })), + ) .child(render_llm_provider_section( &mut tab_index, workspace, diff --git a/crates/onboarding/src/editing_page.rs b/crates/onboarding/src/editing_page.rs index 8fae695854..47dfd84894 100644 --- a/crates/onboarding/src/editing_page.rs +++ b/crates/onboarding/src/editing_page.rs @@ -606,7 +606,7 @@ fn render_popular_settings_section( cx: &mut App, ) -> impl IntoElement { const LIGATURE_TOOLTIP: &str = - "Font ligatures combine two characters into one. For example, turning =/= into ≠."; + "Font ligatures combine two characters into one. For example, turning != into ≠."; v_flex() .pt_6() diff --git a/crates/onboarding/src/onboarding.rs b/crates/onboarding/src/onboarding.rs index 884374a72f..873dd63201 100644 --- a/crates/onboarding/src/onboarding.rs +++ b/crates/onboarding/src/onboarding.rs @@ -850,13 +850,19 @@ impl workspace::SerializableItem for Onboarding { } mod persistence { - use db::{define_connection, query, sqlez_macros::sql}; + use db::{ + query, + sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection}, + sqlez_macros::sql, + }; use workspace::WorkspaceDb; - define_connection! { - pub static ref ONBOARDING_PAGES: OnboardingPagesDb = - &[ - sql!( + pub struct OnboardingPagesDb(ThreadSafeConnection); + + impl Domain for OnboardingPagesDb { + const NAME: &str = stringify!(OnboardingPagesDb); + + const MIGRATIONS: &[&str] = &[sql!( CREATE TABLE onboarding_pages ( workspace_id INTEGER, item_id INTEGER UNIQUE, @@ -866,10 +872,11 @@ mod persistence { FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) ON DELETE CASCADE ) STRICT; - ), - ]; + )]; } + db::static_connection!(ONBOARDING_PAGES, OnboardingPagesDb, [WorkspaceDb]); + impl OnboardingPagesDb { query! { pub async fn save_onboarding_page( diff --git a/crates/onboarding/src/welcome.rs b/crates/onboarding/src/welcome.rs index 3fe9c32a48..8ff55d812b 100644 --- a/crates/onboarding/src/welcome.rs +++ b/crates/onboarding/src/welcome.rs @@ -414,13 +414,19 @@ impl workspace::SerializableItem for WelcomePage { } mod persistence { - use db::{define_connection, query, sqlez_macros::sql}; + use db::{ + query, + sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection}, + sqlez_macros::sql, + }; use workspace::WorkspaceDb; - define_connection! { - pub static ref WELCOME_PAGES: WelcomePagesDb = - &[ - sql!( + pub struct WelcomePagesDb(ThreadSafeConnection); + + impl Domain for WelcomePagesDb { + const NAME: &str = stringify!(WelcomePagesDb); + + const MIGRATIONS: &[&str] = (&[sql!( CREATE TABLE welcome_pages ( workspace_id INTEGER, item_id INTEGER UNIQUE, @@ -430,10 +436,11 @@ mod persistence { FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) ON DELETE CASCADE ) STRICT; - ), - ]; + )]); } + db::static_connection!(WELCOME_PAGES, WelcomePagesDb, [WorkspaceDb]); + impl WelcomePagesDb { query! { pub async fn save_welcome_page( diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index acf6ec434a..08be82b830 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -446,7 +446,6 @@ pub enum ResponseStreamResult { #[derive(Serialize, Deserialize, Debug)] pub struct ResponseStreamEvent { - pub model: String, pub choices: Vec, pub usage: Option, } diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 59c43f945f..10698cead8 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -4393,12 +4393,13 @@ impl OutlinePanel { }) .filter(|(match_range, _)| { let editor = active_editor.read(cx); - if let Some(buffer_id) = match_range.start.buffer_id + let snapshot = editor.buffer().read(cx).snapshot(cx); + if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.start) && editor.is_buffer_folded(buffer_id, cx) { return false; } - if let Some(buffer_id) = match_range.start.buffer_id + if let Some(buffer_id) = snapshot.buffer_id_for_anchor(match_range.end) && editor.is_buffer_folded(buffer_id, cx) { return false; diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index a171b193d0..295bad6e59 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -88,9 +88,18 @@ pub enum BufferStoreEvent { }, } -#[derive(Default, Debug)] +#[derive(Default, Debug, Clone)] pub struct ProjectTransaction(pub HashMap, language::Transaction>); +impl PartialEq for ProjectTransaction { + fn eq(&self, other: &Self) -> bool { + self.0.len() == other.0.len() + && self.0.iter().all(|(buffer, transaction)| { + other.0.get(buffer).is_some_and(|t| t.id == transaction.id) + }) + } +} + impl EventEmitter for BufferStore {} impl RemoteBufferStore { diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index 834bf2c2d2..2906c32ff4 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -34,7 +34,7 @@ use http_client::HttpClient; use language::{Buffer, LanguageToolchainStore, language_settings::InlayHintKind}; use node_runtime::NodeRuntime; -use remote::{SshRemoteClient, ssh_session::SshArgs}; +use remote::{SshInfo, SshRemoteClient, ssh_session::SshArgs}; use rpc::{ AnyProtoClient, TypedEnvelope, proto::{self}, @@ -254,14 +254,18 @@ impl DapStore { cx.spawn(async move |_, cx| { let response = request.await?; let binary = DebugAdapterBinary::from_proto(response)?; - let (mut ssh_command, envs, path_style) = + let (mut ssh_command, envs, path_style, ssh_shell) = ssh_client.read_with(cx, |ssh, _| { - let (SshArgs { arguments, envs }, path_style) = - ssh.ssh_info().context("SSH arguments not found")?; + let SshInfo { + args: SshArgs { arguments, envs }, + path_style, + shell, + } = ssh.ssh_info().context("SSH arguments not found")?; anyhow::Ok(( SshCommand { arguments }, envs.unwrap_or_default(), path_style, + shell, )) })??; @@ -280,6 +284,7 @@ impl DapStore { } let (program, args) = wrap_for_ssh( + &ssh_shell, &ssh_command, binary .command diff --git a/crates/project/src/debugger/locators/cargo.rs b/crates/project/src/debugger/locators/cargo.rs index 3e28fac8af..b2f9580f9c 100644 --- a/crates/project/src/debugger/locators/cargo.rs +++ b/crates/project/src/debugger/locators/cargo.rs @@ -117,7 +117,7 @@ impl DapLocator for CargoLocator { .cwd .clone() .context("Couldn't get cwd from debug config which is needed for locators")?; - let builder = ShellBuilder::new(true, &build_config.shell).non_interactive(); + let builder = ShellBuilder::new(None, &build_config.shell).non_interactive(); let (program, args) = builder.build( Some("cargo".into()), &build_config @@ -126,7 +126,7 @@ impl DapLocator for CargoLocator { .cloned() .take_while(|arg| arg != "--") .chain(Some("--message-format=json".to_owned())) - .collect(), + .collect::>(), ); let mut child = util::command::new_smol_command(program) .args(args) diff --git a/crates/project/src/git_store/git_traversal.rs b/crates/project/src/git_store/git_traversal.rs index 9eadaeac82..eee492e482 100644 --- a/crates/project/src/git_store/git_traversal.rs +++ b/crates/project/src/git_store/git_traversal.rs @@ -42,8 +42,8 @@ impl<'a> GitTraversal<'a> { // other_repo/ // .git/ // our_query.txt - let mut query = path.ancestors(); - while let Some(query) = query.next() { + let query = path.ancestors(); + for query in query { let (_, snapshot) = self .repo_root_to_snapshot .range(Path::new("")..=query) diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index c90d85358a..ce7a871d1a 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -3444,8 +3444,7 @@ impl LspCommand for GetCodeLens { capabilities .server_capabilities .code_lens_provider - .as_ref() - .is_some_and(|code_lens_options| code_lens_options.resolve_provider.unwrap_or(false)) + .is_some() } fn to_lsp( diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 1b46117897..deebaedd74 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -72,10 +72,11 @@ use lsp::{ AdapterServerCapabilities, CodeActionKind, CompletionContext, DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions, Edit, FileOperationFilter, FileOperationPatternKind, FileOperationRegistrationOptions, FileRename, FileSystemWatcher, - LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerId, - LanguageServerName, LanguageServerSelector, LspRequestFuture, MessageActionItem, MessageType, - OneOf, RenameFilesParams, SymbolKind, TextDocumentSyncSaveOptions, TextEdit, WillRenameFiles, - WorkDoneProgressCancelParams, WorkspaceFolder, notification::DidRenameFiles, + LSP_REQUEST_TIMEOUT, LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, + LanguageServerId, LanguageServerName, LanguageServerSelector, LspRequestFuture, + MessageActionItem, MessageType, OneOf, RenameFilesParams, SymbolKind, + TextDocumentSyncSaveOptions, TextEdit, WillRenameFiles, WorkDoneProgressCancelParams, + WorkspaceFolder, notification::DidRenameFiles, }; use node_runtime::read_package_installed_version; use parking_lot::Mutex; @@ -84,7 +85,7 @@ use rand::prelude::*; use rpc::{ AnyProtoClient, - proto::{FromProto, ToProto}, + proto::{FromProto, LspRequestId, LspRequestMessage as _, ToProto}, }; use serde::Serialize; use settings::{Settings, SettingsLocation, SettingsStore}; @@ -92,7 +93,7 @@ use sha2::{Digest, Sha256}; use smol::channel::Sender; use snippet::Snippet; use std::{ - any::Any, + any::{Any, TypeId}, borrow::Cow, cell::RefCell, cmp::{Ordering, Reverse}, @@ -3490,6 +3491,7 @@ pub struct LspStore { pub(super) lsp_server_capabilities: HashMap, lsp_document_colors: HashMap, lsp_code_lens: HashMap, + running_lsp_requests: HashMap>)>, } #[derive(Debug, Default, Clone)] @@ -3499,7 +3501,7 @@ pub struct DocumentColors { } type DocumentColorTask = Shared>>>; -type CodeLensTask = Shared, Arc>>>; +type CodeLensTask = Shared>, Arc>>>; #[derive(Debug, Default)] struct DocumentColorData { @@ -3579,6 +3581,8 @@ struct CoreSymbol { impl LspStore { pub fn init(client: &AnyProtoClient) { + client.add_entity_request_handler(Self::handle_lsp_query); + client.add_entity_message_handler(Self::handle_lsp_query_response); client.add_entity_request_handler(Self::handle_multi_lsp_query); client.add_entity_request_handler(Self::handle_restart_language_servers); client.add_entity_request_handler(Self::handle_stop_language_servers); @@ -3758,6 +3762,7 @@ impl LspStore { lsp_server_capabilities: HashMap::default(), lsp_document_colors: HashMap::default(), lsp_code_lens: HashMap::default(), + running_lsp_requests: HashMap::default(), active_entry: None, _maintain_workspace_config, _maintain_buffer_languages: Self::maintain_buffer_languages(languages, cx), @@ -3819,6 +3824,7 @@ impl LspStore { lsp_server_capabilities: HashMap::default(), lsp_document_colors: HashMap::default(), lsp_code_lens: HashMap::default(), + running_lsp_requests: HashMap::default(), active_entry: None, _maintain_workspace_config, @@ -4381,8 +4387,6 @@ impl LspStore { } } - // TODO: remove MultiLspQuery: instead, the proto handler should pick appropriate server(s) - // Then, use `send_lsp_proto_request` or analogue for most of the LSP proto requests and inline this check inside fn is_capable_for_proto_request( &self, buffer: &Entity, @@ -4639,7 +4643,6 @@ impl LspStore { Some((file, language, raw_buffer.remote_id())) }) .sorted_by_key(|(file, _, _)| Reverse(file.worktree.read(cx).is_visible())); - for (file, language, buffer_id) in buffers { let worktree_id = file.worktree_id(cx); let Some(worktree) = local @@ -4681,7 +4684,6 @@ impl LspStore { cx, ) .collect::>(); - for node in nodes { let server_id = node.server_id_or_init(|disposition| { let path = &disposition.path; @@ -5233,154 +5235,130 @@ impl LspStore { pub fn definitions( &mut self, - buffer_handle: &Entity, + buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetDefinitions { position }; - if !self.is_capable_for_proto_request(buffer_handle, &request, cx) { - return Task::ready(Ok(Vec::new())); + if !self.is_capable_for_proto_request(buffer, &request, cx) { + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer_handle.read(cx).remote_id().into(), - version: serialize_version(&buffer_handle.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetDefinition( - request.to_proto(project_id, buffer_handle.read(cx)), - )), - }); - let buffer = buffer_handle.clone(); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); + let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetDefinitionResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|definitions_response| { - GetDefinitions { position }.response_from_proto( - definitions_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + let actions = join_all(responses.payload.into_iter().map(|response| { + GetDefinitions { position }.response_from_proto( + response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .dedup() - .collect()) + Ok(Some( + actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .dedup() + .collect(), + )) }) } else { let definitions_task = self.request_multiple_lsp_locally( - buffer_handle, + buffer, Some(position), GetDefinitions { position }, cx, ); cx.background_spawn(async move { - Ok(definitions_task - .await - .into_iter() - .flat_map(|(_, definitions)| definitions) - .dedup() - .collect()) + Ok(Some( + definitions_task + .await + .into_iter() + .flat_map(|(_, definitions)| definitions) + .dedup() + .collect(), + )) }) } } pub fn declarations( &mut self, - buffer_handle: &Entity, + buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetDeclarations { position }; - if !self.is_capable_for_proto_request(buffer_handle, &request, cx) { - return Task::ready(Ok(Vec::new())); + if !self.is_capable_for_proto_request(buffer, &request, cx) { + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer_handle.read(cx).remote_id().into(), - version: serialize_version(&buffer_handle.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetDeclaration( - request.to_proto(project_id, buffer_handle.read(cx)), - )), - }); - let buffer = buffer_handle.clone(); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); + let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetDeclarationResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|declarations_response| { - GetDeclarations { position }.response_from_proto( - declarations_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + let actions = join_all(responses.payload.into_iter().map(|response| { + GetDeclarations { position }.response_from_proto( + response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .dedup() - .collect()) + Ok(Some( + actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .dedup() + .collect(), + )) }) } else { let declarations_task = self.request_multiple_lsp_locally( - buffer_handle, + buffer, Some(position), GetDeclarations { position }, cx, ); cx.background_spawn(async move { - Ok(declarations_task - .await - .into_iter() - .flat_map(|(_, declarations)| declarations) - .dedup() - .collect()) + Ok(Some( + declarations_task + .await + .into_iter() + .flat_map(|(_, declarations)| declarations) + .dedup() + .collect(), + )) }) } } @@ -5390,59 +5368,45 @@ impl LspStore { buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetTypeDefinitions { position }; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Ok(Vec::new())); + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetTypeDefinition( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetTypeDefinitionResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|type_definitions_response| { - GetTypeDefinitions { position }.response_from_proto( - type_definitions_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + let actions = join_all(responses.payload.into_iter().map(|response| { + GetTypeDefinitions { position }.response_from_proto( + response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .dedup() - .collect()) + Ok(Some( + actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .dedup() + .collect(), + )) }) } else { let type_definitions_task = self.request_multiple_lsp_locally( @@ -5452,12 +5416,14 @@ impl LspStore { cx, ); cx.background_spawn(async move { - Ok(type_definitions_task - .await - .into_iter() - .flat_map(|(_, type_definitions)| type_definitions) - .dedup() - .collect()) + Ok(Some( + type_definitions_task + .await + .into_iter() + .flat_map(|(_, type_definitions)| type_definitions) + .dedup() + .collect(), + )) }) } } @@ -5467,59 +5433,45 @@ impl LspStore { buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetImplementations { position }; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Ok(Vec::new())); + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetImplementation( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetImplementationResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|implementations_response| { - GetImplementations { position }.response_from_proto( - implementations_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + let actions = join_all(responses.payload.into_iter().map(|response| { + GetImplementations { position }.response_from_proto( + response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .dedup() - .collect()) + Ok(Some( + actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .dedup() + .collect(), + )) }) } else { let implementations_task = self.request_multiple_lsp_locally( @@ -5529,12 +5481,14 @@ impl LspStore { cx, ); cx.background_spawn(async move { - Ok(implementations_task - .await - .into_iter() - .flat_map(|(_, implementations)| implementations) - .dedup() - .collect()) + Ok(Some( + implementations_task + .await + .into_iter() + .flat_map(|(_, implementations)| implementations) + .dedup() + .collect(), + )) }) } } @@ -5544,59 +5498,44 @@ impl LspStore { buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetReferences { position }; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Ok(Vec::new())); + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), + + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetReferences( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); + }; + let Some(responses) = request_task.await? else { + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetReferencesResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|references_response| { - GetReferences { position }.response_from_proto( - references_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) - .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .dedup() - .collect()) + let locations = join_all(responses.payload.into_iter().map(|lsp_response| { + GetReferences { position }.response_from_proto( + lsp_response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) + .await + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .dedup() + .collect(); + Ok(Some(locations)) }) } else { let references_task = self.request_multiple_lsp_locally( @@ -5606,12 +5545,14 @@ impl LspStore { cx, ); cx.background_spawn(async move { - Ok(references_task - .await - .into_iter() - .flat_map(|(_, references)| references) - .dedup() - .collect()) + Ok(Some( + references_task + .await + .into_iter() + .flat_map(|(_, references)| references) + .dedup() + .collect(), + )) }) } } @@ -5622,65 +5563,51 @@ impl LspStore { range: Range, kinds: Option>, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetCodeActions { range: range.clone(), kinds: kinds.clone(), }; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Ok(Vec::new())); + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetCodeActions( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { let Some(project) = weak_project.upgrade() else { - return Ok(Vec::new()); + return Ok(None); }; - let responses = request_task.await?.responses; - let actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetCodeActionsResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|code_actions_response| { - GetCodeActions { - range: range.clone(), - kinds: kinds.clone(), - } - .response_from_proto( - code_actions_response, - project.clone(), - buffer.clone(), - cx.clone(), - ) - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + let actions = join_all(responses.payload.into_iter().map(|response| { + GetCodeActions { + range: range.clone(), + kinds: kinds.clone(), + } + .response_from_proto( + response.response, + project.clone(), + buffer.clone(), + cx.clone(), + ) + })) .await; - Ok(actions - .into_iter() - .collect::>>>()? - .into_iter() - .flatten() - .collect()) + Ok(Some( + actions + .into_iter() + .collect::>>>()? + .into_iter() + .flatten() + .collect(), + )) }) } else { let all_actions_task = self.request_multiple_lsp_locally( @@ -5690,11 +5617,13 @@ impl LspStore { cx, ); cx.background_spawn(async move { - Ok(all_actions_task - .await - .into_iter() - .flat_map(|(_, actions)| actions) - .collect()) + Ok(Some( + all_actions_task + .await + .into_iter() + .flat_map(|(_, actions)| actions) + .collect(), + )) }) } } @@ -5719,8 +5648,10 @@ impl LspStore { != cached_data.lens.keys().copied().collect() }); if !has_different_servers { - return Task::ready(Ok(cached_data.lens.values().flatten().cloned().collect())) - .shared(); + return Task::ready(Ok(Some( + cached_data.lens.values().flatten().cloned().collect(), + ))) + .shared(); } } @@ -5758,17 +5689,19 @@ impl LspStore { lsp_store .update(cx, |lsp_store, _| { let lsp_data = lsp_store.lsp_code_lens.entry(buffer_id).or_default(); - if lsp_data.lens_for_version == query_version_queried_for { - lsp_data.lens.extend(fetched_lens.clone()); - } else if !lsp_data - .lens_for_version - .changed_since(&query_version_queried_for) - { - lsp_data.lens_for_version = query_version_queried_for; - lsp_data.lens = fetched_lens.clone(); + if let Some(fetched_lens) = fetched_lens { + if lsp_data.lens_for_version == query_version_queried_for { + lsp_data.lens.extend(fetched_lens); + } else if !lsp_data + .lens_for_version + .changed_since(&query_version_queried_for) + { + lsp_data.lens_for_version = query_version_queried_for; + lsp_data.lens = fetched_lens; + } } lsp_data.update = None; - lsp_data.lens.values().flatten().cloned().collect() + Some(lsp_data.lens.values().flatten().cloned().collect()) }) .map_err(Arc::new) }) @@ -5781,64 +5714,40 @@ impl LspStore { &mut self, buffer: &Entity, cx: &mut Context, - ) -> Task>>> { + ) -> Task>>>> { if let Some((upstream_client, project_id)) = self.upstream_client() { let request = GetCodeLens; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Ok(HashMap::default())); + return Task::ready(Ok(None)); } - let request_task = upstream_client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), + let request_task = upstream_client.request_lsp( project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetCodeLens( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_lsp_store, cx| { let Some(lsp_store) = weak_lsp_store.upgrade() else { - return Ok(HashMap::default()); + return Ok(None); }; - let responses = request_task.await?.responses; - let code_lens_actions = join_all( - responses - .into_iter() - .filter_map(|lsp_response| { - let response = match lsp_response.response? { - proto::lsp_response::Response::GetCodeLensResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }?; - let server_id = LanguageServerId::from_proto(lsp_response.server_id); - Some((server_id, response)) - }) - .map(|(server_id, code_lens_response)| { - let lsp_store = lsp_store.clone(); - let buffer = buffer.clone(); - let cx = cx.clone(); - async move { - ( - server_id, - GetCodeLens - .response_from_proto( - code_lens_response, - lsp_store, - buffer, - cx, - ) - .await, - ) - } - }), - ) + let Some(responses) = request_task.await? else { + return Ok(None); + }; + + let code_lens_actions = join_all(responses.payload.into_iter().map(|response| { + let lsp_store = lsp_store.clone(); + let buffer = buffer.clone(); + let cx = cx.clone(); + async move { + ( + LanguageServerId::from_proto(response.server_id), + GetCodeLens + .response_from_proto(response.response, lsp_store, buffer, cx) + .await, + ) + } + })) .await; let mut has_errors = false; @@ -5857,14 +5766,14 @@ impl LspStore { !has_errors || !code_lens_actions.is_empty(), "Failed to fetch code lens" ); - Ok(code_lens_actions) + Ok(Some(code_lens_actions)) }) } else { let code_lens_actions_task = self.request_multiple_lsp_locally(buffer, None::, GetCodeLens, cx); - cx.background_spawn( - async move { Ok(code_lens_actions_task.await.into_iter().collect()) }, - ) + cx.background_spawn(async move { + Ok(Some(code_lens_actions_task.await.into_iter().collect())) + }) } } @@ -6480,48 +6389,23 @@ impl LspStore { let buffer_id = buffer.read(cx).remote_id(); if let Some((client, upstream_project_id)) = self.upstream_client() { - if !self.is_capable_for_proto_request( - &buffer, - &GetDocumentDiagnostics { - previous_result_id: None, - }, - cx, - ) { + let request = GetDocumentDiagnostics { + previous_result_id: None, + }; + if !self.is_capable_for_proto_request(&buffer, &request, cx) { return Task::ready(Ok(None)); } - let request_task = client.request(proto::MultiLspQuery { - buffer_id: buffer_id.to_proto(), - version: serialize_version(&buffer.read(cx).version()), - project_id: upstream_project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetDocumentDiagnostics( - proto::GetDocumentDiagnostics { - project_id: upstream_project_id, - buffer_id: buffer_id.to_proto(), - version: serialize_version(&buffer.read(cx).version()), - }, - )), - }); + let request_task = client.request_lsp( + upstream_project_id, + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(upstream_project_id, buffer.read(cx)), + ); cx.background_spawn(async move { - let _proto_responses = request_task - .await? - .responses - .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetDocumentDiagnosticsResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .collect::>(); // Proto requests cause the diagnostics to be pulled from language server(s) on the local side // and then, buffer state updated with the diagnostics received, which will be later propagated to the client. // Do not attempt to further process the dummy responses here. + let _response = request_task.await?; Ok(None) }) } else { @@ -6806,16 +6690,18 @@ impl LspStore { .update(cx, |lsp_store, _| { let lsp_data = lsp_store.lsp_document_colors.entry(buffer_id).or_default(); - if lsp_data.colors_for_version == query_version_queried_for { - lsp_data.colors.extend(fetched_colors.clone()); - lsp_data.cache_version += 1; - } else if !lsp_data - .colors_for_version - .changed_since(&query_version_queried_for) - { - lsp_data.colors_for_version = query_version_queried_for; - lsp_data.colors = fetched_colors.clone(); - lsp_data.cache_version += 1; + if let Some(fetched_colors) = fetched_colors { + if lsp_data.colors_for_version == query_version_queried_for { + lsp_data.colors.extend(fetched_colors); + lsp_data.cache_version += 1; + } else if !lsp_data + .colors_for_version + .changed_since(&query_version_queried_for) + { + lsp_data.colors_for_version = query_version_queried_for; + lsp_data.colors = fetched_colors; + lsp_data.cache_version += 1; + } } lsp_data.colors_update = None; let colors = lsp_data @@ -6840,56 +6726,45 @@ impl LspStore { &mut self, buffer: &Entity, cx: &mut Context, - ) -> Task>>> { + ) -> Task>>>> { if let Some((client, project_id)) = self.upstream_client() { let request = GetDocumentColor {}; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Ok(HashMap::default())); + return Task::ready(Ok(None)); } - let request_task = client.request(proto::MultiLspQuery { + let request_task = client.request_lsp( project_id, - buffer_id: buffer.read(cx).remote_id().to_proto(), - version: serialize_version(&buffer.read(cx).version()), - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetDocumentColor( - request.to_proto(project_id, buffer.read(cx)), - )), - }); + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); - cx.spawn(async move |project, cx| { - let Some(project) = project.upgrade() else { - return Ok(HashMap::default()); + cx.spawn(async move |lsp_store, cx| { + let Some(project) = lsp_store.upgrade() else { + return Ok(None); }; let colors = join_all( request_task .await .log_err() - .map(|response| response.responses) + .flatten() + .map(|response| response.payload) .unwrap_or_default() .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetDocumentColorResponse(response) => { - Some(( - LanguageServerId::from_proto(lsp_response.server_id), - response, - )) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|(server_id, color_response)| { + .map(|color_response| { let response = request.response_from_proto( - color_response, + color_response.response, project.clone(), buffer.clone(), cx.clone(), ); - async move { (server_id, response.await.log_err().unwrap_or_default()) } + async move { + ( + LanguageServerId::from_proto(color_response.server_id), + response.await.log_err().unwrap_or_default(), + ) + } }), ) .await @@ -6900,23 +6775,25 @@ impl LspStore { .extend(colors); acc }); - Ok(colors) + Ok(Some(colors)) }) } else { let document_colors_task = self.request_multiple_lsp_locally(buffer, None::, GetDocumentColor, cx); cx.background_spawn(async move { - Ok(document_colors_task - .await - .into_iter() - .fold(HashMap::default(), |mut acc, (server_id, colors)| { - acc.entry(server_id) - .or_insert_with(HashSet::default) - .extend(colors); - acc - }) - .into_iter() - .collect()) + Ok(Some( + document_colors_task + .await + .into_iter() + .fold(HashMap::default(), |mut acc, (server_id, colors)| { + acc.entry(server_id) + .or_insert_with(HashSet::default) + .extend(colors); + acc + }) + .into_iter() + .collect(), + )) }) } } @@ -6926,49 +6803,34 @@ impl LspStore { buffer: &Entity, position: T, cx: &mut Context, - ) -> Task> { + ) -> Task>> { let position = position.to_point_utf16(buffer.read(cx)); if let Some((client, upstream_project_id)) = self.upstream_client() { let request = GetSignatureHelp { position }; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Vec::new()); + return Task::ready(None); } - let request_task = client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), - project_id: upstream_project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetSignatureHelp( - request.to_proto(upstream_project_id, buffer.read(cx)), - )), - }); + let request_task = client.request_lsp( + upstream_project_id, + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(upstream_project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { - let Some(project) = weak_project.upgrade() else { - return Vec::new(); - }; - join_all( + let project = weak_project.upgrade()?; + let signatures = join_all( request_task .await .log_err() - .map(|response| response.responses) + .flatten() + .map(|response| response.payload) .unwrap_or_default() .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetSignatureHelpResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|signature_response| { + .map(|response| { let response = GetSignatureHelp { position }.response_from_proto( - signature_response, + response.response, project.clone(), buffer.clone(), cx.clone(), @@ -6979,7 +6841,8 @@ impl LspStore { .await .into_iter() .flatten() - .collect() + .collect(); + Some(signatures) }) } else { let all_actions_task = self.request_multiple_lsp_locally( @@ -6989,11 +6852,13 @@ impl LspStore { cx, ); cx.background_spawn(async move { - all_actions_task - .await - .into_iter() - .flat_map(|(_, actions)| actions) - .collect::>() + Some( + all_actions_task + .await + .into_iter() + .flat_map(|(_, actions)| actions) + .collect::>(), + ) }) } } @@ -7003,47 +6868,32 @@ impl LspStore { buffer: &Entity, position: PointUtf16, cx: &mut Context, - ) -> Task> { + ) -> Task>> { if let Some((client, upstream_project_id)) = self.upstream_client() { let request = GetHover { position }; if !self.is_capable_for_proto_request(buffer, &request, cx) { - return Task::ready(Vec::new()); + return Task::ready(None); } - let request_task = client.request(proto::MultiLspQuery { - buffer_id: buffer.read(cx).remote_id().into(), - version: serialize_version(&buffer.read(cx).version()), - project_id: upstream_project_id, - strategy: Some(proto::multi_lsp_query::Strategy::All( - proto::AllLanguageServers {}, - )), - request: Some(proto::multi_lsp_query::Request::GetHover( - request.to_proto(upstream_project_id, buffer.read(cx)), - )), - }); + let request_task = client.request_lsp( + upstream_project_id, + LSP_REQUEST_TIMEOUT, + cx.background_executor().clone(), + request.to_proto(upstream_project_id, buffer.read(cx)), + ); let buffer = buffer.clone(); cx.spawn(async move |weak_project, cx| { - let Some(project) = weak_project.upgrade() else { - return Vec::new(); - }; - join_all( + let project = weak_project.upgrade()?; + let hovers = join_all( request_task .await .log_err() - .map(|response| response.responses) + .flatten() + .map(|response| response.payload) .unwrap_or_default() .into_iter() - .filter_map(|lsp_response| match lsp_response.response? { - proto::lsp_response::Response::GetHoverResponse(response) => { - Some(response) - } - unexpected => { - debug_panic!("Unexpected response: {unexpected:?}"); - None - } - }) - .map(|hover_response| { + .map(|response| { let response = GetHover { position }.response_from_proto( - hover_response, + response.response, project.clone(), buffer.clone(), cx.clone(), @@ -7060,7 +6910,8 @@ impl LspStore { .await .into_iter() .flatten() - .collect() + .collect(); + Some(hovers) }) } else { let all_actions_task = self.request_multiple_lsp_locally( @@ -7070,11 +6921,13 @@ impl LspStore { cx, ); cx.background_spawn(async move { - all_actions_task - .await - .into_iter() - .filter_map(|(_, hover)| remove_empty_hover_blocks(hover?)) - .collect::>() + Some( + all_actions_task + .await + .into_iter() + .filter_map(|(_, hover)| remove_empty_hover_blocks(hover?)) + .collect::>(), + ) }) } } @@ -7735,19 +7588,16 @@ impl LspStore { let snapshot = buffer_handle.read(cx).snapshot(); let buffer = buffer_handle.read(cx); let reused_diagnostics = buffer - .get_diagnostics(server_id) - .into_iter() - .flat_map(|diag| { - diag.iter() - .filter(|v| merge(buffer, &v.diagnostic, cx)) - .map(|v| { - let start = Unclipped(v.range.start.to_point_utf16(&snapshot)); - let end = Unclipped(v.range.end.to_point_utf16(&snapshot)); - DiagnosticEntry { - range: start..end, - diagnostic: v.diagnostic.clone(), - } - }) + .buffer_diagnostics(Some(server_id)) + .iter() + .filter(|v| merge(buffer, &v.diagnostic, cx)) + .map(|v| { + let start = Unclipped(v.range.start.to_point_utf16(&snapshot)); + let end = Unclipped(v.range.end.to_point_utf16(&snapshot)); + DiagnosticEntry { + range: start..end, + diagnostic: v.diagnostic.clone(), + } }) .collect::>(); @@ -8137,6 +7987,203 @@ impl LspStore { })? } + async fn handle_lsp_query( + lsp_store: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + use proto::lsp_query::Request; + let sender_id = envelope.original_sender_id().unwrap_or_default(); + let lsp_query = envelope.payload; + let lsp_request_id = LspRequestId(lsp_query.lsp_request_id); + match lsp_query.request.context("invalid LSP query request")? { + Request::GetReferences(get_references) => { + let position = get_references.position.clone().and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_references, + position, + cx.clone(), + ) + .await?; + } + Request::GetDocumentColor(get_document_color) => { + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_document_color, + None, + cx.clone(), + ) + .await?; + } + Request::GetHover(get_hover) => { + let position = get_hover.position.clone().and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_hover, + position, + cx.clone(), + ) + .await?; + } + Request::GetCodeActions(get_code_actions) => { + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_code_actions, + None, + cx.clone(), + ) + .await?; + } + Request::GetSignatureHelp(get_signature_help) => { + let position = get_signature_help + .position + .clone() + .and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_signature_help, + position, + cx.clone(), + ) + .await?; + } + Request::GetCodeLens(get_code_lens) => { + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_code_lens, + None, + cx.clone(), + ) + .await?; + } + Request::GetDefinition(get_definition) => { + let position = get_definition.position.clone().and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_definition, + position, + cx.clone(), + ) + .await?; + } + Request::GetDeclaration(get_declaration) => { + let position = get_declaration + .position + .clone() + .and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_declaration, + position, + cx.clone(), + ) + .await?; + } + Request::GetTypeDefinition(get_type_definition) => { + let position = get_type_definition + .position + .clone() + .and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_type_definition, + position, + cx.clone(), + ) + .await?; + } + Request::GetImplementation(get_implementation) => { + let position = get_implementation + .position + .clone() + .and_then(deserialize_anchor); + Self::query_lsp_locally::( + lsp_store, + sender_id, + lsp_request_id, + get_implementation, + position, + cx.clone(), + ) + .await?; + } + // Diagnostics pull synchronizes internally via the buffer state, and cannot be handled generically as the other requests. + Request::GetDocumentDiagnostics(get_document_diagnostics) => { + let buffer_id = BufferId::new(get_document_diagnostics.buffer_id())?; + let version = deserialize_version(get_document_diagnostics.buffer_version()); + let buffer = lsp_store.update(&mut cx, |this, cx| { + this.buffer_store.read(cx).get_existing(buffer_id) + })??; + buffer + .update(&mut cx, |buffer, _| { + buffer.wait_for_version(version.clone()) + })? + .await?; + lsp_store.update(&mut cx, |lsp_store, cx| { + let existing_queries = lsp_store + .running_lsp_requests + .entry(TypeId::of::()) + .or_default(); + if ::ProtoRequest::stop_previous_requests( + ) || buffer.read(cx).version.changed_since(&existing_queries.0) + { + existing_queries.1.clear(); + } + existing_queries.1.insert( + lsp_request_id, + cx.spawn(async move |lsp_store, cx| { + let diagnostics_pull = lsp_store + .update(cx, |lsp_store, cx| { + lsp_store.pull_diagnostics_for_buffer(buffer, cx) + }) + .ok(); + if let Some(diagnostics_pull) = diagnostics_pull { + match diagnostics_pull.await { + Ok(()) => {} + Err(e) => log::error!("Failed to pull diagnostics: {e:#}"), + }; + } + }), + ); + })?; + } + } + Ok(proto::Ack {}) + } + + async fn handle_lsp_query_response( + lsp_store: Entity, + envelope: TypedEnvelope, + cx: AsyncApp, + ) -> Result<()> { + lsp_store.read_with(&cx, |lsp_store, _| { + if let Some((upstream_client, _)) = lsp_store.upstream_client() { + upstream_client.handle_lsp_response(envelope.clone()); + } + })?; + Ok(()) + } + + // todo(lsp) remove after Zed Stable hits v0.204.x async fn handle_multi_lsp_query( lsp_store: Entity, envelope: TypedEnvelope, @@ -8710,7 +8757,7 @@ impl LspStore { (root_path.join(&old_path), root_path.join(&new_path)) }; - Self::will_rename_entry( + let _transaction = Self::will_rename_entry( this.downgrade(), worktree_id, &old_abs_path, @@ -8979,13 +9026,22 @@ impl LspStore { lsp_store.update(&mut cx, |lsp_store, cx| { if let Some(server) = lsp_store.language_server_for_id(server_id) { let text_document = if envelope.payload.current_file_only { - let buffer_id = BufferId::new(envelope.payload.buffer_id)?; - lsp_store - .buffer_store() - .read(cx) - .get(buffer_id) - .and_then(|buffer| Some(buffer.read(cx).file()?.as_local()?.abs_path(cx))) - .map(|path| make_text_document_identifier(&path)) + let buffer_id = envelope + .payload + .buffer_id + .map(|id| BufferId::new(id)) + .transpose()?; + buffer_id + .and_then(|buffer_id| { + lsp_store + .buffer_store() + .read(cx) + .get(buffer_id) + .and_then(|buffer| { + Some(buffer.read(cx).file()?.as_local()?.abs_path(cx)) + }) + .map(|path| make_text_document_identifier(&path)) + }) .transpose()? } else { None @@ -9172,7 +9228,7 @@ impl LspStore { new_path: &Path, is_dir: bool, cx: AsyncApp, - ) -> Task<()> { + ) -> Task { let old_uri = lsp::Url::from_file_path(old_path).ok().map(String::from); let new_uri = lsp::Url::from_file_path(new_path).ok().map(String::from); cx.spawn(async move |cx| { @@ -9205,7 +9261,7 @@ impl LspStore { .log_err() .flatten()?; - LocalLspStore::deserialize_workspace_edit( + let transaction = LocalLspStore::deserialize_workspace_edit( this.upgrade()?, edit, false, @@ -9213,8 +9269,8 @@ impl LspStore { cx, ) .await - .ok(); - Some(()) + .ok()?; + Some(transaction) } }); tasks.push(apply_edit); @@ -9224,11 +9280,17 @@ impl LspStore { }) .ok() .flatten(); + let mut merged_transaction = ProjectTransaction::default(); for task in tasks { // Await on tasks sequentially so that the order of application of edits is deterministic // (at least with regards to the order of registration of language servers) - task.await; + if let Some(transaction) = task.await { + for (buffer, buffer_transaction) in transaction.0 { + merged_transaction.0.insert(buffer, buffer_transaction); + } + } } + merged_transaction }) } @@ -11641,12 +11703,11 @@ impl LspStore { // Ignore payload since we notify clients of setting changes unconditionally, relying on them pulling the latest settings. } "workspace/symbol" => { - if let Some(options) = parse_register_capabilities(reg)? { - server.update_capabilities(|capabilities| { - capabilities.workspace_symbol_provider = Some(options); - }); - notify_server_capabilities_updated(&server, cx); - } + let options = parse_register_capabilities(reg)?; + server.update_capabilities(|capabilities| { + capabilities.workspace_symbol_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); } "workspace/fileOperations" => { if let Some(options) = reg.register_options { @@ -11670,12 +11731,11 @@ impl LspStore { } } "textDocument/rangeFormatting" => { - if let Some(options) = parse_register_capabilities(reg)? { - server.update_capabilities(|capabilities| { - capabilities.document_range_formatting_provider = Some(options); - }); - notify_server_capabilities_updated(&server, cx); - } + let options = parse_register_capabilities(reg)?; + server.update_capabilities(|capabilities| { + capabilities.document_range_formatting_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); } "textDocument/onTypeFormatting" => { if let Some(options) = reg @@ -11690,36 +11750,32 @@ impl LspStore { } } "textDocument/formatting" => { - if let Some(options) = parse_register_capabilities(reg)? { - server.update_capabilities(|capabilities| { - capabilities.document_formatting_provider = Some(options); - }); - notify_server_capabilities_updated(&server, cx); - } + let options = parse_register_capabilities(reg)?; + server.update_capabilities(|capabilities| { + capabilities.document_formatting_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); } "textDocument/rename" => { - if let Some(options) = parse_register_capabilities(reg)? { - server.update_capabilities(|capabilities| { - capabilities.rename_provider = Some(options); - }); - notify_server_capabilities_updated(&server, cx); - } + let options = parse_register_capabilities(reg)?; + server.update_capabilities(|capabilities| { + capabilities.rename_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); } "textDocument/inlayHint" => { - if let Some(options) = parse_register_capabilities(reg)? { - server.update_capabilities(|capabilities| { - capabilities.inlay_hint_provider = Some(options); - }); - notify_server_capabilities_updated(&server, cx); - } + let options = parse_register_capabilities(reg)?; + server.update_capabilities(|capabilities| { + capabilities.inlay_hint_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); } "textDocument/documentSymbol" => { - if let Some(options) = parse_register_capabilities(reg)? { - server.update_capabilities(|capabilities| { - capabilities.document_symbol_provider = Some(options); - }); - notify_server_capabilities_updated(&server, cx); - } + let options = parse_register_capabilities(reg)?; + server.update_capabilities(|capabilities| { + capabilities.document_symbol_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); } "textDocument/codeAction" => { if let Some(options) = reg @@ -11735,12 +11791,11 @@ impl LspStore { } } "textDocument/definition" => { - if let Some(options) = parse_register_capabilities(reg)? { - server.update_capabilities(|capabilities| { - capabilities.definition_provider = Some(options); - }); - notify_server_capabilities_updated(&server, cx); - } + let options = parse_register_capabilities(reg)?; + server.update_capabilities(|capabilities| { + capabilities.definition_provider = Some(options); + }); + notify_server_capabilities_updated(&server, cx); } "textDocument/completion" => { if let Some(caps) = reg @@ -11848,7 +11903,7 @@ impl LspStore { notify_server_capabilities_updated(&server, cx); } } - "textDocument/colorProvider" => { + "textDocument/documentColor" => { if let Some(caps) = reg .register_options .map(serde_json::from_value) @@ -11999,7 +12054,7 @@ impl LspStore { }); notify_server_capabilities_updated(&server, cx); } - "textDocument/colorProvider" => { + "textDocument/documentColor" => { server.update_capabilities(|capabilities| { capabilities.color_provider = None; }); @@ -12012,6 +12067,88 @@ impl LspStore { Ok(()) } + async fn query_lsp_locally( + lsp_store: Entity, + sender_id: proto::PeerId, + lsp_request_id: LspRequestId, + proto_request: T::ProtoRequest, + position: Option, + mut cx: AsyncApp, + ) -> Result<()> + where + T: LspCommand + Clone, + T::ProtoRequest: proto::LspRequestMessage, + ::Response: + Into<::Response>, + { + let buffer_id = BufferId::new(proto_request.buffer_id())?; + let version = deserialize_version(proto_request.buffer_version()); + let buffer = lsp_store.update(&mut cx, |this, cx| { + this.buffer_store.read(cx).get_existing(buffer_id) + })??; + buffer + .update(&mut cx, |buffer, _| { + buffer.wait_for_version(version.clone()) + })? + .await?; + let buffer_version = buffer.read_with(&cx, |buffer, _| buffer.version())?; + let request = + T::from_proto(proto_request, lsp_store.clone(), buffer.clone(), cx.clone()).await?; + lsp_store.update(&mut cx, |lsp_store, cx| { + let request_task = + lsp_store.request_multiple_lsp_locally(&buffer, position, request, cx); + let existing_queries = lsp_store + .running_lsp_requests + .entry(TypeId::of::()) + .or_default(); + if T::ProtoRequest::stop_previous_requests() + || buffer_version.changed_since(&existing_queries.0) + { + existing_queries.1.clear(); + } + existing_queries.1.insert( + lsp_request_id, + cx.spawn(async move |lsp_store, cx| { + let response = request_task.await; + lsp_store + .update(cx, |lsp_store, cx| { + if let Some((client, project_id)) = lsp_store.downstream_client.clone() + { + let response = response + .into_iter() + .map(|(server_id, response)| { + ( + server_id.to_proto(), + T::response_to_proto( + response, + lsp_store, + sender_id, + &buffer_version, + cx, + ) + .into(), + ) + }) + .collect::>(); + match client.send_lsp_response::( + project_id, + lsp_request_id, + response, + ) { + Ok(()) => {} + Err(e) => { + log::error!("Failed to send LSP response: {e:#}",) + } + } + } + }) + .ok(); + }), + ); + })?; + Ok(()) + } + fn take_text_document_sync_options( capabilities: &mut lsp::ServerCapabilities, ) -> lsp::TextDocumentSyncOptions { @@ -12025,16 +12162,22 @@ impl LspStore { None => lsp::TextDocumentSyncOptions::default(), } } + + #[cfg(any(test, feature = "test-support"))] + pub fn forget_code_lens_task(&mut self, buffer_id: BufferId) -> Option { + let data = self.lsp_code_lens.get_mut(&buffer_id)?; + Some(data.update.take()?.1) + } } // Registration with registerOptions as null, should fallback to true. // https://github.com/microsoft/vscode-languageserver-node/blob/d90a87f9557a0df9142cfb33e251cfa6fe27d970/client/src/common/client.ts#L2133 fn parse_register_capabilities( reg: lsp::Registration, -) -> anyhow::Result>> { +) -> Result> { Ok(match reg.register_options { - Some(options) => Some(OneOf::Right(serde_json::from_value::(options)?)), - None => Some(OneOf::Left(true)), + Some(options) => OneOf::Right(serde_json::from_value::(options)?), + None => OneOf::Left(true), }) } @@ -13149,10 +13292,10 @@ fn ensure_uniform_list_compatible_label(label: &mut CodeLabel) { let mut offset_map = vec![0; label.text.len() + 1]; let mut last_char_was_space = false; let mut new_idx = 0; - let mut chars = label.text.char_indices().fuse(); + let chars = label.text.char_indices().fuse(); let mut newlines_removed = false; - while let Some((idx, c)) = chars.next() { + for (idx, c) in chars { offset_map[idx] = new_idx; match c { diff --git a/crates/project/src/lsp_store/rust_analyzer_ext.rs b/crates/project/src/lsp_store/rust_analyzer_ext.rs index e5e6338d3c..54f63220b1 100644 --- a/crates/project/src/lsp_store/rust_analyzer_ext.rs +++ b/crates/project/src/lsp_store/rust_analyzer_ext.rs @@ -1,8 +1,8 @@ use ::serde::{Deserialize, Serialize}; use anyhow::Context as _; -use gpui::{App, Entity, Task, WeakEntity}; -use language::ServerHealth; -use lsp::{LanguageServer, LanguageServerName}; +use gpui::{App, AsyncApp, Entity, Task, WeakEntity}; +use language::{Buffer, ServerHealth}; +use lsp::{LanguageServer, LanguageServerId, LanguageServerName}; use rpc::proto; use crate::{LspStore, LspStoreEvent, Project, ProjectPath, lsp_store}; @@ -83,31 +83,32 @@ pub fn register_notifications(lsp_store: WeakEntity, language_server: pub fn cancel_flycheck( project: Entity, - buffer_path: ProjectPath, + buffer_path: Option, cx: &mut App, ) -> Task> { let upstream_client = project.read(cx).lsp_store().read(cx).upstream_client(); let lsp_store = project.read(cx).lsp_store(); - let buffer = project.update(cx, |project, cx| { - project.buffer_store().update(cx, |buffer_store, cx| { - buffer_store.open_buffer(buffer_path, cx) + let buffer = buffer_path.map(|buffer_path| { + project.update(cx, |project, cx| { + project.buffer_store().update(cx, |buffer_store, cx| { + buffer_store.open_buffer(buffer_path, cx) + }) }) }); cx.spawn(async move |cx| { - let buffer = buffer.await?; - let Some(rust_analyzer_server) = project.read_with(cx, |project, cx| { - project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx) - })? + let buffer = match buffer { + Some(buffer) => Some(buffer.await?), + None => None, + }; + let Some(rust_analyzer_server) = find_rust_analyzer_server(&project, buffer.as_ref(), cx) else { return Ok(()); }; - let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto())?; if let Some((client, project_id)) = upstream_client { let request = proto::LspExtCancelFlycheck { project_id, - buffer_id, language_server_id: rust_analyzer_server.to_proto(), }; client @@ -130,28 +131,33 @@ pub fn cancel_flycheck( pub fn run_flycheck( project: Entity, - buffer_path: ProjectPath, + buffer_path: Option, cx: &mut App, ) -> Task> { let upstream_client = project.read(cx).lsp_store().read(cx).upstream_client(); let lsp_store = project.read(cx).lsp_store(); - let buffer = project.update(cx, |project, cx| { - project.buffer_store().update(cx, |buffer_store, cx| { - buffer_store.open_buffer(buffer_path, cx) + let buffer = buffer_path.map(|buffer_path| { + project.update(cx, |project, cx| { + project.buffer_store().update(cx, |buffer_store, cx| { + buffer_store.open_buffer(buffer_path, cx) + }) }) }); cx.spawn(async move |cx| { - let buffer = buffer.await?; - let Some(rust_analyzer_server) = project.read_with(cx, |project, cx| { - project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx) - })? + let buffer = match buffer { + Some(buffer) => Some(buffer.await?), + None => None, + }; + let Some(rust_analyzer_server) = find_rust_analyzer_server(&project, buffer.as_ref(), cx) else { return Ok(()); }; - let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto())?; if let Some((client, project_id)) = upstream_client { + let buffer_id = buffer + .map(|buffer| buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto())) + .transpose()?; let request = proto::LspExtRunFlycheck { project_id, buffer_id, @@ -182,31 +188,32 @@ pub fn run_flycheck( pub fn clear_flycheck( project: Entity, - buffer_path: ProjectPath, + buffer_path: Option, cx: &mut App, ) -> Task> { let upstream_client = project.read(cx).lsp_store().read(cx).upstream_client(); let lsp_store = project.read(cx).lsp_store(); - let buffer = project.update(cx, |project, cx| { - project.buffer_store().update(cx, |buffer_store, cx| { - buffer_store.open_buffer(buffer_path, cx) + let buffer = buffer_path.map(|buffer_path| { + project.update(cx, |project, cx| { + project.buffer_store().update(cx, |buffer_store, cx| { + buffer_store.open_buffer(buffer_path, cx) + }) }) }); cx.spawn(async move |cx| { - let buffer = buffer.await?; - let Some(rust_analyzer_server) = project.read_with(cx, |project, cx| { - project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx) - })? + let buffer = match buffer { + Some(buffer) => Some(buffer.await?), + None => None, + }; + let Some(rust_analyzer_server) = find_rust_analyzer_server(&project, buffer.as_ref(), cx) else { return Ok(()); }; - let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id().to_proto())?; if let Some((client, project_id)) = upstream_client { let request = proto::LspExtClearFlycheck { project_id, - buffer_id, language_server_id: rust_analyzer_server.to_proto(), }; client @@ -226,3 +233,40 @@ pub fn clear_flycheck( anyhow::Ok(()) }) } + +fn find_rust_analyzer_server( + project: &Entity, + buffer: Option<&Entity>, + cx: &mut AsyncApp, +) -> Option { + project + .read_with(cx, |project, cx| { + buffer + .and_then(|buffer| { + project.language_server_id_for_name(buffer.read(cx), &RUST_ANALYZER_NAME, cx) + }) + // If no rust-analyzer found for the current buffer (e.g. `settings.json`), fall back to the project lookup + // and use project's rust-analyzer if it's the only one. + .or_else(|| { + let rust_analyzer_servers = project + .lsp_store() + .read(cx) + .language_server_statuses + .iter() + .filter_map(|(server_id, server_status)| { + if server_status.name == RUST_ANALYZER_NAME { + Some(*server_id) + } else { + None + } + }) + .collect::>(); + if rust_analyzer_servers.len() == 1 { + rust_analyzer_servers.first().copied() + } else { + None + } + }) + }) + .ok()? +} diff --git a/crates/project/src/manifest_tree/server_tree.rs b/crates/project/src/manifest_tree/server_tree.rs index 5e5f4bab49..48e2007d47 100644 --- a/crates/project/src/manifest_tree/server_tree.rs +++ b/crates/project/src/manifest_tree/server_tree.rs @@ -181,6 +181,7 @@ impl LanguageServerTree { &root_path.path, language_name.clone(), ); + ( Arc::new(InnerTreeNode::new( adapter.name(), @@ -408,6 +409,7 @@ impl ServerTreeRebase { if live_node.id.get().is_some() { return Some(node); } + let disposition = &live_node.disposition; let Some((existing_node, _)) = self .old_contents diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index e47c020a42..9fd4eed641 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -327,6 +327,7 @@ pub enum Event { RevealInProjectPanel(ProjectEntryId), SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>), ExpandedAllForEntry(WorktreeId, ProjectEntryId), + EntryRenamed(ProjectTransaction), AgentLocationChanged, } @@ -2119,7 +2120,7 @@ impl Project { let is_root_entry = self.entry_is_worktree_root(entry_id, cx); let lsp_store = self.lsp_store().downgrade(); - cx.spawn(async move |_, cx| { + cx.spawn(async move |project, cx| { let (old_abs_path, new_abs_path) = { let root_path = worktree.read_with(cx, |this, _| this.abs_path())?; let new_abs_path = if is_root_entry { @@ -2129,7 +2130,7 @@ impl Project { }; (root_path.join(&old_path), new_abs_path) }; - LspStore::will_rename_entry( + let transaction = LspStore::will_rename_entry( lsp_store.clone(), worktree_id, &old_abs_path, @@ -2145,6 +2146,12 @@ impl Project { })? .await?; + project + .update(cx, |_, cx| { + cx.emit(Event::EntryRenamed(transaction)); + }) + .ok(); + lsp_store .read_with(cx, |this, _| { this.did_rename_entry(worktree_id, &old_abs_path, &new_abs_path, is_dir); @@ -3415,7 +3422,7 @@ impl Project { buffer: &Entity, position: T, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { let position = position.to_point_utf16(buffer.read(cx)); let guard = self.retain_remotely_created_models(cx); let task = self.lsp_store.update(cx, |lsp_store, cx| { @@ -3433,7 +3440,7 @@ impl Project { buffer: &Entity, position: T, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { let position = position.to_point_utf16(buffer.read(cx)); let guard = self.retain_remotely_created_models(cx); let task = self.lsp_store.update(cx, |lsp_store, cx| { @@ -3451,7 +3458,7 @@ impl Project { buffer: &Entity, position: T, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { let position = position.to_point_utf16(buffer.read(cx)); let guard = self.retain_remotely_created_models(cx); let task = self.lsp_store.update(cx, |lsp_store, cx| { @@ -3469,7 +3476,7 @@ impl Project { buffer: &Entity, position: T, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { let position = position.to_point_utf16(buffer.read(cx)); let guard = self.retain_remotely_created_models(cx); let task = self.lsp_store.update(cx, |lsp_store, cx| { @@ -3487,7 +3494,7 @@ impl Project { buffer: &Entity, position: T, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { let position = position.to_point_utf16(buffer.read(cx)); let guard = self.retain_remotely_created_models(cx); let task = self.lsp_store.update(cx, |lsp_store, cx| { @@ -3585,23 +3592,12 @@ impl Project { }) } - pub fn signature_help( - &self, - buffer: &Entity, - position: T, - cx: &mut Context, - ) -> Task> { - self.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.signature_help(buffer, position, cx) - }) - } - pub fn hover( &self, buffer: &Entity, position: T, cx: &mut Context, - ) -> Task> { + ) -> Task>> { let position = position.to_point_utf16(buffer.read(cx)); self.lsp_store .update(cx, |lsp_store, cx| lsp_store.hover(buffer, position, cx)) @@ -3637,7 +3633,7 @@ impl Project { range: Range, kinds: Option>, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { let buffer = buffer_handle.read(cx); let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end); self.lsp_store.update(cx, |lsp_store, cx| { @@ -3650,7 +3646,7 @@ impl Project { buffer: &Entity, range: Range, cx: &mut Context, - ) -> Task>> { + ) -> Task>>> { let snapshot = buffer.read(cx).snapshot(); let range = range.to_point(&snapshot); let range_start = snapshot.anchor_before(range.start); @@ -3668,16 +3664,18 @@ impl Project { let mut code_lens_actions = code_lens_actions .await .map_err(|e| anyhow!("code lens fetch failed: {e:#}"))?; - code_lens_actions.retain(|code_lens_action| { - range - .start - .cmp(&code_lens_action.range.start, &snapshot) - .is_ge() - && range - .end - .cmp(&code_lens_action.range.end, &snapshot) - .is_le() - }); + if let Some(code_lens_actions) = &mut code_lens_actions { + code_lens_actions.retain(|code_lens_action| { + range + .start + .cmp(&code_lens_action.range.start, &snapshot) + .is_ge() + && range + .end + .cmp(&code_lens_action.range.end, &snapshot) + .is_le() + }); + } Ok(code_lens_actions) }) } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index a6fea4059c..4447c25129 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -181,17 +181,6 @@ pub struct DiagnosticsSettings { /// Settings for showing inline diagnostics. pub inline: InlineDiagnosticsSettings, - - /// Configuration, related to Rust language diagnostics. - pub cargo: Option, -} - -impl DiagnosticsSettings { - pub fn fetch_cargo_diagnostics(&self) -> bool { - self.cargo - .as_ref() - .is_some_and(|cargo_diagnostics| cargo_diagnostics.fetch_cargo_diagnostics) - } } #[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)] @@ -258,7 +247,6 @@ impl Default for DiagnosticsSettings { include_warnings: true, lsp_pull_diagnostics: LspPullDiagnosticsSettings::default(), inline: InlineDiagnosticsSettings::default(), - cargo: None, } } } @@ -292,16 +280,6 @@ impl Default for GlobalLspSettings { } } -#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] -pub struct CargoDiagnosticsSettings { - /// When enabled, Zed disables rust-analyzer's check on save and starts to query - /// Cargo diagnostics separately. - /// - /// Default: false - #[serde(default)] - pub fetch_cargo_diagnostics: bool, -} - #[derive( Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, JsonSchema, )] diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 8b0b21fcd6..6dcd07482e 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -4,6 +4,7 @@ use crate::{ Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation, *, }; +use async_trait::async_trait; use buffer_diff::{ BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks, @@ -21,7 +22,8 @@ use http_client::Url; use itertools::Itertools; use language::{ Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter, - LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint, + LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider, + ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainLister, language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings}, tree_sitter_rust, tree_sitter_typescript, }; @@ -140,8 +142,10 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) { end_of_line = lf insert_final_newline = true trim_trailing_whitespace = true + max_line_length = 120 [*.js] tab_width = 10 + max_line_length = off "#, ".zed": { "settings.json": r#"{ @@ -149,7 +153,8 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) { "hard_tabs": false, "ensure_final_newline_on_save": false, "remove_trailing_whitespace_on_save": false, - "soft_wrap": "editor_width" + "preferred_line_length": 64, + "soft_wrap": "editor_width", }"#, }, "a.rs": "fn a() {\n A\n}", @@ -157,6 +162,7 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) { ".editorconfig": r#" [*.rs] indent_size = 2 + max_line_length = off, "#, "b.rs": "fn b() {\n B\n}", }, @@ -205,6 +211,7 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) { assert_eq!(settings_a.hard_tabs, true); assert_eq!(settings_a.ensure_final_newline_on_save, true); assert_eq!(settings_a.remove_trailing_whitespace_on_save, true); + assert_eq!(settings_a.preferred_line_length, 120); // .editorconfig in b/ overrides .editorconfig in root assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2)); @@ -212,6 +219,10 @@ async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) { // "indent_size" is not set, so "tab_width" is used assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10)); + // When max_line_length is "off", default to .zed/settings.json + assert_eq!(settings_b.preferred_line_length, 64); + assert_eq!(settings_c.preferred_line_length, 64); + // README.md should not be affected by .editorconfig's globe "*.rs" assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8)); }); @@ -587,6 +598,203 @@ async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) { ); } +#[gpui::test] +async fn test_running_multiple_instances_of_a_single_server_in_one_worktree( + cx: &mut gpui::TestAppContext, +) { + pub(crate) struct PyprojectTomlManifestProvider; + + impl ManifestProvider for PyprojectTomlManifestProvider { + fn name(&self) -> ManifestName { + SharedString::new_static("pyproject.toml").into() + } + + fn search( + &self, + ManifestQuery { + path, + depth, + delegate, + }: ManifestQuery, + ) -> Option> { + for path in path.ancestors().take(depth) { + let p = path.join("pyproject.toml"); + if delegate.exists(&p, Some(false)) { + return Some(path.into()); + } + } + + None + } + } + + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + path!("/the-root"), + json!({ + ".zed": { + "settings.json": r#" + { + "languages": { + "Python": { + "language_servers": ["ty"] + } + } + }"# + }, + "project-a": { + ".venv": {}, + "file.py": "", + "pyproject.toml": "" + }, + "project-b": { + ".venv": {}, + "source_file.py":"", + "another_file.py": "", + "pyproject.toml": "" + } + }), + ) + .await; + cx.update(|cx| { + ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider)) + }); + + let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + let _fake_python_server = language_registry.register_fake_lsp( + "Python", + FakeLspAdapter { + name: "ty", + capabilities: lsp::ServerCapabilities { + ..Default::default() + }, + ..Default::default() + }, + ); + + language_registry.add(python_lang(fs.clone())); + let (first_buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + let servers = project.update(cx, |project, cx| { + project.lsp_store.update(cx, |this, cx| { + first_buffer.update(cx, |buffer, cx| { + this.language_servers_for_local_buffer(buffer, cx) + .map(|(adapter, server)| (adapter.clone(), server.clone())) + .collect::>() + }) + }) + }); + cx.executor().run_until_parked(); + assert_eq!(servers.len(), 1); + let (adapter, server) = servers.into_iter().next().unwrap(); + assert_eq!(adapter.name(), LanguageServerName::new_static("ty")); + assert_eq!(server.server_id(), LanguageServerId(0)); + // `workspace_folders` are set to the rooting point. + assert_eq!( + server.workspace_folders(), + BTreeSet::from_iter( + [Url::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter() + ) + ); + + let (second_project_buffer, _other_handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx) + }) + .await + .unwrap(); + cx.executor().run_until_parked(); + let servers = project.update(cx, |project, cx| { + project.lsp_store.update(cx, |this, cx| { + second_project_buffer.update(cx, |buffer, cx| { + this.language_servers_for_local_buffer(buffer, cx) + .map(|(adapter, server)| (adapter.clone(), server.clone())) + .collect::>() + }) + }) + }); + cx.executor().run_until_parked(); + assert_eq!(servers.len(), 1); + let (adapter, server) = servers.into_iter().next().unwrap(); + assert_eq!(adapter.name(), LanguageServerName::new_static("ty")); + // We're not using venvs at all here, so both folders should fall under the same root. + assert_eq!(server.server_id(), LanguageServerId(0)); + // Now, let's select a different toolchain for one of subprojects. + let (available_toolchains_for_b, root_path) = project + .update(cx, |this, cx| { + let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id(); + this.available_toolchains( + ProjectPath { + worktree_id, + path: Arc::from("project-b/source_file.py".as_ref()), + }, + LanguageName::new("Python"), + cx, + ) + }) + .await + .expect("A toolchain to be discovered"); + assert_eq!(root_path.as_ref(), Path::new("project-b")); + assert_eq!(available_toolchains_for_b.toolchains().len(), 1); + let currently_active_toolchain = project + .update(cx, |this, cx| { + let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id(); + this.active_toolchain( + ProjectPath { + worktree_id, + path: Arc::from("project-b/source_file.py".as_ref()), + }, + LanguageName::new("Python"), + cx, + ) + }) + .await; + + assert!(currently_active_toolchain.is_none()); + let _ = project + .update(cx, |this, cx| { + let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id(); + this.activate_toolchain( + ProjectPath { + worktree_id, + path: root_path, + }, + available_toolchains_for_b + .toolchains + .into_iter() + .next() + .unwrap(), + cx, + ) + }) + .await + .unwrap(); + cx.run_until_parked(); + let servers = project.update(cx, |project, cx| { + project.lsp_store.update(cx, |this, cx| { + second_project_buffer.update(cx, |buffer, cx| { + this.language_servers_for_local_buffer(buffer, cx) + .map(|(adapter, server)| (adapter.clone(), server.clone())) + .collect::>() + }) + }) + }); + cx.executor().run_until_parked(); + assert_eq!(servers.len(), 1); + let (adapter, server) = servers.into_iter().next().unwrap(); + assert_eq!(adapter.name(), LanguageServerName::new_static("ty")); + // There's a new language server in town. + assert_eq!(server.server_id(), LanguageServerId(1)); +} + #[gpui::test] async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { init_test(cx); @@ -3005,6 +3213,7 @@ async fn test_definition(cx: &mut gpui::TestAppContext) { let mut definitions = project .update(cx, |project, cx| project.definitions(&buffer, 22, cx)) .await + .unwrap() .unwrap(); // Assert no new language server started @@ -3519,7 +3728,7 @@ async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) { .next() .await; - let action = actions.await.unwrap()[0].clone(); + let action = actions.await.unwrap().unwrap()[0].clone(); let apply = project.update(cx, |project, cx| { project.apply_code_action(buffer.clone(), action, true, cx) }); @@ -6110,6 +6319,7 @@ async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) { hover_task .await .into_iter() + .flatten() .map(|hover| hover.contents.iter().map(|block| &block.text).join("|")) .sorted() .collect::>(), @@ -6183,6 +6393,7 @@ async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) { hover_task .await .into_iter() + .flatten() .map(|hover| hover.contents.iter().map(|block| &block.text).join("|")) .sorted() .collect::>(), @@ -6261,7 +6472,7 @@ async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) { .await .expect("The code action request should have been triggered"); - let code_actions = code_actions_task.await.unwrap(); + let code_actions = code_actions_task.await.unwrap().unwrap(); assert_eq!(code_actions.len(), 1); assert_eq!( code_actions[0].lsp_action.action_kind(), @@ -6420,6 +6631,7 @@ async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) { code_actions_task .await .unwrap() + .unwrap() .into_iter() .map(|code_action| code_action.lsp_action.title().to_owned()) .sorted() @@ -8969,6 +9181,65 @@ fn rust_lang() -> Arc { )) } +fn python_lang(fs: Arc) -> Arc { + struct PythonMootToolchainLister(Arc); + #[async_trait] + impl ToolchainLister for PythonMootToolchainLister { + async fn list( + &self, + worktree_root: PathBuf, + subroot_relative_path: Option>, + _: Option>, + ) -> ToolchainList { + // This lister will always return a path .venv directories within ancestors + let ancestors = subroot_relative_path + .into_iter() + .flat_map(|path| path.ancestors().map(ToOwned::to_owned).collect::>()); + let mut toolchains = vec![]; + for ancestor in ancestors { + let venv_path = worktree_root.join(ancestor).join(".venv"); + if self.0.is_dir(&venv_path).await { + toolchains.push(Toolchain { + name: SharedString::new("Python Venv"), + path: venv_path.to_string_lossy().into_owned().into(), + language_name: LanguageName(SharedString::new_static("Python")), + as_json: serde_json::Value::Null, + }) + } + } + ToolchainList { + toolchains, + ..Default::default() + } + } + // Returns a term which we should use in UI to refer to a toolchain. + fn term(&self) -> SharedString { + SharedString::new_static("virtual environment") + } + /// Returns the name of the manifest file for this toolchain. + fn manifest_name(&self) -> ManifestName { + SharedString::new_static("pyproject.toml").into() + } + } + Arc::new( + Language::new( + LanguageConfig { + name: "Python".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["py".to_string()], + ..Default::default() + }, + ..Default::default() + }, + None, // We're not testing Python parsing with this language. + ) + .with_manifest(Some(ManifestName::from(SharedString::new_static( + "pyproject.toml", + )))) + .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))), + ) +} + fn typescript_lang() -> Arc { Arc::new(Language::new( LanguageConfig { diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index e9582e73fd..b009b357fe 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -4,7 +4,7 @@ use collections::HashMap; use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity}; use itertools::Itertools; use language::LanguageName; -use remote::ssh_session::SshArgs; +use remote::{SshInfo, ssh_session::SshArgs}; use settings::{Settings, SettingsLocation}; use smol::channel::bounded; use std::{ @@ -13,7 +13,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use task::{DEFAULT_REMOTE_SHELL, Shell, ShellBuilder, SpawnInTerminal}; +use task::{Shell, ShellBuilder, SpawnInTerminal}; use terminal::{ TaskState, TaskStatus, Terminal, TerminalBuilder, terminal_settings::{self, ActivateScript, TerminalSettings, VenvSettings}, @@ -58,11 +58,13 @@ impl SshCommand { } } +#[derive(Debug)] pub struct SshDetails { pub host: String, pub ssh_command: SshCommand, pub envs: Option>, pub path_style: PathStyle, + pub shell: String, } impl Project { @@ -87,12 +89,18 @@ impl Project { pub fn ssh_details(&self, cx: &App) -> Option { if let Some(ssh_client) = &self.ssh_client { let ssh_client = ssh_client.read(cx); - if let Some((SshArgs { arguments, envs }, path_style)) = ssh_client.ssh_info() { + if let Some(SshInfo { + args: SshArgs { arguments, envs }, + path_style, + shell, + }) = ssh_client.ssh_info() + { return Some(SshDetails { host: ssh_client.connection_options().host, ssh_command: SshCommand { arguments }, envs, path_style, + shell, }); } } @@ -165,7 +173,9 @@ impl Project { let ssh_details = self.ssh_details(cx); let settings = self.terminal_settings(&path, cx).clone(); - let builder = ShellBuilder::new(ssh_details.is_none(), &settings.shell).non_interactive(); + let builder = + ShellBuilder::new(ssh_details.as_ref().map(|ssh| &*ssh.shell), &settings.shell) + .non_interactive(); let (command, args) = builder.build(Some(command), &Vec::new()); let mut env = self @@ -180,9 +190,11 @@ impl Project { ssh_command, envs, path_style, + shell, .. }) => { let (command, args) = wrap_for_ssh( + &shell, &ssh_command, Some((&command, &args)), path.as_deref(), @@ -280,6 +292,7 @@ impl Project { ssh_command, envs, path_style, + shell, }) => { log::debug!("Connecting to a remote server: {ssh_command:?}"); @@ -291,6 +304,7 @@ impl Project { .or_insert_with(|| "xterm-256color".to_string()); let (program, args) = wrap_for_ssh( + &shell, &ssh_command, None, path.as_deref(), @@ -343,11 +357,13 @@ impl Project { ssh_command, envs, path_style, + shell, }) => { log::debug!("Connecting to a remote server: {ssh_command:?}"); env.entry("TERM".to_string()) .or_insert_with(|| "xterm-256color".to_string()); let (program, args) = wrap_for_ssh( + &shell, &ssh_command, spawn_task .command @@ -637,6 +653,7 @@ impl Project { } pub fn wrap_for_ssh( + shell: &str, ssh_command: &SshCommand, command: Option<(&String, &Vec)>, path: Option<&Path>, @@ -645,16 +662,11 @@ pub fn wrap_for_ssh( path_style: PathStyle, ) -> (String, Vec) { let to_run = if let Some((command, args)) = command { - // DEFAULT_REMOTE_SHELL is '"${SHELL:-sh}"' so must not be escaped - let command: Option> = if command == DEFAULT_REMOTE_SHELL { - Some(command.into()) - } else { - shlex::try_quote(command).ok() - }; + let command: Option> = shlex::try_quote(command).ok(); let args = args.iter().filter_map(|arg| shlex::try_quote(arg).ok()); command.into_iter().chain(args).join(" ") } else { - "exec ${SHELL:-sh} -l".to_string() + format!("exec {shell} -l") }; let mut env_changes = String::new(); @@ -688,7 +700,7 @@ pub fn wrap_for_ssh( } else { format!("cd; {env_changes} {to_run}") }; - let shell_invocation = format!("sh -c {}", shlex::try_quote(&commands).unwrap()); + let shell_invocation = format!("{shell} -c {}", shlex::try_quote(&commands).unwrap()); let program = "ssh".to_string(); let mut args = ssh_command.arguments.clone(); diff --git a/crates/project/src/toolchain_store.rs b/crates/project/src/toolchain_store.rs index 05531ebe9a..ac87e64248 100644 --- a/crates/project/src/toolchain_store.rs +++ b/crates/project/src/toolchain_store.rs @@ -34,7 +34,10 @@ enum ToolchainStoreInner { Entity, #[allow(dead_code)] Subscription, ), - Remote(Entity), + Remote( + Entity, + #[allow(dead_code)] Subscription, + ), } impl EventEmitter for ToolchainStore {} @@ -65,10 +68,12 @@ impl ToolchainStore { Self(ToolchainStoreInner::Local(entity, subscription)) } - pub(super) fn remote(project_id: u64, client: AnyProtoClient, cx: &mut App) -> Self { - Self(ToolchainStoreInner::Remote( - cx.new(|_| RemoteToolchainStore { client, project_id }), - )) + pub(super) fn remote(project_id: u64, client: AnyProtoClient, cx: &mut Context) -> Self { + let entity = cx.new(|_| RemoteToolchainStore { client, project_id }); + let _subscription = cx.subscribe(&entity, |_, _, e: &ToolchainStoreEvent, cx| { + cx.emit(e.clone()) + }); + Self(ToolchainStoreInner::Remote(entity, _subscription)) } pub(crate) fn activate_toolchain( &self, @@ -80,8 +85,8 @@ impl ToolchainStore { ToolchainStoreInner::Local(local, _) => { local.update(cx, |this, cx| this.activate_toolchain(path, toolchain, cx)) } - ToolchainStoreInner::Remote(remote) => { - remote.read(cx).activate_toolchain(path, toolchain, cx) + ToolchainStoreInner::Remote(remote, _) => { + remote.update(cx, |this, cx| this.activate_toolchain(path, toolchain, cx)) } } } @@ -95,7 +100,7 @@ impl ToolchainStore { ToolchainStoreInner::Local(local, _) => { local.update(cx, |this, cx| this.list_toolchains(path, language_name, cx)) } - ToolchainStoreInner::Remote(remote) => { + ToolchainStoreInner::Remote(remote, _) => { remote.read(cx).list_toolchains(path, language_name, cx) } } @@ -112,7 +117,7 @@ impl ToolchainStore { &path.path, language_name, )), - ToolchainStoreInner::Remote(remote) => { + ToolchainStoreInner::Remote(remote, _) => { remote.read(cx).active_toolchain(path, language_name, cx) } } @@ -234,13 +239,13 @@ impl ToolchainStore { pub fn as_language_toolchain_store(&self) -> Arc { match &self.0 { ToolchainStoreInner::Local(local, _) => Arc::new(LocalStore(local.downgrade())), - ToolchainStoreInner::Remote(remote) => Arc::new(RemoteStore(remote.downgrade())), + ToolchainStoreInner::Remote(remote, _) => Arc::new(RemoteStore(remote.downgrade())), } } pub fn as_local_store(&self) -> Option<&Entity> { match &self.0 { ToolchainStoreInner::Local(local, _) => Some(local), - ToolchainStoreInner::Remote(_) => None, + ToolchainStoreInner::Remote(_, _) => None, } } } @@ -415,6 +420,8 @@ impl LocalToolchainStore { .cloned() } } + +impl EventEmitter for RemoteToolchainStore {} struct RemoteToolchainStore { client: AnyProtoClient, project_id: u64, @@ -425,27 +432,37 @@ impl RemoteToolchainStore { &self, project_path: ProjectPath, toolchain: Toolchain, - cx: &App, + cx: &mut Context, ) -> Task> { let project_id = self.project_id; let client = self.client.clone(); - cx.background_spawn(async move { - let path = PathBuf::from(toolchain.path.to_string()); - let _ = client - .request(proto::ActivateToolchain { - project_id, - worktree_id: project_path.worktree_id.to_proto(), - language_name: toolchain.language_name.into(), - toolchain: Some(proto::Toolchain { - name: toolchain.name.into(), - path: path.to_proto(), - raw_json: toolchain.as_json.to_string(), - }), - path: Some(project_path.path.to_string_lossy().into_owned()), + cx.spawn(async move |this, cx| { + let did_activate = cx + .background_spawn(async move { + let path = PathBuf::from(toolchain.path.to_string()); + let _ = client + .request(proto::ActivateToolchain { + project_id, + worktree_id: project_path.worktree_id.to_proto(), + language_name: toolchain.language_name.into(), + toolchain: Some(proto::Toolchain { + name: toolchain.name.into(), + path: path.to_proto(), + raw_json: toolchain.as_json.to_string(), + }), + path: Some(project_path.path.to_string_lossy().into_owned()), + }) + .await + .log_err()?; + Some(()) }) - .await - .log_err()?; - Some(()) + .await; + did_activate.and_then(|_| { + this.update(cx, |_, cx| { + cx.emit(ToolchainStoreEvent::ToolchainActivated); + }) + .ok() + }) }) } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 52ec7a9880..c99f5f8172 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -69,6 +69,7 @@ use workspace::{ notifications::{DetachAndPromptErr, NotifyTaskExt}, }; use worktree::CreatedEntry; +use zed_actions::workspace::OpenWithSystem; const PROJECT_PANEL_KEY: &str = "ProjectPanel"; const NEW_ENTRY_ID: ProjectEntryId = ProjectEntryId::MAX; @@ -255,8 +256,6 @@ actions!( RevealInFileManager, /// Removes the selected folder from the project. RemoveFromProject, - /// Opens the selected file with the system's default application. - OpenWithSystem, /// Cuts the selected file or directory. Cut, /// Pastes the previously cut or copied item. diff --git a/crates/proto/proto/lsp.proto b/crates/proto/proto/lsp.proto index ea9647feff..473ef5c38c 100644 --- a/crates/proto/proto/lsp.proto +++ b/crates/proto/proto/lsp.proto @@ -753,28 +753,47 @@ message TextEdit { PointUtf16 lsp_range_end = 3; } -message MultiLspQuery { +message LspQuery { uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; - oneof strategy { - AllLanguageServers all = 4; - } + uint64 lsp_request_id = 2; oneof request { + GetReferences get_references = 3; + GetDocumentColor get_document_color = 4; GetHover get_hover = 5; GetCodeActions get_code_actions = 6; GetSignatureHelp get_signature_help = 7; GetCodeLens get_code_lens = 8; GetDocumentDiagnostics get_document_diagnostics = 9; - GetDocumentColor get_document_color = 10; - GetDefinition get_definition = 11; - GetDeclaration get_declaration = 12; - GetTypeDefinition get_type_definition = 13; - GetImplementation get_implementation = 14; - GetReferences get_references = 15; + GetDefinition get_definition = 10; + GetDeclaration get_declaration = 11; + GetTypeDefinition get_type_definition = 12; + GetImplementation get_implementation = 13; } } +message LspQueryResponse { + uint64 project_id = 1; + uint64 lsp_request_id = 2; + repeated LspResponse responses = 3; +} + +message LspResponse { + oneof response { + GetHoverResponse get_hover_response = 1; + GetCodeActionsResponse get_code_actions_response = 2; + GetSignatureHelpResponse get_signature_help_response = 3; + GetCodeLensResponse get_code_lens_response = 4; + GetDocumentDiagnosticsResponse get_document_diagnostics_response = 5; + GetDocumentColorResponse get_document_color_response = 6; + GetDefinitionResponse get_definition_response = 8; + GetDeclarationResponse get_declaration_response = 9; + GetTypeDefinitionResponse get_type_definition_response = 10; + GetImplementationResponse get_implementation_response = 11; + GetReferencesResponse get_references_response = 12; + } + uint64 server_id = 7; +} + message AllLanguageServers {} message LanguageServerSelector { @@ -798,27 +817,6 @@ message StopLanguageServers { bool all = 4; } -message MultiLspQueryResponse { - repeated LspResponse responses = 1; -} - -message LspResponse { - oneof response { - GetHoverResponse get_hover_response = 1; - GetCodeActionsResponse get_code_actions_response = 2; - GetSignatureHelpResponse get_signature_help_response = 3; - GetCodeLensResponse get_code_lens_response = 4; - GetDocumentDiagnosticsResponse get_document_diagnostics_response = 5; - GetDocumentColorResponse get_document_color_response = 6; - GetDefinitionResponse get_definition_response = 8; - GetDeclarationResponse get_declaration_response = 9; - GetTypeDefinitionResponse get_type_definition_response = 10; - GetImplementationResponse get_implementation_response = 11; - GetReferencesResponse get_references_response = 12; - } - uint64 server_id = 7; -} - message LspExtRunnables { uint64 project_id = 1; uint64 buffer_id = 2; @@ -836,21 +834,19 @@ message LspRunnable { message LspExtCancelFlycheck { uint64 project_id = 1; - uint64 buffer_id = 2; - uint64 language_server_id = 3; + uint64 language_server_id = 2; } message LspExtRunFlycheck { uint64 project_id = 1; - uint64 buffer_id = 2; + optional uint64 buffer_id = 2; uint64 language_server_id = 3; bool current_file_only = 4; } message LspExtClearFlycheck { uint64 project_id = 1; - uint64 buffer_id = 2; - uint64 language_server_id = 3; + uint64 language_server_id = 2; } message LspDiagnosticRelatedInformation { @@ -909,3 +905,30 @@ message PullWorkspaceDiagnostics { uint64 project_id = 1; uint64 server_id = 2; } + +// todo(lsp) remove after Zed Stable hits v0.204.x +message MultiLspQuery { + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; + oneof strategy { + AllLanguageServers all = 4; + } + oneof request { + GetHover get_hover = 5; + GetCodeActions get_code_actions = 6; + GetSignatureHelp get_signature_help = 7; + GetCodeLens get_code_lens = 8; + GetDocumentDiagnostics get_document_diagnostics = 9; + GetDocumentColor get_document_color = 10; + GetDefinition get_definition = 11; + GetDeclaration get_declaration = 12; + GetTypeDefinition get_type_definition = 13; + GetImplementation get_implementation = 14; + GetReferences get_references = 15; + } +} + +message MultiLspQueryResponse { + repeated LspResponse responses = 1; +} diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 310fcf584e..70689bcd63 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -393,7 +393,10 @@ message Envelope { GetCrashFilesResponse get_crash_files_response = 362; GitClone git_clone = 363; - GitCloneResponse git_clone_response = 364; // current max + GitCloneResponse git_clone_response = 364; + + LspQuery lsp_query = 365; + LspQueryResponse lsp_query_response = 366; // current max } reserved 87 to 88; diff --git a/crates/proto/src/macros.rs b/crates/proto/src/macros.rs index 2ce0c0df25..59e984d7db 100644 --- a/crates/proto/src/macros.rs +++ b/crates/proto/src/macros.rs @@ -69,3 +69,32 @@ macro_rules! entity_messages { })* }; } + +#[macro_export] +macro_rules! lsp_messages { + ($(($request_name:ident, $response_name:ident, $stop_previous_requests:expr)),* $(,)?) => { + $(impl LspRequestMessage for $request_name { + type Response = $response_name; + + fn to_proto_query(self) -> $crate::lsp_query::Request { + $crate::lsp_query::Request::$request_name(self) + } + + fn response_to_proto_query(response: Self::Response) -> $crate::lsp_response::Response { + $crate::lsp_response::Response::$response_name(response) + } + + fn buffer_id(&self) -> u64 { + self.buffer_id + } + + fn buffer_version(&self) -> &[$crate::VectorClockEntry] { + &self.version + } + + fn stop_previous_requests() -> bool { + $stop_previous_requests + } + })* + }; +} diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 802db09590..d38e54685f 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -169,6 +169,9 @@ messages!( (MarkNotificationRead, Foreground), (MoveChannel, Foreground), (ReorderChannel, Foreground), + (LspQuery, Background), + (LspQueryResponse, Background), + // todo(lsp) remove after Zed Stable hits v0.204.x (MultiLspQuery, Background), (MultiLspQueryResponse, Background), (OnTypeFormatting, Background), @@ -426,7 +429,10 @@ request_messages!( (SetRoomParticipantRole, Ack), (BlameBuffer, BlameBufferResponse), (RejoinRemoteProjects, RejoinRemoteProjectsResponse), + // todo(lsp) remove after Zed Stable hits v0.204.x (MultiLspQuery, MultiLspQueryResponse), + (LspQuery, Ack), + (LspQueryResponse, Ack), (RestartLanguageServers, Ack), (StopLanguageServers, Ack), (OpenContext, OpenContextResponse), @@ -478,6 +484,20 @@ request_messages!( (GitClone, GitCloneResponse) ); +lsp_messages!( + (GetReferences, GetReferencesResponse, true), + (GetDocumentColor, GetDocumentColorResponse, true), + (GetHover, GetHoverResponse, true), + (GetCodeActions, GetCodeActionsResponse, true), + (GetSignatureHelp, GetSignatureHelpResponse, true), + (GetCodeLens, GetCodeLensResponse, true), + (GetDocumentDiagnostics, GetDocumentDiagnosticsResponse, true), + (GetDefinition, GetDefinitionResponse, true), + (GetDeclaration, GetDeclarationResponse, true), + (GetTypeDefinition, GetTypeDefinitionResponse, true), + (GetImplementation, GetImplementationResponse, true), +); + entity_messages!( {project_id, ShareProject}, AddProjectCollaborator, @@ -520,6 +540,9 @@ entity_messages!( LeaveProject, LinkedEditingRange, LoadCommitDiff, + LspQuery, + LspQueryResponse, + // todo(lsp) remove after Zed Stable hits v0.204.x MultiLspQuery, RestartLanguageServers, StopLanguageServers, @@ -777,6 +800,28 @@ pub fn split_repository_update( }]) } +impl LspQuery { + pub fn query_name_and_write_permissions(&self) -> (&str, bool) { + match self.request { + Some(lsp_query::Request::GetHover(_)) => ("GetHover", false), + Some(lsp_query::Request::GetCodeActions(_)) => ("GetCodeActions", true), + Some(lsp_query::Request::GetSignatureHelp(_)) => ("GetSignatureHelp", false), + Some(lsp_query::Request::GetCodeLens(_)) => ("GetCodeLens", true), + Some(lsp_query::Request::GetDocumentDiagnostics(_)) => { + ("GetDocumentDiagnostics", false) + } + Some(lsp_query::Request::GetDefinition(_)) => ("GetDefinition", false), + Some(lsp_query::Request::GetDeclaration(_)) => ("GetDeclaration", false), + Some(lsp_query::Request::GetTypeDefinition(_)) => ("GetTypeDefinition", false), + Some(lsp_query::Request::GetImplementation(_)) => ("GetImplementation", false), + Some(lsp_query::Request::GetReferences(_)) => ("GetReferences", false), + Some(lsp_query::Request::GetDocumentColor(_)) => ("GetDocumentColor", false), + None => ("", true), + } + } +} + +// todo(lsp) remove after Zed Stable hits v0.204.x impl MultiLspQuery { pub fn request_str(&self) -> &str { match self.request { diff --git a/crates/proto/src/typed_envelope.rs b/crates/proto/src/typed_envelope.rs index 381a6379dc..f677a3b967 100644 --- a/crates/proto/src/typed_envelope.rs +++ b/crates/proto/src/typed_envelope.rs @@ -31,6 +31,58 @@ pub trait RequestMessage: EnvelopedMessage { type Response: EnvelopedMessage; } +/// A trait to bind LSP request and responses for the proto layer. +/// Should be used for every LSP request that has to traverse through the proto layer. +/// +/// `lsp_messages` macro in the same crate provides a convenient way to implement this. +pub trait LspRequestMessage: EnvelopedMessage { + type Response: EnvelopedMessage; + + fn to_proto_query(self) -> crate::lsp_query::Request; + + fn response_to_proto_query(response: Self::Response) -> crate::lsp_response::Response; + + fn buffer_id(&self) -> u64; + + fn buffer_version(&self) -> &[crate::VectorClockEntry]; + + /// Whether to deduplicate the requests, or keep the previous ones running when another + /// request of the same kind is processed. + fn stop_previous_requests() -> bool; +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct LspRequestId(pub u64); + +/// A response from a single language server. +/// There could be multiple responses for a single LSP request, +/// from different servers. +pub struct ProtoLspResponse { + pub server_id: u64, + pub response: R, +} + +impl ProtoLspResponse> { + pub fn into_response(self) -> Result> { + let envelope = self + .response + .into_any() + .downcast::>() + .map_err(|_| { + anyhow::anyhow!( + "cannot downcast LspResponse to {} for message {}", + T::Response::NAME, + T::NAME, + ) + })?; + + Ok(ProtoLspResponse { + server_id: self.server_id, + response: envelope.payload, + }) + } +} + pub trait AnyTypedEnvelope: Any + Send + Sync { fn payload_type_id(&self) -> TypeId; fn payload_type_name(&self) -> &'static str; diff --git a/crates/recent_projects/src/disconnected_overlay.rs b/crates/recent_projects/src/disconnected_overlay.rs index dd4d788cfd..8ffe0ef07c 100644 --- a/crates/recent_projects/src/disconnected_overlay.rs +++ b/crates/recent_projects/src/disconnected_overlay.rs @@ -1,5 +1,3 @@ -use std::path::PathBuf; - use gpui::{ClickEvent, DismissEvent, EventEmitter, FocusHandle, Focusable, Render, WeakEntity}; use project::project_settings::ProjectSettings; use remote::SshConnectionOptions; @@ -103,17 +101,17 @@ impl DisconnectedOverlay { return; }; - let Some(ssh_project) = workspace.read(cx).serialized_ssh_project() else { - return; - }; - let Some(window_handle) = window.window_handle().downcast::() else { return; }; let app_state = workspace.read(cx).app_state().clone(); - - let paths = ssh_project.paths.iter().map(PathBuf::from).collect(); + let paths = workspace + .read(cx) + .root_paths(cx) + .iter() + .map(|path| path.to_path_buf()) + .collect(); cx.spawn_in(window, async move |_, cx| { open_ssh_project( diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 2093e96cae..fa57b588cd 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -19,15 +19,12 @@ use picker::{ pub use remote_servers::RemoteServerProjects; use settings::Settings; pub use ssh_connections::SshSettings; -use std::{ - path::{Path, PathBuf}, - sync::Arc, -}; +use std::{path::Path, sync::Arc}; use ui::{KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*, tooltip_container}; use util::{ResultExt, paths::PathExt}; use workspace::{ - CloseIntent, HistoryManager, ModalView, OpenOptions, SerializedWorkspaceLocation, WORKSPACE_DB, - Workspace, WorkspaceId, with_active_or_new_workspace, + CloseIntent, HistoryManager, ModalView, OpenOptions, PathList, SerializedWorkspaceLocation, + WORKSPACE_DB, Workspace, WorkspaceId, with_active_or_new_workspace, }; use zed_actions::{OpenRecent, OpenRemote}; @@ -154,7 +151,7 @@ impl Render for RecentProjects { pub struct RecentProjectsDelegate { workspace: WeakEntity, - workspaces: Vec<(WorkspaceId, SerializedWorkspaceLocation)>, + workspaces: Vec<(WorkspaceId, SerializedWorkspaceLocation, PathList)>, selected_match_index: usize, matches: Vec, render_paths: bool, @@ -178,12 +175,15 @@ impl RecentProjectsDelegate { } } - pub fn set_workspaces(&mut self, workspaces: Vec<(WorkspaceId, SerializedWorkspaceLocation)>) { + pub fn set_workspaces( + &mut self, + workspaces: Vec<(WorkspaceId, SerializedWorkspaceLocation, PathList)>, + ) { self.workspaces = workspaces; self.has_any_non_local_projects = !self .workspaces .iter() - .all(|(_, location)| matches!(location, SerializedWorkspaceLocation::Local(_, _))); + .all(|(_, location, _)| matches!(location, SerializedWorkspaceLocation::Local)); } } impl EventEmitter for RecentProjectsDelegate {} @@ -236,15 +236,14 @@ impl PickerDelegate for RecentProjectsDelegate { .workspaces .iter() .enumerate() - .filter(|(_, (id, _))| !self.is_current_workspace(*id, cx)) - .map(|(id, (_, location))| { - let combined_string = location - .sorted_paths() + .filter(|(_, (id, _, _))| !self.is_current_workspace(*id, cx)) + .map(|(id, (_, _, paths))| { + let combined_string = paths + .paths() .iter() .map(|path| path.compact().to_string_lossy().into_owned()) .collect::>() .join(""); - StringMatchCandidate::new(id, &combined_string) }) .collect::>(); @@ -279,7 +278,7 @@ impl PickerDelegate for RecentProjectsDelegate { .get(self.selected_index()) .zip(self.workspace.upgrade()) { - let (candidate_workspace_id, candidate_workspace_location) = + let (candidate_workspace_id, candidate_workspace_location, candidate_workspace_paths) = &self.workspaces[selected_match.candidate_id]; let replace_current_window = if self.create_new_window { secondary @@ -292,8 +291,8 @@ impl PickerDelegate for RecentProjectsDelegate { Task::ready(Ok(())) } else { match candidate_workspace_location { - SerializedWorkspaceLocation::Local(paths, _) => { - let paths = paths.paths().to_vec(); + SerializedWorkspaceLocation::Local => { + let paths = candidate_workspace_paths.paths().to_vec(); if replace_current_window { cx.spawn_in(window, async move |workspace, cx| { let continue_replacing = workspace @@ -321,7 +320,7 @@ impl PickerDelegate for RecentProjectsDelegate { workspace.open_workspace_for_paths(false, paths, window, cx) } } - SerializedWorkspaceLocation::Ssh(ssh_project) => { + SerializedWorkspaceLocation::Ssh(connection) => { let app_state = workspace.app_state().clone(); let replace_window = if replace_current_window { @@ -337,12 +336,12 @@ impl PickerDelegate for RecentProjectsDelegate { let connection_options = SshSettings::get_global(cx) .connection_options_for( - ssh_project.host.clone(), - ssh_project.port, - ssh_project.user.clone(), + connection.host.clone(), + connection.port, + connection.user.clone(), ); - let paths = ssh_project.paths.iter().map(PathBuf::from).collect(); + let paths = candidate_workspace_paths.paths().to_vec(); cx.spawn_in(window, async move |_, cx| { open_ssh_project( @@ -383,12 +382,12 @@ impl PickerDelegate for RecentProjectsDelegate { ) -> Option { let hit = self.matches.get(ix)?; - let (_, location) = self.workspaces.get(hit.candidate_id)?; + let (_, location, paths) = self.workspaces.get(hit.candidate_id)?; let mut path_start_offset = 0; - let (match_labels, paths): (Vec<_>, Vec<_>) = location - .sorted_paths() + let (match_labels, paths): (Vec<_>, Vec<_>) = paths + .paths() .iter() .map(|p| p.compact()) .map(|path| { @@ -416,11 +415,9 @@ impl PickerDelegate for RecentProjectsDelegate { .gap_3() .when(self.has_any_non_local_projects, |this| { this.child(match location { - SerializedWorkspaceLocation::Local(_, _) => { - Icon::new(IconName::Screen) - .color(Color::Muted) - .into_any_element() - } + SerializedWorkspaceLocation::Local => Icon::new(IconName::Screen) + .color(Color::Muted) + .into_any_element(), SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Server) .color(Color::Muted) .into_any_element(), @@ -568,7 +565,7 @@ impl RecentProjectsDelegate { cx: &mut Context>, ) { if let Some(selected_match) = self.matches.get(ix) { - let (workspace_id, _) = self.workspaces[selected_match.candidate_id]; + let (workspace_id, _, _) = self.workspaces[selected_match.candidate_id]; cx.spawn_in(window, async move |this, cx| { let _ = WORKSPACE_DB.delete_workspace_by_id(workspace_id).await; let workspaces = WORKSPACE_DB @@ -707,7 +704,8 @@ mod tests { }]; delegate.set_workspaces(vec![( WorkspaceId::default(), - SerializedWorkspaceLocation::from_local_paths(vec![path!("/test/path/")]), + SerializedWorkspaceLocation::Local, + PathList::new(&[path!("/test/path")]), )]); }); }) diff --git a/crates/remote/src/remote.rs b/crates/remote/src/remote.rs index 43eb59c0ae..71895f1678 100644 --- a/crates/remote/src/remote.rs +++ b/crates/remote/src/remote.rs @@ -4,6 +4,6 @@ pub mod proxy; pub mod ssh_session; pub use ssh_session::{ - ConnectionState, SshClientDelegate, SshConnectionOptions, SshPlatform, SshRemoteClient, - SshRemoteEvent, + ConnectionState, SshClientDelegate, SshConnectionOptions, SshInfo, SshPlatform, + SshRemoteClient, SshRemoteEvent, }; diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index a26f4be661..6794018470 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -52,11 +52,6 @@ use util::{ paths::{PathStyle, RemotePathBuf}, }; -#[derive( - Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, -)] -pub struct SshProjectId(pub u64); - #[derive(Clone)] pub struct SshSocket { connection_options: SshConnectionOptions, @@ -89,11 +84,19 @@ pub struct SshConnectionOptions { pub upload_binary_over_ssh: bool, } +#[derive(Debug, Clone, PartialEq, Eq)] pub struct SshArgs { pub arguments: Vec, pub envs: Option>, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct SshInfo { + pub args: SshArgs, + pub path_style: PathStyle, + pub shell: String, +} + #[macro_export] macro_rules! shell_script { ($fmt:expr, $($name:ident = $arg:expr),+ $(,)?) => {{ @@ -442,7 +445,7 @@ impl SshSocket { } async fn platform(&self) -> Result { - let uname = self.run_command("sh", &["-c", "uname -sm"]).await?; + let uname = self.run_command("sh", &["-lc", "uname -sm"]).await?; let Some((os, arch)) = uname.split_once(" ") else { anyhow::bail!("unknown uname: {uname:?}") }; @@ -471,6 +474,16 @@ impl SshSocket { Ok(SshPlatform { os, arch }) } + + async fn shell(&self) -> String { + match self.run_command("sh", &["-lc", "echo $SHELL"]).await { + Ok(shell) => shell.trim().to_owned(), + Err(e) => { + log::error!("Failed to get shell: {e}"); + "sh".to_owned() + } + } + } } const MAX_MISSED_HEARTBEATS: usize = 5; @@ -1152,12 +1165,16 @@ impl SshRemoteClient { cx.notify(); } - pub fn ssh_info(&self) -> Option<(SshArgs, PathStyle)> { + pub fn ssh_info(&self) -> Option { self.state .lock() .as_ref() .and_then(|state| state.ssh_connection()) - .map(|ssh_connection| (ssh_connection.ssh_args(), ssh_connection.path_style())) + .map(|ssh_connection| SshInfo { + args: ssh_connection.ssh_args(), + path_style: ssh_connection.path_style(), + shell: ssh_connection.shell(), + }) } pub fn upload_directory( @@ -1392,6 +1409,7 @@ trait RemoteConnection: Send + Sync { fn ssh_args(&self) -> SshArgs; fn connection_options(&self) -> SshConnectionOptions; fn path_style(&self) -> PathStyle; + fn shell(&self) -> String; #[cfg(any(test, feature = "test-support"))] fn simulate_disconnect(&self, _: &AsyncApp) {} @@ -1403,6 +1421,7 @@ struct SshRemoteConnection { remote_binary_path: Option, ssh_platform: SshPlatform, ssh_path_style: PathStyle, + ssh_shell: String, _temp_dir: TempDir, } @@ -1429,6 +1448,10 @@ impl RemoteConnection for SshRemoteConnection { self.socket.connection_options.clone() } + fn shell(&self) -> String { + self.ssh_shell.clone() + } + fn upload_directory( &self, src_path: PathBuf, @@ -1510,7 +1533,7 @@ impl RemoteConnection for SshRemoteConnection { let ssh_proxy_process = match self .socket - .ssh_command("sh", &["-c", &start_proxy_command]) + .ssh_command("sh", &["-lc", &start_proxy_command]) // IMPORTANT: we kill this process when we drop the task that uses it. .kill_on_drop(true) .spawn() @@ -1642,6 +1665,7 @@ impl SshRemoteConnection { "windows" => PathStyle::Windows, _ => PathStyle::Posix, }; + let ssh_shell = socket.shell().await; let mut this = Self { socket, @@ -1650,6 +1674,7 @@ impl SshRemoteConnection { remote_binary_path: None, ssh_path_style, ssh_platform, + ssh_shell, }; let (release_channel, version, commit) = cx.update(|cx| { @@ -1885,7 +1910,7 @@ impl SshRemoteConnection { .run_command( "sh", &[ - "-c", + "-lc", &shell_script!("mkdir -p {parent}", parent = parent.to_string().as_ref()), ], ) @@ -1963,7 +1988,7 @@ impl SshRemoteConnection { .run_command( "sh", &[ - "-c", + "-lc", &shell_script!("mkdir -p {parent}", parent = parent.to_string().as_ref()), ], ) @@ -2011,7 +2036,7 @@ impl SshRemoteConnection { dst_path = &dst_path.to_string() ) }; - self.socket.run_command("sh", &["-c", &script]).await?; + self.socket.run_command("sh", &["-lc", &script]).await?; Ok(()) } @@ -2686,6 +2711,10 @@ mod fake { fn path_style(&self) -> PathStyle { PathStyle::current() } + + fn shell(&self) -> String { + "sh".to_owned() + } } pub(super) struct Delegate; diff --git a/crates/remote_server/Cargo.toml b/crates/remote_server/Cargo.toml index dcec9f6fe0..5dbb9a2771 100644 --- a/crates/remote_server/Cargo.toml +++ b/crates/remote_server/Cargo.toml @@ -65,6 +65,7 @@ telemetry_events.workspace = true util.workspace = true watch.workspace = true worktree.workspace = true +thiserror.workspace = true [target.'cfg(not(windows))'.dependencies] crashes.workspace = true diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index 83caebe62f..6216ff7728 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -46,6 +46,9 @@ pub struct HeadlessProject { pub languages: Arc, pub extensions: Entity, pub git_store: Entity, + // Used mostly to keep alive the toolchain store for RPC handlers. + // Local variant is used within LSP store, but that's a separate entity. + pub _toolchain_store: Entity, } pub struct HeadlessAppState { @@ -269,6 +272,7 @@ impl HeadlessProject { languages, extensions, git_store, + _toolchain_store: toolchain_store, } } diff --git a/crates/remote_server/src/main.rs b/crates/remote_server/src/main.rs index 03b0c3eda3..368c7cb639 100644 --- a/crates/remote_server/src/main.rs +++ b/crates/remote_server/src/main.rs @@ -1,6 +1,7 @@ #![cfg_attr(target_os = "windows", allow(unused, dead_code))] -use clap::{Parser, Subcommand}; +use clap::Parser; +use remote_server::Commands; use std::path::PathBuf; #[derive(Parser)] @@ -21,105 +22,34 @@ struct Cli { printenv: bool, } -#[derive(Subcommand)] -enum Commands { - Run { - #[arg(long)] - log_file: PathBuf, - #[arg(long)] - pid_file: PathBuf, - #[arg(long)] - stdin_socket: PathBuf, - #[arg(long)] - stdout_socket: PathBuf, - #[arg(long)] - stderr_socket: PathBuf, - }, - Proxy { - #[arg(long)] - reconnect: bool, - #[arg(long)] - identifier: String, - }, - Version, -} - #[cfg(windows)] fn main() { unimplemented!() } #[cfg(not(windows))] -fn main() { - use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; - use remote::proxy::ProxyLaunchError; - use remote_server::unix::{execute_proxy, execute_run}; - +fn main() -> anyhow::Result<()> { let cli = Cli::parse(); if let Some(socket_path) = &cli.askpass { askpass::main(socket_path); - return; + return Ok(()); } if let Some(socket) = &cli.crash_handler { crashes::crash_server(socket.as_path()); - return; + return Ok(()); } if cli.printenv { util::shell_env::print_env(); - return; + return Ok(()); } - let result = match cli.command { - Some(Commands::Run { - log_file, - pid_file, - stdin_socket, - stdout_socket, - stderr_socket, - }) => execute_run( - log_file, - pid_file, - stdin_socket, - stdout_socket, - stderr_socket, - ), - Some(Commands::Proxy { - identifier, - reconnect, - }) => match execute_proxy(identifier, reconnect) { - Ok(_) => Ok(()), - Err(err) => { - if let Some(err) = err.downcast_ref::() { - std::process::exit(err.to_exit_code()); - } - Err(err) - } - }, - Some(Commands::Version) => { - let release_channel = *RELEASE_CHANNEL; - match release_channel { - ReleaseChannel::Stable | ReleaseChannel::Preview => { - println!("{}", env!("ZED_PKG_VERSION")) - } - ReleaseChannel::Nightly | ReleaseChannel::Dev => { - println!( - "{}", - option_env!("ZED_COMMIT_SHA").unwrap_or(release_channel.dev_name()) - ) - } - }; - std::process::exit(0); - } - None => { - eprintln!("usage: remote "); - std::process::exit(1); - } - }; - if let Err(error) = result { - log::error!("exiting due to error: {}", error); + if let Some(command) = cli.command { + remote_server::run(command) + } else { + eprintln!("usage: remote "); std::process::exit(1); } } diff --git a/crates/remote_server/src/remote_server.rs b/crates/remote_server/src/remote_server.rs index 52003969af..c14a4828ac 100644 --- a/crates/remote_server/src/remote_server.rs +++ b/crates/remote_server/src/remote_server.rs @@ -6,4 +6,78 @@ pub mod unix; #[cfg(test)] mod remote_editing_tests; +use clap::Subcommand; +use std::path::PathBuf; + pub use headless_project::{HeadlessAppState, HeadlessProject}; + +#[derive(Subcommand)] +pub enum Commands { + Run { + #[arg(long)] + log_file: PathBuf, + #[arg(long)] + pid_file: PathBuf, + #[arg(long)] + stdin_socket: PathBuf, + #[arg(long)] + stdout_socket: PathBuf, + #[arg(long)] + stderr_socket: PathBuf, + }, + Proxy { + #[arg(long)] + reconnect: bool, + #[arg(long)] + identifier: String, + }, + Version, +} + +#[cfg(not(windows))] +pub fn run(command: Commands) -> anyhow::Result<()> { + use anyhow::Context; + use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; + use unix::{ExecuteProxyError, execute_proxy, execute_run}; + + match command { + Commands::Run { + log_file, + pid_file, + stdin_socket, + stdout_socket, + stderr_socket, + } => execute_run( + log_file, + pid_file, + stdin_socket, + stdout_socket, + stderr_socket, + ), + Commands::Proxy { + identifier, + reconnect, + } => execute_proxy(identifier, reconnect) + .inspect_err(|err| { + if let ExecuteProxyError::ServerNotRunning(err) = err { + std::process::exit(err.to_exit_code()); + } + }) + .context("running proxy on the remote server"), + Commands::Version => { + let release_channel = *RELEASE_CHANNEL; + match release_channel { + ReleaseChannel::Stable | ReleaseChannel::Preview => { + println!("{}", env!("ZED_PKG_VERSION")) + } + ReleaseChannel::Nightly | ReleaseChannel::Dev => { + println!( + "{}", + option_env!("ZED_COMMIT_SHA").unwrap_or(release_channel.dev_name()) + ) + } + }; + Ok(()) + } + } +} diff --git a/crates/remote_server/src/unix.rs b/crates/remote_server/src/unix.rs index b8a7351552..c6d1566d60 100644 --- a/crates/remote_server/src/unix.rs +++ b/crates/remote_server/src/unix.rs @@ -36,6 +36,7 @@ use smol::Async; use smol::{net::unix::UnixListener, stream::StreamExt as _}; use std::ffi::OsStr; use std::ops::ControlFlow; +use std::process::ExitStatus; use std::str::FromStr; use std::sync::LazyLock; use std::{env, thread}; @@ -46,6 +47,7 @@ use std::{ sync::Arc, }; use telemetry_events::LocationData; +use thiserror::Error; use util::ResultExt; pub static VERSION: LazyLock<&str> = LazyLock::new(|| match *RELEASE_CHANNEL { @@ -526,7 +528,23 @@ pub fn execute_run( Ok(()) } -#[derive(Clone)] +#[derive(Debug, Error)] +pub(crate) enum ServerPathError { + #[error("Failed to create server_dir `{path}`")] + CreateServerDir { + #[source] + source: std::io::Error, + path: PathBuf, + }, + #[error("Failed to create logs_dir `{path}`")] + CreateLogsDir { + #[source] + source: std::io::Error, + path: PathBuf, + }, +} + +#[derive(Clone, Debug)] struct ServerPaths { log_file: PathBuf, pid_file: PathBuf, @@ -536,10 +554,19 @@ struct ServerPaths { } impl ServerPaths { - fn new(identifier: &str) -> Result { + fn new(identifier: &str) -> Result { let server_dir = paths::remote_server_state_dir().join(identifier); - std::fs::create_dir_all(&server_dir)?; - std::fs::create_dir_all(&logs_dir())?; + std::fs::create_dir_all(&server_dir).map_err(|source| { + ServerPathError::CreateServerDir { + source, + path: server_dir.clone(), + } + })?; + let log_dir = logs_dir(); + std::fs::create_dir_all(log_dir).map_err(|source| ServerPathError::CreateLogsDir { + source: source, + path: log_dir.clone(), + })?; let pid_file = server_dir.join("server.pid"); let stdin_socket = server_dir.join("stdin.sock"); @@ -557,7 +584,43 @@ impl ServerPaths { } } -pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> { +#[derive(Debug, Error)] +pub(crate) enum ExecuteProxyError { + #[error("Failed to init server paths")] + ServerPath(#[from] ServerPathError), + + #[error(transparent)] + ServerNotRunning(#[from] ProxyLaunchError), + + #[error("Failed to check PidFile '{path}'")] + CheckPidFile { + #[source] + source: CheckPidError, + path: PathBuf, + }, + + #[error("Failed to kill existing server with pid '{pid}'")] + KillRunningServer { + #[source] + source: std::io::Error, + pid: u32, + }, + + #[error("failed to spawn server")] + SpawnServer(#[source] SpawnServerError), + + #[error("stdin_task failed")] + StdinTask(#[source] anyhow::Error), + #[error("stdout_task failed")] + StdoutTask(#[source] anyhow::Error), + #[error("stderr_task failed")] + StderrTask(#[source] anyhow::Error), +} + +pub(crate) fn execute_proxy( + identifier: String, + is_reconnecting: bool, +) -> Result<(), ExecuteProxyError> { init_logging_proxy(); let server_paths = ServerPaths::new(&identifier)?; @@ -574,12 +637,19 @@ pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> { log::info!("starting proxy process. PID: {}", std::process::id()); - let server_pid = check_pid_file(&server_paths.pid_file)?; + let server_pid = check_pid_file(&server_paths.pid_file).map_err(|source| { + ExecuteProxyError::CheckPidFile { + source, + path: server_paths.pid_file.clone(), + } + })?; let server_running = server_pid.is_some(); if is_reconnecting { if !server_running { log::error!("attempted to reconnect, but no server running"); - anyhow::bail!(ProxyLaunchError::ServerNotRunning); + return Err(ExecuteProxyError::ServerNotRunning( + ProxyLaunchError::ServerNotRunning, + )); } } else { if let Some(pid) = server_pid { @@ -590,7 +660,7 @@ pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> { kill_running_server(pid, &server_paths)?; } - spawn_server(&server_paths)?; + spawn_server(&server_paths).map_err(ExecuteProxyError::SpawnServer)?; }; let stdin_task = smol::spawn(async move { @@ -630,9 +700,9 @@ pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> { if let Err(forwarding_result) = smol::block_on(async move { futures::select! { - result = stdin_task.fuse() => result.context("stdin_task failed"), - result = stdout_task.fuse() => result.context("stdout_task failed"), - result = stderr_task.fuse() => result.context("stderr_task failed"), + result = stdin_task.fuse() => result.map_err(ExecuteProxyError::StdinTask), + result = stdout_task.fuse() => result.map_err(ExecuteProxyError::StdoutTask), + result = stderr_task.fuse() => result.map_err(ExecuteProxyError::StderrTask), } }) { log::error!( @@ -645,12 +715,12 @@ pub fn execute_proxy(identifier: String, is_reconnecting: bool) -> Result<()> { Ok(()) } -fn kill_running_server(pid: u32, paths: &ServerPaths) -> Result<()> { +fn kill_running_server(pid: u32, paths: &ServerPaths) -> Result<(), ExecuteProxyError> { log::info!("killing existing server with PID {}", pid); std::process::Command::new("kill") .arg(pid.to_string()) .output() - .context("failed to kill existing server")?; + .map_err(|source| ExecuteProxyError::KillRunningServer { source, pid })?; for file in [ &paths.pid_file, @@ -664,18 +734,39 @@ fn kill_running_server(pid: u32, paths: &ServerPaths) -> Result<()> { Ok(()) } -fn spawn_server(paths: &ServerPaths) -> Result<()> { +#[derive(Debug, Error)] +pub(crate) enum SpawnServerError { + #[error("failed to remove stdin socket")] + RemoveStdinSocket(#[source] std::io::Error), + + #[error("failed to remove stdout socket")] + RemoveStdoutSocket(#[source] std::io::Error), + + #[error("failed to remove stderr socket")] + RemoveStderrSocket(#[source] std::io::Error), + + #[error("failed to get current_exe")] + CurrentExe(#[source] std::io::Error), + + #[error("failed to launch server process")] + ProcessStatus(#[source] std::io::Error), + + #[error("failed to launch and detach server process: {status}\n{paths}")] + LaunchStatus { status: ExitStatus, paths: String }, +} + +fn spawn_server(paths: &ServerPaths) -> Result<(), SpawnServerError> { if paths.stdin_socket.exists() { - std::fs::remove_file(&paths.stdin_socket)?; + std::fs::remove_file(&paths.stdin_socket).map_err(SpawnServerError::RemoveStdinSocket)?; } if paths.stdout_socket.exists() { - std::fs::remove_file(&paths.stdout_socket)?; + std::fs::remove_file(&paths.stdout_socket).map_err(SpawnServerError::RemoveStdoutSocket)?; } if paths.stderr_socket.exists() { - std::fs::remove_file(&paths.stderr_socket)?; + std::fs::remove_file(&paths.stderr_socket).map_err(SpawnServerError::RemoveStderrSocket)?; } - let binary_name = std::env::current_exe()?; + let binary_name = std::env::current_exe().map_err(SpawnServerError::CurrentExe)?; let mut server_process = std::process::Command::new(binary_name); server_process .arg("run") @@ -692,11 +783,17 @@ fn spawn_server(paths: &ServerPaths) -> Result<()> { let status = server_process .status() - .context("failed to launch server process")?; - anyhow::ensure!( - status.success(), - "failed to launch and detach server process" - ); + .map_err(SpawnServerError::ProcessStatus)?; + + if !status.success() { + return Err(SpawnServerError::LaunchStatus { + status, + paths: format!( + "log file: {:?}, pid file: {:?}", + paths.log_file, paths.pid_file, + ), + }); + } let mut total_time_waited = std::time::Duration::from_secs(0); let wait_duration = std::time::Duration::from_millis(20); @@ -717,7 +814,15 @@ fn spawn_server(paths: &ServerPaths) -> Result<()> { Ok(()) } -fn check_pid_file(path: &Path) -> Result> { +#[derive(Debug, Error)] +#[error("Failed to remove PID file for missing process (pid `{pid}`")] +pub(crate) struct CheckPidError { + #[source] + source: std::io::Error, + pid: u32, +} + +fn check_pid_file(path: &Path) -> Result, CheckPidError> { let Some(pid) = std::fs::read_to_string(&path) .ok() .and_then(|contents| contents.parse::().ok()) @@ -742,7 +847,7 @@ fn check_pid_file(path: &Path) -> Result> { log::debug!( "Found PID file, but process with that PID does not exist. Removing PID file." ); - std::fs::remove_file(&path).context("Failed to remove PID file")?; + std::fs::remove_file(&path).map_err(|source| CheckPidError { source, pid })?; Ok(None) } } diff --git a/crates/rpc/src/proto_client.rs b/crates/rpc/src/proto_client.rs index 05b6bd1439..a90797ff5d 100644 --- a/crates/rpc/src/proto_client.rs +++ b/crates/rpc/src/proto_client.rs @@ -1,35 +1,48 @@ -use anyhow::Context; +use anyhow::{Context, Result}; use collections::HashMap; use futures::{ Future, FutureExt as _, + channel::oneshot, future::{BoxFuture, LocalBoxFuture}, }; -use gpui::{AnyEntity, AnyWeakEntity, AsyncApp, Entity}; +use gpui::{AnyEntity, AnyWeakEntity, AsyncApp, BackgroundExecutor, Entity, FutureExt as _}; +use parking_lot::Mutex; use proto::{ - AnyTypedEnvelope, EntityMessage, Envelope, EnvelopedMessage, RequestMessage, TypedEnvelope, - error::ErrorExt as _, + AnyTypedEnvelope, EntityMessage, Envelope, EnvelopedMessage, LspRequestId, LspRequestMessage, + RequestMessage, TypedEnvelope, error::ErrorExt as _, }; use std::{ any::{Any, TypeId}, - sync::{Arc, Weak}, + sync::{ + Arc, OnceLock, + atomic::{self, AtomicU64}, + }, + time::Duration, }; #[derive(Clone)] -pub struct AnyProtoClient(Arc); +pub struct AnyProtoClient(Arc); -impl AnyProtoClient { - pub fn downgrade(&self) -> AnyWeakProtoClient { - AnyWeakProtoClient(Arc::downgrade(&self.0)) - } -} +type RequestIds = Arc< + Mutex< + HashMap< + LspRequestId, + oneshot::Sender< + Result< + Option>>>>, + >, + >, + >, + >, +>; -#[derive(Clone)] -pub struct AnyWeakProtoClient(Weak); +static NEXT_LSP_REQUEST_ID: OnceLock> = OnceLock::new(); +static REQUEST_IDS: OnceLock = OnceLock::new(); -impl AnyWeakProtoClient { - pub fn upgrade(&self) -> Option { - self.0.upgrade().map(AnyProtoClient) - } +struct State { + client: Arc, + next_lsp_request_id: Arc, + request_ids: RequestIds, } pub trait ProtoClient: Send + Sync { @@ -37,11 +50,11 @@ pub trait ProtoClient: Send + Sync { &self, envelope: Envelope, request_type: &'static str, - ) -> BoxFuture<'static, anyhow::Result>; + ) -> BoxFuture<'static, Result>; - fn send(&self, envelope: Envelope, message_type: &'static str) -> anyhow::Result<()>; + fn send(&self, envelope: Envelope, message_type: &'static str) -> Result<()>; - fn send_response(&self, envelope: Envelope, message_type: &'static str) -> anyhow::Result<()>; + fn send_response(&self, envelope: Envelope, message_type: &'static str) -> Result<()>; fn message_handler_set(&self) -> &parking_lot::Mutex; @@ -65,7 +78,7 @@ pub type ProtoMessageHandler = Arc< Box, AnyProtoClient, AsyncApp, - ) -> LocalBoxFuture<'static, anyhow::Result<()>>, + ) -> LocalBoxFuture<'static, Result<()>>, >; impl ProtoMessageHandlerSet { @@ -113,7 +126,7 @@ impl ProtoMessageHandlerSet { message: Box, client: AnyProtoClient, cx: AsyncApp, - ) -> Option>> { + ) -> Option>> { let payload_type_id = message.payload_type_id(); let mut this = this.lock(); let handler = this.message_handlers.get(&payload_type_id)?.clone(); @@ -169,43 +182,195 @@ where T: ProtoClient + 'static, { fn from(client: Arc) -> Self { - Self(client) + Self::new(client) } } impl AnyProtoClient { pub fn new(client: Arc) -> Self { - Self(client) + Self(Arc::new(State { + client, + next_lsp_request_id: NEXT_LSP_REQUEST_ID + .get_or_init(|| Arc::new(AtomicU64::new(0))) + .clone(), + request_ids: REQUEST_IDS.get_or_init(RequestIds::default).clone(), + })) } pub fn is_via_collab(&self) -> bool { - self.0.is_via_collab() + self.0.client.is_via_collab() } pub fn request( &self, request: T, - ) -> impl Future> + use { + ) -> impl Future> + use { let envelope = request.into_envelope(0, None, None); - let response = self.0.request(envelope, T::NAME); + let response = self.0.client.request(envelope, T::NAME); async move { T::Response::from_envelope(response.await?) .context("received response of the wrong type") } } - pub fn send(&self, request: T) -> anyhow::Result<()> { + pub fn send(&self, request: T) -> Result<()> { let envelope = request.into_envelope(0, None, None); - self.0.send(envelope, T::NAME) + self.0.client.send(envelope, T::NAME) } - pub fn send_response( - &self, - request_id: u32, - request: T, - ) -> anyhow::Result<()> { + pub fn send_response(&self, request_id: u32, request: T) -> Result<()> { let envelope = request.into_envelope(0, Some(request_id), None); - self.0.send(envelope, T::NAME) + self.0.client.send(envelope, T::NAME) + } + + pub fn request_lsp( + &self, + project_id: u64, + timeout: Duration, + executor: BackgroundExecutor, + request: T, + ) -> impl Future< + Output = Result>>>>, + > + use + where + T: LspRequestMessage, + { + let new_id = LspRequestId( + self.0 + .next_lsp_request_id + .fetch_add(1, atomic::Ordering::Acquire), + ); + let (tx, rx) = oneshot::channel(); + { + self.0.request_ids.lock().insert(new_id, tx); + } + + let query = proto::LspQuery { + project_id, + lsp_request_id: new_id.0, + request: Some(request.to_proto_query()), + }; + let request = self.request(query); + let request_ids = self.0.request_ids.clone(); + async move { + match request.await { + Ok(_request_enqueued) => {} + Err(e) => { + request_ids.lock().remove(&new_id); + return Err(e).context("sending LSP proto request"); + } + } + + let response = rx.with_timeout(timeout, &executor).await; + { + request_ids.lock().remove(&new_id); + } + match response { + Ok(Ok(response)) => { + let response = response + .context("waiting for LSP proto response")? + .map(|response| { + anyhow::Ok(TypedEnvelope { + payload: response + .payload + .into_iter() + .map(|lsp_response| lsp_response.into_response::()) + .collect::>>()?, + sender_id: response.sender_id, + original_sender_id: response.original_sender_id, + message_id: response.message_id, + received_at: response.received_at, + }) + }) + .transpose() + .context("converting LSP proto response")?; + Ok(response) + } + Err(_cancelled_due_timeout) => Ok(None), + Ok(Err(_channel_dropped)) => Ok(None), + } + } + } + + pub fn send_lsp_response( + &self, + project_id: u64, + lsp_request_id: LspRequestId, + server_responses: HashMap, + ) -> Result<()> { + self.send(proto::LspQueryResponse { + project_id, + lsp_request_id: lsp_request_id.0, + responses: server_responses + .into_iter() + .map(|(server_id, response)| proto::LspResponse { + server_id, + response: Some(T::response_to_proto_query(response)), + }) + .collect(), + }) + } + + pub fn handle_lsp_response(&self, mut envelope: TypedEnvelope) { + let request_id = LspRequestId(envelope.payload.lsp_request_id); + let mut response_senders = self.0.request_ids.lock(); + if let Some(tx) = response_senders.remove(&request_id) { + let responses = envelope.payload.responses.drain(..).collect::>(); + tx.send(Ok(Some(proto::TypedEnvelope { + sender_id: envelope.sender_id, + original_sender_id: envelope.original_sender_id, + message_id: envelope.message_id, + received_at: envelope.received_at, + payload: responses + .into_iter() + .filter_map(|response| { + use proto::lsp_response::Response; + + let server_id = response.server_id; + let response = match response.response? { + Response::GetReferencesResponse(response) => { + to_any_envelope(&envelope, response) + } + Response::GetDocumentColorResponse(response) => { + to_any_envelope(&envelope, response) + } + Response::GetHoverResponse(response) => { + to_any_envelope(&envelope, response) + } + Response::GetCodeActionsResponse(response) => { + to_any_envelope(&envelope, response) + } + Response::GetSignatureHelpResponse(response) => { + to_any_envelope(&envelope, response) + } + Response::GetCodeLensResponse(response) => { + to_any_envelope(&envelope, response) + } + Response::GetDocumentDiagnosticsResponse(response) => { + to_any_envelope(&envelope, response) + } + Response::GetDefinitionResponse(response) => { + to_any_envelope(&envelope, response) + } + Response::GetDeclarationResponse(response) => { + to_any_envelope(&envelope, response) + } + Response::GetTypeDefinitionResponse(response) => { + to_any_envelope(&envelope, response) + } + Response::GetImplementationResponse(response) => { + to_any_envelope(&envelope, response) + } + }; + Some(proto::ProtoLspResponse { + server_id, + response, + }) + }) + .collect(), + }))) + .ok(); + } } pub fn add_request_handler(&self, entity: gpui::WeakEntity, handler: H) @@ -213,31 +378,35 @@ impl AnyProtoClient { M: RequestMessage, E: 'static, H: 'static + Sync + Fn(Entity, TypedEnvelope, AsyncApp) -> F + Send + Sync, - F: 'static + Future>, + F: 'static + Future>, { - self.0.message_handler_set().lock().add_message_handler( - TypeId::of::(), - entity.into(), - Arc::new(move |entity, envelope, client, cx| { - let entity = entity.downcast::().unwrap(); - let envelope = envelope.into_any().downcast::>().unwrap(); - let request_id = envelope.message_id(); - handler(entity, *envelope, cx) - .then(move |result| async move { - match result { - Ok(response) => { - client.send_response(request_id, response)?; - Ok(()) + self.0 + .client + .message_handler_set() + .lock() + .add_message_handler( + TypeId::of::(), + entity.into(), + Arc::new(move |entity, envelope, client, cx| { + let entity = entity.downcast::().unwrap(); + let envelope = envelope.into_any().downcast::>().unwrap(); + let request_id = envelope.message_id(); + handler(entity, *envelope, cx) + .then(move |result| async move { + match result { + Ok(response) => { + client.send_response(request_id, response)?; + Ok(()) + } + Err(error) => { + client.send_response(request_id, error.to_proto())?; + Err(error) + } } - Err(error) => { - client.send_response(request_id, error.to_proto())?; - Err(error) - } - } - }) - .boxed_local() - }), - ) + }) + .boxed_local() + }), + ) } pub fn add_entity_request_handler(&self, handler: H) @@ -245,7 +414,7 @@ impl AnyProtoClient { M: EnvelopedMessage + RequestMessage + EntityMessage, E: 'static, H: 'static + Sync + Send + Fn(gpui::Entity, TypedEnvelope, AsyncApp) -> F, - F: 'static + Future>, + F: 'static + Future>, { let message_type_id = TypeId::of::(); let entity_type_id = TypeId::of::(); @@ -257,6 +426,7 @@ impl AnyProtoClient { .remote_entity_id() }; self.0 + .client .message_handler_set() .lock() .add_entity_message_handler( @@ -290,7 +460,7 @@ impl AnyProtoClient { M: EnvelopedMessage + EntityMessage, E: 'static, H: 'static + Sync + Send + Fn(gpui::Entity, TypedEnvelope, AsyncApp) -> F, - F: 'static + Future>, + F: 'static + Future>, { let message_type_id = TypeId::of::(); let entity_type_id = TypeId::of::(); @@ -302,6 +472,7 @@ impl AnyProtoClient { .remote_entity_id() }; self.0 + .client .message_handler_set() .lock() .add_entity_message_handler( @@ -319,7 +490,7 @@ impl AnyProtoClient { pub fn subscribe_to_entity(&self, remote_id: u64, entity: &Entity) { let id = (TypeId::of::(), remote_id); - let mut message_handlers = self.0.message_handler_set().lock(); + let mut message_handlers = self.0.client.message_handler_set().lock(); if message_handlers .entities_by_type_and_remote_id .contains_key(&id) @@ -335,3 +506,16 @@ impl AnyProtoClient { ); } } + +fn to_any_envelope( + envelope: &TypedEnvelope, + response: T, +) -> Box { + Box::new(proto::TypedEnvelope { + sender_id: envelope.sender_id, + original_sender_id: envelope.original_sender_id, + message_id: envelope.message_id, + received_at: envelope.received_at, + payload: response, + }) as Box<_> +} diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index c4ba9b5154..8ac12588af 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -3905,7 +3905,7 @@ pub mod tests { assert_eq!(workspace.active_pane(), &second_pane); second_pane.update(cx, |this, cx| { assert_eq!(this.active_item_index(), 1); - this.activate_prev_item(false, window, cx); + this.activate_previous_item(&Default::default(), window, cx); assert_eq!(this.active_item_index(), 0); }); workspace.activate_pane_in_direction(workspace::SplitDirection::Left, window, cx); @@ -3940,7 +3940,9 @@ pub mod tests { // Focus the second pane's non-search item window .update(cx, |_workspace, window, cx| { - second_pane.update(cx, |pane, cx| pane.activate_next_item(true, window, cx)); + second_pane.update(cx, |pane, cx| { + pane.activate_next_item(&Default::default(), window, cx) + }); }) .unwrap(); diff --git a/crates/settings/src/settings_json.rs b/crates/settings/src/settings_json.rs index 8080ec8d5f..f112ec811d 100644 --- a/crates/settings/src/settings_json.rs +++ b/crates/settings/src/settings_json.rs @@ -209,7 +209,7 @@ fn replace_value_in_json_text( if ch == ',' { removal_end = existing_value_range.end + offset + 1; // Also consume whitespace after the comma - while let Some((_, next_ch)) = chars.next() { + for (_, next_ch) in chars.by_ref() { if next_ch.is_whitespace() { removal_end += next_ch.len_utf8(); } else { diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 211db46c6c..3deaed8b9d 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -60,6 +60,11 @@ pub trait Settings: 'static + Send + Sync { /// The logic for combining together values from one or more JSON files into the /// final value for this setting. + /// + /// # Warning + /// `Self::FileContent` deserialized field names should match with `Self` deserialized field names + /// otherwise the field won't be deserialized properly and you will get the error: + /// "A default setting must be added to the `default.json` file" fn load(sources: SettingsSources, cx: &mut App) -> Result where Self: Sized; diff --git a/crates/settings_ui/src/keybindings.rs b/crates/settings_ui/src/keybindings.rs index 9a2d33ef7c..288f59c8e0 100644 --- a/crates/settings_ui/src/keybindings.rs +++ b/crates/settings_ui/src/keybindings.rs @@ -12,9 +12,11 @@ use fs::Fs; use fuzzy::{StringMatch, StringMatchCandidate}; use gpui::{ Action, AppContext as _, AsyncApp, Axis, ClickEvent, Context, DismissEvent, Entity, - EventEmitter, FocusHandle, Focusable, Global, IsZero, KeyContext, Keystroke, MouseButton, - Point, ScrollStrategy, ScrollWheelEvent, Stateful, StyledText, Subscription, Task, - TextStyleRefinement, WeakEntity, actions, anchored, deferred, div, + EventEmitter, FocusHandle, Focusable, Global, IsZero, + KeyBindingContextPredicate::{And, Descendant, Equal, Identifier, Not, NotEqual, Or}, + KeyContext, Keystroke, MouseButton, Point, ScrollStrategy, ScrollWheelEvent, Stateful, + StyledText, Subscription, Task, TextStyleRefinement, WeakEntity, actions, anchored, deferred, + div, }; use language::{Language, LanguageConfig, ToOffset as _}; use notifications::status_toast::{StatusToast, ToastIcon}; @@ -182,15 +184,6 @@ struct KeybindConflict { remaining_conflict_amount: usize, } -impl KeybindConflict { - fn from_iter<'a>(mut indices: impl Iterator) -> Option { - indices.next().map(|origin| Self { - first_conflict_index: origin.index, - remaining_conflict_amount: indices.count(), - }) - } -} - #[derive(Clone, Copy, PartialEq)] struct ConflictOrigin { override_source: KeybindSource, @@ -238,13 +231,21 @@ impl ConflictOrigin { #[derive(Default)] struct ConflictState { conflicts: Vec>, - keybind_mapping: HashMap>, + keybind_mapping: ConflictKeybindMapping, has_user_conflicts: bool, } +type ConflictKeybindMapping = HashMap< + Vec, + Vec<( + Option, + Vec, + )>, +>; + impl ConflictState { fn new(key_bindings: &[ProcessedBinding]) -> Self { - let mut action_keybind_mapping: HashMap<_, Vec> = HashMap::default(); + let mut action_keybind_mapping = ConflictKeybindMapping::default(); let mut largest_index = 0; for (index, binding) in key_bindings @@ -252,29 +253,48 @@ impl ConflictState { .enumerate() .flat_map(|(index, binding)| Some(index).zip(binding.keybind_information())) { - action_keybind_mapping - .entry(binding.get_action_mapping()) - .or_default() - .push(ConflictOrigin::new(binding.source, index)); + let mapping = binding.get_action_mapping(); + let predicate = mapping + .context + .and_then(|ctx| gpui::KeyBindingContextPredicate::parse(&ctx).ok()); + let entry = action_keybind_mapping + .entry(mapping.keystrokes) + .or_default(); + let origin = ConflictOrigin::new(binding.source, index); + if let Some((_, origins)) = + entry + .iter_mut() + .find(|(other_predicate, _)| match (&predicate, other_predicate) { + (None, None) => true, + (Some(a), Some(b)) => normalized_ctx_eq(a, b), + _ => false, + }) + { + origins.push(origin); + } else { + entry.push((predicate, vec![origin])); + } largest_index = index; } let mut conflicts = vec![None; largest_index + 1]; let mut has_user_conflicts = false; - for indices in action_keybind_mapping.values_mut() { - indices.sort_unstable_by_key(|origin| origin.override_source); - let Some((fst, snd)) = indices.get(0).zip(indices.get(1)) else { - continue; - }; + for entries in action_keybind_mapping.values_mut() { + for (_, indices) in entries.iter_mut() { + indices.sort_unstable_by_key(|origin| origin.override_source); + let Some((fst, snd)) = indices.get(0).zip(indices.get(1)) else { + continue; + }; - for origin in indices.iter() { - conflicts[origin.index] = - origin.get_conflict_with(if origin == fst { snd } else { fst }) + for origin in indices.iter() { + conflicts[origin.index] = + origin.get_conflict_with(if origin == fst { snd } else { fst }) + } + + has_user_conflicts |= fst.override_source == KeybindSource::User + && snd.override_source == KeybindSource::User; } - - has_user_conflicts |= fst.override_source == KeybindSource::User - && snd.override_source == KeybindSource::User; } Self { @@ -289,15 +309,34 @@ impl ConflictState { action_mapping: &ActionMapping, keybind_idx: Option, ) -> Option { - self.keybind_mapping - .get(action_mapping) - .and_then(|indices| { - KeybindConflict::from_iter( - indices + let ActionMapping { + keystrokes, + context, + } = action_mapping; + let predicate = context + .as_deref() + .and_then(|ctx| gpui::KeyBindingContextPredicate::parse(&ctx).ok()); + self.keybind_mapping.get(keystrokes).and_then(|entries| { + entries + .iter() + .find_map(|(other_predicate, indices)| { + match (&predicate, other_predicate) { + (None, None) => true, + (Some(pred), Some(other)) => normalized_ctx_eq(pred, other), + _ => false, + } + .then_some(indices) + }) + .and_then(|indices| { + let mut indices = indices .iter() - .filter(|&conflict| Some(conflict.index) != keybind_idx), - ) - }) + .filter(|&conflict| Some(conflict.index) != keybind_idx); + indices.next().map(|origin| KeybindConflict { + first_conflict_index: origin.index, + remaining_conflict_amount: indices.count(), + }) + }) + }) } fn conflict_for_idx(&self, idx: usize) -> Option { @@ -3089,29 +3128,29 @@ fn collect_contexts_from_assets() -> Vec { queue.push(root_context); while let Some(context) = queue.pop() { match context { - gpui::KeyBindingContextPredicate::Identifier(ident) => { + Identifier(ident) => { contexts.insert(ident); } - gpui::KeyBindingContextPredicate::Equal(ident_a, ident_b) => { + Equal(ident_a, ident_b) => { contexts.insert(ident_a); contexts.insert(ident_b); } - gpui::KeyBindingContextPredicate::NotEqual(ident_a, ident_b) => { + NotEqual(ident_a, ident_b) => { contexts.insert(ident_a); contexts.insert(ident_b); } - gpui::KeyBindingContextPredicate::Descendant(ctx_a, ctx_b) => { + Descendant(ctx_a, ctx_b) => { queue.push(*ctx_a); queue.push(*ctx_b); } - gpui::KeyBindingContextPredicate::Not(ctx) => { + Not(ctx) => { queue.push(*ctx); } - gpui::KeyBindingContextPredicate::And(ctx_a, ctx_b) => { + And(ctx_a, ctx_b) => { queue.push(*ctx_a); queue.push(*ctx_b); } - gpui::KeyBindingContextPredicate::Or(ctx_a, ctx_b) => { + Or(ctx_a, ctx_b) => { queue.push(*ctx_a); queue.push(*ctx_b); } @@ -3126,6 +3165,127 @@ fn collect_contexts_from_assets() -> Vec { contexts } +fn normalized_ctx_eq( + a: &gpui::KeyBindingContextPredicate, + b: &gpui::KeyBindingContextPredicate, +) -> bool { + use gpui::KeyBindingContextPredicate::*; + return match (a, b) { + (Identifier(_), Identifier(_)) => a == b, + (Equal(a_left, a_right), Equal(b_left, b_right)) => { + (a_left == b_left && a_right == b_right) || (a_left == b_right && a_right == b_left) + } + (NotEqual(a_left, a_right), NotEqual(b_left, b_right)) => { + (a_left == b_left && a_right == b_right) || (a_left == b_right && a_right == b_left) + } + (Descendant(a_parent, a_child), Descendant(b_parent, b_child)) => { + normalized_ctx_eq(a_parent, b_parent) && normalized_ctx_eq(a_child, b_child) + } + (Not(a_expr), Not(b_expr)) => normalized_ctx_eq(a_expr, b_expr), + // Handle double negation: !(!a) == a + (Not(a_expr), b) if matches!(a_expr.as_ref(), Not(_)) => { + let Not(a_inner) = a_expr.as_ref() else { + unreachable!(); + }; + normalized_ctx_eq(b, a_inner) + } + (a, Not(b_expr)) if matches!(b_expr.as_ref(), Not(_)) => { + let Not(b_inner) = b_expr.as_ref() else { + unreachable!(); + }; + normalized_ctx_eq(a, b_inner) + } + (And(a_left, a_right), And(b_left, b_right)) + if matches!(a_left.as_ref(), And(_, _)) + || matches!(a_right.as_ref(), And(_, _)) + || matches!(b_left.as_ref(), And(_, _)) + || matches!(b_right.as_ref(), And(_, _)) => + { + let mut a_operands = Vec::new(); + flatten_and(a, &mut a_operands); + let mut b_operands = Vec::new(); + flatten_and(b, &mut b_operands); + compare_operand_sets(&a_operands, &b_operands) + } + (And(a_left, a_right), And(b_left, b_right)) => { + (normalized_ctx_eq(a_left, b_left) && normalized_ctx_eq(a_right, b_right)) + || (normalized_ctx_eq(a_left, b_right) && normalized_ctx_eq(a_right, b_left)) + } + (Or(a_left, a_right), Or(b_left, b_right)) + if matches!(a_left.as_ref(), Or(_, _)) + || matches!(a_right.as_ref(), Or(_, _)) + || matches!(b_left.as_ref(), Or(_, _)) + || matches!(b_right.as_ref(), Or(_, _)) => + { + let mut a_operands = Vec::new(); + flatten_or(a, &mut a_operands); + let mut b_operands = Vec::new(); + flatten_or(b, &mut b_operands); + compare_operand_sets(&a_operands, &b_operands) + } + (Or(a_left, a_right), Or(b_left, b_right)) => { + (normalized_ctx_eq(a_left, b_left) && normalized_ctx_eq(a_right, b_right)) + || (normalized_ctx_eq(a_left, b_right) && normalized_ctx_eq(a_right, b_left)) + } + _ => false, + }; + + fn flatten_and<'a>( + pred: &'a gpui::KeyBindingContextPredicate, + operands: &mut Vec<&'a gpui::KeyBindingContextPredicate>, + ) { + use gpui::KeyBindingContextPredicate::*; + match pred { + And(left, right) => { + flatten_and(left, operands); + flatten_and(right, operands); + } + _ => operands.push(pred), + } + } + + fn flatten_or<'a>( + pred: &'a gpui::KeyBindingContextPredicate, + operands: &mut Vec<&'a gpui::KeyBindingContextPredicate>, + ) { + use gpui::KeyBindingContextPredicate::*; + match pred { + Or(left, right) => { + flatten_or(left, operands); + flatten_or(right, operands); + } + _ => operands.push(pred), + } + } + + fn compare_operand_sets( + a: &[&gpui::KeyBindingContextPredicate], + b: &[&gpui::KeyBindingContextPredicate], + ) -> bool { + if a.len() != b.len() { + return false; + } + + // For each operand in a, find a matching operand in b + let mut b_matched = vec![false; b.len()]; + for a_operand in a { + let mut found = false; + for (b_idx, b_operand) in b.iter().enumerate() { + if !b_matched[b_idx] && normalized_ctx_eq(a_operand, b_operand) { + b_matched[b_idx] = true; + found = true; + break; + } + } + if !found { + return false; + } + } + + true + } +} + impl SerializableItem for KeymapEditor { fn serialized_item_kind() -> &'static str { "KeymapEditor" @@ -3188,12 +3348,15 @@ impl SerializableItem for KeymapEditor { } mod persistence { - use db::{define_connection, query, sqlez_macros::sql}; + use db::{query, sqlez::domain::Domain, sqlez_macros::sql}; use workspace::WorkspaceDb; - define_connection! { - pub static ref KEYBINDING_EDITORS: KeybindingEditorDb = - &[sql!( + pub struct KeybindingEditorDb(db::sqlez::thread_safe_connection::ThreadSafeConnection); + + impl Domain for KeybindingEditorDb { + const NAME: &str = stringify!(KeybindingEditorDb); + + const MIGRATIONS: &[&str] = &[sql!( CREATE TABLE keybinding_editors ( workspace_id INTEGER, item_id INTEGER UNIQUE, @@ -3202,9 +3365,11 @@ mod persistence { FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) ON DELETE CASCADE ) STRICT; - )]; + )]; } + db::static_connection!(KEYBINDING_EDITORS, KeybindingEditorDb, [WorkspaceDb]); + impl KeybindingEditorDb { query! { pub async fn save_keybinding_editor( @@ -3228,3 +3393,152 @@ mod persistence { } } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn normalized_ctx_cmp() { + #[track_caller] + fn cmp(a: &str, b: &str) -> bool { + let a = gpui::KeyBindingContextPredicate::parse(a) + .expect("Failed to parse keybinding context a"); + let b = gpui::KeyBindingContextPredicate::parse(b) + .expect("Failed to parse keybinding context b"); + normalized_ctx_eq(&a, &b) + } + + // Basic equality - identical expressions + assert!(cmp("a && b", "a && b")); + assert!(cmp("a || b", "a || b")); + assert!(cmp("a == b", "a == b")); + assert!(cmp("a != b", "a != b")); + assert!(cmp("a > b", "a > b")); + assert!(cmp("!a", "!a")); + + // AND operator - associative/commutative + assert!(cmp("a && b", "b && a")); + assert!(cmp("a && b && c", "c && b && a")); + assert!(cmp("a && b && c", "b && a && c")); + assert!(cmp("a && b && c && d", "d && c && b && a")); + + // OR operator - associative/commutative + assert!(cmp("a || b", "b || a")); + assert!(cmp("a || b || c", "c || b || a")); + assert!(cmp("a || b || c", "b || a || c")); + assert!(cmp("a || b || c || d", "d || c || b || a")); + + // Equality operator - associative/commutative + assert!(cmp("a == b", "b == a")); + assert!(cmp("x == y", "y == x")); + + // Inequality operator - associative/commutative + assert!(cmp("a != b", "b != a")); + assert!(cmp("x != y", "y != x")); + + // Complex nested expressions with associative operators + assert!(cmp("(a && b) || c", "c || (a && b)")); + assert!(cmp("(a && b) || c", "c || (b && a)")); + assert!(cmp("(a || b) && c", "c && (a || b)")); + assert!(cmp("(a || b) && c", "c && (b || a)")); + assert!(cmp("(a && b) || (c && d)", "(c && d) || (a && b)")); + assert!(cmp("(a && b) || (c && d)", "(d && c) || (b && a)")); + + // Multiple levels of nesting + assert!(cmp("((a && b) || c) && d", "d && ((a && b) || c)")); + assert!(cmp("((a && b) || c) && d", "d && (c || (b && a))")); + assert!(cmp("a && (b || (c && d))", "(b || (c && d)) && a")); + assert!(cmp("a && (b || (c && d))", "(b || (d && c)) && a")); + + // Negation with associative operators + assert!(cmp("!a && b", "b && !a")); + assert!(cmp("!a || b", "b || !a")); + assert!(cmp("!(a && b) || c", "c || !(a && b)")); + assert!(cmp("!(a && b) || c", "c || !(b && a)")); + + // Descendant operator (>) - NOT associative/commutative + assert!(cmp("a > b", "a > b")); + assert!(!cmp("a > b", "b > a")); + assert!(!cmp("a > b > c", "c > b > a")); + assert!(!cmp("a > b > c", "a > c > b")); + + // Mixed operators with descendant + assert!(cmp("(a > b) && c", "c && (a > b)")); + assert!(!cmp("(a > b) && c", "c && (b > a)")); + assert!(cmp("(a > b) || (c > d)", "(c > d) || (a > b)")); + assert!(!cmp("(a > b) || (c > d)", "(b > a) || (d > c)")); + + // Negative cases - different operators + assert!(!cmp("a && b", "a || b")); + assert!(!cmp("a == b", "a != b")); + assert!(!cmp("a && b", "a > b")); + assert!(!cmp("a || b", "a > b")); + assert!(!cmp("a == b", "a && b")); + assert!(!cmp("a != b", "a || b")); + + // Negative cases - different operands + assert!(!cmp("a && b", "a && c")); + assert!(!cmp("a && b", "c && d")); + assert!(!cmp("a || b", "a || c")); + assert!(!cmp("a || b", "c || d")); + assert!(!cmp("a == b", "a == c")); + assert!(!cmp("a != b", "a != c")); + assert!(!cmp("a > b", "a > c")); + assert!(!cmp("a > b", "c > b")); + + // Negative cases - with negation + assert!(!cmp("!a", "a")); + assert!(!cmp("!a && b", "a && b")); + assert!(!cmp("!(a && b)", "a && b")); + assert!(!cmp("!a || b", "a || b")); + assert!(!cmp("!(a || b)", "a || b")); + + // Negative cases - complex expressions + assert!(!cmp("(a && b) || c", "(a || b) && c")); + assert!(!cmp("a && (b || c)", "a || (b && c)")); + assert!(!cmp("(a && b) || (c && d)", "(a || b) && (c || d)")); + assert!(!cmp("a > b && c", "a && b > c")); + + // Edge cases - multiple same operands + assert!(cmp("a && a", "a && a")); + assert!(cmp("a || a", "a || a")); + assert!(cmp("a && a && b", "b && a && a")); + assert!(cmp("a || a || b", "b || a || a")); + + // Edge cases - deeply nested + assert!(cmp( + "((a && b) || (c && d)) && ((e || f) && g)", + "((e || f) && g) && ((c && d) || (a && b))" + )); + assert!(cmp( + "((a && b) || (c && d)) && ((e || f) && g)", + "(g && (f || e)) && ((d && c) || (b && a))" + )); + + // Edge cases - repeated patterns + assert!(cmp("(a && b) || (a && b)", "(b && a) || (b && a)")); + assert!(cmp("(a || b) && (a || b)", "(b || a) && (b || a)")); + + // Negative cases - subtle differences + assert!(!cmp("a && b && c", "a && b")); + assert!(!cmp("a || b || c", "a || b")); + assert!(!cmp("(a && b) || c", "a && (b || c)")); + + // a > b > c is not the same as a > c, should not be equal + assert!(!cmp("a > b > c", "a > c")); + + // Double negation with complex expressions + assert!(cmp("!(!(a && b))", "a && b")); + assert!(cmp("!(!(a || b))", "a || b")); + assert!(cmp("!(!(a > b))", "a > b")); + assert!(cmp("!(!a) && b", "a && b")); + assert!(cmp("!(!a) || b", "a || b")); + assert!(cmp("!(!(a && b)) || c", "(a && b) || c")); + assert!(cmp("!(!(a && b)) || c", "(b && a) || c")); + assert!(cmp("!(!a)", "a")); + assert!(cmp("a", "!(!a)")); + assert!(cmp("!(!(!a))", "!a")); + assert!(cmp("!(!(!(!a)))", "a")); + } +} diff --git a/crates/sqlez/src/domain.rs b/crates/sqlez/src/domain.rs index a83f4e18d6..5744a67da2 100644 --- a/crates/sqlez/src/domain.rs +++ b/crates/sqlez/src/domain.rs @@ -1,8 +1,12 @@ use crate::connection::Connection; pub trait Domain: 'static { - fn name() -> &'static str; - fn migrations() -> &'static [&'static str]; + const NAME: &str; + const MIGRATIONS: &[&str]; + + fn should_allow_migration_change(_index: usize, _old: &str, _new: &str) -> bool { + false + } } pub trait Migrator: 'static { @@ -17,7 +21,11 @@ impl Migrator for () { impl Migrator for D { fn migrate(connection: &Connection) -> anyhow::Result<()> { - connection.migrate(Self::name(), Self::migrations()) + connection.migrate( + Self::NAME, + Self::MIGRATIONS, + Self::should_allow_migration_change, + ) } } diff --git a/crates/sqlez/src/migrations.rs b/crates/sqlez/src/migrations.rs index 7c59ffe658..2429ddeb41 100644 --- a/crates/sqlez/src/migrations.rs +++ b/crates/sqlez/src/migrations.rs @@ -34,7 +34,12 @@ impl Connection { /// Note: Unlike everything else in SQLez, migrations are run eagerly, without first /// preparing the SQL statements. This makes it possible to do multi-statement schema /// updates in a single string without running into prepare errors. - pub fn migrate(&self, domain: &'static str, migrations: &[&'static str]) -> Result<()> { + pub fn migrate( + &self, + domain: &'static str, + migrations: &[&'static str], + mut should_allow_migration_change: impl FnMut(usize, &str, &str) -> bool, + ) -> Result<()> { self.with_savepoint("migrating", || { // Setup the migrations table unconditionally self.exec(indoc! {" @@ -65,9 +70,14 @@ impl Connection { &sqlformat::QueryParams::None, Default::default(), ); - if completed_migration == migration { + if completed_migration == migration + || migration.trim().starts_with("-- ALLOW_MIGRATION_CHANGE") + { // Migration already run. Continue continue; + } else if should_allow_migration_change(index, &completed_migration, &migration) + { + continue; } else { anyhow::bail!(formatdoc! {" Migration changed for {domain} at step {index} @@ -108,6 +118,7 @@ mod test { a TEXT, b TEXT )"}], + disallow_migration_change, ) .unwrap(); @@ -136,6 +147,7 @@ mod test { d TEXT )"}, ], + disallow_migration_change, ) .unwrap(); @@ -214,7 +226,11 @@ mod test { // Run the migration verifying that the row got dropped connection - .migrate("test", &["DELETE FROM test_table"]) + .migrate( + "test", + &["DELETE FROM test_table"], + disallow_migration_change, + ) .unwrap(); assert_eq!( connection @@ -232,7 +248,11 @@ mod test { // Run the same migration again and verify that the table was left unchanged connection - .migrate("test", &["DELETE FROM test_table"]) + .migrate( + "test", + &["DELETE FROM test_table"], + disallow_migration_change, + ) .unwrap(); assert_eq!( connection @@ -252,27 +272,28 @@ mod test { .migrate( "test migration", &[ - indoc! {" - CREATE TABLE test ( - col INTEGER - )"}, - indoc! {" - INSERT INTO test (col) VALUES (1)"}, + "CREATE TABLE test (col INTEGER)", + "INSERT INTO test (col) VALUES (1)", ], + disallow_migration_change, ) .unwrap(); + let mut migration_changed = false; + // Create another migration with the same domain but different steps let second_migration_result = connection.migrate( "test migration", &[ - indoc! {" - CREATE TABLE test ( - color INTEGER - )"}, - indoc! {" - INSERT INTO test (color) VALUES (1)"}, + "CREATE TABLE test (color INTEGER )", + "INSERT INTO test (color) VALUES (1)", ], + |_, old, new| { + assert_eq!(old, "CREATE TABLE test (col INTEGER)"); + assert_eq!(new, "CREATE TABLE test (color INTEGER)"); + migration_changed = true; + false + }, ); // Verify new migration returns error when run @@ -284,7 +305,11 @@ mod test { let connection = Connection::open_memory(Some("test_create_alter_drop")); connection - .migrate("first_migration", &["CREATE TABLE table1(a TEXT) STRICT;"]) + .migrate( + "first_migration", + &["CREATE TABLE table1(a TEXT) STRICT;"], + disallow_migration_change, + ) .unwrap(); connection @@ -305,6 +330,7 @@ mod test { ALTER TABLE table2 RENAME TO table1; "}], + disallow_migration_change, ) .unwrap(); @@ -312,4 +338,8 @@ mod test { assert_eq!(res, "test text"); } + + fn disallow_migration_change(_: usize, _: &str, _: &str) -> bool { + false + } } diff --git a/crates/sqlez/src/thread_safe_connection.rs b/crates/sqlez/src/thread_safe_connection.rs index afdc96586e..58d3afe78f 100644 --- a/crates/sqlez/src/thread_safe_connection.rs +++ b/crates/sqlez/src/thread_safe_connection.rs @@ -278,12 +278,8 @@ mod test { enum TestDomain {} impl Domain for TestDomain { - fn name() -> &'static str { - "test" - } - fn migrations() -> &'static [&'static str] { - &["CREATE TABLE test(col1 TEXT, col2 TEXT) STRICT;"] - } + const NAME: &str = "test"; + const MIGRATIONS: &[&str] = &["CREATE TABLE test(col1 TEXT, col2 TEXT) STRICT;"]; } for _ in 0..100 { @@ -312,12 +308,9 @@ mod test { fn wild_zed_lost_failure() { enum TestWorkspace {} impl Domain for TestWorkspace { - fn name() -> &'static str { - "workspace" - } + const NAME: &str = "workspace"; - fn migrations() -> &'static [&'static str] { - &[" + const MIGRATIONS: &[&str] = &[" CREATE TABLE workspaces( workspace_id INTEGER PRIMARY KEY, dock_visible INTEGER, -- Boolean @@ -336,8 +329,7 @@ mod test { ON DELETE CASCADE ON UPDATE CASCADE ) STRICT; - "] - } + "]; } let builder = diff --git a/crates/system_specs/Cargo.toml b/crates/system_specs/Cargo.toml new file mode 100644 index 0000000000..8ef1b581ae --- /dev/null +++ b/crates/system_specs/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "system_specs" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/system_specs.rs" + +[features] +default = [] + +[dependencies] +anyhow.workspace = true +client.workspace = true +gpui.workspace = true +human_bytes.workspace = true +release_channel.workspace = true +serde.workspace = true +sysinfo.workspace = true +workspace-hack.workspace = true + +[target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies] +pciid-parser.workspace = true diff --git a/crates/system_specs/LICENSE-GPL b/crates/system_specs/LICENSE-GPL new file mode 120000 index 0000000000..89e542f750 --- /dev/null +++ b/crates/system_specs/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/feedback/src/system_specs.rs b/crates/system_specs/src/system_specs.rs similarity index 59% rename from crates/feedback/src/system_specs.rs rename to crates/system_specs/src/system_specs.rs index 87642ab929..731d335232 100644 --- a/crates/feedback/src/system_specs.rs +++ b/crates/system_specs/src/system_specs.rs @@ -1,11 +1,22 @@ +//! # system_specs + use client::telemetry; -use gpui::{App, AppContext as _, SemanticVersion, Task, Window}; +pub use gpui::GpuSpecs; +use gpui::{App, AppContext as _, SemanticVersion, Task, Window, actions}; use human_bytes::human_bytes; use release_channel::{AppCommitSha, AppVersion, ReleaseChannel}; use serde::Serialize; use std::{env, fmt::Display}; use sysinfo::{MemoryRefreshKind, RefreshKind, System}; +actions!( + zed, + [ + /// Copies system specifications to the clipboard for bug reports. + CopySystemSpecsIntoClipboard, + ] +); + #[derive(Clone, Debug, Serialize)] pub struct SystemSpecs { app_version: String, @@ -158,6 +169,115 @@ fn try_determine_available_gpus() -> Option { } } +#[derive(Debug, PartialEq, Eq, serde::Deserialize, serde::Serialize, Clone)] +pub struct GpuInfo { + pub device_name: Option, + pub device_pci_id: u16, + pub vendor_name: Option, + pub vendor_pci_id: u16, + pub driver_version: Option, + pub driver_name: Option, +} + +#[cfg(any(target_os = "linux", target_os = "freebsd"))] +pub fn read_gpu_info_from_sys_class_drm() -> anyhow::Result> { + use anyhow::Context as _; + use pciid_parser; + let dir_iter = std::fs::read_dir("/sys/class/drm").context("Failed to read /sys/class/drm")?; + let mut pci_addresses = vec![]; + let mut gpus = Vec::::new(); + let pci_db = pciid_parser::Database::read().ok(); + for entry in dir_iter { + let Ok(entry) = entry else { + continue; + }; + + let device_path = entry.path().join("device"); + let Some(pci_address) = device_path.read_link().ok().and_then(|pci_address| { + pci_address + .file_name() + .and_then(std::ffi::OsStr::to_str) + .map(str::trim) + .map(str::to_string) + }) else { + continue; + }; + let Ok(device_pci_id) = read_pci_id_from_path(device_path.join("device")) else { + continue; + }; + let Ok(vendor_pci_id) = read_pci_id_from_path(device_path.join("vendor")) else { + continue; + }; + let driver_name = std::fs::read_link(device_path.join("driver")) + .ok() + .and_then(|driver_link| { + driver_link + .file_name() + .and_then(std::ffi::OsStr::to_str) + .map(str::trim) + .map(str::to_string) + }); + let driver_version = driver_name + .as_ref() + .and_then(|driver_name| { + std::fs::read_to_string(format!("/sys/module/{driver_name}/version")).ok() + }) + .as_deref() + .map(str::trim) + .map(str::to_string); + + let already_found = gpus + .iter() + .zip(&pci_addresses) + .any(|(gpu, gpu_pci_address)| { + gpu_pci_address == &pci_address + && gpu.driver_version == driver_version + && gpu.driver_name == driver_name + }); + + if already_found { + continue; + } + + let vendor = pci_db + .as_ref() + .and_then(|db| db.vendors.get(&vendor_pci_id)); + let vendor_name = vendor.map(|vendor| vendor.name.clone()); + let device_name = vendor + .and_then(|vendor| vendor.devices.get(&device_pci_id)) + .map(|device| device.name.clone()); + + gpus.push(GpuInfo { + device_name, + device_pci_id, + vendor_name, + vendor_pci_id, + driver_version, + driver_name, + }); + pci_addresses.push(pci_address); + } + + Ok(gpus) +} + +#[cfg(any(target_os = "linux", target_os = "freebsd"))] +fn read_pci_id_from_path(path: impl AsRef) -> anyhow::Result { + use anyhow::Context as _; + let id = std::fs::read_to_string(path)?; + let id = id + .trim() + .strip_prefix("0x") + .context("Not a device ID") + .context(id.clone())?; + anyhow::ensure!( + id.len() == 4, + "Not a device id, expected 4 digits, found {}", + id.len() + ); + u16::from_str_radix(id, 16).context("Failed to parse device ID") +} + /// Returns value of `ZED_BUNDLE_TYPE` set at compiletime or else at runtime. /// /// The compiletime value is used by flatpak since it doesn't seem to have a way to provide a diff --git a/crates/tab_switcher/src/tab_switcher.rs b/crates/tab_switcher/src/tab_switcher.rs index 12af124ec7..bf3ce7b568 100644 --- a/crates/tab_switcher/src/tab_switcher.rs +++ b/crates/tab_switcher/src/tab_switcher.rs @@ -2,12 +2,14 @@ mod tab_switcher_tests; use collections::HashMap; -use editor::items::entry_git_aware_label_color; +use editor::items::{ + entry_diagnostic_aware_icon_decoration_and_color, entry_git_aware_label_color, +}; use fuzzy::StringMatchCandidate; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EntityId, EventEmitter, FocusHandle, - Focusable, Modifiers, ModifiersChangedEvent, MouseButton, MouseUpEvent, ParentElement, Render, - Styled, Task, WeakEntity, Window, actions, rems, + Focusable, Modifiers, ModifiersChangedEvent, MouseButton, MouseUpEvent, ParentElement, Point, + Render, Styled, Task, WeakEntity, Window, actions, rems, }; use picker::{Picker, PickerDelegate}; use project::Project; @@ -15,11 +17,14 @@ use schemars::JsonSchema; use serde::Deserialize; use settings::Settings; use std::{cmp::Reverse, sync::Arc}; -use ui::{ListItem, ListItemSpacing, Tooltip, prelude::*}; +use ui::{ + DecoratedIcon, IconDecoration, IconDecorationKind, ListItem, ListItemSpacing, Tooltip, + prelude::*, +}; use util::ResultExt; use workspace::{ ModalView, Pane, SaveIntent, Workspace, - item::{ItemHandle, ItemSettings, TabContentParams}, + item::{ItemHandle, ItemSettings, ShowDiagnostics, TabContentParams}, pane::{Event as PaneEvent, render_item_indicator, tab_details}, }; @@ -113,7 +118,13 @@ impl TabSwitcher { } let weak_workspace = workspace.weak_handle(); + let project = workspace.project().clone(); + let original_items: Vec<_> = workspace + .panes() + .iter() + .map(|p| (p.clone(), p.read(cx).active_item_index())) + .collect(); workspace.toggle_modal(window, cx, |window, cx| { let delegate = TabSwitcherDelegate::new( project, @@ -124,6 +135,7 @@ impl TabSwitcher { is_global, window, cx, + original_items, ); TabSwitcher::new(delegate, window, is_global, cx) }); @@ -221,7 +233,80 @@ pub struct TabSwitcherDelegate { workspace: WeakEntity, project: Entity, matches: Vec, + original_items: Vec<(Entity, usize)>, is_all_panes: bool, + restored_items: bool, +} + +impl TabMatch { + fn icon( + &self, + project: &Entity, + selected: bool, + window: &Window, + cx: &App, + ) -> Option { + let icon = self.item.tab_icon(window, cx)?; + let item_settings = ItemSettings::get_global(cx); + let show_diagnostics = item_settings.show_diagnostics; + let git_status_color = item_settings + .git_status + .then(|| { + let path = self.item.project_path(cx)?; + let project = project.read(cx); + let entry = project.entry_for_path(&path, cx)?; + let git_status = project + .project_path_git_status(&path, cx) + .map(|status| status.summary()) + .unwrap_or_default(); + Some(entry_git_aware_label_color( + git_status, + entry.is_ignored, + selected, + )) + }) + .flatten(); + let colored_icon = icon.color(git_status_color.unwrap_or_default()); + + let most_sever_diagostic_level = if show_diagnostics == ShowDiagnostics::Off { + None + } else { + let buffer_store = project.read(cx).buffer_store().read(cx); + let buffer = self + .item + .project_path(cx) + .and_then(|path| buffer_store.get_by_path(&path)) + .map(|buffer| buffer.read(cx)); + buffer.and_then(|buffer| { + buffer + .buffer_diagnostics(None) + .iter() + .map(|diagnostic_entry| diagnostic_entry.diagnostic.severity) + .min() + }) + }; + + let decorations = + entry_diagnostic_aware_icon_decoration_and_color(most_sever_diagostic_level) + .filter(|(d, _)| { + *d != IconDecorationKind::Triangle + || show_diagnostics != ShowDiagnostics::Errors + }) + .map(|(icon, color)| { + let knockout_item_color = if selected { + cx.theme().colors().element_selected + } else { + cx.theme().colors().element_background + }; + IconDecoration::new(icon, knockout_item_color, cx) + .color(color.color(cx)) + .position(Point { + x: px(-2.), + y: px(-2.), + }) + }); + Some(DecoratedIcon::new(colored_icon, decorations)) + } } impl TabSwitcherDelegate { @@ -235,6 +320,7 @@ impl TabSwitcherDelegate { is_all_panes: bool, window: &mut Window, cx: &mut Context, + original_items: Vec<(Entity, usize)>, ) -> Self { Self::subscribe_to_updates(&pane, window, cx); Self { @@ -246,6 +332,8 @@ impl TabSwitcherDelegate { project, matches: Vec::new(), is_all_panes, + original_items, + restored_items: false, } } @@ -300,14 +388,6 @@ impl TabSwitcherDelegate { let matches = if query.is_empty() { let history = workspace.read(cx).recently_activated_items(cx); - for item in &all_items { - eprintln!( - "{:?} {:?}", - item.item.tab_content_text(0, cx), - (Reverse(history.get(&item.item.item_id())), item.item_index) - ) - } - eprintln!(""); all_items .sort_by_key(|tab| (Reverse(history.get(&tab.item.item_id())), tab.item_index)); all_items @@ -474,8 +554,25 @@ impl PickerDelegate for TabSwitcherDelegate { self.selected_index } - fn set_selected_index(&mut self, ix: usize, _: &mut Window, cx: &mut Context>) { + fn set_selected_index( + &mut self, + ix: usize, + window: &mut Window, + cx: &mut Context>, + ) { self.selected_index = ix; + + let Some(selected_match) = self.matches.get(self.selected_index()) else { + return; + }; + selected_match + .pane + .update(cx, |pane, cx| { + if let Some(index) = pane.index_for_item(selected_match.item.as_ref()) { + pane.activate_item(index, false, false, window, cx); + } + }) + .ok(); cx.notify(); } @@ -502,6 +599,13 @@ impl PickerDelegate for TabSwitcherDelegate { let Some(selected_match) = self.matches.get(self.selected_index()) else { return; }; + + self.restored_items = true; + for (pane, index) in self.original_items.iter() { + pane.update(cx, |this, cx| { + this.activate_item(*index, false, false, window, cx); + }) + } selected_match .pane .update(cx, |pane, cx| { @@ -512,7 +616,15 @@ impl PickerDelegate for TabSwitcherDelegate { .ok(); } - fn dismissed(&mut self, _: &mut Window, cx: &mut Context>) { + fn dismissed(&mut self, window: &mut Window, cx: &mut Context>) { + if !self.restored_items { + for (pane, index) in self.original_items.iter() { + pane.update(cx, |this, cx| { + this.activate_item(*index, false, false, window, cx); + }) + } + } + self.tab_switcher .update(cx, |_, cx| cx.emit(DismissEvent)) .log_err(); @@ -538,31 +650,7 @@ impl PickerDelegate for TabSwitcherDelegate { }; let label = tab_match.item.tab_content(params, window, cx); - let icon = tab_match.item.tab_icon(window, cx).map(|icon| { - let git_status_color = ItemSettings::get_global(cx) - .git_status - .then(|| { - tab_match - .item - .project_path(cx) - .as_ref() - .and_then(|path| { - let project = self.project.read(cx); - let entry = project.entry_for_path(path, cx)?; - let git_status = project - .project_path_git_status(path, cx) - .map(|status| status.summary()) - .unwrap_or_default(); - Some((entry, git_status)) - }) - .map(|(entry, git_status)| { - entry_git_aware_label_color(git_status, entry.is_ignored, selected) - }) - }) - .flatten(); - - icon.color(git_status_color.unwrap_or_default()) - }); + let icon = tab_match.icon(&self.project, selected, window, cx); let indicator = render_item_indicator(tab_match.item.boxed_clone(), cx); let indicator_color = if let Some(ref indicator) = indicator { @@ -604,7 +692,7 @@ impl PickerDelegate for TabSwitcherDelegate { .inset(true) .toggle_state(selected) .child(h_flex().w_full().child(label)) - .start_slot::(icon) + .start_slot::(icon) .map(|el| { if self.selected_index == ix { el.end_slot::(close_button) diff --git a/crates/task/src/shell_builder.rs b/crates/task/src/shell_builder.rs index 770312bafc..de4ddc00f4 100644 --- a/crates/task/src/shell_builder.rs +++ b/crates/task/src/shell_builder.rs @@ -1,26 +1,40 @@ use crate::Shell; -#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] -enum ShellKind { +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ShellKind { #[default] Posix, + Csh, + Fish, Powershell, Nushell, Cmd, } impl ShellKind { - fn new(program: &str) -> Self { + pub fn system() -> Self { + Self::new(&system_shell()) + } + + pub fn new(program: &str) -> Self { + #[cfg(windows)] + let (_, program) = program.rsplit_once('\\').unwrap_or(("", program)); + #[cfg(not(windows))] + let (_, program) = program.rsplit_once('/').unwrap_or(("", program)); if program == "powershell" - || program.ends_with("powershell.exe") + || program == "powershell.exe" || program == "pwsh" - || program.ends_with("pwsh.exe") + || program == "pwsh.exe" { ShellKind::Powershell - } else if program == "cmd" || program.ends_with("cmd.exe") { + } else if program == "cmd" || program == "cmd.exe" { ShellKind::Cmd } else if program == "nu" { ShellKind::Nushell + } else if program == "fish" { + ShellKind::Fish + } else if program == "csh" { + ShellKind::Csh } else { // Someother shell detected, the user might install and use a // unix-like shell. @@ -33,6 +47,8 @@ impl ShellKind { Self::Powershell => Self::to_powershell_variable(input), Self::Cmd => Self::to_cmd_variable(input), Self::Posix => input.to_owned(), + Self::Fish => input.to_owned(), + Self::Csh => input.to_owned(), Self::Nushell => Self::to_nushell_variable(input), } } @@ -153,7 +169,7 @@ impl ShellKind { match self { ShellKind::Powershell => vec!["-C".to_owned(), combined_command], ShellKind::Cmd => vec!["/C".to_owned(), combined_command], - ShellKind::Posix | ShellKind::Nushell => interactive + ShellKind::Posix | ShellKind::Nushell | ShellKind::Fish | ShellKind::Csh => interactive .then(|| "-i".to_owned()) .into_iter() .chain(["-c".to_owned(), combined_command]) @@ -184,19 +200,14 @@ pub struct ShellBuilder { kind: ShellKind, } -pub static DEFAULT_REMOTE_SHELL: &str = "\"${SHELL:-sh}\""; - impl ShellBuilder { /// Create a new ShellBuilder as configured. - pub fn new(is_local: bool, shell: &Shell) -> Self { + pub fn new(remote_system_shell: Option<&str>, shell: &Shell) -> Self { let (program, args) = match shell { - Shell::System => { - if is_local { - (system_shell(), Vec::new()) - } else { - (DEFAULT_REMOTE_SHELL.to_string(), Vec::new()) - } - } + Shell::System => match remote_system_shell { + Some(remote_shell) => (remote_shell.to_string(), Vec::new()), + None => (system_shell(), Vec::new()), + }, Shell::Program(shell) => (shell.clone(), Vec::new()), Shell::WithArguments { program, args, .. } => (program.clone(), args.clone()), }; @@ -212,6 +223,7 @@ impl ShellBuilder { self.interactive = false; self } + /// Returns the label to show in the terminal tab pub fn command_label(&self, command_label: &str) -> String { match self.kind { @@ -221,7 +233,7 @@ impl ShellBuilder { ShellKind::Cmd => { format!("{} /C '{}'", self.program, command_label) } - ShellKind::Posix | ShellKind::Nushell => { + ShellKind::Posix | ShellKind::Nushell | ShellKind::Fish | ShellKind::Csh => { let interactivity = self.interactive.then_some("-i ").unwrap_or_default(); format!( "{} {interactivity}-c '$\"{}\"'", @@ -234,7 +246,7 @@ impl ShellBuilder { pub fn build( mut self, task_command: Option, - task_args: &Vec, + task_args: &[String], ) -> (String, Vec) { if let Some(task_command) = task_command { let combined_command = task_args.iter().fold(task_command, |mut command, arg| { @@ -258,11 +270,11 @@ mod test { #[test] fn test_nu_shell_variable_substitution() { let shell = Shell::Program("nu".to_owned()); - let shell_builder = ShellBuilder::new(true, &shell); + let shell_builder = ShellBuilder::new(None, &shell); let (program, args) = shell_builder.build( Some("echo".into()), - &vec![ + &[ "${hello}".to_string(), "$world".to_string(), "nothing".to_string(), diff --git a/crates/task/src/task.rs b/crates/task/src/task.rs index 85e654eff4..eb9e59f087 100644 --- a/crates/task/src/task.rs +++ b/crates/task/src/task.rs @@ -22,7 +22,7 @@ pub use debug_format::{ AttachRequest, BuildTaskDefinition, DebugRequest, DebugScenario, DebugTaskFile, LaunchRequest, Request, TcpArgumentsTemplate, ZedDebugConfig, }; -pub use shell_builder::{DEFAULT_REMOTE_SHELL, ShellBuilder}; +pub use shell_builder::{ShellBuilder, ShellKind}; pub use task_template::{ DebugArgsRequest, HideStrategy, RevealStrategy, TaskTemplate, TaskTemplates, substitute_variables_in_map, substitute_variables_in_str, diff --git a/crates/terminal_view/src/persistence.rs b/crates/terminal_view/src/persistence.rs index b93b267f58..c7ebd314e4 100644 --- a/crates/terminal_view/src/persistence.rs +++ b/crates/terminal_view/src/persistence.rs @@ -9,7 +9,11 @@ use std::path::{Path, PathBuf}; use ui::{App, Context, Pixels, Window}; use util::ResultExt as _; -use db::{define_connection, query, sqlez::statement::Statement, sqlez_macros::sql}; +use db::{ + query, + sqlez::{domain::Domain, statement::Statement, thread_safe_connection::ThreadSafeConnection}, + sqlez_macros::sql, +}; use workspace::{ ItemHandle, ItemId, Member, Pane, PaneAxis, PaneGroup, SerializableItem as _, Workspace, WorkspaceDb, WorkspaceId, @@ -375,9 +379,13 @@ impl<'de> Deserialize<'de> for SerializedAxis { } } -define_connection! { - pub static ref TERMINAL_DB: TerminalDb = - &[sql!( +pub struct TerminalDb(ThreadSafeConnection); + +impl Domain for TerminalDb { + const NAME: &str = stringify!(TerminalDb); + + const MIGRATIONS: &[&str] = &[ + sql!( CREATE TABLE terminals ( workspace_id INTEGER, item_id INTEGER UNIQUE, @@ -414,6 +422,8 @@ define_connection! { ]; } +db::static_connection!(TERMINAL_DB, TerminalDb, [WorkspaceDb]); + impl TerminalDb { query! { pub async fn update_workspace_id( diff --git a/crates/terminal_view/src/terminal_element.rs b/crates/terminal_view/src/terminal_element.rs index c2fbeb7ee6..fe3301fb89 100644 --- a/crates/terminal_view/src/terminal_element.rs +++ b/crates/terminal_view/src/terminal_element.rs @@ -1,4 +1,3 @@ -use crate::color_contrast; use editor::{CursorLayout, HighlightedRange, HighlightedRangeLine}; use gpui::{ AbsoluteLength, AnyElement, App, AvailableSpace, Bounds, ContentMask, Context, DispatchPhase, @@ -27,6 +26,7 @@ use terminal::{ terminal_settings::TerminalSettings, }; use theme::{ActiveTheme, Theme, ThemeSettings}; +use ui::utils::ensure_minimum_contrast; use ui::{ParentElement, Tooltip}; use util::ResultExt; use workspace::Workspace; @@ -534,7 +534,7 @@ impl TerminalElement { // Only apply contrast adjustment to non-decorative characters if !Self::is_decorative_character(indexed.c) { - fg = color_contrast::ensure_minimum_contrast(fg, bg, minimum_contrast); + fg = ensure_minimum_contrast(fg, bg, minimum_contrast); } // Ghostty uses (175/255) as the multiplier (~0.69), Alacritty uses 0.66, Kitty @@ -1598,6 +1598,7 @@ pub fn convert_color(fg: &terminal::alacritty_terminal::vte::ansi::Color, theme: mod tests { use super::*; use gpui::{AbsoluteLength, Hsla, font}; + use ui::utils::apca_contrast; #[test] fn test_is_decorative_character() { @@ -1713,7 +1714,7 @@ mod tests { }; // Should have poor contrast - let actual_contrast = color_contrast::apca_contrast(white_fg, light_gray_bg).abs(); + let actual_contrast = apca_contrast(white_fg, light_gray_bg).abs(); assert!( actual_contrast < 30.0, "White on light gray should have poor APCA contrast: {}", @@ -1721,12 +1722,12 @@ mod tests { ); // After adjustment with minimum APCA contrast of 45, should be darker - let adjusted = color_contrast::ensure_minimum_contrast(white_fg, light_gray_bg, 45.0); + let adjusted = ensure_minimum_contrast(white_fg, light_gray_bg, 45.0); assert!( adjusted.l < white_fg.l, "Adjusted color should be darker than original" ); - let adjusted_contrast = color_contrast::apca_contrast(adjusted, light_gray_bg).abs(); + let adjusted_contrast = apca_contrast(adjusted, light_gray_bg).abs(); assert!(adjusted_contrast >= 45.0, "Should meet minimum contrast"); // Test case 2: Dark colors (poor contrast) @@ -1744,7 +1745,7 @@ mod tests { }; // Should have poor contrast - let actual_contrast = color_contrast::apca_contrast(black_fg, dark_gray_bg).abs(); + let actual_contrast = apca_contrast(black_fg, dark_gray_bg).abs(); assert!( actual_contrast < 30.0, "Black on dark gray should have poor APCA contrast: {}", @@ -1752,16 +1753,16 @@ mod tests { ); // After adjustment with minimum APCA contrast of 45, should be lighter - let adjusted = color_contrast::ensure_minimum_contrast(black_fg, dark_gray_bg, 45.0); + let adjusted = ensure_minimum_contrast(black_fg, dark_gray_bg, 45.0); assert!( adjusted.l > black_fg.l, "Adjusted color should be lighter than original" ); - let adjusted_contrast = color_contrast::apca_contrast(adjusted, dark_gray_bg).abs(); + let adjusted_contrast = apca_contrast(adjusted, dark_gray_bg).abs(); assert!(adjusted_contrast >= 45.0, "Should meet minimum contrast"); // Test case 3: Already good contrast - let good_contrast = color_contrast::ensure_minimum_contrast(black_fg, white_fg, 45.0); + let good_contrast = ensure_minimum_contrast(black_fg, white_fg, 45.0); assert_eq!( good_contrast, black_fg, "Good contrast should not be adjusted" @@ -1788,11 +1789,11 @@ mod tests { }; // With minimum contrast of 0.0, no adjustment should happen - let no_adjust = color_contrast::ensure_minimum_contrast(white_fg, white_bg, 0.0); + let no_adjust = ensure_minimum_contrast(white_fg, white_bg, 0.0); assert_eq!(no_adjust, white_fg, "No adjustment with min_contrast 0.0"); // With minimum APCA contrast of 15, it should adjust to a darker color - let adjusted = color_contrast::ensure_minimum_contrast(white_fg, white_bg, 15.0); + let adjusted = ensure_minimum_contrast(white_fg, white_bg, 15.0); assert!( adjusted.l < white_fg.l, "White on white should become darker, got l={}", @@ -1800,7 +1801,7 @@ mod tests { ); // Verify the contrast is now acceptable - let new_contrast = color_contrast::apca_contrast(adjusted, white_bg).abs(); + let new_contrast = apca_contrast(adjusted, white_bg).abs(); assert!( new_contrast >= 15.0, "Adjusted APCA contrast {} should be >= 15.0", diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index f40c4870f1..6b17911487 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -481,14 +481,17 @@ impl TerminalPanel { window: &mut Window, cx: &mut Context, ) -> Task>> { - let Ok(is_local) = self - .workspace - .update(cx, |workspace, cx| workspace.project().read(cx).is_local()) - else { + let Ok((ssh_client, false)) = self.workspace.update(cx, |workspace, cx| { + let project = workspace.project().read(cx); + ( + project.ssh_client().and_then(|it| it.read(cx).ssh_info()), + project.is_via_collab(), + ) + }) else { return Task::ready(Err(anyhow!("Project is not local"))); }; - let builder = ShellBuilder::new(is_local, &task.shell); + let builder = ShellBuilder::new(ssh_client.as_ref().map(|info| &*info.shell), &task.shell); let command_label = builder.command_label(&task.command_label); let (command, args) = builder.build(task.command.clone(), &task.args); diff --git a/crates/terminal_view/src/terminal_path_like_target.rs b/crates/terminal_view/src/terminal_path_like_target.rs new file mode 100644 index 0000000000..e20df7f001 --- /dev/null +++ b/crates/terminal_view/src/terminal_path_like_target.rs @@ -0,0 +1,825 @@ +use super::{HoverTarget, HoveredWord, TerminalView}; +use anyhow::{Context as _, Result}; +use editor::Editor; +use gpui::{App, AppContext, Context, Task, WeakEntity, Window}; +use itertools::Itertools; +use project::{Entry, Metadata}; +use std::path::PathBuf; +use terminal::PathLikeTarget; +use util::{ResultExt, debug_panic, paths::PathWithPosition}; +use workspace::{OpenOptions, OpenVisible, Workspace}; + +#[derive(Debug, Clone)] +enum OpenTarget { + Worktree(PathWithPosition, Entry), + File(PathWithPosition, Metadata), +} + +impl OpenTarget { + fn is_file(&self) -> bool { + match self { + OpenTarget::Worktree(_, entry) => entry.is_file(), + OpenTarget::File(_, metadata) => !metadata.is_dir, + } + } + + fn is_dir(&self) -> bool { + match self { + OpenTarget::Worktree(_, entry) => entry.is_dir(), + OpenTarget::File(_, metadata) => metadata.is_dir, + } + } + + fn path(&self) -> &PathWithPosition { + match self { + OpenTarget::Worktree(path, _) => path, + OpenTarget::File(path, _) => path, + } + } +} + +pub(super) fn hover_path_like_target( + workspace: &WeakEntity, + hovered_word: HoveredWord, + path_like_target: &PathLikeTarget, + cx: &mut Context, +) -> Task<()> { + let file_to_open_task = possible_open_target(workspace, path_like_target, cx); + cx.spawn(async move |terminal_view, cx| { + let file_to_open = file_to_open_task.await; + terminal_view + .update(cx, |terminal_view, _| match file_to_open { + Some(OpenTarget::File(path, _) | OpenTarget::Worktree(path, _)) => { + terminal_view.hover = Some(HoverTarget { + tooltip: path.to_string(|path| path.to_string_lossy().to_string()), + hovered_word, + }); + } + None => { + terminal_view.hover = None; + } + }) + .ok(); + }) +} + +fn possible_open_target( + workspace: &WeakEntity, + path_like_target: &PathLikeTarget, + cx: &App, +) -> Task> { + let Some(workspace) = workspace.upgrade() else { + return Task::ready(None); + }; + // We have to check for both paths, as on Unix, certain paths with positions are valid file paths too. + // We can be on FS remote part, without real FS, so cannot canonicalize or check for existence the path right away. + let mut potential_paths = Vec::new(); + let cwd = path_like_target.terminal_dir.as_ref(); + let maybe_path = &path_like_target.maybe_path; + let original_path = PathWithPosition::from_path(PathBuf::from(maybe_path)); + let path_with_position = PathWithPosition::parse_str(maybe_path); + let worktree_candidates = workspace + .read(cx) + .worktrees(cx) + .sorted_by_key(|worktree| { + let worktree_root = worktree.read(cx).abs_path(); + match cwd.and_then(|cwd| worktree_root.strip_prefix(cwd).ok()) { + Some(cwd_child) => cwd_child.components().count(), + None => usize::MAX, + } + }) + .collect::>(); + // Since we do not check paths via FS and joining, we need to strip off potential `./`, `a/`, `b/` prefixes out of it. + const GIT_DIFF_PATH_PREFIXES: &[&str] = &["a", "b"]; + for prefix_str in GIT_DIFF_PATH_PREFIXES.iter().chain(std::iter::once(&".")) { + if let Some(stripped) = original_path.path.strip_prefix(prefix_str).ok() { + potential_paths.push(PathWithPosition { + path: stripped.to_owned(), + row: original_path.row, + column: original_path.column, + }); + } + if let Some(stripped) = path_with_position.path.strip_prefix(prefix_str).ok() { + potential_paths.push(PathWithPosition { + path: stripped.to_owned(), + row: path_with_position.row, + column: path_with_position.column, + }); + } + } + + let insert_both_paths = original_path != path_with_position; + potential_paths.insert(0, original_path); + if insert_both_paths { + potential_paths.insert(1, path_with_position); + } + + // If we won't find paths "easily", we can traverse the entire worktree to look what ends with the potential path suffix. + // That will be slow, though, so do the fast checks first. + let mut worktree_paths_to_check = Vec::new(); + for worktree in &worktree_candidates { + let worktree_root = worktree.read(cx).abs_path(); + let mut paths_to_check = Vec::with_capacity(potential_paths.len()); + + for path_with_position in &potential_paths { + let path_to_check = if worktree_root.ends_with(&path_with_position.path) { + let root_path_with_position = PathWithPosition { + path: worktree_root.to_path_buf(), + row: path_with_position.row, + column: path_with_position.column, + }; + match worktree.read(cx).root_entry() { + Some(root_entry) => { + return Task::ready(Some(OpenTarget::Worktree( + root_path_with_position, + root_entry.clone(), + ))); + } + None => root_path_with_position, + } + } else { + PathWithPosition { + path: path_with_position + .path + .strip_prefix(&worktree_root) + .unwrap_or(&path_with_position.path) + .to_owned(), + row: path_with_position.row, + column: path_with_position.column, + } + }; + + if path_to_check.path.is_relative() + && let Some(entry) = worktree.read(cx).entry_for_path(&path_to_check.path) + { + return Task::ready(Some(OpenTarget::Worktree( + PathWithPosition { + path: worktree_root.join(&entry.path), + row: path_to_check.row, + column: path_to_check.column, + }, + entry.clone(), + ))); + } + + paths_to_check.push(path_to_check); + } + + if !paths_to_check.is_empty() { + worktree_paths_to_check.push((worktree.clone(), paths_to_check)); + } + } + + // Before entire worktree traversal(s), make an attempt to do FS checks if available. + let fs_paths_to_check = if workspace.read(cx).project().read(cx).is_local() { + potential_paths + .into_iter() + .flat_map(|path_to_check| { + let mut paths_to_check = Vec::new(); + let maybe_path = &path_to_check.path; + if maybe_path.starts_with("~") { + if let Some(home_path) = + maybe_path + .strip_prefix("~") + .ok() + .and_then(|stripped_maybe_path| { + Some(dirs::home_dir()?.join(stripped_maybe_path)) + }) + { + paths_to_check.push(PathWithPosition { + path: home_path, + row: path_to_check.row, + column: path_to_check.column, + }); + } + } else { + paths_to_check.push(PathWithPosition { + path: maybe_path.clone(), + row: path_to_check.row, + column: path_to_check.column, + }); + if maybe_path.is_relative() { + if let Some(cwd) = &cwd { + paths_to_check.push(PathWithPosition { + path: cwd.join(maybe_path), + row: path_to_check.row, + column: path_to_check.column, + }); + } + for worktree in &worktree_candidates { + paths_to_check.push(PathWithPosition { + path: worktree.read(cx).abs_path().join(maybe_path), + row: path_to_check.row, + column: path_to_check.column, + }); + } + } + } + paths_to_check + }) + .collect() + } else { + Vec::new() + }; + + let worktree_check_task = cx.spawn(async move |cx| { + for (worktree, worktree_paths_to_check) in worktree_paths_to_check { + let found_entry = worktree + .update(cx, |worktree, _| { + let worktree_root = worktree.abs_path(); + let traversal = worktree.traverse_from_path(true, true, false, "".as_ref()); + for entry in traversal { + if let Some(path_in_worktree) = worktree_paths_to_check + .iter() + .find(|path_to_check| entry.path.ends_with(&path_to_check.path)) + { + return Some(OpenTarget::Worktree( + PathWithPosition { + path: worktree_root.join(&entry.path), + row: path_in_worktree.row, + column: path_in_worktree.column, + }, + entry.clone(), + )); + } + } + None + }) + .ok()?; + if let Some(found_entry) = found_entry { + return Some(found_entry); + } + } + None + }); + + let fs = workspace.read(cx).project().read(cx).fs().clone(); + cx.background_spawn(async move { + for mut path_to_check in fs_paths_to_check { + if let Some(fs_path_to_check) = fs.canonicalize(&path_to_check.path).await.ok() + && let Some(metadata) = fs.metadata(&fs_path_to_check).await.ok().flatten() + { + path_to_check.path = fs_path_to_check; + return Some(OpenTarget::File(path_to_check, metadata)); + } + } + + worktree_check_task.await + }) +} + +pub(super) fn open_path_like_target( + workspace: &WeakEntity, + terminal_view: &mut TerminalView, + path_like_target: &PathLikeTarget, + window: &mut Window, + cx: &mut Context, +) { + possibly_open_target(workspace, terminal_view, path_like_target, window, cx) + .detach_and_log_err(cx) +} + +fn possibly_open_target( + workspace: &WeakEntity, + terminal_view: &mut TerminalView, + path_like_target: &PathLikeTarget, + window: &mut Window, + cx: &mut Context, +) -> Task>> { + if terminal_view.hover.is_none() { + return Task::ready(Ok(None)); + } + let workspace = workspace.clone(); + let path_like_target = path_like_target.clone(); + cx.spawn_in(window, async move |terminal_view, cx| { + let Some(open_target) = terminal_view + .update(cx, |_, cx| { + possible_open_target(&workspace, &path_like_target, cx) + })? + .await + else { + return Ok(None); + }; + + let path_to_open = open_target.path(); + let opened_items = workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_paths( + vec![path_to_open.path.clone()], + OpenOptions { + visible: Some(OpenVisible::OnlyDirectories), + ..Default::default() + }, + None, + window, + cx, + ) + }) + .context("workspace update")? + .await; + if opened_items.len() != 1 { + debug_panic!( + "Received {} items for one path {path_to_open:?}", + opened_items.len(), + ); + } + + if let Some(opened_item) = opened_items.first() { + if open_target.is_file() { + if let Some(Ok(opened_item)) = opened_item { + if let Some(row) = path_to_open.row { + let col = path_to_open.column.unwrap_or(0); + if let Some(active_editor) = opened_item.downcast::() { + active_editor + .downgrade() + .update_in(cx, |editor, window, cx| { + editor.go_to_singleton_buffer_point( + language::Point::new( + row.saturating_sub(1), + col.saturating_sub(1), + ), + window, + cx, + ) + }) + .log_err(); + } + } + return Ok(Some(open_target)); + } + } else if open_target.is_dir() { + workspace.update(cx, |workspace, cx| { + workspace.project().update(cx, |_, cx| { + cx.emit(project::Event::ActivateProjectPanel); + }) + })?; + return Ok(Some(open_target)); + } + } + Ok(None) + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::TestAppContext; + use project::{Project, terminals::TerminalKind}; + use serde_json::json; + use std::path::{Path, PathBuf}; + use terminal::{HoveredWord, alacritty_terminal::index::Point as AlacPoint}; + use util::path; + use workspace::AppState; + + async fn init_test( + app_cx: &mut TestAppContext, + trees: impl IntoIterator, + worktree_roots: impl IntoIterator, + ) -> impl AsyncFnMut(HoveredWord, PathLikeTarget) -> (Option, Option) + { + let fs = app_cx.update(AppState::test).fs.as_fake().clone(); + + app_cx.update(|cx| { + terminal::init(cx); + theme::init(theme::LoadThemes::JustBase, cx); + Project::init_settings(cx); + language::init(cx); + editor::init(cx); + }); + + for (path, tree) in trees { + fs.insert_tree(path, tree).await; + } + + let project = Project::test( + fs.clone(), + worktree_roots + .into_iter() + .map(Path::new) + .collect::>(), + app_cx, + ) + .await; + + let (workspace, cx) = + app_cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let terminal = project + .update(cx, |project, cx| { + project.create_terminal(TerminalKind::Shell(None), cx) + }) + .await + .expect("Failed to create a terminal"); + + let workspace_a = workspace.clone(); + let (terminal_view, cx) = app_cx.add_window_view(|window, cx| { + TerminalView::new( + terminal, + workspace_a.downgrade(), + None, + project.downgrade(), + window, + cx, + ) + }); + + async move |hovered_word: HoveredWord, + path_like_target: PathLikeTarget| + -> (Option, Option) { + let workspace_a = workspace.clone(); + terminal_view + .update(cx, |_, cx| { + hover_path_like_target( + &workspace_a.downgrade(), + hovered_word, + &path_like_target, + cx, + ) + }) + .await; + + let hover_target = + terminal_view.read_with(cx, |terminal_view, _| terminal_view.hover.clone()); + + let open_target = terminal_view + .update_in(cx, |terminal_view, window, cx| { + possibly_open_target( + &workspace.downgrade(), + terminal_view, + &path_like_target, + window, + cx, + ) + }) + .await + .expect("Failed to possibly open target"); + + (hover_target, open_target) + } + } + + async fn test_path_like_simple( + test_path_like: &mut impl AsyncFnMut( + HoveredWord, + PathLikeTarget, + ) -> (Option, Option), + maybe_path: &str, + tooltip: &str, + terminal_dir: Option, + file: &str, + line: u32, + ) { + let (hover_target, open_target) = test_path_like( + HoveredWord { + word: maybe_path.to_string(), + word_match: AlacPoint::default()..=AlacPoint::default(), + id: 0, + }, + PathLikeTarget { + maybe_path: maybe_path.to_string(), + terminal_dir, + }, + ) + .await; + + let Some(hover_target) = hover_target else { + assert!( + hover_target.is_some(), + "Hover target should not be `None` at {file}:{line}:" + ); + return; + }; + + assert_eq!( + hover_target.tooltip, tooltip, + "Tooltip mismatch at {file}:{line}:" + ); + assert_eq!( + hover_target.hovered_word.word, maybe_path, + "Hovered word mismatch at {file}:{line}:" + ); + + let Some(open_target) = open_target else { + assert!( + open_target.is_some(), + "Open target should not be `None` at {file}:{line}:" + ); + return; + }; + + assert_eq!( + open_target.path().path, + Path::new(tooltip), + "Open target path mismatch at {file}:{line}:" + ); + } + + macro_rules! none_or_some { + () => { + None + }; + ($some:expr) => { + Some($some) + }; + } + + macro_rules! test_path_like { + ($test_path_like:expr, $maybe_path:literal, $tooltip:literal $(, $cwd:literal)?) => { + test_path_like_simple( + &mut $test_path_like, + path!($maybe_path), + path!($tooltip), + none_or_some!($($crate::PathBuf::from(path!($cwd)))?), + std::file!(), + std::line!(), + ) + .await + }; + } + + #[doc = "test_path_likes!(, , , { $(;)+ })"] + macro_rules! test_path_likes { + ($cx:expr, $trees:expr, $worktrees:expr, { $($tests:expr;)+ }) => { { + let mut test_path_like = init_test($cx, $trees, $worktrees).await; + #[doc ="test!(, , )"] + macro_rules! test { + ($maybe_path:literal, $tooltip:literal) => { + test_path_like!(test_path_like, $maybe_path, $tooltip) + }; + ($maybe_path:literal, $tooltip:literal, $cwd:literal) => { + test_path_like!(test_path_like, $maybe_path, $tooltip, $cwd) + } + } + $($tests);+ + } } + } + + #[gpui::test] + async fn one_folder_worktree(cx: &mut TestAppContext) { + test_path_likes!( + cx, + vec![( + path!("/test"), + json!({ + "lib.rs": "", + "test.rs": "", + }), + )], + vec![path!("/test")], + { + test!("lib.rs", "/test/lib.rs"); + test!("test.rs", "/test/test.rs"); + } + ) + } + + #[gpui::test] + async fn mixed_worktrees(cx: &mut TestAppContext) { + test_path_likes!( + cx, + vec![ + ( + path!("/"), + json!({ + "file.txt": "", + }), + ), + ( + path!("/test"), + json!({ + "lib.rs": "", + "test.rs": "", + "file.txt": "", + }), + ), + ], + vec![path!("/file.txt"), path!("/test")], + { + test!("file.txt", "/file.txt", "/"); + test!("lib.rs", "/test/lib.rs", "/test"); + test!("test.rs", "/test/test.rs", "/test"); + test!("file.txt", "/test/file.txt", "/test"); + } + ) + } + + #[gpui::test] + async fn worktree_file_preferred(cx: &mut TestAppContext) { + test_path_likes!( + cx, + vec![ + ( + path!("/"), + json!({ + "file.txt": "", + }), + ), + ( + path!("/test"), + json!({ + "file.txt": "", + }), + ), + ], + vec![path!("/test")], + { + test!("file.txt", "/test/file.txt", "/test"); + } + ) + } + + mod issues { + use super::*; + + // https://github.com/zed-industries/zed/issues/28407 + #[gpui::test] + async fn issue_28407_siblings(cx: &mut TestAppContext) { + test_path_likes!( + cx, + vec![( + path!("/dir1"), + json!({ + "dir 2": { + "C.py": "" + }, + "dir 3": { + "C.py": "" + }, + }), + )], + vec![path!("/dir1")], + { + test!("C.py", "/dir1/dir 2/C.py", "/dir1"); + test!("C.py", "/dir1/dir 2/C.py", "/dir1/dir 2"); + test!("C.py", "/dir1/dir 3/C.py", "/dir1/dir 3"); + } + ) + } + + // https://github.com/zed-industries/zed/issues/28407 + // See https://github.com/zed-industries/zed/issues/34027 + // See https://github.com/zed-industries/zed/issues/33498 + #[gpui::test] + #[should_panic(expected = "Tooltip mismatch")] + async fn issue_28407_nesting(cx: &mut TestAppContext) { + test_path_likes!( + cx, + vec![( + path!("/project"), + json!({ + "lib": { + "src": { + "main.rs": "" + }, + }, + "src": { + "main.rs": "" + }, + }), + )], + vec![path!("/project")], + { + // Failing currently + test!("main.rs", "/project/src/main.rs", "/project"); + test!("main.rs", "/project/src/main.rs", "/project/src"); + test!("main.rs", "/project/lib/src/main.rs", "/project/lib"); + test!("main.rs", "/project/lib/src/main.rs", "/project/lib/src"); + + test!("src/main.rs", "/project/src/main.rs", "/project"); + test!("src/main.rs", "/project/src/main.rs", "/project/src"); + // Failing currently + test!("src/main.rs", "/project/lib/src/main.rs", "/project/lib"); + // Failing currently + test!( + "src/main.rs", + "/project/lib/src/main.rs", + "/project/lib/src" + ); + + test!("lib/src/main.rs", "/project/lib/src/main.rs", "/project"); + test!( + "lib/src/main.rs", + "/project/lib/src/main.rs", + "/project/src" + ); + test!( + "lib/src/main.rs", + "/project/lib/src/main.rs", + "/project/lib" + ); + test!( + "lib/src/main.rs", + "/project/lib/src/main.rs", + "/project/lib/src" + ); + } + ) + } + + // https://github.com/zed-industries/zed/issues/28339 + #[gpui::test] + async fn issue_28339(cx: &mut TestAppContext) { + test_path_likes!( + cx, + vec![( + path!("/tmp"), + json!({ + "issue28339": { + "foo": { + "bar.txt": "" + }, + }, + }), + )], + vec![path!("/tmp")], + { + test!( + "foo/./bar.txt", + "/tmp/issue28339/foo/bar.txt", + "/tmp/issue28339" + ); + test!( + "foo/../foo/bar.txt", + "/tmp/issue28339/foo/bar.txt", + "/tmp/issue28339" + ); + test!( + "foo/..///foo/bar.txt", + "/tmp/issue28339/foo/bar.txt", + "/tmp/issue28339" + ); + test!( + "issue28339/../issue28339/foo/../foo/bar.txt", + "/tmp/issue28339/foo/bar.txt", + "/tmp/issue28339" + ); + test!( + "./bar.txt", + "/tmp/issue28339/foo/bar.txt", + "/tmp/issue28339/foo" + ); + test!( + "../foo/bar.txt", + "/tmp/issue28339/foo/bar.txt", + "/tmp/issue28339/foo" + ); + } + ) + } + + // https://github.com/zed-industries/zed/issues/34027 + #[gpui::test] + #[should_panic(expected = "Tooltip mismatch")] + async fn issue_34027(cx: &mut TestAppContext) { + test_path_likes!( + cx, + vec![( + path!("/tmp/issue34027"), + json!({ + "test.txt": "", + "foo": { + "test.txt": "", + } + }), + ),], + vec![path!("/tmp/issue34027")], + { + test!("test.txt", "/tmp/issue34027/test.txt", "/tmp/issue34027"); + test!( + "test.txt", + "/tmp/issue34027/foo/test.txt", + "/tmp/issue34027/foo" + ); + } + ) + } + + // https://github.com/zed-industries/zed/issues/34027 + #[gpui::test] + #[should_panic(expected = "Tooltip mismatch")] + async fn issue_34027_non_worktree_file(cx: &mut TestAppContext) { + test_path_likes!( + cx, + vec![ + ( + path!("/"), + json!({ + "file.txt": "", + }), + ), + ( + path!("/test"), + json!({ + "file.txt": "", + }), + ), + ], + vec![path!("/test")], + { + test!("file.txt", "/file.txt", "/"); + test!("file.txt", "/test/file.txt", "/test"); + } + ) + } + } +} diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 0c16e3fb9d..9aa855acb7 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1,22 +1,21 @@ -mod color_contrast; mod persistence; pub mod terminal_element; pub mod terminal_panel; +mod terminal_path_like_target; pub mod terminal_scrollbar; mod terminal_slash_command; pub mod terminal_tab_tooltip; use assistant_slash_command::SlashCommandRegistry; -use editor::{Editor, EditorSettings, actions::SelectAll, scroll::ScrollbarAutoHide}; +use editor::{EditorSettings, actions::SelectAll, scroll::ScrollbarAutoHide}; use gpui::{ Action, AnyElement, App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, KeyContext, KeyDownEvent, Keystroke, MouseButton, MouseDownEvent, Pixels, Render, ScrollWheelEvent, Stateful, Styled, Subscription, Task, WeakEntity, actions, anchored, deferred, div, }; -use itertools::Itertools; use persistence::TERMINAL_DB; -use project::{Entry, Metadata, Project, search::SearchQuery, terminals::TerminalKind}; +use project::{Project, search::SearchQuery, terminals::TerminalKind}; use schemars::JsonSchema; use task::TaskId; use terminal::{ @@ -31,16 +30,17 @@ use terminal::{ }; use terminal_element::TerminalElement; use terminal_panel::TerminalPanel; +use terminal_path_like_target::{hover_path_like_target, open_path_like_target}; use terminal_scrollbar::TerminalScrollHandle; use terminal_slash_command::TerminalSlashCommand; use terminal_tab_tooltip::TerminalTooltip; use ui::{ ContextMenu, Icon, IconName, Label, Scrollbar, ScrollbarState, Tooltip, h_flex, prelude::*, }; -use util::{ResultExt, debug_panic, paths::PathWithPosition}; +use util::ResultExt; use workspace::{ - CloseActiveItem, NewCenterTerminal, NewTerminal, OpenOptions, OpenVisible, ToolbarItemLocation, - Workspace, WorkspaceId, delete_unloaded_items, + CloseActiveItem, NewCenterTerminal, NewTerminal, ToolbarItemLocation, Workspace, WorkspaceId, + delete_unloaded_items, item::{ BreadcrumbText, Item, ItemEvent, SerializableItem, TabContentParams, TabTooltipContent, }, @@ -48,7 +48,6 @@ use workspace::{ searchable::{Direction, SearchEvent, SearchOptions, SearchableItem, SearchableItemHandle}, }; -use anyhow::Context as _; use serde::Deserialize; use settings::{Settings, SettingsStore}; use smol::Timer; @@ -64,7 +63,6 @@ use std::{ }; const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500); -const GIT_DIFF_PATH_PREFIXES: &[&str] = &["a", "b"]; const TERMINAL_SCROLLBAR_WIDTH: Pixels = px(12.); /// Event to transmit the scroll from the element to the view @@ -181,6 +179,7 @@ impl ContentMode { } #[derive(Debug)] +#[cfg_attr(test, derive(Clone, Eq, PartialEq))] struct HoverTarget { tooltip: String, hovered_word: HoveredWord, @@ -1066,37 +1065,13 @@ fn subscribe_for_terminal_events( .as_ref() .map(|hover| &hover.hovered_word) { - let valid_files_to_open_task = possible_open_target( + terminal_view.hover = None; + terminal_view.hover_tooltip_update = hover_path_like_target( &workspace, - &path_like_target.terminal_dir, - &path_like_target.maybe_path, + hovered_word.clone(), + path_like_target, cx, ); - let hovered_word = hovered_word.clone(); - - terminal_view.hover = None; - terminal_view.hover_tooltip_update = - cx.spawn(async move |terminal_view, cx| { - let file_to_open = valid_files_to_open_task.await; - terminal_view - .update(cx, |terminal_view, _| match file_to_open { - Some( - OpenTarget::File(path, _) - | OpenTarget::Worktree(path, _), - ) => { - terminal_view.hover = Some(HoverTarget { - tooltip: path.to_string(|path| { - path.to_string_lossy().to_string() - }), - hovered_word, - }); - } - None => { - terminal_view.hover = None; - } - }) - .ok(); - }); cx.notify(); } } @@ -1110,86 +1085,13 @@ fn subscribe_for_terminal_events( Event::Open(maybe_navigation_target) => match maybe_navigation_target { MaybeNavigationTarget::Url(url) => cx.open_url(url), - - MaybeNavigationTarget::PathLike(path_like_target) => { - if terminal_view.hover.is_none() { - return; - } - let task_workspace = workspace.clone(); - let path_like_target = path_like_target.clone(); - cx.spawn_in(window, async move |terminal_view, cx| { - let open_target = terminal_view - .update(cx, |_, cx| { - possible_open_target( - &task_workspace, - &path_like_target.terminal_dir, - &path_like_target.maybe_path, - cx, - ) - })? - .await; - if let Some(open_target) = open_target { - let path_to_open = open_target.path(); - let opened_items = task_workspace - .update_in(cx, |workspace, window, cx| { - workspace.open_paths( - vec![path_to_open.path.clone()], - OpenOptions { - visible: Some(OpenVisible::OnlyDirectories), - ..Default::default() - }, - None, - window, - cx, - ) - }) - .context("workspace update")? - .await; - if opened_items.len() != 1 { - debug_panic!( - "Received {} items for one path {path_to_open:?}", - opened_items.len(), - ); - } - - if let Some(opened_item) = opened_items.first() { - if open_target.is_file() { - if let Some(Ok(opened_item)) = opened_item - && let Some(row) = path_to_open.row - { - let col = path_to_open.column.unwrap_or(0); - if let Some(active_editor) = - opened_item.downcast::() - { - active_editor - .downgrade() - .update_in(cx, |editor, window, cx| { - editor.go_to_singleton_buffer_point( - language::Point::new( - row.saturating_sub(1), - col.saturating_sub(1), - ), - window, - cx, - ) - }) - .log_err(); - } - } - } else if open_target.is_dir() { - task_workspace.update(cx, |workspace, cx| { - workspace.project().update(cx, |_, cx| { - cx.emit(project::Event::ActivateProjectPanel); - }) - })?; - } - } - } - - anyhow::Ok(()) - }) - .detach_and_log_err(cx) - } + MaybeNavigationTarget::PathLike(path_like_target) => open_path_like_target( + &workspace, + terminal_view, + path_like_target, + window, + cx, + ), }, Event::BreadcrumbsChanged => cx.emit(ItemEvent::UpdateBreadcrumbs), Event::CloseTerminal => cx.emit(ItemEvent::CloseItem), @@ -1203,241 +1105,6 @@ fn subscribe_for_terminal_events( vec![terminal_subscription, terminal_events_subscription] } -#[derive(Debug, Clone)] -enum OpenTarget { - Worktree(PathWithPosition, Entry), - File(PathWithPosition, Metadata), -} - -impl OpenTarget { - fn is_file(&self) -> bool { - match self { - OpenTarget::Worktree(_, entry) => entry.is_file(), - OpenTarget::File(_, metadata) => !metadata.is_dir, - } - } - - fn is_dir(&self) -> bool { - match self { - OpenTarget::Worktree(_, entry) => entry.is_dir(), - OpenTarget::File(_, metadata) => metadata.is_dir, - } - } - - fn path(&self) -> &PathWithPosition { - match self { - OpenTarget::Worktree(path, _) => path, - OpenTarget::File(path, _) => path, - } - } -} - -fn possible_open_target( - workspace: &WeakEntity, - cwd: &Option, - maybe_path: &str, - cx: &App, -) -> Task> { - let Some(workspace) = workspace.upgrade() else { - return Task::ready(None); - }; - // We have to check for both paths, as on Unix, certain paths with positions are valid file paths too. - // We can be on FS remote part, without real FS, so cannot canonicalize or check for existence the path right away. - let mut potential_paths = Vec::new(); - let original_path = PathWithPosition::from_path(PathBuf::from(maybe_path)); - let path_with_position = PathWithPosition::parse_str(maybe_path); - let worktree_candidates = workspace - .read(cx) - .worktrees(cx) - .sorted_by_key(|worktree| { - let worktree_root = worktree.read(cx).abs_path(); - match cwd - .as_ref() - .and_then(|cwd| worktree_root.strip_prefix(cwd).ok()) - { - Some(cwd_child) => cwd_child.components().count(), - None => usize::MAX, - } - }) - .collect::>(); - // Since we do not check paths via FS and joining, we need to strip off potential `./`, `a/`, `b/` prefixes out of it. - for prefix_str in GIT_DIFF_PATH_PREFIXES.iter().chain(std::iter::once(&".")) { - if let Some(stripped) = original_path.path.strip_prefix(prefix_str).ok() { - potential_paths.push(PathWithPosition { - path: stripped.to_owned(), - row: original_path.row, - column: original_path.column, - }); - } - if let Some(stripped) = path_with_position.path.strip_prefix(prefix_str).ok() { - potential_paths.push(PathWithPosition { - path: stripped.to_owned(), - row: path_with_position.row, - column: path_with_position.column, - }); - } - } - - let insert_both_paths = original_path != path_with_position; - potential_paths.insert(0, original_path); - if insert_both_paths { - potential_paths.insert(1, path_with_position); - } - - // If we won't find paths "easily", we can traverse the entire worktree to look what ends with the potential path suffix. - // That will be slow, though, so do the fast checks first. - let mut worktree_paths_to_check = Vec::new(); - for worktree in &worktree_candidates { - let worktree_root = worktree.read(cx).abs_path(); - let mut paths_to_check = Vec::with_capacity(potential_paths.len()); - - for path_with_position in &potential_paths { - let path_to_check = if worktree_root.ends_with(&path_with_position.path) { - let root_path_with_position = PathWithPosition { - path: worktree_root.to_path_buf(), - row: path_with_position.row, - column: path_with_position.column, - }; - match worktree.read(cx).root_entry() { - Some(root_entry) => { - return Task::ready(Some(OpenTarget::Worktree( - root_path_with_position, - root_entry.clone(), - ))); - } - None => root_path_with_position, - } - } else { - PathWithPosition { - path: path_with_position - .path - .strip_prefix(&worktree_root) - .unwrap_or(&path_with_position.path) - .to_owned(), - row: path_with_position.row, - column: path_with_position.column, - } - }; - - if path_to_check.path.is_relative() - && let Some(entry) = worktree.read(cx).entry_for_path(&path_to_check.path) - { - return Task::ready(Some(OpenTarget::Worktree( - PathWithPosition { - path: worktree_root.join(&entry.path), - row: path_to_check.row, - column: path_to_check.column, - }, - entry.clone(), - ))); - } - - paths_to_check.push(path_to_check); - } - - if !paths_to_check.is_empty() { - worktree_paths_to_check.push((worktree.clone(), paths_to_check)); - } - } - - // Before entire worktree traversal(s), make an attempt to do FS checks if available. - let fs_paths_to_check = if workspace.read(cx).project().read(cx).is_local() { - potential_paths - .into_iter() - .flat_map(|path_to_check| { - let mut paths_to_check = Vec::new(); - let maybe_path = &path_to_check.path; - if maybe_path.starts_with("~") { - if let Some(home_path) = - maybe_path - .strip_prefix("~") - .ok() - .and_then(|stripped_maybe_path| { - Some(dirs::home_dir()?.join(stripped_maybe_path)) - }) - { - paths_to_check.push(PathWithPosition { - path: home_path, - row: path_to_check.row, - column: path_to_check.column, - }); - } - } else { - paths_to_check.push(PathWithPosition { - path: maybe_path.clone(), - row: path_to_check.row, - column: path_to_check.column, - }); - if maybe_path.is_relative() { - if let Some(cwd) = &cwd { - paths_to_check.push(PathWithPosition { - path: cwd.join(maybe_path), - row: path_to_check.row, - column: path_to_check.column, - }); - } - for worktree in &worktree_candidates { - paths_to_check.push(PathWithPosition { - path: worktree.read(cx).abs_path().join(maybe_path), - row: path_to_check.row, - column: path_to_check.column, - }); - } - } - } - paths_to_check - }) - .collect() - } else { - Vec::new() - }; - - let worktree_check_task = cx.spawn(async move |cx| { - for (worktree, worktree_paths_to_check) in worktree_paths_to_check { - let found_entry = worktree - .update(cx, |worktree, _| { - let worktree_root = worktree.abs_path(); - let mut traversal = worktree.traverse_from_path(true, true, false, "".as_ref()); - while let Some(entry) = traversal.next() { - if let Some(path_in_worktree) = worktree_paths_to_check - .iter() - .find(|path_to_check| entry.path.ends_with(&path_to_check.path)) - { - return Some(OpenTarget::Worktree( - PathWithPosition { - path: worktree_root.join(&entry.path), - row: path_in_worktree.row, - column: path_in_worktree.column, - }, - entry.clone(), - )); - } - } - None - }) - .ok()?; - if let Some(found_entry) = found_entry { - return Some(found_entry); - } - } - None - }); - - let fs = workspace.read(cx).project().read(cx).fs().clone(); - cx.background_spawn(async move { - for mut path_to_check in fs_paths_to_check { - if let Some(fs_path_to_check) = fs.canonicalize(&path_to_check.path).await.ok() - && let Some(metadata) = fs.metadata(&fs_path_to_check).await.ok().flatten() - { - path_to_check.path = fs_path_to_check; - return Some(OpenTarget::File(path_to_check, metadata)); - } - } - - worktree_check_task.await - }) -} - fn regex_search_for_query(query: &project::search::SearchQuery) -> Option { let str = query.as_str(); if query.is_regex() { diff --git a/crates/toolchain_selector/src/active_toolchain.rs b/crates/toolchain_selector/src/active_toolchain.rs index ea5dcc2a19..bf45bffea3 100644 --- a/crates/toolchain_selector/src/active_toolchain.rs +++ b/crates/toolchain_selector/src/active_toolchain.rs @@ -38,7 +38,6 @@ impl ActiveToolchain { .ok() .flatten(); if let Some(editor) = editor { - this.active_toolchain.take(); this.update_lister(editor, window, cx); } }, @@ -124,16 +123,6 @@ impl ActiveToolchain { if let Some((_, buffer, _)) = editor.active_excerpt(cx) && let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx)) { - if self - .active_buffer - .as_ref() - .is_some_and(|(old_worktree_id, old_buffer, _)| { - (old_worktree_id, old_buffer.entity_id()) == (&worktree_id, buffer.entity_id()) - }) - { - return; - } - let subscription = cx.subscribe_in( &buffer, window, diff --git a/crates/ui/src/components/callout.rs b/crates/ui/src/components/callout.rs index 7ffeda881c..b1ead18ee7 100644 --- a/crates/ui/src/components/callout.rs +++ b/crates/ui/src/components/callout.rs @@ -132,6 +132,7 @@ impl RenderOnce for Callout { h_flex() .min_w_0() + .w_full() .p_2() .gap_2() .items_start() diff --git a/crates/ui/src/components/context_menu.rs b/crates/ui/src/components/context_menu.rs index 25575c4f1e..21ab283d88 100644 --- a/crates/ui/src/components/context_menu.rs +++ b/crates/ui/src/components/context_menu.rs @@ -561,7 +561,7 @@ impl ContextMenu { action: Some(action.boxed_clone()), handler: Rc::new(move |_, window, cx| window.dispatch_action(action.boxed_clone(), cx)), icon: Some(IconName::ArrowUpRight), - icon_size: IconSize::Small, + icon_size: IconSize::XSmall, icon_position: IconPosition::End, icon_color: None, disabled: false, diff --git a/crates/ui/src/components/disclosure.rs b/crates/ui/src/components/disclosure.rs index 98406cd1e2..4bb3419176 100644 --- a/crates/ui/src/components/disclosure.rs +++ b/crates/ui/src/components/disclosure.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use gpui::{ClickEvent, CursorStyle}; +use gpui::{ClickEvent, CursorStyle, SharedString}; use crate::{Color, IconButton, IconButtonShape, IconName, IconSize, prelude::*}; @@ -14,6 +14,7 @@ pub struct Disclosure { cursor_style: CursorStyle, opened_icon: IconName, closed_icon: IconName, + visible_on_hover: Option, } impl Disclosure { @@ -27,6 +28,7 @@ impl Disclosure { cursor_style: CursorStyle::PointingHand, opened_icon: IconName::ChevronDown, closed_icon: IconName::ChevronRight, + visible_on_hover: None, } } @@ -73,6 +75,13 @@ impl Clickable for Disclosure { } } +impl VisibleOnHover for Disclosure { + fn visible_on_hover(mut self, group_name: impl Into) -> Self { + self.visible_on_hover = Some(group_name.into()); + self + } +} + impl RenderOnce for Disclosure { fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { IconButton::new( @@ -87,6 +96,9 @@ impl RenderOnce for Disclosure { .icon_size(IconSize::Small) .disabled(self.disabled) .toggle_state(self.selected) + .when_some(self.visible_on_hover.clone(), |this, group_name| { + this.visible_on_hover(group_name) + }) .when_some(self.on_toggle, move |this, on_toggle| { this.on_click(move |event, window, cx| on_toggle(event, window, cx)) }) diff --git a/crates/ui/src/components/label.rs b/crates/ui/src/components/label.rs index 8c9ea62424..dc830559ca 100644 --- a/crates/ui/src/components/label.rs +++ b/crates/ui/src/components/label.rs @@ -2,8 +2,10 @@ mod highlighted_label; mod label; mod label_like; mod loading_label; +mod spinner_label; pub use highlighted_label::*; pub use label::*; pub use label_like::*; pub use loading_label::*; +pub use spinner_label::*; diff --git a/crates/ui/src/components/label/spinner_label.rs b/crates/ui/src/components/label/spinner_label.rs new file mode 100644 index 0000000000..b7b65fbcc9 --- /dev/null +++ b/crates/ui/src/components/label/spinner_label.rs @@ -0,0 +1,192 @@ +use crate::prelude::*; +use gpui::{Animation, AnimationExt, FontWeight}; +use std::time::Duration; + +/// Different types of spinner animations +#[derive(Debug, Default, Clone, Copy, PartialEq)] +pub enum SpinnerVariant { + #[default] + Dots, + DotsVariant, +} + +/// A spinner indication, based on the label component, that loops through +/// frames of the specified animation. It implements `LabelCommon` as well. +/// +/// # Default Example +/// +/// ``` +/// use ui::{SpinnerLabel}; +/// +/// SpinnerLabel::new(); +/// ``` +/// +/// # Variant Example +/// +/// ``` +/// use ui::{SpinnerLabel}; +/// +/// SpinnerLabel::dots_variant(); +/// ``` +#[derive(IntoElement, RegisterComponent)] +pub struct SpinnerLabel { + base: Label, + variant: SpinnerVariant, + frames: Vec<&'static str>, + duration: Duration, +} + +impl SpinnerVariant { + fn frames(&self) -> Vec<&'static str> { + match self { + SpinnerVariant::Dots => vec!["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"], + SpinnerVariant::DotsVariant => vec!["⣼", "⣹", "⢻", "⠿", "⡟", "⣏", "⣧", "⣶"], + } + } + + fn duration(&self) -> Duration { + match self { + SpinnerVariant::Dots => Duration::from_millis(1000), + SpinnerVariant::DotsVariant => Duration::from_millis(1000), + } + } + + fn animation_id(&self) -> &'static str { + match self { + SpinnerVariant::Dots => "spinner_label_dots", + SpinnerVariant::DotsVariant => "spinner_label_dots_variant", + } + } +} + +impl SpinnerLabel { + pub fn new() -> Self { + Self::with_variant(SpinnerVariant::default()) + } + + pub fn with_variant(variant: SpinnerVariant) -> Self { + let frames = variant.frames(); + let duration = variant.duration(); + + SpinnerLabel { + base: Label::new(frames[0]), + variant, + frames, + duration, + } + } + + pub fn dots() -> Self { + Self::with_variant(SpinnerVariant::Dots) + } + + pub fn dots_variant() -> Self { + Self::with_variant(SpinnerVariant::DotsVariant) + } +} + +impl LabelCommon for SpinnerLabel { + fn size(mut self, size: LabelSize) -> Self { + self.base = self.base.size(size); + self + } + + fn weight(mut self, weight: FontWeight) -> Self { + self.base = self.base.weight(weight); + self + } + + fn line_height_style(mut self, line_height_style: LineHeightStyle) -> Self { + self.base = self.base.line_height_style(line_height_style); + self + } + + fn color(mut self, color: Color) -> Self { + self.base = self.base.color(color); + self + } + + fn strikethrough(mut self) -> Self { + self.base = self.base.strikethrough(); + self + } + + fn italic(mut self) -> Self { + self.base = self.base.italic(); + self + } + + fn alpha(mut self, alpha: f32) -> Self { + self.base = self.base.alpha(alpha); + self + } + + fn underline(mut self) -> Self { + self.base = self.base.underline(); + self + } + + fn truncate(mut self) -> Self { + self.base = self.base.truncate(); + self + } + + fn single_line(mut self) -> Self { + self.base = self.base.single_line(); + self + } + + fn buffer_font(mut self, cx: &App) -> Self { + self.base = self.base.buffer_font(cx); + self + } + + fn inline_code(mut self, cx: &App) -> Self { + self.base = self.base.inline_code(cx); + self + } +} + +impl RenderOnce for SpinnerLabel { + fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { + let frames = self.frames.clone(); + let duration = self.duration; + + self.base.color(Color::Muted).with_animation( + self.variant.animation_id(), + Animation::new(duration).repeat(), + move |mut label, delta| { + let frame_index = (delta * frames.len() as f32) as usize % frames.len(); + + label.set_text(frames[frame_index]); + label + }, + ) + } +} + +impl Component for SpinnerLabel { + fn scope() -> ComponentScope { + ComponentScope::Loading + } + + fn name() -> &'static str { + "Spinner Label" + } + + fn sort_name() -> &'static str { + "Spinner Label" + } + + fn preview(_window: &mut Window, _cx: &mut App) -> Option { + let examples = vec![ + single_example("Default", SpinnerLabel::new().into_any_element()), + single_example( + "Dots Variant", + SpinnerLabel::dots_variant().into_any_element(), + ), + ]; + + Some(example_group(examples).vertical().into_any_element()) + } +} diff --git a/crates/ui/src/utils.rs b/crates/ui/src/utils.rs index 26a59001f6..cd7d8eb497 100644 --- a/crates/ui/src/utils.rs +++ b/crates/ui/src/utils.rs @@ -3,12 +3,14 @@ use gpui::App; use theme::ActiveTheme; +mod apca_contrast; mod color_contrast; mod corner_solver; mod format_distance; mod search_input; mod with_rem_size; +pub use apca_contrast::*; pub use color_contrast::*; pub use corner_solver::{CornerSolver, inner_corner_radius}; pub use format_distance::*; diff --git a/crates/terminal_view/src/color_contrast.rs b/crates/ui/src/utils/apca_contrast.rs similarity index 100% rename from crates/terminal_view/src/color_contrast.rs rename to crates/ui/src/utils/apca_contrast.rs diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index b430120314..1192b14812 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -166,7 +166,7 @@ impl> From for SanitizedPath { } } -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum PathStyle { Posix, Windows, diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index 79d18a85e9..b57c916db9 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -1492,7 +1492,7 @@ impl OnMatchingLines { let mut search = String::new(); let mut escaped = false; - while let Some(c) = chars.next() { + for c in chars.by_ref() { if escaped { escaped = false; // unescape escaped parens diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index 2bc531268d..726022021d 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -23,6 +23,8 @@ actions!( HelixInsert, /// Appends at the end of the selection. HelixAppend, + /// Goes to the location of the last modification. + HelixGotoLastModification, ] ); @@ -31,6 +33,7 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { Vim::action(editor, cx, Vim::helix_insert); Vim::action(editor, cx, Vim::helix_append); Vim::action(editor, cx, Vim::helix_yank); + Vim::action(editor, cx, Vim::helix_goto_last_modification); } impl Vim { @@ -430,6 +433,15 @@ impl Vim { }); self.switch_mode(Mode::HelixNormal, true, window, cx); } + + pub fn helix_goto_last_modification( + &mut self, + _: &HelixGotoLastModification, + window: &mut Window, + cx: &mut Context, + ) { + self.jump(".".into(), false, false, window, cx); + } } #[cfg(test)] @@ -441,6 +453,7 @@ mod test { #[gpui::test] async fn test_word_motions(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); // « // ˇ // » @@ -502,6 +515,7 @@ mod test { #[gpui::test] async fn test_delete(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); // test delete a selection cx.set_state( @@ -582,6 +596,7 @@ mod test { #[gpui::test] async fn test_f_and_t(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); cx.set_state( indoc! {" @@ -635,6 +650,7 @@ mod test { #[gpui::test] async fn test_newline_char(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); cx.set_state("aa«\nˇ»bb cc", Mode::HelixNormal); @@ -652,6 +668,7 @@ mod test { #[gpui::test] async fn test_insert_selected(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); cx.set_state( indoc! {" «The ˇ»quick brown @@ -674,6 +691,7 @@ mod test { #[gpui::test] async fn test_append(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); // test from the end of the selection cx.set_state( indoc! {" @@ -716,6 +734,7 @@ mod test { #[gpui::test] async fn test_replace(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); // No selection (single character) cx.set_state("ˇaa", Mode::HelixNormal); @@ -763,4 +782,72 @@ mod test { cx.shared_clipboard().assert_eq("worl"); cx.assert_state("hello «worlˇ»d", Mode::HelixNormal); } + #[gpui::test] + async fn test_shift_r_paste(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + // First copy some text to clipboard + cx.set_state("«hello worldˇ»", Mode::HelixNormal); + cx.simulate_keystrokes("y"); + + // Test paste with shift-r on single cursor + cx.set_state("foo ˇbar", Mode::HelixNormal); + cx.simulate_keystrokes("shift-r"); + + cx.assert_state("foo hello worldˇbar", Mode::HelixNormal); + + // Test paste with shift-r on selection + cx.set_state("foo «barˇ» baz", Mode::HelixNormal); + cx.simulate_keystrokes("shift-r"); + + cx.assert_state("foo hello worldˇ baz", Mode::HelixNormal); + } + + #[gpui::test] + async fn test_insert_mode_stickiness(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + // Make a modification at a specific location + cx.set_state("ˇhello", Mode::HelixNormal); + assert_eq!(cx.mode(), Mode::HelixNormal); + cx.simulate_keystrokes("i"); + assert_eq!(cx.mode(), Mode::Insert); + cx.simulate_keystrokes("escape"); + assert_eq!(cx.mode(), Mode::HelixNormal); + } + + #[gpui::test] + async fn test_goto_last_modification(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + // Make a modification at a specific location + cx.set_state("line one\nline ˇtwo\nline three", Mode::HelixNormal); + cx.assert_state("line one\nline ˇtwo\nline three", Mode::HelixNormal); + cx.simulate_keystrokes("i"); + cx.simulate_keystrokes("escape"); + cx.simulate_keystrokes("i"); + cx.simulate_keystrokes("m o d i f i e d space"); + cx.simulate_keystrokes("escape"); + + // TODO: this fails, because state is no longer helix + cx.assert_state( + "line one\nline modified ˇtwo\nline three", + Mode::HelixNormal, + ); + + // Move cursor away from the modification + cx.simulate_keystrokes("up"); + + // Use "g ." to go back to last modification + cx.simulate_keystrokes("g ."); + + // Verify we're back at the modification location and still in HelixNormal mode + cx.assert_state( + "line one\nline modifiedˇ two\nline three", + Mode::HelixNormal, + ); + } } diff --git a/crates/vim/src/motion.rs b/crates/vim/src/motion.rs index a2f165e9fe..a54d3caa60 100644 --- a/crates/vim/src/motion.rs +++ b/crates/vim/src/motion.rs @@ -1610,10 +1610,20 @@ fn up_down_buffer_rows( map.line_len(begin_folded_line.row()) }; - ( - map.clip_point(DisplayPoint::new(begin_folded_line.row(), new_col), bias), - goal, - ) + let point = DisplayPoint::new(begin_folded_line.row(), new_col); + let mut clipped_point = map.clip_point(point, bias); + + // When navigating vertically in vim mode with inlay hints present, + // we need to handle the case where clipping moves us to a different row. + // This can happen when moving down (Bias::Right) and hitting an inlay hint. + // Re-clip with opposite bias to stay on the intended line. + // + // See: https://github.com/zed-industries/zed/issues/29134 + if clipped_point.row() > point.row() { + clipped_point = map.clip_point(point, Bias::Left); + } + + (clipped_point, goal) } fn down_display( @@ -3842,6 +3852,84 @@ mod test { ); } + #[gpui::test] + async fn test_visual_mode_with_inlay_hints_on_empty_line(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + // Test the exact scenario from issue #29134 + cx.set_state( + indoc! {" + fn main() { + let this_is_a_long_name = Vec::::new(); + let new_oneˇ = this_is_a_long_name + .iter() + .map(|i| i + 1) + .map(|i| i * 2) + .collect::>(); + } + "}, + Mode::Normal, + ); + + // Add type hint inlay on the empty line (line 3, after "this_is_a_long_name") + cx.update_editor(|editor, _window, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + // The empty line is at line 3 (0-indexed) + let line_start = snapshot.anchor_after(Point::new(3, 0)); + let inlay_text = ": Vec"; + let inlay = Inlay::edit_prediction(1, line_start, inlay_text); + editor.splice_inlays(&[], vec![inlay], cx); + }); + + // Enter visual mode + cx.simulate_keystrokes("v"); + cx.assert_state( + indoc! {" + fn main() { + let this_is_a_long_name = Vec::::new(); + let new_one« ˇ»= this_is_a_long_name + .iter() + .map(|i| i + 1) + .map(|i| i * 2) + .collect::>(); + } + "}, + Mode::Visual, + ); + + // Move down - should go to the beginning of line 4, not skip to line 5 + cx.simulate_keystrokes("j"); + cx.assert_state( + indoc! {" + fn main() { + let this_is_a_long_name = Vec::::new(); + let new_one« = this_is_a_long_name + ˇ» .iter() + .map(|i| i + 1) + .map(|i| i * 2) + .collect::>(); + } + "}, + Mode::Visual, + ); + + // Test with multiple movements + cx.set_state("let aˇ = 1;\nlet b = 2;\n\nlet c = 3;", Mode::Normal); + + // Add type hint on the empty line + cx.update_editor(|editor, _window, cx| { + let snapshot = editor.buffer().read(cx).snapshot(cx); + let empty_line_start = snapshot.anchor_after(Point::new(2, 0)); + let inlay_text = ": i32"; + let inlay = Inlay::edit_prediction(2, empty_line_start, inlay_text); + editor.splice_inlays(&[], vec![inlay], cx); + }); + + // Enter visual mode and move down twice + cx.simulate_keystrokes("v j j"); + cx.assert_state("let a« = 1;\nlet b = 2;\n\nˇ»let c = 3;", Mode::Visual); + } + #[gpui::test] async fn test_go_to_percentage(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 115aef1dab..34ac4aab1f 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -70,8 +70,19 @@ impl Vim { } else { Point::new(row, 0) }; + let end = if row == selection.end.row { + selection.end + } else { + Point::new(row, snapshot.line_len(multi_buffer::MultiBufferRow(row))) + }; - if let Some((range, num, radix)) = find_number(&snapshot, start) { + let number_result = if !selection.is_empty() { + find_number_in_range(&snapshot, start, end) + } else { + find_number(&snapshot, start) + }; + + if let Some((range, num, radix)) = number_result { let replace = match radix { 10 => increment_decimal_string(&num, delta), 16 => increment_hex_string(&num, delta), @@ -189,6 +200,90 @@ fn increment_binary_string(num: &str, delta: i64) -> String { format!("{:0width$b}", result, width = num.len()) } +fn find_number_in_range( + snapshot: &MultiBufferSnapshot, + start: Point, + end: Point, +) -> Option<(Range, String, u32)> { + let start_offset = start.to_offset(snapshot); + let end_offset = end.to_offset(snapshot); + + let mut offset = start_offset; + + // Backward scan to find the start of the number, but stop at start_offset + for ch in snapshot.reversed_chars_at(offset) { + if ch.is_ascii_hexdigit() || ch == '-' || ch == 'b' || ch == 'x' { + if offset == 0 { + break; + } + offset -= ch.len_utf8(); + if offset < start_offset { + offset = start_offset; + break; + } + } else { + break; + } + } + + let mut begin = None; + let mut end_num = None; + let mut num = String::new(); + let mut radix = 10; + + let mut chars = snapshot.chars_at(offset).peekable(); + + while let Some(ch) = chars.next() { + if offset >= end_offset { + break; // stop at end of selection + } + + if num == "0" && ch == 'b' && chars.peek().is_some() && chars.peek().unwrap().is_digit(2) { + radix = 2; + begin = None; + num = String::new(); + } else if num == "0" + && ch == 'x' + && chars.peek().is_some() + && chars.peek().unwrap().is_ascii_hexdigit() + { + radix = 16; + begin = None; + num = String::new(); + } + + if ch.is_digit(radix) + || (begin.is_none() + && ch == '-' + && chars.peek().is_some() + && chars.peek().unwrap().is_digit(radix)) + { + if begin.is_none() { + begin = Some(offset); + } + num.push(ch); + } else if begin.is_some() { + end_num = Some(offset); + break; + } else if ch == '\n' { + break; + } + + offset += ch.len_utf8(); + } + + if let Some(begin) = begin { + let end_num = end_num.unwrap_or(offset); + Some(( + begin.to_point(snapshot)..end_num.to_point(snapshot), + num, + radix, + )) + } else { + None + } +} + fn find_number( snapshot: &MultiBufferSnapshot, start: Point, @@ -274,9 +369,9 @@ fn find_boolean(snapshot: &MultiBufferSnapshot, start: Point) -> Option<(Range

= &[ +pub struct VimDb(ThreadSafeConnection); + +impl Domain for VimDb { + const NAME: &str = stringify!(VimDb); + + const MIGRATIONS: &[&str] = &[ sql! ( CREATE TABLE vim_marks ( workspace_id INTEGER, @@ -1689,7 +1695,9 @@ define_connection! ( ON vim_global_marks_paths(workspace_id, mark_name); ), ]; -); +} + +db::static_connection!(DB, VimDb, [WorkspaceDb]); struct SerializedMark { path: Arc, diff --git a/crates/vim/test_data/test_increment_visual_partial_number.json b/crates/vim/test_data/test_increment_visual_partial_number.json new file mode 100644 index 0000000000..ebb4eece78 --- /dev/null +++ b/crates/vim/test_data/test_increment_visual_partial_number.json @@ -0,0 +1,20 @@ +{"Put":{"state":"ˇ123"}} +{"Key":"v"} +{"Key":"l"} +{"Key":"ctrl-a"} +{"Get":{"state":"ˇ133","mode":"Normal"}} +{"Key":"l"} +{"Key":"v"} +{"Key":"l"} +{"Key":"ctrl-a"} +{"Get":{"state":"1ˇ34","mode":"Normal"}} +{"Key":"shift-v"} +{"Key":"y"} +{"Key":"p"} +{"Key":"p"} +{"Key":"ctrl-v"} +{"Key":"k"} +{"Key":"k"} +{"Key":"l"} +{"Key":"ctrl-a"} +{"Get":{"state":"ˇ144\n144\n144","mode":"Normal"}} diff --git a/crates/watch/src/watch.rs b/crates/watch/src/watch.rs index f0ed5b4a18..71dab74820 100644 --- a/crates/watch/src/watch.rs +++ b/crates/watch/src/watch.rs @@ -162,6 +162,19 @@ impl Receiver { pending_waker_id: None, } } + + /// Creates a new [`Receiver`] holding an initial value that will never change. + pub fn constant(value: T) -> Self { + let state = Arc::new(RwLock::new(State { + value, + wakers: BTreeMap::new(), + next_waker_id: WakerId::default(), + version: 0, + closed: false, + })); + + Self { state, version: 0 } + } } impl Receiver { diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index e1bda7ad36..869aa5322e 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -29,7 +29,6 @@ test-support = [ any_vec.workspace = true anyhow.workspace = true async-recursion.workspace = true -bincode = "1.2.1" call.workspace = true client.workspace = true clock.workspace = true @@ -80,5 +79,6 @@ project = { workspace = true, features = ["test-support"] } session = { workspace = true, features = ["test-support"] } settings = { workspace = true, features = ["test-support"] } http_client = { workspace = true, features = ["test-support"] } +pretty_assertions.workspace = true tempfile.workspace = true zlog.workspace = true diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index 7a8de6e910..149a122c0c 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -915,6 +915,11 @@ impl Render for PanelButtons { .on_click({ let action = action.boxed_clone(); move |_, window, cx| { + telemetry::event!( + "Panel Button Clicked", + name = name, + toggle_state = !is_open + ); window.focus(&focus_handle); window.dispatch_action(action.boxed_clone(), cx) } diff --git a/crates/workspace/src/history_manager.rs b/crates/workspace/src/history_manager.rs index a8387369f4..f68b58ff82 100644 --- a/crates/workspace/src/history_manager.rs +++ b/crates/workspace/src/history_manager.rs @@ -5,7 +5,9 @@ use smallvec::SmallVec; use ui::App; use util::{ResultExt, paths::PathExt}; -use crate::{NewWindow, SerializedWorkspaceLocation, WORKSPACE_DB, WorkspaceId}; +use crate::{ + NewWindow, SerializedWorkspaceLocation, WORKSPACE_DB, WorkspaceId, path_list::PathList, +}; pub fn init(cx: &mut App) { let manager = cx.new(|_| HistoryManager::new()); @@ -44,7 +46,13 @@ impl HistoryManager { .unwrap_or_default() .into_iter() .rev() - .map(|(id, location)| HistoryManagerEntry::new(id, &location)) + .filter_map(|(id, location, paths)| { + if matches!(location, SerializedWorkspaceLocation::Local) { + Some(HistoryManagerEntry::new(id, &paths)) + } else { + None + } + }) .collect::>(); this.update(cx, |this, cx| { this.history = recent_folders; @@ -118,9 +126,9 @@ impl HistoryManager { } impl HistoryManagerEntry { - pub fn new(id: WorkspaceId, location: &SerializedWorkspaceLocation) -> Self { - let path = location - .sorted_paths() + pub fn new(id: WorkspaceId, paths: &PathList) -> Self { + let path = paths + .paths() .iter() .map(|path| path.compact()) .collect::>(); diff --git a/crates/workspace/src/invalid_buffer_view.rs b/crates/workspace/src/invalid_buffer_view.rs new file mode 100644 index 0000000000..b8c0db29d3 --- /dev/null +++ b/crates/workspace/src/invalid_buffer_view.rs @@ -0,0 +1,111 @@ +use std::{path::Path, sync::Arc}; + +use gpui::{EventEmitter, FocusHandle, Focusable}; +use ui::{ + App, Button, ButtonCommon, ButtonStyle, Clickable, Context, FluentBuilder, InteractiveElement, + KeyBinding, ParentElement, Render, SharedString, Styled as _, Window, h_flex, v_flex, +}; +use zed_actions::workspace::OpenWithSystem; + +use crate::Item; + +/// A view to display when a certain buffer fails to open. +pub struct InvalidBufferView { + /// Which path was attempted to open. + pub abs_path: Arc, + /// An error message, happened when opening the buffer. + pub error: SharedString, + is_local: bool, + focus_handle: FocusHandle, +} + +impl InvalidBufferView { + pub fn new( + abs_path: &Path, + is_local: bool, + e: &anyhow::Error, + _: &mut Window, + cx: &mut App, + ) -> Self { + Self { + is_local, + abs_path: Arc::from(abs_path), + error: format!("{e}").into(), + focus_handle: cx.focus_handle(), + } + } +} + +impl Item for InvalidBufferView { + type Event = (); + + fn tab_content_text(&self, mut detail: usize, _: &App) -> SharedString { + // Ensure we always render at least the filename. + detail += 1; + + let path = self.abs_path.as_ref(); + + let mut prefix = path; + while detail > 0 { + if let Some(parent) = prefix.parent() { + prefix = parent; + detail -= 1; + } else { + break; + } + } + + let path = if detail > 0 { + path + } else { + path.strip_prefix(prefix).unwrap_or(path) + }; + + SharedString::new(path.to_string_lossy()) + } +} + +impl EventEmitter<()> for InvalidBufferView {} + +impl Focusable for InvalidBufferView { + fn focus_handle(&self, _: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Render for InvalidBufferView { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl gpui::IntoElement { + let abs_path = self.abs_path.clone(); + v_flex() + .size_full() + .track_focus(&self.focus_handle(cx)) + .flex_none() + .justify_center() + .overflow_hidden() + .key_context("InvalidBuffer") + .child( + h_flex().size_full().justify_center().child( + v_flex() + .justify_center() + .gap_2() + .child(h_flex().justify_center().child("Unsupported file type")) + .when(self.is_local, |contents| { + contents.child( + h_flex().justify_center().child( + Button::new("open-with-system", "Open in Default App") + .on_click(move |_, _, cx| { + cx.open_with_system(&abs_path); + }) + .style(ButtonStyle::Outlined) + .key_binding(KeyBinding::for_action( + &OpenWithSystem, + window, + cx, + )), + ), + ) + }), + ), + ) + } +} diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 5a497398f9..db91bd82b9 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -1,6 +1,7 @@ use crate::{ CollaboratorId, DelayedDebouncedEditAction, FollowableViewRegistry, ItemNavHistory, SerializableItemRegistry, ToolbarItemLocation, ViewId, Workspace, WorkspaceId, + invalid_buffer_view::InvalidBufferView, pane::{self, Pane}, persistence::model::ItemId, searchable::SearchableItemHandle, @@ -22,6 +23,7 @@ use std::{ any::{Any, TypeId}, cell::RefCell, ops::Range, + path::Path, rc::Rc, sync::Arc, time::Duration, @@ -1161,6 +1163,22 @@ pub trait ProjectItem: Item { ) -> Self where Self: Sized; + + /// A fallback handler, which will be called after [`project::ProjectItem::try_open`] fails, + /// with the error from that failure as an argument. + /// Allows to open an item that can gracefully display and handle errors. + fn for_broken_project_item( + _abs_path: &Path, + _is_local: bool, + _e: &anyhow::Error, + _window: &mut Window, + _cx: &mut App, + ) -> Option + where + Self: Sized, + { + None + } } #[derive(Debug)] diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 23c8c0b185..fe8014d9f7 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2,6 +2,7 @@ use crate::{ CloseWindow, NewFile, NewTerminal, OpenInTerminal, OpenOptions, OpenTerminal, OpenVisible, SplitDirection, ToggleFileFinder, ToggleProjectSymbols, ToggleZoom, Workspace, WorkspaceItemBuilder, + invalid_buffer_view::InvalidBufferView, item::{ ActivateOnClose, ClosePosition, Item, ItemHandle, ItemSettings, PreviewTabsSettings, ProjectItemKind, SaveOptions, ShowCloseButton, ShowDiagnostics, TabContentParams, @@ -513,7 +514,7 @@ impl Pane { } } - fn alternate_file(&mut self, window: &mut Window, cx: &mut Context) { + fn alternate_file(&mut self, _: &AlternateFile, window: &mut Window, cx: &mut Context) { let (_, alternative) = &self.alternate_file_items; if let Some(alternative) = alternative { let existing = self @@ -787,7 +788,7 @@ impl Pane { !self.nav_history.0.lock().forward_stack.is_empty() } - pub fn navigate_backward(&mut self, window: &mut Window, cx: &mut Context) { + pub fn navigate_backward(&mut self, _: &GoBack, window: &mut Window, cx: &mut Context) { if let Some(workspace) = self.workspace.upgrade() { let pane = cx.entity().downgrade(); window.defer(cx, move |window, cx| { @@ -798,7 +799,7 @@ impl Pane { } } - fn navigate_forward(&mut self, window: &mut Window, cx: &mut Context) { + fn navigate_forward(&mut self, _: &GoForward, window: &mut Window, cx: &mut Context) { if let Some(workspace) = self.workspace.upgrade() { let pane = cx.entity().downgrade(); window.defer(cx, move |window, cx| { @@ -897,19 +898,43 @@ impl Pane { } } } + + let set_up_existing_item = + |index: usize, pane: &mut Self, window: &mut Window, cx: &mut Context| { + // If the item is already open, and the item is a preview item + // and we are not allowing items to open as preview, mark the item as persistent. + if let Some(preview_item_id) = pane.preview_item_id + && let Some(tab) = pane.items.get(index) + && tab.item_id() == preview_item_id + && !allow_preview + { + pane.set_preview_item_id(None, cx); + } + if activate { + pane.activate_item(index, focus_item, focus_item, window, cx); + } + }; + let set_up_new_item = |new_item: Box, + destination_index: Option, + pane: &mut Self, + window: &mut Window, + cx: &mut Context| { + if allow_preview { + pane.set_preview_item_id(Some(new_item.item_id()), cx); + } + pane.add_item_inner( + new_item, + true, + focus_item, + activate, + destination_index, + window, + cx, + ); + }; + if let Some((index, existing_item)) = existing_item { - // If the item is already open, and the item is a preview item - // and we are not allowing items to open as preview, mark the item as persistent. - if let Some(preview_item_id) = self.preview_item_id - && let Some(tab) = self.items.get(index) - && tab.item_id() == preview_item_id - && !allow_preview - { - self.set_preview_item_id(None, cx); - } - if activate { - self.activate_item(index, focus_item, focus_item, window, cx); - } + set_up_existing_item(index, self, window, cx); existing_item } else { // If the item is being opened as preview and we have an existing preview tab, @@ -921,21 +946,46 @@ impl Pane { }; let new_item = build_item(self, window, cx); + // A special case that won't ever get a `project_entry_id` but has to be deduplicated nonetheless. + if let Some(invalid_buffer_view) = new_item.downcast::() { + let mut already_open_view = None; + let mut views_to_close = HashSet::default(); + for existing_error_view in self + .items_of_type::() + .filter(|item| item.read(cx).abs_path == invalid_buffer_view.read(cx).abs_path) + { + if already_open_view.is_none() + && existing_error_view.read(cx).error == invalid_buffer_view.read(cx).error + { + already_open_view = Some(existing_error_view); + } else { + views_to_close.insert(existing_error_view.item_id()); + } + } - if allow_preview { - self.set_preview_item_id(Some(new_item.item_id()), cx); + let resulting_item = match already_open_view { + Some(already_open_view) => { + if let Some(index) = self.index_for_item_id(already_open_view.item_id()) { + set_up_existing_item(index, self, window, cx); + } + Box::new(already_open_view) as Box<_> + } + None => { + set_up_new_item(new_item.clone(), destination_index, self, window, cx); + new_item + } + }; + + self.close_items(window, cx, SaveIntent::Skip, |existing_item| { + views_to_close.contains(&existing_item) + }) + .detach(); + + resulting_item + } else { + set_up_new_item(new_item.clone(), destination_index, self, window, cx); + new_item } - self.add_item_inner( - new_item.clone(), - true, - focus_item, - activate, - destination_index, - window, - cx, - ); - - new_item } } @@ -1233,9 +1283,9 @@ impl Pane { } } - pub fn activate_prev_item( + pub fn activate_previous_item( &mut self, - activate_pane: bool, + _: &ActivatePreviousItem, window: &mut Window, cx: &mut Context, ) { @@ -1245,12 +1295,12 @@ impl Pane { } else if !self.items.is_empty() { index = self.items.len() - 1; } - self.activate_item(index, activate_pane, activate_pane, window, cx); + self.activate_item(index, true, true, window, cx); } pub fn activate_next_item( &mut self, - activate_pane: bool, + _: &ActivateNextItem, window: &mut Window, cx: &mut Context, ) { @@ -1260,10 +1310,15 @@ impl Pane { } else { index = 0; } - self.activate_item(index, activate_pane, activate_pane, window, cx); + self.activate_item(index, true, true, window, cx); } - pub fn swap_item_left(&mut self, window: &mut Window, cx: &mut Context) { + pub fn swap_item_left( + &mut self, + _: &SwapItemLeft, + window: &mut Window, + cx: &mut Context, + ) { let index = self.active_item_index; if index == 0 { return; @@ -1273,9 +1328,14 @@ impl Pane { self.activate_item(index - 1, true, true, window, cx); } - pub fn swap_item_right(&mut self, window: &mut Window, cx: &mut Context) { + pub fn swap_item_right( + &mut self, + _: &SwapItemRight, + window: &mut Window, + cx: &mut Context, + ) { let index = self.active_item_index; - if index + 1 == self.items.len() { + if index + 1 >= self.items.len() { return; } @@ -1283,6 +1343,16 @@ impl Pane { self.activate_item(index + 1, true, true, window, cx); } + pub fn activate_last_item( + &mut self, + _: &ActivateLastItem, + window: &mut Window, + cx: &mut Context, + ) { + let index = self.items.len().saturating_sub(1); + self.activate_item(index, true, true, window, cx); + } + pub fn close_active_item( &mut self, action: &CloseActiveItem, @@ -2831,7 +2901,9 @@ impl Pane { .on_click({ let entity = cx.entity(); move |_, window, cx| { - entity.update(cx, |pane, cx| pane.navigate_backward(window, cx)) + entity.update(cx, |pane, cx| { + pane.navigate_backward(&Default::default(), window, cx) + }) } }) .disabled(!self.can_navigate_backward()) @@ -2846,7 +2918,11 @@ impl Pane { .icon_size(IconSize::Small) .on_click({ let entity = cx.entity(); - move |_, window, cx| entity.update(cx, |pane, cx| pane.navigate_forward(window, cx)) + move |_, window, cx| { + entity.update(cx, |pane, cx| { + pane.navigate_forward(&Default::default(), window, cx) + }) + } }) .disabled(!self.can_navigate_forward()) .tooltip({ @@ -3478,9 +3554,6 @@ impl Render for Pane { .size_full() .flex_none() .overflow_hidden() - .on_action(cx.listener(|pane, _: &AlternateFile, window, cx| { - pane.alternate_file(window, cx); - })) .on_action( cx.listener(|pane, _: &SplitLeft, _, cx| pane.split(SplitDirection::Left, cx)), ) @@ -3497,12 +3570,6 @@ impl Render for Pane { .on_action( cx.listener(|pane, _: &SplitDown, _, cx| pane.split(SplitDirection::Down, cx)), ) - .on_action( - cx.listener(|pane, _: &GoBack, window, cx| pane.navigate_backward(window, cx)), - ) - .on_action( - cx.listener(|pane, _: &GoForward, window, cx| pane.navigate_forward(window, cx)), - ) .on_action(cx.listener(|_, _: &JoinIntoNext, _, cx| { cx.emit(Event::JoinIntoNext); })) @@ -3510,6 +3577,8 @@ impl Render for Pane { cx.emit(Event::JoinAll); })) .on_action(cx.listener(Pane::toggle_zoom)) + .on_action(cx.listener(Self::navigate_backward)) + .on_action(cx.listener(Self::navigate_forward)) .on_action( cx.listener(|pane: &mut Pane, action: &ActivateItem, window, cx| { pane.activate_item( @@ -3521,33 +3590,14 @@ impl Render for Pane { ); }), ) - .on_action( - cx.listener(|pane: &mut Pane, _: &ActivateLastItem, window, cx| { - pane.activate_item(pane.items.len().saturating_sub(1), true, true, window, cx); - }), - ) - .on_action( - cx.listener(|pane: &mut Pane, _: &ActivatePreviousItem, window, cx| { - pane.activate_prev_item(true, window, cx); - }), - ) - .on_action( - cx.listener(|pane: &mut Pane, _: &ActivateNextItem, window, cx| { - pane.activate_next_item(true, window, cx); - }), - ) - .on_action( - cx.listener(|pane, _: &SwapItemLeft, window, cx| pane.swap_item_left(window, cx)), - ) - .on_action( - cx.listener(|pane, _: &SwapItemRight, window, cx| pane.swap_item_right(window, cx)), - ) - .on_action(cx.listener(|pane, action, window, cx| { - pane.toggle_pin_tab(action, window, cx); - })) - .on_action(cx.listener(|pane, action, window, cx| { - pane.unpin_all_tabs(action, window, cx); - })) + .on_action(cx.listener(Self::alternate_file)) + .on_action(cx.listener(Self::activate_last_item)) + .on_action(cx.listener(Self::activate_previous_item)) + .on_action(cx.listener(Self::activate_next_item)) + .on_action(cx.listener(Self::swap_item_left)) + .on_action(cx.listener(Self::swap_item_right)) + .on_action(cx.listener(Self::toggle_pin_tab)) + .on_action(cx.listener(Self::unpin_all_tabs)) .when(PreviewTabsSettings::get_global(cx).enabled, |this| { this.on_action(cx.listener(|pane: &mut Pane, _: &TogglePreviewTab, _, cx| { if let Some(active_item_id) = pane.active_item().map(|i| i.item_id()) { @@ -6402,6 +6452,57 @@ mod tests { .unwrap(); } + #[gpui::test] + async fn test_item_swapping_actions(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + assert_item_labels(&pane, [], cx); + + // Test that these actions do not panic + pane.update_in(cx, |pane, window, cx| { + pane.swap_item_right(&Default::default(), window, cx); + }); + + pane.update_in(cx, |pane, window, cx| { + pane.swap_item_left(&Default::default(), window, cx); + }); + + add_labeled_item(&pane, "A", false, cx); + add_labeled_item(&pane, "B", false, cx); + add_labeled_item(&pane, "C", false, cx); + assert_item_labels(&pane, ["A", "B", "C*"], cx); + + pane.update_in(cx, |pane, window, cx| { + pane.swap_item_right(&Default::default(), window, cx); + }); + assert_item_labels(&pane, ["A", "B", "C*"], cx); + + pane.update_in(cx, |pane, window, cx| { + pane.swap_item_left(&Default::default(), window, cx); + }); + assert_item_labels(&pane, ["A", "C*", "B"], cx); + + pane.update_in(cx, |pane, window, cx| { + pane.swap_item_left(&Default::default(), window, cx); + }); + assert_item_labels(&pane, ["C*", "A", "B"], cx); + + pane.update_in(cx, |pane, window, cx| { + pane.swap_item_left(&Default::default(), window, cx); + }); + assert_item_labels(&pane, ["C*", "A", "B"], cx); + + pane.update_in(cx, |pane, window, cx| { + pane.swap_item_right(&Default::default(), window, cx); + }); + assert_item_labels(&pane, ["A", "C*", "B"], cx); + } + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); diff --git a/crates/workspace/src/path_list.rs b/crates/workspace/src/path_list.rs new file mode 100644 index 0000000000..cf463e6b22 --- /dev/null +++ b/crates/workspace/src/path_list.rs @@ -0,0 +1,123 @@ +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; + +use util::paths::SanitizedPath; + +/// A list of absolute paths, in a specific order. +/// +/// The paths are stored in lexicographic order, so that they can be compared to +/// other path lists without regard to the order of the paths. +#[derive(Default, PartialEq, Eq, Debug, Clone)] +pub struct PathList { + paths: Arc<[PathBuf]>, + order: Arc<[usize]>, +} + +#[derive(Debug)] +pub struct SerializedPathList { + pub paths: String, + pub order: String, +} + +impl PathList { + pub fn new>(paths: &[P]) -> Self { + let mut indexed_paths: Vec<(usize, PathBuf)> = paths + .iter() + .enumerate() + .map(|(ix, path)| (ix, SanitizedPath::from(path).into())) + .collect(); + indexed_paths.sort_by(|(_, a), (_, b)| a.cmp(b)); + let order = indexed_paths.iter().map(|e| e.0).collect::>().into(); + let paths = indexed_paths + .into_iter() + .map(|e| e.1) + .collect::>() + .into(); + Self { order, paths } + } + + pub fn is_empty(&self) -> bool { + self.paths.is_empty() + } + + pub fn paths(&self) -> &[PathBuf] { + self.paths.as_ref() + } + + pub fn order(&self) -> &[usize] { + self.order.as_ref() + } + + pub fn is_lexicographically_ordered(&self) -> bool { + self.order.iter().enumerate().all(|(i, &j)| i == j) + } + + pub fn deserialize(serialized: &SerializedPathList) -> Self { + let mut paths: Vec = if serialized.paths.is_empty() { + Vec::new() + } else { + serialized.paths.split('\n').map(PathBuf::from).collect() + }; + + let mut order: Vec = serialized + .order + .split(',') + .filter_map(|s| s.parse().ok()) + .collect(); + + if !paths.is_sorted() || order.len() != paths.len() { + order = (0..paths.len()).collect(); + paths.sort(); + } + + Self { + paths: paths.into(), + order: order.into(), + } + } + + pub fn serialize(&self) -> SerializedPathList { + use std::fmt::Write as _; + + let mut paths = String::new(); + for path in self.paths.iter() { + if !paths.is_empty() { + paths.push('\n'); + } + paths.push_str(&path.to_string_lossy()); + } + + let mut order = String::new(); + for ix in self.order.iter() { + if !order.is_empty() { + order.push(','); + } + write!(&mut order, "{}", *ix).unwrap(); + } + SerializedPathList { paths, order } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_path_list() { + let list1 = PathList::new(&["a/d", "a/c"]); + let list2 = PathList::new(&["a/c", "a/d"]); + + assert_eq!(list1.paths(), list2.paths()); + assert_ne!(list1, list2); + assert_eq!(list1.order(), &[1, 0]); + assert_eq!(list2.order(), &[0, 1]); + + let list1_deserialized = PathList::deserialize(&list1.serialize()); + assert_eq!(list1_deserialized, list1); + + let list2_deserialized = PathList::deserialize(&list2.serialize()); + assert_eq!(list2_deserialized, list2); + } +} diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index b2d1340a7b..12e719cfd9 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -9,15 +9,17 @@ use std::{ }; use anyhow::{Context as _, Result, bail}; -use client::DevServerProjectId; -use db::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql}; +use collections::HashMap; +use db::{ + query, + sqlez::{connection::Connection, domain::Domain}, + sqlez_macros::sql, +}; use gpui::{Axis, Bounds, Task, WindowBounds, WindowId, point, size}; -use itertools::Itertools; use project::debugger::breakpoint_store::{BreakpointState, SourceBreakpoint}; use language::{LanguageName, Toolchain}; use project::WorktreeId; -use remote::ssh_session::SshProjectId; use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::{SqlType, Statement}, @@ -28,14 +30,17 @@ use ui::{App, px}; use util::{ResultExt, maybe}; use uuid::Uuid; -use crate::WorkspaceId; - -use model::{ - GroupId, ItemId, LocalPaths, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup, - SerializedSshProject, SerializedWorkspace, +use crate::{ + WorkspaceId, + path_list::{PathList, SerializedPathList}, }; -use self::model::{DockStructure, LocalPathsOrder, SerializedWorkspaceLocation}; +use model::{ + GroupId, ItemId, PaneId, SerializedItem, SerializedPane, SerializedPaneGroup, + SerializedSshConnection, SerializedWorkspace, SshConnectionId, +}; + +use self::model::{DockStructure, SerializedWorkspaceLocation}; #[derive(Copy, Clone, Debug, PartialEq)] pub(crate) struct SerializedAxis(pub(crate) gpui::Axis); @@ -274,247 +279,189 @@ impl sqlez::bindable::Bind for SerializedPixels { } } -define_connection! { - // Current schema shape using pseudo-rust syntax: - // - // workspaces( - // workspace_id: usize, // Primary key for workspaces - // local_paths: Bincode>, - // local_paths_order: Bincode>, - // dock_visible: bool, // Deprecated - // dock_anchor: DockAnchor, // Deprecated - // dock_pane: Option, // Deprecated - // left_sidebar_open: boolean, - // timestamp: String, // UTC YYYY-MM-DD HH:MM:SS - // window_state: String, // WindowBounds Discriminant - // window_x: Option, // WindowBounds::Fixed RectF x - // window_y: Option, // WindowBounds::Fixed RectF y - // window_width: Option, // WindowBounds::Fixed RectF width - // window_height: Option, // WindowBounds::Fixed RectF height - // display: Option, // Display id - // fullscreen: Option, // Is the window fullscreen? - // centered_layout: Option, // Is the Centered Layout mode activated? - // session_id: Option, // Session id - // window_id: Option, // Window Id - // ) - // - // pane_groups( - // group_id: usize, // Primary key for pane_groups - // workspace_id: usize, // References workspaces table - // parent_group_id: Option, // None indicates that this is the root node - // position: Option, // None indicates that this is the root node - // axis: Option, // 'Vertical', 'Horizontal' - // flexes: Option>, // A JSON array of floats - // ) - // - // panes( - // pane_id: usize, // Primary key for panes - // workspace_id: usize, // References workspaces table - // active: bool, - // ) - // - // center_panes( - // pane_id: usize, // Primary key for center_panes - // parent_group_id: Option, // References pane_groups. If none, this is the root - // position: Option, // None indicates this is the root - // ) - // - // CREATE TABLE items( - // item_id: usize, // This is the item's view id, so this is not unique - // workspace_id: usize, // References workspaces table - // pane_id: usize, // References panes table - // kind: String, // Indicates which view this connects to. This is the key in the item_deserializers global - // position: usize, // Position of the item in the parent pane. This is equivalent to panes' position column - // active: bool, // Indicates if this item is the active one in the pane - // preview: bool // Indicates if this item is a preview item - // ) - // - // CREATE TABLE breakpoints( - // workspace_id: usize Foreign Key, // References workspace table - // path: PathBuf, // The absolute path of the file that this breakpoint belongs to - // breakpoint_location: Vec, // A list of the locations of breakpoints - // kind: int, // The kind of breakpoint (standard, log) - // log_message: String, // log message for log breakpoints, otherwise it's Null - // ) - pub static ref DB: WorkspaceDb<()> = - &[ +pub struct WorkspaceDb(ThreadSafeConnection); + +impl Domain for WorkspaceDb { + const NAME: &str = stringify!(WorkspaceDb); + + const MIGRATIONS: &[&str] = &[ sql!( - CREATE TABLE workspaces( - workspace_id INTEGER PRIMARY KEY, - workspace_location BLOB UNIQUE, - dock_visible INTEGER, // Deprecated. Preserving so users can downgrade Zed. - dock_anchor TEXT, // Deprecated. Preserving so users can downgrade Zed. - dock_pane INTEGER, // Deprecated. Preserving so users can downgrade Zed. - left_sidebar_open INTEGER, // Boolean - timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL, - FOREIGN KEY(dock_pane) REFERENCES panes(pane_id) - ) STRICT; + CREATE TABLE workspaces( + workspace_id INTEGER PRIMARY KEY, + workspace_location BLOB UNIQUE, + dock_visible INTEGER, // Deprecated. Preserving so users can downgrade Zed. + dock_anchor TEXT, // Deprecated. Preserving so users can downgrade Zed. + dock_pane INTEGER, // Deprecated. Preserving so users can downgrade Zed. + left_sidebar_open INTEGER, // Boolean + timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL, + FOREIGN KEY(dock_pane) REFERENCES panes(pane_id) + ) STRICT; - CREATE TABLE pane_groups( - group_id INTEGER PRIMARY KEY, - workspace_id INTEGER NOT NULL, - parent_group_id INTEGER, // NULL indicates that this is a root node - position INTEGER, // NULL indicates that this is a root node - axis TEXT NOT NULL, // Enum: 'Vertical' / 'Horizontal' - FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) - ON DELETE CASCADE - ON UPDATE CASCADE, - FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE - ) STRICT; + CREATE TABLE pane_groups( + group_id INTEGER PRIMARY KEY, + workspace_id INTEGER NOT NULL, + parent_group_id INTEGER, // NULL indicates that this is a root node + position INTEGER, // NULL indicates that this is a root node + axis TEXT NOT NULL, // Enum: 'Vertical' / 'Horizontal' + FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) + ON DELETE CASCADE + ON UPDATE CASCADE, + FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE + ) STRICT; - CREATE TABLE panes( - pane_id INTEGER PRIMARY KEY, - workspace_id INTEGER NOT NULL, - active INTEGER NOT NULL, // Boolean - FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) - ON DELETE CASCADE - ON UPDATE CASCADE - ) STRICT; + CREATE TABLE panes( + pane_id INTEGER PRIMARY KEY, + workspace_id INTEGER NOT NULL, + active INTEGER NOT NULL, // Boolean + FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) + ON DELETE CASCADE + ON UPDATE CASCADE + ) STRICT; - CREATE TABLE center_panes( - pane_id INTEGER PRIMARY KEY, - parent_group_id INTEGER, // NULL means that this is a root pane - position INTEGER, // NULL means that this is a root pane - FOREIGN KEY(pane_id) REFERENCES panes(pane_id) - ON DELETE CASCADE, - FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE - ) STRICT; + CREATE TABLE center_panes( + pane_id INTEGER PRIMARY KEY, + parent_group_id INTEGER, // NULL means that this is a root pane + position INTEGER, // NULL means that this is a root pane + FOREIGN KEY(pane_id) REFERENCES panes(pane_id) + ON DELETE CASCADE, + FOREIGN KEY(parent_group_id) REFERENCES pane_groups(group_id) ON DELETE CASCADE + ) STRICT; - CREATE TABLE items( - item_id INTEGER NOT NULL, // This is the item's view id, so this is not unique - workspace_id INTEGER NOT NULL, - pane_id INTEGER NOT NULL, - kind TEXT NOT NULL, - position INTEGER NOT NULL, - active INTEGER NOT NULL, - FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) - ON DELETE CASCADE - ON UPDATE CASCADE, - FOREIGN KEY(pane_id) REFERENCES panes(pane_id) - ON DELETE CASCADE, - PRIMARY KEY(item_id, workspace_id) - ) STRICT; - ), - sql!( - ALTER TABLE workspaces ADD COLUMN window_state TEXT; - ALTER TABLE workspaces ADD COLUMN window_x REAL; - ALTER TABLE workspaces ADD COLUMN window_y REAL; - ALTER TABLE workspaces ADD COLUMN window_width REAL; - ALTER TABLE workspaces ADD COLUMN window_height REAL; - ALTER TABLE workspaces ADD COLUMN display BLOB; - ), - // Drop foreign key constraint from workspaces.dock_pane to panes table. - sql!( - CREATE TABLE workspaces_2( - workspace_id INTEGER PRIMARY KEY, - workspace_location BLOB UNIQUE, - dock_visible INTEGER, // Deprecated. Preserving so users can downgrade Zed. - dock_anchor TEXT, // Deprecated. Preserving so users can downgrade Zed. - dock_pane INTEGER, // Deprecated. Preserving so users can downgrade Zed. - left_sidebar_open INTEGER, // Boolean - timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL, - window_state TEXT, - window_x REAL, - window_y REAL, - window_width REAL, - window_height REAL, - display BLOB - ) STRICT; - INSERT INTO workspaces_2 SELECT * FROM workspaces; - DROP TABLE workspaces; - ALTER TABLE workspaces_2 RENAME TO workspaces; - ), - // Add panels related information - sql!( - ALTER TABLE workspaces ADD COLUMN left_dock_visible INTEGER; //bool - ALTER TABLE workspaces ADD COLUMN left_dock_active_panel TEXT; - ALTER TABLE workspaces ADD COLUMN right_dock_visible INTEGER; //bool - ALTER TABLE workspaces ADD COLUMN right_dock_active_panel TEXT; - ALTER TABLE workspaces ADD COLUMN bottom_dock_visible INTEGER; //bool - ALTER TABLE workspaces ADD COLUMN bottom_dock_active_panel TEXT; - ), - // Add panel zoom persistence - sql!( - ALTER TABLE workspaces ADD COLUMN left_dock_zoom INTEGER; //bool - ALTER TABLE workspaces ADD COLUMN right_dock_zoom INTEGER; //bool - ALTER TABLE workspaces ADD COLUMN bottom_dock_zoom INTEGER; //bool - ), - // Add pane group flex data - sql!( - ALTER TABLE pane_groups ADD COLUMN flexes TEXT; - ), - // Add fullscreen field to workspace - // Deprecated, `WindowBounds` holds the fullscreen state now. - // Preserving so users can downgrade Zed. - sql!( - ALTER TABLE workspaces ADD COLUMN fullscreen INTEGER; //bool - ), - // Add preview field to items - sql!( - ALTER TABLE items ADD COLUMN preview INTEGER; //bool - ), - // Add centered_layout field to workspace - sql!( - ALTER TABLE workspaces ADD COLUMN centered_layout INTEGER; //bool - ), - sql!( - CREATE TABLE remote_projects ( - remote_project_id INTEGER NOT NULL UNIQUE, - path TEXT, - dev_server_name TEXT - ); - ALTER TABLE workspaces ADD COLUMN remote_project_id INTEGER; - ALTER TABLE workspaces RENAME COLUMN workspace_location TO local_paths; - ), - sql!( - DROP TABLE remote_projects; - CREATE TABLE dev_server_projects ( - id INTEGER NOT NULL UNIQUE, - path TEXT, - dev_server_name TEXT - ); - ALTER TABLE workspaces DROP COLUMN remote_project_id; - ALTER TABLE workspaces ADD COLUMN dev_server_project_id INTEGER; - ), - sql!( - ALTER TABLE workspaces ADD COLUMN local_paths_order BLOB; - ), - sql!( - ALTER TABLE workspaces ADD COLUMN session_id TEXT DEFAULT NULL; - ), - sql!( - ALTER TABLE workspaces ADD COLUMN window_id INTEGER DEFAULT NULL; - ), - sql!( - ALTER TABLE panes ADD COLUMN pinned_count INTEGER DEFAULT 0; - ), - sql!( - CREATE TABLE ssh_projects ( - id INTEGER PRIMARY KEY, - host TEXT NOT NULL, - port INTEGER, - path TEXT NOT NULL, - user TEXT - ); - ALTER TABLE workspaces ADD COLUMN ssh_project_id INTEGER REFERENCES ssh_projects(id) ON DELETE CASCADE; - ), - sql!( - ALTER TABLE ssh_projects RENAME COLUMN path TO paths; - ), - sql!( - CREATE TABLE toolchains ( - workspace_id INTEGER, - worktree_id INTEGER, - language_name TEXT NOT NULL, - name TEXT NOT NULL, - path TEXT NOT NULL, - PRIMARY KEY (workspace_id, worktree_id, language_name) - ); - ), - sql!( - ALTER TABLE toolchains ADD COLUMN raw_json TEXT DEFAULT "{}"; - ), - sql!( + CREATE TABLE items( + item_id INTEGER NOT NULL, // This is the item's view id, so this is not unique + workspace_id INTEGER NOT NULL, + pane_id INTEGER NOT NULL, + kind TEXT NOT NULL, + position INTEGER NOT NULL, + active INTEGER NOT NULL, + FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) + ON DELETE CASCADE + ON UPDATE CASCADE, + FOREIGN KEY(pane_id) REFERENCES panes(pane_id) + ON DELETE CASCADE, + PRIMARY KEY(item_id, workspace_id) + ) STRICT; + ), + sql!( + ALTER TABLE workspaces ADD COLUMN window_state TEXT; + ALTER TABLE workspaces ADD COLUMN window_x REAL; + ALTER TABLE workspaces ADD COLUMN window_y REAL; + ALTER TABLE workspaces ADD COLUMN window_width REAL; + ALTER TABLE workspaces ADD COLUMN window_height REAL; + ALTER TABLE workspaces ADD COLUMN display BLOB; + ), + // Drop foreign key constraint from workspaces.dock_pane to panes table. + sql!( + CREATE TABLE workspaces_2( + workspace_id INTEGER PRIMARY KEY, + workspace_location BLOB UNIQUE, + dock_visible INTEGER, // Deprecated. Preserving so users can downgrade Zed. + dock_anchor TEXT, // Deprecated. Preserving so users can downgrade Zed. + dock_pane INTEGER, // Deprecated. Preserving so users can downgrade Zed. + left_sidebar_open INTEGER, // Boolean + timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL, + window_state TEXT, + window_x REAL, + window_y REAL, + window_width REAL, + window_height REAL, + display BLOB + ) STRICT; + INSERT INTO workspaces_2 SELECT * FROM workspaces; + DROP TABLE workspaces; + ALTER TABLE workspaces_2 RENAME TO workspaces; + ), + // Add panels related information + sql!( + ALTER TABLE workspaces ADD COLUMN left_dock_visible INTEGER; //bool + ALTER TABLE workspaces ADD COLUMN left_dock_active_panel TEXT; + ALTER TABLE workspaces ADD COLUMN right_dock_visible INTEGER; //bool + ALTER TABLE workspaces ADD COLUMN right_dock_active_panel TEXT; + ALTER TABLE workspaces ADD COLUMN bottom_dock_visible INTEGER; //bool + ALTER TABLE workspaces ADD COLUMN bottom_dock_active_panel TEXT; + ), + // Add panel zoom persistence + sql!( + ALTER TABLE workspaces ADD COLUMN left_dock_zoom INTEGER; //bool + ALTER TABLE workspaces ADD COLUMN right_dock_zoom INTEGER; //bool + ALTER TABLE workspaces ADD COLUMN bottom_dock_zoom INTEGER; //bool + ), + // Add pane group flex data + sql!( + ALTER TABLE pane_groups ADD COLUMN flexes TEXT; + ), + // Add fullscreen field to workspace + // Deprecated, `WindowBounds` holds the fullscreen state now. + // Preserving so users can downgrade Zed. + sql!( + ALTER TABLE workspaces ADD COLUMN fullscreen INTEGER; //bool + ), + // Add preview field to items + sql!( + ALTER TABLE items ADD COLUMN preview INTEGER; //bool + ), + // Add centered_layout field to workspace + sql!( + ALTER TABLE workspaces ADD COLUMN centered_layout INTEGER; //bool + ), + sql!( + CREATE TABLE remote_projects ( + remote_project_id INTEGER NOT NULL UNIQUE, + path TEXT, + dev_server_name TEXT + ); + ALTER TABLE workspaces ADD COLUMN remote_project_id INTEGER; + ALTER TABLE workspaces RENAME COLUMN workspace_location TO local_paths; + ), + sql!( + DROP TABLE remote_projects; + CREATE TABLE dev_server_projects ( + id INTEGER NOT NULL UNIQUE, + path TEXT, + dev_server_name TEXT + ); + ALTER TABLE workspaces DROP COLUMN remote_project_id; + ALTER TABLE workspaces ADD COLUMN dev_server_project_id INTEGER; + ), + sql!( + ALTER TABLE workspaces ADD COLUMN local_paths_order BLOB; + ), + sql!( + ALTER TABLE workspaces ADD COLUMN session_id TEXT DEFAULT NULL; + ), + sql!( + ALTER TABLE workspaces ADD COLUMN window_id INTEGER DEFAULT NULL; + ), + sql!( + ALTER TABLE panes ADD COLUMN pinned_count INTEGER DEFAULT 0; + ), + sql!( + CREATE TABLE ssh_projects ( + id INTEGER PRIMARY KEY, + host TEXT NOT NULL, + port INTEGER, + path TEXT NOT NULL, + user TEXT + ); + ALTER TABLE workspaces ADD COLUMN ssh_project_id INTEGER REFERENCES ssh_projects(id) ON DELETE CASCADE; + ), + sql!( + ALTER TABLE ssh_projects RENAME COLUMN path TO paths; + ), + sql!( + CREATE TABLE toolchains ( + workspace_id INTEGER, + worktree_id INTEGER, + language_name TEXT NOT NULL, + name TEXT NOT NULL, + path TEXT NOT NULL, + PRIMARY KEY (workspace_id, worktree_id, language_name) + ); + ), + sql!( + ALTER TABLE toolchains ADD COLUMN raw_json TEXT DEFAULT "{}"; + ), + sql!( CREATE TABLE breakpoints ( workspace_id INTEGER NOT NULL, path TEXT NOT NULL, @@ -526,39 +473,165 @@ define_connection! { ON UPDATE CASCADE ); ), - sql!( - ALTER TABLE workspaces ADD COLUMN local_paths_array TEXT; - CREATE UNIQUE INDEX local_paths_array_uq ON workspaces(local_paths_array); - ALTER TABLE workspaces ADD COLUMN local_paths_order_array TEXT; - ), - sql!( - ALTER TABLE breakpoints ADD COLUMN state INTEGER DEFAULT(0) NOT NULL - ), - sql!( - ALTER TABLE breakpoints DROP COLUMN kind - ), - sql!(ALTER TABLE toolchains ADD COLUMN relative_worktree_path TEXT DEFAULT "" NOT NULL), - sql!( - ALTER TABLE breakpoints ADD COLUMN condition TEXT; - ALTER TABLE breakpoints ADD COLUMN hit_condition TEXT; - ), - sql!(CREATE TABLE toolchains2 ( - workspace_id INTEGER, - worktree_id INTEGER, - language_name TEXT NOT NULL, - name TEXT NOT NULL, - path TEXT NOT NULL, - raw_json TEXT NOT NULL, - relative_worktree_path TEXT NOT NULL, - PRIMARY KEY (workspace_id, worktree_id, language_name, relative_worktree_path)) STRICT; - INSERT INTO toolchains2 - SELECT * FROM toolchains; - DROP TABLE toolchains; - ALTER TABLE toolchains2 RENAME TO toolchains; - ) + sql!( + ALTER TABLE workspaces ADD COLUMN local_paths_array TEXT; + CREATE UNIQUE INDEX local_paths_array_uq ON workspaces(local_paths_array); + ALTER TABLE workspaces ADD COLUMN local_paths_order_array TEXT; + ), + sql!( + ALTER TABLE breakpoints ADD COLUMN state INTEGER DEFAULT(0) NOT NULL + ), + sql!( + ALTER TABLE breakpoints DROP COLUMN kind + ), + sql!(ALTER TABLE toolchains ADD COLUMN relative_worktree_path TEXT DEFAULT "" NOT NULL), + sql!( + ALTER TABLE breakpoints ADD COLUMN condition TEXT; + ALTER TABLE breakpoints ADD COLUMN hit_condition TEXT; + ), + sql!(CREATE TABLE toolchains2 ( + workspace_id INTEGER, + worktree_id INTEGER, + language_name TEXT NOT NULL, + name TEXT NOT NULL, + path TEXT NOT NULL, + raw_json TEXT NOT NULL, + relative_worktree_path TEXT NOT NULL, + PRIMARY KEY (workspace_id, worktree_id, language_name, relative_worktree_path)) STRICT; + INSERT INTO toolchains2 + SELECT * FROM toolchains; + DROP TABLE toolchains; + ALTER TABLE toolchains2 RENAME TO toolchains; + ), + sql!( + CREATE TABLE ssh_connections ( + id INTEGER PRIMARY KEY, + host TEXT NOT NULL, + port INTEGER, + user TEXT + ); + + INSERT INTO ssh_connections (host, port, user) + SELECT DISTINCT host, port, user + FROM ssh_projects; + + CREATE TABLE workspaces_2( + workspace_id INTEGER PRIMARY KEY, + paths TEXT, + paths_order TEXT, + ssh_connection_id INTEGER REFERENCES ssh_connections(id), + timestamp TEXT DEFAULT CURRENT_TIMESTAMP NOT NULL, + window_state TEXT, + window_x REAL, + window_y REAL, + window_width REAL, + window_height REAL, + display BLOB, + left_dock_visible INTEGER, + left_dock_active_panel TEXT, + right_dock_visible INTEGER, + right_dock_active_panel TEXT, + bottom_dock_visible INTEGER, + bottom_dock_active_panel TEXT, + left_dock_zoom INTEGER, + right_dock_zoom INTEGER, + bottom_dock_zoom INTEGER, + fullscreen INTEGER, + centered_layout INTEGER, + session_id TEXT, + window_id INTEGER + ) STRICT; + + INSERT + INTO workspaces_2 + SELECT + workspaces.workspace_id, + CASE + WHEN ssh_projects.id IS NOT NULL THEN ssh_projects.paths + ELSE + CASE + WHEN workspaces.local_paths_array IS NULL OR workspaces.local_paths_array = "" THEN + NULL + ELSE + replace(workspaces.local_paths_array, ',', CHAR(10)) + END + END as paths, + + CASE + WHEN ssh_projects.id IS NOT NULL THEN "" + ELSE workspaces.local_paths_order_array + END as paths_order, + + CASE + WHEN ssh_projects.id IS NOT NULL THEN ( + SELECT ssh_connections.id + FROM ssh_connections + WHERE + ssh_connections.host IS ssh_projects.host AND + ssh_connections.port IS ssh_projects.port AND + ssh_connections.user IS ssh_projects.user + ) + ELSE NULL + END as ssh_connection_id, + + workspaces.timestamp, + workspaces.window_state, + workspaces.window_x, + workspaces.window_y, + workspaces.window_width, + workspaces.window_height, + workspaces.display, + workspaces.left_dock_visible, + workspaces.left_dock_active_panel, + workspaces.right_dock_visible, + workspaces.right_dock_active_panel, + workspaces.bottom_dock_visible, + workspaces.bottom_dock_active_panel, + workspaces.left_dock_zoom, + workspaces.right_dock_zoom, + workspaces.bottom_dock_zoom, + workspaces.fullscreen, + workspaces.centered_layout, + workspaces.session_id, + workspaces.window_id + FROM + workspaces LEFT JOIN + ssh_projects ON + workspaces.ssh_project_id = ssh_projects.id; + + DROP TABLE ssh_projects; + DROP TABLE workspaces; + ALTER TABLE workspaces_2 RENAME TO workspaces; + + CREATE UNIQUE INDEX ix_workspaces_location ON workspaces(ssh_connection_id, paths); + ), + // Fix any data from when workspaces.paths were briefly encoded as JSON arrays + sql!( + UPDATE workspaces + SET paths = CASE + WHEN substr(paths, 1, 2) = '[' || '"' AND substr(paths, -2, 2) = '"' || ']' THEN + replace( + substr(paths, 3, length(paths) - 4), + '"' || ',' || '"', + CHAR(10) + ) + ELSE + replace(paths, ',', CHAR(10)) + END + WHERE paths IS NOT NULL + ), ]; + + // Allow recovering from bad migration that was initially shipped to nightly + // when introducing the ssh_connections table. + fn should_allow_migration_change(_index: usize, old: &str, new: &str) -> bool { + old.starts_with("CREATE TABLE ssh_connections") + && new.starts_with("CREATE TABLE ssh_connections") + } } +db::static_connection!(DB, WorkspaceDb, []); + impl WorkspaceDb { /// Returns a serialized workspace for the given worktree_roots. If the passed array /// is empty, the most recent workspace is returned instead. If no workspace for the @@ -566,17 +639,33 @@ impl WorkspaceDb { pub(crate) fn workspace_for_roots>( &self, worktree_roots: &[P], + ) -> Option { + self.workspace_for_roots_internal(worktree_roots, None) + } + + pub(crate) fn ssh_workspace_for_roots>( + &self, + worktree_roots: &[P], + ssh_project_id: SshConnectionId, + ) -> Option { + self.workspace_for_roots_internal(worktree_roots, Some(ssh_project_id)) + } + + pub(crate) fn workspace_for_roots_internal>( + &self, + worktree_roots: &[P], + ssh_connection_id: Option, ) -> Option { // paths are sorted before db interactions to ensure that the order of the paths // doesn't affect the workspace selection for existing workspaces - let local_paths = LocalPaths::new(worktree_roots); + let root_paths = PathList::new(worktree_roots); // Note that we re-assign the workspace_id here in case it's empty // and we've grabbed the most recent workspace let ( workspace_id, - local_paths, - local_paths_order, + paths, + paths_order, window_bounds, display, centered_layout, @@ -584,8 +673,8 @@ impl WorkspaceDb { window_id, ): ( WorkspaceId, - Option, - Option, + String, + String, Option, Option, Option, @@ -595,8 +684,8 @@ impl WorkspaceDb { .select_row_bound(sql! { SELECT workspace_id, - local_paths, - local_paths_order, + paths, + paths_order, window_state, window_x, window_y, @@ -615,25 +704,31 @@ impl WorkspaceDb { bottom_dock_zoom, window_id FROM workspaces - WHERE local_paths = ? + WHERE + paths IS ? AND + ssh_connection_id IS ? + LIMIT 1 + }) + .map(|mut prepared_statement| { + (prepared_statement)(( + root_paths.serialize().paths, + ssh_connection_id.map(|id| id.0 as i32), + )) + .unwrap() }) - .and_then(|mut prepared_statement| (prepared_statement)(&local_paths)) .context("No workspaces found") .warn_on_err() .flatten()?; - let local_paths = local_paths?; - let location = match local_paths_order { - Some(order) => SerializedWorkspaceLocation::Local(local_paths, order), - None => { - let order = LocalPathsOrder::default_for_paths(&local_paths); - SerializedWorkspaceLocation::Local(local_paths, order) - } - }; + let paths = PathList::deserialize(&SerializedPathList { + paths, + order: paths_order, + }); Some(SerializedWorkspace { id: workspace_id, - location, + location: SerializedWorkspaceLocation::Local, + paths, center_group: self .get_center_pane_group(workspace_id) .context("Getting center group") @@ -648,63 +743,6 @@ impl WorkspaceDb { }) } - pub(crate) fn workspace_for_ssh_project( - &self, - ssh_project: &SerializedSshProject, - ) -> Option { - let (workspace_id, window_bounds, display, centered_layout, docks, window_id): ( - WorkspaceId, - Option, - Option, - Option, - DockStructure, - Option, - ) = self - .select_row_bound(sql! { - SELECT - workspace_id, - window_state, - window_x, - window_y, - window_width, - window_height, - display, - centered_layout, - left_dock_visible, - left_dock_active_panel, - left_dock_zoom, - right_dock_visible, - right_dock_active_panel, - right_dock_zoom, - bottom_dock_visible, - bottom_dock_active_panel, - bottom_dock_zoom, - window_id - FROM workspaces - WHERE ssh_project_id = ? - }) - .and_then(|mut prepared_statement| (prepared_statement)(ssh_project.id.0)) - .context("No workspaces found") - .warn_on_err() - .flatten()?; - - Some(SerializedWorkspace { - id: workspace_id, - location: SerializedWorkspaceLocation::Ssh(ssh_project.clone()), - center_group: self - .get_center_pane_group(workspace_id) - .context("Getting center group") - .log_err()?, - window_bounds, - centered_layout: centered_layout.unwrap_or(false), - breakpoints: self.breakpoints(workspace_id), - display, - docks, - session_id: None, - window_id, - }) - } - fn breakpoints(&self, workspace_id: WorkspaceId) -> BTreeMap, Vec> { let breakpoints: Result> = self .select_bound(sql! { @@ -754,16 +792,34 @@ impl WorkspaceDb { /// Saves a workspace using the worktree roots. Will garbage collect any workspaces /// that used this workspace previously pub(crate) async fn save_workspace(&self, workspace: SerializedWorkspace) { + let paths = workspace.paths.serialize(); log::debug!("Saving workspace at location: {:?}", workspace.location); self.write(move |conn| { conn.with_savepoint("update_worktrees", || { + let ssh_connection_id = match &workspace.location { + SerializedWorkspaceLocation::Local => None, + SerializedWorkspaceLocation::Ssh(connection) => { + Some(Self::get_or_create_ssh_connection_query( + conn, + connection.host.clone(), + connection.port, + connection.user.clone(), + )?.0) + } + }; + // Clear out panes and pane_groups conn.exec_bound(sql!( DELETE FROM pane_groups WHERE workspace_id = ?1; DELETE FROM panes WHERE workspace_id = ?1;))?(workspace.id) .context("Clearing old panes")?; - conn.exec_bound(sql!(DELETE FROM breakpoints WHERE workspace_id = ?1))?(workspace.id).context("Clearing old breakpoints")?; + conn.exec_bound( + sql!( + DELETE FROM breakpoints WHERE workspace_id = ?1; + DELETE FROM toolchains WHERE workspace_id = ?1; + ) + )?(workspace.id).context("Clearing old breakpoints")?; for (path, breakpoints) in workspace.breakpoints { for bp in breakpoints { @@ -790,115 +846,73 @@ impl WorkspaceDb { } } } - } + conn.exec_bound(sql!( + DELETE + FROM workspaces + WHERE + workspace_id != ?1 AND + paths IS ?2 AND + ssh_connection_id IS ?3 + ))?(( + workspace.id, + paths.paths.clone(), + ssh_connection_id, + )) + .context("clearing out old locations")?; - match workspace.location { - SerializedWorkspaceLocation::Local(local_paths, local_paths_order) => { - conn.exec_bound(sql!( - DELETE FROM toolchains WHERE workspace_id = ?1; - DELETE FROM workspaces WHERE local_paths = ? AND workspace_id != ? - ))?((&local_paths, workspace.id)) - .context("clearing out old locations")?; + // Upsert + let query = sql!( + INSERT INTO workspaces( + workspace_id, + paths, + paths_order, + ssh_connection_id, + left_dock_visible, + left_dock_active_panel, + left_dock_zoom, + right_dock_visible, + right_dock_active_panel, + right_dock_zoom, + bottom_dock_visible, + bottom_dock_active_panel, + bottom_dock_zoom, + session_id, + window_id, + timestamp + ) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, CURRENT_TIMESTAMP) + ON CONFLICT DO + UPDATE SET + paths = ?2, + paths_order = ?3, + ssh_connection_id = ?4, + left_dock_visible = ?5, + left_dock_active_panel = ?6, + left_dock_zoom = ?7, + right_dock_visible = ?8, + right_dock_active_panel = ?9, + right_dock_zoom = ?10, + bottom_dock_visible = ?11, + bottom_dock_active_panel = ?12, + bottom_dock_zoom = ?13, + session_id = ?14, + window_id = ?15, + timestamp = CURRENT_TIMESTAMP + ); + let mut prepared_query = conn.exec_bound(query)?; + let args = ( + workspace.id, + paths.paths.clone(), + paths.order.clone(), + ssh_connection_id, + workspace.docks, + workspace.session_id, + workspace.window_id, + ); - // Upsert - let query = sql!( - INSERT INTO workspaces( - workspace_id, - local_paths, - local_paths_order, - left_dock_visible, - left_dock_active_panel, - left_dock_zoom, - right_dock_visible, - right_dock_active_panel, - right_dock_zoom, - bottom_dock_visible, - bottom_dock_active_panel, - bottom_dock_zoom, - session_id, - window_id, - timestamp, - local_paths_array, - local_paths_order_array - ) - VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, CURRENT_TIMESTAMP, ?15, ?16) - ON CONFLICT DO - UPDATE SET - local_paths = ?2, - local_paths_order = ?3, - left_dock_visible = ?4, - left_dock_active_panel = ?5, - left_dock_zoom = ?6, - right_dock_visible = ?7, - right_dock_active_panel = ?8, - right_dock_zoom = ?9, - bottom_dock_visible = ?10, - bottom_dock_active_panel = ?11, - bottom_dock_zoom = ?12, - session_id = ?13, - window_id = ?14, - timestamp = CURRENT_TIMESTAMP, - local_paths_array = ?15, - local_paths_order_array = ?16 - ); - let mut prepared_query = conn.exec_bound(query)?; - let args = (workspace.id, &local_paths, &local_paths_order, workspace.docks, workspace.session_id, workspace.window_id, local_paths.paths().iter().map(|path| path.to_string_lossy().to_string()).join(","), local_paths_order.order().iter().map(|order| order.to_string()).join(",")); - - prepared_query(args).context("Updating workspace")?; - } - SerializedWorkspaceLocation::Ssh(ssh_project) => { - conn.exec_bound(sql!( - DELETE FROM toolchains WHERE workspace_id = ?1; - DELETE FROM workspaces WHERE ssh_project_id = ? AND workspace_id != ? - ))?((ssh_project.id.0, workspace.id)) - .context("clearing out old locations")?; - - // Upsert - conn.exec_bound(sql!( - INSERT INTO workspaces( - workspace_id, - ssh_project_id, - left_dock_visible, - left_dock_active_panel, - left_dock_zoom, - right_dock_visible, - right_dock_active_panel, - right_dock_zoom, - bottom_dock_visible, - bottom_dock_active_panel, - bottom_dock_zoom, - session_id, - window_id, - timestamp - ) - VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, CURRENT_TIMESTAMP) - ON CONFLICT DO - UPDATE SET - ssh_project_id = ?2, - left_dock_visible = ?3, - left_dock_active_panel = ?4, - left_dock_zoom = ?5, - right_dock_visible = ?6, - right_dock_active_panel = ?7, - right_dock_zoom = ?8, - bottom_dock_visible = ?9, - bottom_dock_active_panel = ?10, - bottom_dock_zoom = ?11, - session_id = ?12, - window_id = ?13, - timestamp = CURRENT_TIMESTAMP - ))?(( - workspace.id, - ssh_project.id.0, - workspace.docks, - workspace.session_id, - workspace.window_id - )) - .context("Updating workspace")?; - } - } + prepared_query(args).context("Updating workspace")?; // Save center pane group Self::save_pane_group(conn, workspace.id, &workspace.center_group, None) @@ -911,89 +925,95 @@ impl WorkspaceDb { .await; } - pub(crate) async fn get_or_create_ssh_project( + pub(crate) async fn get_or_create_ssh_connection( &self, host: String, port: Option, - paths: Vec, user: Option, - ) -> Result { - let paths = serde_json::to_string(&paths)?; - if let Some(project) = self - .get_ssh_project(host.clone(), port, paths.clone(), user.clone()) - .await? + ) -> Result { + self.write(move |conn| Self::get_or_create_ssh_connection_query(conn, host, port, user)) + .await + } + + fn get_or_create_ssh_connection_query( + this: &Connection, + host: String, + port: Option, + user: Option, + ) -> Result { + if let Some(id) = this.select_row_bound(sql!( + SELECT id FROM ssh_connections WHERE host IS ? AND port IS ? AND user IS ? LIMIT 1 + ))?((host.clone(), port, user.clone()))? { - Ok(project) + Ok(SshConnectionId(id)) } else { log::debug!("Inserting SSH project at host {host}"); - self.insert_ssh_project(host, port, paths, user) - .await? - .context("failed to insert ssh project") + let id = this.select_row_bound(sql!( + INSERT INTO ssh_connections ( + host, + port, + user + ) VALUES (?1, ?2, ?3) + RETURNING id + ))?((host, port, user))? + .context("failed to insert ssh project")?; + Ok(SshConnectionId(id)) } } - query! { - async fn get_ssh_project(host: String, port: Option, paths: String, user: Option) -> Result> { - SELECT id, host, port, paths, user - FROM ssh_projects - WHERE host IS ? AND port IS ? AND paths IS ? AND user IS ? - LIMIT 1 - } - } - - query! { - async fn insert_ssh_project(host: String, port: Option, paths: String, user: Option) -> Result> { - INSERT INTO ssh_projects( - host, - port, - paths, - user - ) VALUES (?1, ?2, ?3, ?4) - RETURNING id, host, port, paths, user - } - } - - query! { - pub async fn update_ssh_project_paths_query(ssh_project_id: u64, paths: String) -> Result> { - UPDATE ssh_projects - SET paths = ?2 - WHERE id = ?1 - RETURNING id, host, port, paths, user - } - } - - pub(crate) async fn update_ssh_project_paths( - &self, - ssh_project_id: SshProjectId, - new_paths: Vec, - ) -> Result { - let paths = serde_json::to_string(&new_paths)?; - self.update_ssh_project_paths_query(ssh_project_id.0, paths) - .await? - .context("failed to update ssh project paths") - } - query! { pub async fn next_id() -> Result { INSERT INTO workspaces DEFAULT VALUES RETURNING workspace_id } } + fn recent_workspaces(&self) -> Result)>> { + Ok(self + .recent_workspaces_query()? + .into_iter() + .map(|(id, paths, order, ssh_connection_id)| { + ( + id, + PathList::deserialize(&SerializedPathList { paths, order }), + ssh_connection_id, + ) + }) + .collect()) + } + query! { - fn recent_workspaces() -> Result)>> { - SELECT workspace_id, local_paths, local_paths_order, ssh_project_id + fn recent_workspaces_query() -> Result)>> { + SELECT workspace_id, paths, paths_order, ssh_connection_id FROM workspaces - WHERE local_paths IS NOT NULL - OR ssh_project_id IS NOT NULL + WHERE + paths IS NOT NULL OR + ssh_connection_id IS NOT NULL ORDER BY timestamp DESC } } + fn session_workspaces( + &self, + session_id: String, + ) -> Result, Option)>> { + Ok(self + .session_workspaces_query(session_id)? + .into_iter() + .map(|(paths, order, window_id, ssh_connection_id)| { + ( + PathList::deserialize(&SerializedPathList { paths, order }), + window_id, + ssh_connection_id.map(SshConnectionId), + ) + }) + .collect()) + } + query! { - fn session_workspaces(session_id: String) -> Result, Option)>> { - SELECT local_paths, local_paths_order, window_id, ssh_project_id + fn session_workspaces_query(session_id: String) -> Result, Option)>> { + SELECT paths, paths_order, window_id, ssh_connection_id FROM workspaces - WHERE session_id = ?1 AND dev_server_project_id IS NULL + WHERE session_id = ?1 ORDER BY timestamp DESC } } @@ -1013,17 +1033,39 @@ impl WorkspaceDb { } } - query! { - fn ssh_projects() -> Result> { - SELECT id, host, port, paths, user - FROM ssh_projects - } + fn ssh_connections(&self) -> Result> { + Ok(self + .ssh_connections_query()? + .into_iter() + .map(|(id, host, port, user)| { + ( + SshConnectionId(id), + SerializedSshConnection { host, port, user }, + ) + }) + .collect()) } query! { - fn ssh_project(id: u64) -> Result { - SELECT id, host, port, paths, user - FROM ssh_projects + pub fn ssh_connections_query() -> Result, Option)>> { + SELECT id, host, port, user + FROM ssh_connections + } + } + + pub(crate) fn ssh_connection(&self, id: SshConnectionId) -> Result { + let row = self.ssh_connection_query(id.0)?; + Ok(SerializedSshConnection { + host: row.0, + port: row.1, + user: row.2, + }) + } + + query! { + fn ssh_connection_query(id: u64) -> Result<(String, Option, Option)> { + SELECT host, port, user + FROM ssh_connections WHERE id = ? } } @@ -1037,7 +1079,7 @@ impl WorkspaceDb { display, window_state, window_x, window_y, window_width, window_height FROM workspaces - WHERE local_paths + WHERE paths IS NOT NULL ORDER BY timestamp DESC LIMIT 1 @@ -1054,46 +1096,33 @@ impl WorkspaceDb { } } - pub async fn delete_workspace_by_dev_server_project_id( - &self, - id: DevServerProjectId, - ) -> Result<()> { - self.write(move |conn| { - conn.exec_bound(sql!( - DELETE FROM dev_server_projects WHERE id = ? - ))?(id.0)?; - conn.exec_bound(sql!( - DELETE FROM toolchains WHERE workspace_id = ?1; - DELETE FROM workspaces - WHERE dev_server_project_id IS ? - ))?(id.0) - }) - .await - } - // Returns the recent locations which are still valid on disk and deletes ones which no longer // exist. pub async fn recent_workspaces_on_disk( &self, - ) -> Result> { + ) -> Result> { let mut result = Vec::new(); let mut delete_tasks = Vec::new(); - let ssh_projects = self.ssh_projects()?; + let ssh_connections = self.ssh_connections()?; - for (id, location, order, ssh_project_id) in self.recent_workspaces()? { - if let Some(ssh_project_id) = ssh_project_id.map(SshProjectId) { - if let Some(ssh_project) = ssh_projects.iter().find(|rp| rp.id == ssh_project_id) { - result.push((id, SerializedWorkspaceLocation::Ssh(ssh_project.clone()))); + for (id, paths, ssh_connection_id) in self.recent_workspaces()? { + if let Some(ssh_connection_id) = ssh_connection_id.map(SshConnectionId) { + if let Some(ssh_connection) = ssh_connections.get(&ssh_connection_id) { + result.push(( + id, + SerializedWorkspaceLocation::Ssh(ssh_connection.clone()), + paths, + )); } else { delete_tasks.push(self.delete_workspace_by_id(id)); } continue; } - if location.paths().iter().all(|path| path.exists()) - && location.paths().iter().any(|path| path.is_dir()) + if paths.paths().iter().all(|path| path.exists()) + && paths.paths().iter().any(|path| path.is_dir()) { - result.push((id, SerializedWorkspaceLocation::Local(location, order))); + result.push((id, SerializedWorkspaceLocation::Local, paths)); } else { delete_tasks.push(self.delete_workspace_by_id(id)); } @@ -1103,13 +1132,13 @@ impl WorkspaceDb { Ok(result) } - pub async fn last_workspace(&self) -> Result> { + pub async fn last_workspace(&self) -> Result> { Ok(self .recent_workspaces_on_disk() .await? .into_iter() .next() - .map(|(_, location)| location)) + .map(|(_, location, paths)| (location, paths))) } // Returns the locations of the workspaces that were still opened when the last @@ -1120,25 +1149,31 @@ impl WorkspaceDb { &self, last_session_id: &str, last_session_window_stack: Option>, - ) -> Result> { + ) -> Result> { let mut workspaces = Vec::new(); - for (location, order, window_id, ssh_project_id) in + for (paths, window_id, ssh_connection_id) in self.session_workspaces(last_session_id.to_owned())? { - if let Some(ssh_project_id) = ssh_project_id { - let location = SerializedWorkspaceLocation::Ssh(self.ssh_project(ssh_project_id)?); - workspaces.push((location, window_id.map(WindowId::from))); - } else if location.paths().iter().all(|path| path.exists()) - && location.paths().iter().any(|path| path.is_dir()) + if let Some(ssh_connection_id) = ssh_connection_id { + workspaces.push(( + SerializedWorkspaceLocation::Ssh(self.ssh_connection(ssh_connection_id)?), + paths, + window_id.map(WindowId::from), + )); + } else if paths.paths().iter().all(|path| path.exists()) + && paths.paths().iter().any(|path| path.is_dir()) { - let location = SerializedWorkspaceLocation::Local(location, order); - workspaces.push((location, window_id.map(WindowId::from))); + workspaces.push(( + SerializedWorkspaceLocation::Local, + paths, + window_id.map(WindowId::from), + )); } } if let Some(stack) = last_session_window_stack { - workspaces.sort_by_key(|(_, window_id)| { + workspaces.sort_by_key(|(_, _, window_id)| { window_id .and_then(|id| stack.iter().position(|&order_id| order_id == id)) .unwrap_or(usize::MAX) @@ -1147,7 +1182,7 @@ impl WorkspaceDb { Ok(workspaces .into_iter() - .map(|(paths, _)| paths) + .map(|(location, paths, _)| (location, paths)) .collect::>()) } @@ -1499,13 +1534,13 @@ pub fn delete_unloaded_items( #[cfg(test)] mod tests { - use std::thread; - use std::time::Duration; - use super::*; - use crate::persistence::model::SerializedWorkspace; - use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup}; + use crate::persistence::model::{ + SerializedItem, SerializedPane, SerializedPaneGroup, SerializedWorkspace, + }; use gpui; + use pretty_assertions::assert_eq; + use std::{thread, time::Duration}; #[gpui::test] async fn test_breakpoints() { @@ -1558,7 +1593,8 @@ mod tests { let workspace = SerializedWorkspace { id, - location: SerializedWorkspaceLocation::from_local_paths(["/tmp"]), + paths: PathList::new(&["/tmp"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -1711,7 +1747,8 @@ mod tests { let workspace = SerializedWorkspace { id, - location: SerializedWorkspaceLocation::from_local_paths(["/tmp"]), + paths: PathList::new(&["/tmp"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -1757,7 +1794,8 @@ mod tests { let workspace_without_breakpoint = SerializedWorkspace { id, - location: SerializedWorkspaceLocation::from_local_paths(["/tmp"]), + paths: PathList::new(&["/tmp"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -1796,6 +1834,7 @@ mod tests { ON DELETE CASCADE ) STRICT; )], + |_, _, _| false, ) .unwrap(); }) @@ -1844,6 +1883,7 @@ mod tests { REFERENCES workspaces(workspace_id) ON DELETE CASCADE ) STRICT;)], + |_, _, _| false, ) }) .await @@ -1851,7 +1891,8 @@ mod tests { let mut workspace_1 = SerializedWorkspace { id: WorkspaceId(1), - location: SerializedWorkspaceLocation::from_local_paths(["/tmp", "/tmp2"]), + paths: PathList::new(&["/tmp", "/tmp2"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -1864,7 +1905,8 @@ mod tests { let workspace_2 = SerializedWorkspace { id: WorkspaceId(2), - location: SerializedWorkspaceLocation::from_local_paths(["/tmp"]), + paths: PathList::new(&["/tmp"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -1893,7 +1935,7 @@ mod tests { }) .await; - workspace_1.location = SerializedWorkspaceLocation::from_local_paths(["/tmp", "/tmp3"]); + workspace_1.paths = PathList::new(&["/tmp", "/tmp3"]); db.save_workspace(workspace_1.clone()).await; db.save_workspace(workspace_1).await; db.save_workspace(workspace_2).await; @@ -1969,10 +2011,8 @@ mod tests { let workspace = SerializedWorkspace { id: WorkspaceId(5), - location: SerializedWorkspaceLocation::Local( - LocalPaths::new(["/tmp", "/tmp2"]), - LocalPathsOrder::new([1, 0]), - ), + paths: PathList::new(&["/tmp", "/tmp2"]), + location: SerializedWorkspaceLocation::Local, center_group, window_bounds: Default::default(), breakpoints: Default::default(), @@ -2004,10 +2044,8 @@ mod tests { let workspace_1 = SerializedWorkspace { id: WorkspaceId(1), - location: SerializedWorkspaceLocation::Local( - LocalPaths::new(["/tmp", "/tmp2"]), - LocalPathsOrder::new([0, 1]), - ), + paths: PathList::new(&["/tmp", "/tmp2"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), breakpoints: Default::default(), @@ -2020,7 +2058,8 @@ mod tests { let mut workspace_2 = SerializedWorkspace { id: WorkspaceId(2), - location: SerializedWorkspaceLocation::from_local_paths(["/tmp"]), + paths: PathList::new(&["/tmp"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -2049,7 +2088,7 @@ mod tests { assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None); // Test 'mutate' case of updating a pre-existing id - workspace_2.location = SerializedWorkspaceLocation::from_local_paths(["/tmp", "/tmp2"]); + workspace_2.paths = PathList::new(&["/tmp", "/tmp2"]); db.save_workspace(workspace_2.clone()).await; assert_eq!( @@ -2060,10 +2099,8 @@ mod tests { // Test other mechanism for mutating let mut workspace_3 = SerializedWorkspace { id: WorkspaceId(3), - location: SerializedWorkspaceLocation::Local( - LocalPaths::new(["/tmp", "/tmp2"]), - LocalPathsOrder::new([1, 0]), - ), + paths: PathList::new(&["/tmp2", "/tmp"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), breakpoints: Default::default(), @@ -2081,8 +2118,7 @@ mod tests { ); // Make sure that updating paths differently also works - workspace_3.location = - SerializedWorkspaceLocation::from_local_paths(["/tmp3", "/tmp4", "/tmp2"]); + workspace_3.paths = PathList::new(&["/tmp3", "/tmp4", "/tmp2"]); db.save_workspace(workspace_3.clone()).await; assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None); assert_eq!( @@ -2100,7 +2136,8 @@ mod tests { let workspace_1 = SerializedWorkspace { id: WorkspaceId(1), - location: SerializedWorkspaceLocation::from_local_paths(["/tmp1"]), + paths: PathList::new(&["/tmp1"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -2113,7 +2150,8 @@ mod tests { let workspace_2 = SerializedWorkspace { id: WorkspaceId(2), - location: SerializedWorkspaceLocation::from_local_paths(["/tmp2"]), + paths: PathList::new(&["/tmp2"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -2126,7 +2164,8 @@ mod tests { let workspace_3 = SerializedWorkspace { id: WorkspaceId(3), - location: SerializedWorkspaceLocation::from_local_paths(["/tmp3"]), + paths: PathList::new(&["/tmp3"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -2139,7 +2178,8 @@ mod tests { let workspace_4 = SerializedWorkspace { id: WorkspaceId(4), - location: SerializedWorkspaceLocation::from_local_paths(["/tmp4"]), + paths: PathList::new(&["/tmp4"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -2150,14 +2190,15 @@ mod tests { window_id: None, }; - let ssh_project = db - .get_or_create_ssh_project("my-host".to_string(), Some(1234), vec![], None) + let connection_id = db + .get_or_create_ssh_connection("my-host".to_string(), Some(1234), None) .await .unwrap(); let workspace_5 = SerializedWorkspace { id: WorkspaceId(5), - location: SerializedWorkspaceLocation::Ssh(ssh_project.clone()), + paths: PathList::default(), + location: SerializedWorkspaceLocation::Ssh(db.ssh_connection(connection_id).unwrap()), center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -2170,10 +2211,8 @@ mod tests { let workspace_6 = SerializedWorkspace { id: WorkspaceId(6), - location: SerializedWorkspaceLocation::Local( - LocalPaths::new(["/tmp6a", "/tmp6b", "/tmp6c"]), - LocalPathsOrder::new([2, 1, 0]), - ), + paths: PathList::new(&["/tmp6a", "/tmp6b", "/tmp6c"]), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), breakpoints: Default::default(), @@ -2195,41 +2234,36 @@ mod tests { let locations = db.session_workspaces("session-id-1".to_owned()).unwrap(); assert_eq!(locations.len(), 2); - assert_eq!(locations[0].0, LocalPaths::new(["/tmp2"])); - assert_eq!(locations[0].1, LocalPathsOrder::new([0])); - assert_eq!(locations[0].2, Some(20)); - assert_eq!(locations[1].0, LocalPaths::new(["/tmp1"])); - assert_eq!(locations[1].1, LocalPathsOrder::new([0])); - assert_eq!(locations[1].2, Some(10)); + assert_eq!(locations[0].0, PathList::new(&["/tmp2"])); + assert_eq!(locations[0].1, Some(20)); + assert_eq!(locations[1].0, PathList::new(&["/tmp1"])); + assert_eq!(locations[1].1, Some(10)); let locations = db.session_workspaces("session-id-2".to_owned()).unwrap(); assert_eq!(locations.len(), 2); - let empty_paths: Vec<&str> = Vec::new(); - assert_eq!(locations[0].0, LocalPaths::new(empty_paths.iter())); - assert_eq!(locations[0].1, LocalPathsOrder::new([])); - assert_eq!(locations[0].2, Some(50)); - assert_eq!(locations[0].3, Some(ssh_project.id.0)); - assert_eq!(locations[1].0, LocalPaths::new(["/tmp3"])); - assert_eq!(locations[1].1, LocalPathsOrder::new([0])); - assert_eq!(locations[1].2, Some(30)); + assert_eq!(locations[0].0, PathList::default()); + assert_eq!(locations[0].1, Some(50)); + assert_eq!(locations[0].2, Some(connection_id)); + assert_eq!(locations[1].0, PathList::new(&["/tmp3"])); + assert_eq!(locations[1].1, Some(30)); let locations = db.session_workspaces("session-id-3".to_owned()).unwrap(); assert_eq!(locations.len(), 1); assert_eq!( locations[0].0, - LocalPaths::new(["/tmp6a", "/tmp6b", "/tmp6c"]), + PathList::new(&["/tmp6a", "/tmp6b", "/tmp6c"]), ); - assert_eq!(locations[0].1, LocalPathsOrder::new([2, 1, 0])); - assert_eq!(locations[0].2, Some(60)); + assert_eq!(locations[0].1, Some(60)); } fn default_workspace>( - workspace_id: &[P], + paths: &[P], center_group: &SerializedPaneGroup, ) -> SerializedWorkspace { SerializedWorkspace { id: WorkspaceId(4), - location: SerializedWorkspaceLocation::from_local_paths(workspace_id), + paths: PathList::new(paths), + location: SerializedWorkspaceLocation::Local, center_group: center_group.clone(), window_bounds: Default::default(), display: Default::default(), @@ -2252,30 +2286,18 @@ mod tests { WorkspaceDb::open_test_db("test_serializing_workspaces_last_session_workspaces").await; let workspaces = [ - (1, vec![dir1.path()], vec![0], 9), - (2, vec![dir2.path()], vec![0], 5), - (3, vec![dir3.path()], vec![0], 8), - (4, vec![dir4.path()], vec![0], 2), - ( - 5, - vec![dir1.path(), dir2.path(), dir3.path()], - vec![0, 1, 2], - 3, - ), - ( - 6, - vec![dir2.path(), dir3.path(), dir4.path()], - vec![2, 1, 0], - 4, - ), + (1, vec![dir1.path()], 9), + (2, vec![dir2.path()], 5), + (3, vec![dir3.path()], 8), + (4, vec![dir4.path()], 2), + (5, vec![dir1.path(), dir2.path(), dir3.path()], 3), + (6, vec![dir4.path(), dir3.path(), dir2.path()], 4), ] .into_iter() - .map(|(id, locations, order, window_id)| SerializedWorkspace { + .map(|(id, paths, window_id)| SerializedWorkspace { id: WorkspaceId(id), - location: SerializedWorkspaceLocation::Local( - LocalPaths::new(locations), - LocalPathsOrder::new(order), - ), + paths: PathList::new(paths.as_slice()), + location: SerializedWorkspaceLocation::Local, center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -2300,39 +2322,37 @@ mod tests { WindowId::from(4), // Bottom ])); - let have = db + let locations = db .last_session_workspace_locations("one-session", stack) .unwrap(); - assert_eq!(have.len(), 6); assert_eq!( - have[0], - SerializedWorkspaceLocation::from_local_paths(&[dir4.path()]) - ); - assert_eq!( - have[1], - SerializedWorkspaceLocation::from_local_paths([dir3.path()]) - ); - assert_eq!( - have[2], - SerializedWorkspaceLocation::from_local_paths([dir2.path()]) - ); - assert_eq!( - have[3], - SerializedWorkspaceLocation::from_local_paths([dir1.path()]) - ); - assert_eq!( - have[4], - SerializedWorkspaceLocation::Local( - LocalPaths::new([dir1.path(), dir2.path(), dir3.path()]), - LocalPathsOrder::new([0, 1, 2]), - ), - ); - assert_eq!( - have[5], - SerializedWorkspaceLocation::Local( - LocalPaths::new([dir2.path(), dir3.path(), dir4.path()]), - LocalPathsOrder::new([2, 1, 0]), - ), + locations, + [ + ( + SerializedWorkspaceLocation::Local, + PathList::new(&[dir4.path()]) + ), + ( + SerializedWorkspaceLocation::Local, + PathList::new(&[dir3.path()]) + ), + ( + SerializedWorkspaceLocation::Local, + PathList::new(&[dir2.path()]) + ), + ( + SerializedWorkspaceLocation::Local, + PathList::new(&[dir1.path()]) + ), + ( + SerializedWorkspaceLocation::Local, + PathList::new(&[dir1.path(), dir2.path(), dir3.path()]) + ), + ( + SerializedWorkspaceLocation::Local, + PathList::new(&[dir4.path(), dir3.path(), dir2.path()]) + ), + ] ); } @@ -2343,7 +2363,7 @@ mod tests { ) .await; - let ssh_projects = [ + let ssh_connections = [ ("host-1", "my-user-1"), ("host-2", "my-user-2"), ("host-3", "my-user-3"), @@ -2351,24 +2371,30 @@ mod tests { ] .into_iter() .map(|(host, user)| async { - db.get_or_create_ssh_project(host.to_string(), None, vec![], Some(user.to_string())) + db.get_or_create_ssh_connection(host.to_string(), None, Some(user.to_string())) .await - .unwrap() + .unwrap(); + SerializedSshConnection { + host: host.into(), + port: None, + user: Some(user.into()), + } }) .collect::>(); - let ssh_projects = futures::future::join_all(ssh_projects).await; + let ssh_connections = futures::future::join_all(ssh_connections).await; let workspaces = [ - (1, ssh_projects[0].clone(), 9), - (2, ssh_projects[1].clone(), 5), - (3, ssh_projects[2].clone(), 8), - (4, ssh_projects[3].clone(), 2), + (1, ssh_connections[0].clone(), 9), + (2, ssh_connections[1].clone(), 5), + (3, ssh_connections[2].clone(), 8), + (4, ssh_connections[3].clone(), 2), ] .into_iter() - .map(|(id, ssh_project, window_id)| SerializedWorkspace { + .map(|(id, ssh_connection, window_id)| SerializedWorkspace { id: WorkspaceId(id), - location: SerializedWorkspaceLocation::Ssh(ssh_project), + paths: PathList::default(), + location: SerializedWorkspaceLocation::Ssh(ssh_connection), center_group: Default::default(), window_bounds: Default::default(), display: Default::default(), @@ -2397,19 +2423,31 @@ mod tests { assert_eq!(have.len(), 4); assert_eq!( have[0], - SerializedWorkspaceLocation::Ssh(ssh_projects[3].clone()) + ( + SerializedWorkspaceLocation::Ssh(ssh_connections[3].clone()), + PathList::default() + ) ); assert_eq!( have[1], - SerializedWorkspaceLocation::Ssh(ssh_projects[2].clone()) + ( + SerializedWorkspaceLocation::Ssh(ssh_connections[2].clone()), + PathList::default() + ) ); assert_eq!( have[2], - SerializedWorkspaceLocation::Ssh(ssh_projects[1].clone()) + ( + SerializedWorkspaceLocation::Ssh(ssh_connections[1].clone()), + PathList::default() + ) ); assert_eq!( have[3], - SerializedWorkspaceLocation::Ssh(ssh_projects[0].clone()) + ( + SerializedWorkspaceLocation::Ssh(ssh_connections[0].clone()), + PathList::default() + ) ); } @@ -2417,116 +2455,110 @@ mod tests { async fn test_get_or_create_ssh_project() { let db = WorkspaceDb::open_test_db("test_get_or_create_ssh_project").await; - let (host, port, paths, user) = ( - "example.com".to_string(), - Some(22_u16), - vec!["/home/user".to_string(), "/etc/nginx".to_string()], - Some("user".to_string()), - ); + let host = "example.com".to_string(); + let port = Some(22_u16); + let user = Some("user".to_string()); - let project = db - .get_or_create_ssh_project(host.clone(), port, paths.clone(), user.clone()) + let connection_id = db + .get_or_create_ssh_connection(host.clone(), port, user.clone()) .await .unwrap(); - assert_eq!(project.host, host); - assert_eq!(project.paths, paths); - assert_eq!(project.user, user); - // Test that calling the function again with the same parameters returns the same project - let same_project = db - .get_or_create_ssh_project(host.clone(), port, paths.clone(), user.clone()) + let same_connection = db + .get_or_create_ssh_connection(host.clone(), port, user.clone()) .await .unwrap(); - assert_eq!(project.id, same_project.id); + assert_eq!(connection_id, same_connection); // Test with different parameters - let (host2, paths2, user2) = ( - "otherexample.com".to_string(), - vec!["/home/otheruser".to_string()], - Some("otheruser".to_string()), - ); + let host2 = "otherexample.com".to_string(); + let port2 = None; + let user2 = Some("otheruser".to_string()); - let different_project = db - .get_or_create_ssh_project(host2.clone(), None, paths2.clone(), user2.clone()) + let different_connection = db + .get_or_create_ssh_connection(host2.clone(), port2, user2.clone()) .await .unwrap(); - assert_ne!(project.id, different_project.id); - assert_eq!(different_project.host, host2); - assert_eq!(different_project.paths, paths2); - assert_eq!(different_project.user, user2); + assert_ne!(connection_id, different_connection); } #[gpui::test] async fn test_get_or_create_ssh_project_with_null_user() { let db = WorkspaceDb::open_test_db("test_get_or_create_ssh_project_with_null_user").await; - let (host, port, paths, user) = ( - "example.com".to_string(), - None, - vec!["/home/user".to_string()], - None, - ); + let (host, port, user) = ("example.com".to_string(), None, None); - let project = db - .get_or_create_ssh_project(host.clone(), port, paths.clone(), None) + let connection_id = db + .get_or_create_ssh_connection(host.clone(), port, None) .await .unwrap(); - assert_eq!(project.host, host); - assert_eq!(project.paths, paths); - assert_eq!(project.user, None); - - // Test that calling the function again with the same parameters returns the same project - let same_project = db - .get_or_create_ssh_project(host.clone(), port, paths.clone(), user.clone()) + let same_connection_id = db + .get_or_create_ssh_connection(host.clone(), port, user.clone()) .await .unwrap(); - assert_eq!(project.id, same_project.id); + assert_eq!(connection_id, same_connection_id); } #[gpui::test] - async fn test_get_ssh_projects() { - let db = WorkspaceDb::open_test_db("test_get_ssh_projects").await; + async fn test_get_ssh_connections() { + let db = WorkspaceDb::open_test_db("test_get_ssh_connections").await; - let projects = vec![ - ( - "example.com".to_string(), - None, - vec!["/home/user".to_string()], - None, - ), + let connections = [ + ("example.com".to_string(), None, None), ( "anotherexample.com".to_string(), Some(123_u16), - vec!["/home/user2".to_string()], Some("user2".to_string()), ), - ( - "yetanother.com".to_string(), - Some(345_u16), - vec!["/home/user3".to_string(), "/proc/1234/exe".to_string()], - None, - ), + ("yetanother.com".to_string(), Some(345_u16), None), ]; - for (host, port, paths, user) in projects.iter() { - let project = db - .get_or_create_ssh_project(host.clone(), *port, paths.clone(), user.clone()) - .await - .unwrap(); - - assert_eq!(&project.host, host); - assert_eq!(&project.port, port); - assert_eq!(&project.paths, paths); - assert_eq!(&project.user, user); + let mut ids = Vec::new(); + for (host, port, user) in connections.iter() { + ids.push( + db.get_or_create_ssh_connection(host.clone(), *port, user.clone()) + .await + .unwrap(), + ); } - let stored_projects = db.ssh_projects().unwrap(); - assert_eq!(stored_projects.len(), projects.len()); + let stored_projects = db.ssh_connections().unwrap(); + assert_eq!( + stored_projects, + [ + ( + ids[0], + SerializedSshConnection { + host: "example.com".into(), + port: None, + user: None, + } + ), + ( + ids[1], + SerializedSshConnection { + host: "anotherexample.com".into(), + port: Some(123), + user: Some("user2".into()), + } + ), + ( + ids[2], + SerializedSshConnection { + host: "yetanother.com".into(), + port: Some(345), + user: None, + } + ), + ] + .into_iter() + .collect::>(), + ); } #[gpui::test] @@ -2659,56 +2691,4 @@ mod tests { assert_eq!(workspace.center_group, new_workspace.center_group); } - - #[gpui::test] - async fn test_update_ssh_project_paths() { - zlog::init_test(); - - let db = WorkspaceDb::open_test_db("test_update_ssh_project_paths").await; - - let (host, port, initial_paths, user) = ( - "example.com".to_string(), - Some(22_u16), - vec!["/home/user".to_string(), "/etc/nginx".to_string()], - Some("user".to_string()), - ); - - let project = db - .get_or_create_ssh_project(host.clone(), port, initial_paths.clone(), user.clone()) - .await - .unwrap(); - - assert_eq!(project.host, host); - assert_eq!(project.paths, initial_paths); - assert_eq!(project.user, user); - - let new_paths = vec![ - "/home/user".to_string(), - "/etc/nginx".to_string(), - "/var/log".to_string(), - "/opt/app".to_string(), - ]; - - let updated_project = db - .update_ssh_project_paths(project.id, new_paths.clone()) - .await - .unwrap(); - - assert_eq!(updated_project.id, project.id); - assert_eq!(updated_project.paths, new_paths); - - let retrieved_project = db - .get_ssh_project( - host.clone(), - port, - serde_json::to_string(&new_paths).unwrap(), - user.clone(), - ) - .await - .unwrap() - .unwrap(); - - assert_eq!(retrieved_project.id, project.id); - assert_eq!(retrieved_project.paths, new_paths); - } } diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 15a54ac62f..04757d0495 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -1,256 +1,48 @@ use super::{SerializedAxis, SerializedWindowBounds}; use crate::{ Member, Pane, PaneAxis, SerializableItemRegistry, Workspace, WorkspaceId, item::ItemHandle, + path_list::PathList, }; -use anyhow::{Context as _, Result}; +use anyhow::Result; use async_recursion::async_recursion; use db::sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::Statement, }; use gpui::{AsyncWindowContext, Entity, WeakEntity}; -use itertools::Itertools as _; + use project::{Project, debugger::breakpoint_store::SourceBreakpoint}; -use remote::ssh_session::SshProjectId; use serde::{Deserialize, Serialize}; use std::{ collections::BTreeMap, path::{Path, PathBuf}, sync::Arc, }; -use util::{ResultExt, paths::SanitizedPath}; +use util::ResultExt; use uuid::Uuid; +#[derive( + Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize, +)] +pub(crate) struct SshConnectionId(pub u64); + #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] -pub struct SerializedSshProject { - pub id: SshProjectId, +pub struct SerializedSshConnection { pub host: String, pub port: Option, - pub paths: Vec, pub user: Option, } -impl SerializedSshProject { - pub fn ssh_urls(&self) -> Vec { - self.paths - .iter() - .map(|path| { - let mut result = String::new(); - if let Some(user) = &self.user { - result.push_str(user); - result.push('@'); - } - result.push_str(&self.host); - if let Some(port) = &self.port { - result.push(':'); - result.push_str(&port.to_string()); - } - result.push_str(path); - PathBuf::from(result) - }) - .collect() - } -} - -impl StaticColumnCount for SerializedSshProject { - fn column_count() -> usize { - 5 - } -} - -impl Bind for &SerializedSshProject { - fn bind(&self, statement: &Statement, start_index: i32) -> Result { - let next_index = statement.bind(&self.id.0, start_index)?; - let next_index = statement.bind(&self.host, next_index)?; - let next_index = statement.bind(&self.port, next_index)?; - let raw_paths = serde_json::to_string(&self.paths)?; - let next_index = statement.bind(&raw_paths, next_index)?; - statement.bind(&self.user, next_index) - } -} - -impl Column for SerializedSshProject { - fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { - let id = statement.column_int64(start_index)?; - let host = statement.column_text(start_index + 1)?.to_string(); - let (port, _) = Option::::column(statement, start_index + 2)?; - let raw_paths = statement.column_text(start_index + 3)?.to_string(); - let paths: Vec = serde_json::from_str(&raw_paths)?; - - let (user, _) = Option::::column(statement, start_index + 4)?; - - Ok(( - Self { - id: SshProjectId(id as u64), - host, - port, - paths, - user, - }, - start_index + 5, - )) - } -} - -#[derive(Debug, PartialEq, Clone)] -pub struct LocalPaths(Arc>); - -impl LocalPaths { - pub fn new>(paths: impl IntoIterator) -> Self { - let mut paths: Vec = paths - .into_iter() - .map(|p| SanitizedPath::from(p).into()) - .collect(); - // Ensure all future `zed workspace1 workspace2` and `zed workspace2 workspace1` calls are using the same workspace. - // The actual workspace order is stored in the `LocalPathsOrder` struct. - paths.sort(); - Self(Arc::new(paths)) - } - - pub fn paths(&self) -> &Arc> { - &self.0 - } -} - -impl StaticColumnCount for LocalPaths {} -impl Bind for &LocalPaths { - fn bind(&self, statement: &Statement, start_index: i32) -> Result { - statement.bind(&bincode::serialize(&self.0)?, start_index) - } -} - -impl Column for LocalPaths { - fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { - let path_blob = statement.column_blob(start_index)?; - let paths: Arc> = if path_blob.is_empty() { - Default::default() - } else { - bincode::deserialize(path_blob).context("Bincode deserialization of paths failed")? - }; - - Ok((Self(paths), start_index + 1)) - } -} - -#[derive(Debug, PartialEq, Clone)] -pub struct LocalPathsOrder(Vec); - -impl LocalPathsOrder { - pub fn new(order: impl IntoIterator) -> Self { - Self(order.into_iter().collect()) - } - - pub fn order(&self) -> &[usize] { - self.0.as_slice() - } - - pub fn default_for_paths(paths: &LocalPaths) -> Self { - Self::new(0..paths.0.len()) - } -} - -impl StaticColumnCount for LocalPathsOrder {} -impl Bind for &LocalPathsOrder { - fn bind(&self, statement: &Statement, start_index: i32) -> Result { - statement.bind(&bincode::serialize(&self.0)?, start_index) - } -} - -impl Column for LocalPathsOrder { - fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { - let order_blob = statement.column_blob(start_index)?; - let order = if order_blob.is_empty() { - Vec::new() - } else { - bincode::deserialize(order_blob).context("deserializing workspace root order")? - }; - - Ok((Self(order), start_index + 1)) - } -} - #[derive(Debug, PartialEq, Clone)] pub enum SerializedWorkspaceLocation { - Local(LocalPaths, LocalPathsOrder), - Ssh(SerializedSshProject), + Local, + Ssh(SerializedSshConnection), } impl SerializedWorkspaceLocation { - /// Create a new `SerializedWorkspaceLocation` from a list of local paths. - /// - /// The paths will be sorted and the order will be stored in the `LocalPathsOrder` struct. - /// - /// # Examples - /// - /// ``` - /// use std::path::Path; - /// use zed_workspace::SerializedWorkspaceLocation; - /// - /// let location = SerializedWorkspaceLocation::from_local_paths(vec![ - /// Path::new("path/to/workspace1"), - /// Path::new("path/to/workspace2"), - /// ]); - /// assert_eq!(location, SerializedWorkspaceLocation::Local( - /// LocalPaths::new(vec![ - /// Path::new("path/to/workspace1"), - /// Path::new("path/to/workspace2"), - /// ]), - /// LocalPathsOrder::new(vec![0, 1]), - /// )); - /// ``` - /// - /// ``` - /// use std::path::Path; - /// use zed_workspace::SerializedWorkspaceLocation; - /// - /// let location = SerializedWorkspaceLocation::from_local_paths(vec![ - /// Path::new("path/to/workspace2"), - /// Path::new("path/to/workspace1"), - /// ]); - /// - /// assert_eq!(location, SerializedWorkspaceLocation::Local( - /// LocalPaths::new(vec![ - /// Path::new("path/to/workspace1"), - /// Path::new("path/to/workspace2"), - /// ]), - /// LocalPathsOrder::new(vec![1, 0]), - /// )); - /// ``` - pub fn from_local_paths>(paths: impl IntoIterator) -> Self { - let mut indexed_paths: Vec<_> = paths - .into_iter() - .map(|p| p.as_ref().to_path_buf()) - .enumerate() - .collect(); - - indexed_paths.sort_by(|(_, a), (_, b)| a.cmp(b)); - - let sorted_paths: Vec<_> = indexed_paths.iter().map(|(_, path)| path.clone()).collect(); - let order: Vec<_> = indexed_paths.iter().map(|(index, _)| *index).collect(); - - Self::Local(LocalPaths::new(sorted_paths), LocalPathsOrder::new(order)) - } - /// Get sorted paths pub fn sorted_paths(&self) -> Arc> { - match self { - SerializedWorkspaceLocation::Local(paths, order) => { - if order.order().is_empty() { - paths.paths().clone() - } else { - Arc::new( - order - .order() - .iter() - .zip(paths.paths().iter()) - .sorted_by_key(|(i, _)| **i) - .map(|(_, p)| p.clone()) - .collect(), - ) - } - } - SerializedWorkspaceLocation::Ssh(ssh_project) => Arc::new(ssh_project.ssh_urls()), - } + unimplemented!() } } @@ -258,6 +50,7 @@ impl SerializedWorkspaceLocation { pub(crate) struct SerializedWorkspace { pub(crate) id: WorkspaceId, pub(crate) location: SerializedWorkspaceLocation, + pub(crate) paths: PathList, pub(crate) center_group: SerializedPaneGroup, pub(crate) window_bounds: Option, pub(crate) centered_layout: bool, @@ -581,80 +374,3 @@ impl Column for SerializedItem { )) } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_serialize_local_paths() { - let paths = vec!["b", "a", "c"]; - let serialized = SerializedWorkspaceLocation::from_local_paths(paths); - - assert_eq!( - serialized, - SerializedWorkspaceLocation::Local( - LocalPaths::new(vec!["a", "b", "c"]), - LocalPathsOrder::new(vec![1, 0, 2]) - ) - ); - } - - #[test] - fn test_sorted_paths() { - let paths = vec!["b", "a", "c"]; - let serialized = SerializedWorkspaceLocation::from_local_paths(paths); - assert_eq!( - serialized.sorted_paths(), - Arc::new(vec![ - PathBuf::from("b"), - PathBuf::from("a"), - PathBuf::from("c"), - ]) - ); - - let paths = Arc::new(vec![ - PathBuf::from("a"), - PathBuf::from("b"), - PathBuf::from("c"), - ]); - let order = vec![2, 0, 1]; - let serialized = - SerializedWorkspaceLocation::Local(LocalPaths(paths), LocalPathsOrder(order)); - assert_eq!( - serialized.sorted_paths(), - Arc::new(vec![ - PathBuf::from("b"), - PathBuf::from("c"), - PathBuf::from("a"), - ]) - ); - - let paths = Arc::new(vec![ - PathBuf::from("a"), - PathBuf::from("b"), - PathBuf::from("c"), - ]); - let order = vec![]; - let serialized = - SerializedWorkspaceLocation::Local(LocalPaths(paths.clone()), LocalPathsOrder(order)); - assert_eq!(serialized.sorted_paths(), paths); - - let urls = ["/a", "/b", "/c"]; - let serialized = SerializedWorkspaceLocation::Ssh(SerializedSshProject { - id: SshProjectId(0), - host: "host".to_string(), - port: Some(22), - paths: urls.iter().map(|s| s.to_string()).collect(), - user: Some("user".to_string()), - }); - assert_eq!( - serialized.sorted_paths(), - Arc::new( - urls.iter() - .map(|p| PathBuf::from(format!("user@host:22{}", p))) - .collect() - ) - ); - } -} diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 499e4f4619..044601df97 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1,10 +1,12 @@ pub mod dock; pub mod history_manager; +pub mod invalid_buffer_view; pub mod item; mod modal_layer; pub mod notifications; pub mod pane; pub mod pane_group; +mod path_list; mod persistence; pub mod searchable; pub mod shared_screen; @@ -17,6 +19,7 @@ mod workspace_settings; pub use crate::notifications::NotificationFrame; pub use dock::Panel; +pub use path_list::PathList; pub use toast_layer::{ToastAction, ToastLayer, ToastView}; use anyhow::{Context as _, Result, anyhow}; @@ -61,13 +64,10 @@ use notifications::{ }; pub use pane::*; pub use pane_group::*; -use persistence::{ - DB, SerializedWindowBounds, - model::{SerializedSshProject, SerializedWorkspace}, -}; +use persistence::{DB, SerializedWindowBounds, model::SerializedWorkspace}; pub use persistence::{ DB as WORKSPACE_DB, WorkspaceDb, delete_unloaded_items, - model::{ItemId, LocalPaths, SerializedWorkspaceLocation}, + model::{ItemId, SerializedSshConnection, SerializedWorkspaceLocation}, }; use postage::stream::Stream; use project::{ @@ -612,21 +612,60 @@ impl ProjectItemRegistry { ); self.build_project_item_for_path_fns .push(|project, project_path, window, cx| { + let project_path = project_path.clone(); + let is_file = project + .read(cx) + .entry_for_path(&project_path, cx) + .is_some_and(|entry| entry.is_file()); + let entry_abs_path = project.read(cx).absolute_path(&project_path, cx); + let is_local = project.read(cx).is_local(); let project_item = - ::try_open(project, project_path, cx)?; + ::try_open(project, &project_path, cx)?; let project = project.clone(); Some(window.spawn(cx, async move |cx| { - let project_item = project_item.await?; - let project_entry_id: Option = - project_item.read_with(cx, project::ProjectItem::entry_id)?; - let build_workspace_item = Box::new( - |pane: &mut Pane, window: &mut Window, cx: &mut Context| { - Box::new(cx.new(|cx| { - T::for_project_item(project, Some(pane), project_item, window, cx) - })) as Box + match project_item.await.with_context(|| { + format!( + "opening project path {:?}", + entry_abs_path.as_deref().unwrap_or(&project_path.path) + ) + }) { + Ok(project_item) => { + let project_item = project_item; + let project_entry_id: Option = + project_item.read_with(cx, project::ProjectItem::entry_id)?; + let build_workspace_item = Box::new( + |pane: &mut Pane, window: &mut Window, cx: &mut Context| { + Box::new(cx.new(|cx| { + T::for_project_item( + project, + Some(pane), + project_item, + window, + cx, + ) + })) as Box + }, + ) as Box<_>; + Ok((project_entry_id, build_workspace_item)) + } + Err(e) => match entry_abs_path.as_deref().filter(|_| is_file) { + Some(abs_path) => match cx.update(|window, cx| { + T::for_broken_project_item(abs_path, is_local, &e, window, cx) + })? { + Some(broken_project_item_view) => { + let build_workspace_item = Box::new( + move |_: &mut Pane, _: &mut Window, cx: &mut Context| { + cx.new(|_| broken_project_item_view).boxed_clone() + }, + ) + as Box<_>; + Ok((None, build_workspace_item)) + } + None => Err(e)?, + }, + None => Err(e)?, }, - ) as Box<_>; - Ok((project_entry_id, build_workspace_item)) + } })) }); } @@ -1013,7 +1052,7 @@ pub enum OpenVisible { enum WorkspaceLocation { // Valid local paths or SSH project to serialize - Location(SerializedWorkspaceLocation), + Location(SerializedWorkspaceLocation, PathList), // No valid location found hence clear session id DetachFromSession, // No valid location found to serialize @@ -1097,7 +1136,6 @@ pub struct Workspace { terminal_provider: Option>, debugger_provider: Option>, serializable_items_tx: UnboundedSender>, - serialized_ssh_project: Option, _items_serializer: Task>, session_id: Option, scheduled_tasks: Vec>, @@ -1146,8 +1184,6 @@ impl Workspace { project::Event::WorktreeRemoved(_) | project::Event::WorktreeAdded(_) => { this.update_window_title(window, cx); - this.update_ssh_paths(cx); - this.serialize_ssh_paths(window, cx); this.serialize_workspace(window, cx); // This event could be triggered by `AddFolderToProject` or `RemoveFromProject`. this.update_history(cx); @@ -1432,7 +1468,7 @@ impl Workspace { serializable_items_tx, _items_serializer, session_id: Some(session_id), - serialized_ssh_project: None, + scheduled_tasks: Vec::new(), } } @@ -1472,20 +1508,9 @@ impl Workspace { let serialized_workspace = persistence::DB.workspace_for_roots(paths_to_open.as_slice()); - let workspace_location = serialized_workspace - .as_ref() - .map(|ws| &ws.location) - .and_then(|loc| match loc { - SerializedWorkspaceLocation::Local(_, order) => { - Some((loc.sorted_paths(), order.order())) - } - _ => None, - }); - - if let Some((paths, order)) = workspace_location { - paths_to_open = paths.iter().cloned().collect(); - - if order.iter().enumerate().any(|(i, &j)| i != j) { + if let Some(paths) = serialized_workspace.as_ref().map(|ws| &ws.paths) { + paths_to_open = paths.paths().to_vec(); + if !paths.is_lexicographically_ordered() { project_handle .update(cx, |project, cx| { project.set_worktrees_reordered(true, cx); @@ -2005,14 +2030,6 @@ impl Workspace { self.debugger_provider.clone() } - pub fn serialized_ssh_project(&self) -> Option { - self.serialized_ssh_project.clone() - } - - pub fn set_serialized_ssh_project(&mut self, serialized_ssh_project: SerializedSshProject) { - self.serialized_ssh_project = Some(serialized_ssh_project); - } - pub fn prompt_for_open_path( &mut self, path_prompt_options: PathPromptOptions, @@ -2249,27 +2266,43 @@ impl Workspace { })?; if let Some(active_call) = active_call - && close_intent != CloseIntent::Quit && workspace_count == 1 && active_call.read_with(cx, |call, _| call.room().is_some())? { - let answer = cx.update(|window, cx| { - window.prompt( - PromptLevel::Warning, - "Do you want to leave the current call?", - None, - &["Close window and hang up", "Cancel"], - cx, - ) - })?; + if close_intent == CloseIntent::CloseWindow { + let answer = cx.update(|window, cx| { + window.prompt( + PromptLevel::Warning, + "Do you want to leave the current call?", + None, + &["Close window and hang up", "Cancel"], + cx, + ) + })?; - if answer.await.log_err() == Some(1) { - return anyhow::Ok(false); - } else { - active_call - .update(cx, |call, cx| call.hang_up(cx))? - .await - .log_err(); + if answer.await.log_err() == Some(1) { + return anyhow::Ok(false); + } else { + active_call + .update(cx, |call, cx| call.hang_up(cx))? + .await + .log_err(); + } + } + if close_intent == CloseIntent::ReplaceWindow { + _ = active_call.update(cx, |this, cx| { + let workspace = cx + .windows() + .iter() + .filter_map(|window| window.downcast::()) + .next() + .unwrap(); + let project = workspace.read(cx)?.project.clone(); + if project.read(cx).is_shared() { + this.unshare_project(project, cx)?; + } + Ok::<_, anyhow::Error>(()) + })?; } } @@ -3363,9 +3396,8 @@ impl Workspace { window: &mut Window, cx: &mut App, ) -> Task, WorkspaceItemBuilder)>> { - let project = self.project().clone(); let registry = cx.default_global::().clone(); - registry.open_path(&project, &path, window, cx) + registry.open_path(self.project(), &path, window, cx) } pub fn find_project_item( @@ -3990,52 +4022,6 @@ impl Workspace { maybe_pane_handle } - pub fn split_pane_with_item( - &mut self, - pane_to_split: WeakEntity, - split_direction: SplitDirection, - from: WeakEntity, - item_id_to_move: EntityId, - window: &mut Window, - cx: &mut Context, - ) { - let Some(pane_to_split) = pane_to_split.upgrade() else { - return; - }; - let Some(from) = from.upgrade() else { - return; - }; - - let new_pane = self.add_pane(window, cx); - move_item(&from, &new_pane, item_id_to_move, 0, true, window, cx); - self.center - .split(&pane_to_split, &new_pane, split_direction) - .unwrap(); - cx.notify(); - } - - pub fn split_pane_with_project_entry( - &mut self, - pane_to_split: WeakEntity, - split_direction: SplitDirection, - project_entry: ProjectEntryId, - window: &mut Window, - cx: &mut Context, - ) -> Option>> { - let pane_to_split = pane_to_split.upgrade()?; - let new_pane = self.add_pane(window, cx); - self.center - .split(&pane_to_split, &new_pane, split_direction) - .unwrap(); - - let path = self.project.read(cx).path_for_entry(project_entry, cx)?; - let task = self.open_path(path, Some(new_pane.downgrade()), true, window, cx); - Some(cx.foreground_executor().spawn(async move { - task.await?; - Ok(()) - })) - } - pub fn join_all_panes(&mut self, window: &mut Window, cx: &mut Context) { let active_item = self.active_pane.read(cx).active_item(); for pane in &self.panes { @@ -5044,59 +5030,12 @@ impl Workspace { self.session_id.clone() } - fn local_paths(&self, cx: &App) -> Option>> { + pub fn root_paths(&self, cx: &App) -> Vec> { let project = self.project().read(cx); - - if project.is_local() { - Some( - project - .visible_worktrees(cx) - .map(|worktree| worktree.read(cx).abs_path()) - .collect::>(), - ) - } else { - None - } - } - - fn update_ssh_paths(&mut self, cx: &App) { - let project = self.project().read(cx); - if !project.is_local() { - let paths: Vec = project - .visible_worktrees(cx) - .map(|worktree| worktree.read(cx).abs_path().to_string_lossy().to_string()) - .collect(); - if let Some(ssh_project) = &mut self.serialized_ssh_project { - ssh_project.paths = paths; - } - } - } - - fn serialize_ssh_paths(&mut self, window: &mut Window, cx: &mut Context) { - if self._schedule_serialize_ssh_paths.is_none() { - self._schedule_serialize_ssh_paths = - Some(cx.spawn_in(window, async move |this, cx| { - cx.background_executor() - .timer(SERIALIZATION_THROTTLE_TIME) - .await; - this.update_in(cx, |this, window, cx| { - let task = if let Some(ssh_project) = &this.serialized_ssh_project { - let ssh_project_id = ssh_project.id; - let ssh_project_paths = ssh_project.paths.clone(); - window.spawn(cx, async move |_| { - persistence::DB - .update_ssh_project_paths(ssh_project_id, ssh_project_paths) - .await - }) - } else { - Task::ready(Err(anyhow::anyhow!("No SSH project to serialize"))) - }; - task.detach(); - this._schedule_serialize_ssh_paths.take(); - }) - .log_err(); - })); - } + project + .visible_worktrees(cx) + .map(|worktree| worktree.read(cx).abs_path()) + .collect::>() } fn remove_panes(&mut self, member: Member, window: &mut Window, cx: &mut Context) { @@ -5269,7 +5208,7 @@ impl Workspace { } match self.serialize_workspace_location(cx) { - WorkspaceLocation::Location(location) => { + WorkspaceLocation::Location(location, paths) => { let breakpoints = self.project.update(cx, |project, cx| { project .breakpoint_store() @@ -5283,6 +5222,7 @@ impl Workspace { let serialized_workspace = SerializedWorkspace { id: database_id, location, + paths, center_group, window_bounds, display: Default::default(), @@ -5308,13 +5248,19 @@ impl Workspace { } fn serialize_workspace_location(&self, cx: &App) -> WorkspaceLocation { - if let Some(ssh_project) = &self.serialized_ssh_project { - WorkspaceLocation::Location(SerializedWorkspaceLocation::Ssh(ssh_project.clone())) - } else if let Some(local_paths) = self.local_paths(cx) { - if !local_paths.is_empty() { - WorkspaceLocation::Location(SerializedWorkspaceLocation::from_local_paths( - local_paths, - )) + let paths = PathList::new(&self.root_paths(cx)); + if let Some(connection) = self.project.read(cx).ssh_connection_options(cx) { + WorkspaceLocation::Location( + SerializedWorkspaceLocation::Ssh(SerializedSshConnection { + host: connection.host, + port: connection.port, + user: connection.username, + }), + paths, + ) + } else if self.project.read(cx).is_local() { + if !paths.is_empty() { + WorkspaceLocation::Location(SerializedWorkspaceLocation::Local, paths) } else { WorkspaceLocation::DetachFromSession } @@ -5327,13 +5273,13 @@ impl Workspace { let Some(id) = self.database_id() else { return; }; - let location = match self.serialize_workspace_location(cx) { - WorkspaceLocation::Location(location) => location, - _ => return, - }; + if !self.project.read(cx).is_local() { + return; + } if let Some(manager) = HistoryManager::global(cx) { + let paths = PathList::new(&self.root_paths(cx)); manager.update(cx, |this, cx| { - this.update_history(id, HistoryManagerEntry::new(id, &location), cx); + this.update_history(id, HistoryManagerEntry::new(id, &paths), cx); }); } } @@ -6641,15 +6587,29 @@ impl Render for Workspace { } }) .children(self.zoomed.as_ref().and_then(|view| { - Some(div() + let zoomed_view = view.upgrade()?; + let div = div() .occlude() .absolute() .overflow_hidden() .border_color(colors.border) .bg(colors.background) - .child(view.upgrade()?) + .child(zoomed_view) .inset_0() - .shadow_lg()) + .shadow_lg(); + + if !WorkspaceSettings::get_global(cx).zoomed_padding { + return Some(div); + } + + Some(match self.zoomed_position { + Some(DockPosition::Left) => div.right_2().border_r_1(), + Some(DockPosition::Right) => div.left_2().border_l_1(), + Some(DockPosition::Bottom) => div.top_2().border_t_1(), + None => { + div.top_2().bottom_2().left_2().right_2().border_1() + } + }) })) .children(self.render_notifications(window, cx)), ) @@ -6799,14 +6759,14 @@ impl WorkspaceHandle for Entity { } } -pub async fn last_opened_workspace_location() -> Option { +pub async fn last_opened_workspace_location() -> Option<(SerializedWorkspaceLocation, PathList)> { DB.last_workspace().await.log_err().flatten() } pub fn last_session_workspace_locations( last_session_id: &str, last_session_window_stack: Option>, -) -> Option> { +) -> Option> { DB.last_session_workspace_locations(last_session_id, last_session_window_stack) .log_err() } @@ -7309,7 +7269,7 @@ pub fn open_ssh_project_with_new_connection( cx: &mut App, ) -> Task> { cx.spawn(async move |cx| { - let (serialized_ssh_project, workspace_id, serialized_workspace) = + let (workspace_id, serialized_workspace) = serialize_ssh_project(connection_options.clone(), paths.clone(), cx).await?; let session = match cx @@ -7343,7 +7303,6 @@ pub fn open_ssh_project_with_new_connection( open_ssh_project_inner( project, paths, - serialized_ssh_project, workspace_id, serialized_workspace, app_state, @@ -7363,13 +7322,12 @@ pub fn open_ssh_project_with_existing_connection( cx: &mut AsyncApp, ) -> Task> { cx.spawn(async move |cx| { - let (serialized_ssh_project, workspace_id, serialized_workspace) = + let (workspace_id, serialized_workspace) = serialize_ssh_project(connection_options.clone(), paths.clone(), cx).await?; open_ssh_project_inner( project, paths, - serialized_ssh_project, workspace_id, serialized_workspace, app_state, @@ -7383,7 +7341,6 @@ pub fn open_ssh_project_with_existing_connection( async fn open_ssh_project_inner( project: Entity, paths: Vec, - serialized_ssh_project: SerializedSshProject, workspace_id: WorkspaceId, serialized_workspace: Option, app_state: Arc, @@ -7436,7 +7393,6 @@ async fn open_ssh_project_inner( let mut workspace = Workspace::new(Some(workspace_id), project, app_state.clone(), window, cx); - workspace.set_serialized_ssh_project(serialized_ssh_project); workspace.update_history(cx); if let Some(ref serialized) = serialized_workspace { @@ -7473,28 +7429,18 @@ fn serialize_ssh_project( connection_options: SshConnectionOptions, paths: Vec, cx: &AsyncApp, -) -> Task< - Result<( - SerializedSshProject, - WorkspaceId, - Option, - )>, -> { +) -> Task)>> { cx.background_spawn(async move { - let serialized_ssh_project = persistence::DB - .get_or_create_ssh_project( + let ssh_connection_id = persistence::DB + .get_or_create_ssh_connection( connection_options.host.clone(), connection_options.port, - paths - .iter() - .map(|path| path.to_string_lossy().to_string()) - .collect::>(), connection_options.username.clone(), ) .await?; let serialized_workspace = - persistence::DB.workspace_for_ssh_project(&serialized_ssh_project); + persistence::DB.ssh_workspace_for_roots(&paths, ssh_connection_id); let workspace_id = if let Some(workspace_id) = serialized_workspace.as_ref().map(|workspace| workspace.id) @@ -7504,7 +7450,7 @@ fn serialize_ssh_project( persistence::DB.next_id().await? }; - Ok((serialized_ssh_project, workspace_id, serialized_workspace)) + Ok((workspace_id, serialized_workspace)) }) } @@ -8051,18 +7997,15 @@ pub fn ssh_workspace_position_from_db( paths_to_open: &[PathBuf], cx: &App, ) -> Task> { - let paths = paths_to_open - .iter() - .map(|path| path.to_string_lossy().to_string()) - .collect::>(); + let paths = paths_to_open.to_vec(); cx.background_spawn(async move { - let serialized_ssh_project = persistence::DB - .get_or_create_ssh_project(host, port, paths, user) + let ssh_connection_id = persistence::DB + .get_or_create_ssh_connection(host, port, user) .await .context("fetching serialized ssh project")?; let serialized_workspace = - persistence::DB.workspace_for_ssh_project(&serialized_ssh_project); + persistence::DB.ssh_workspace_for_roots(&paths, ssh_connection_id); let (window_bounds, display) = if let Some(bounds) = window_bounds_env_override() { (Some(WindowBounds::Windowed(bounds)), None) diff --git a/crates/workspace/src/workspace_settings.rs b/crates/workspace/src/workspace_settings.rs index 5635347514..3b6bc1ea97 100644 --- a/crates/workspace/src/workspace_settings.rs +++ b/crates/workspace/src/workspace_settings.rs @@ -29,6 +29,7 @@ pub struct WorkspaceSettings { pub on_last_window_closed: OnLastWindowClosed, pub resize_all_panels_in_dock: Vec, pub close_on_file_delete: bool, + pub zoomed_padding: bool, } #[derive(Copy, Clone, Default, Serialize, Deserialize, JsonSchema)] @@ -202,6 +203,12 @@ pub struct WorkspaceSettingsContent { /// /// Default: false pub close_on_file_delete: Option, + /// Whether to show padding for zoomed panels. + /// When enabled, zoomed bottom panels will have some top padding, + /// while zoomed left/right panels will have padding to the right/left (respectively). + /// + /// Default: true + pub zoomed_padding: Option, } #[derive(Deserialize)] diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index d69efaf6c0..6f4ead9ebb 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.201.0" +version = "0.202.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] @@ -20,6 +20,7 @@ path = "src/main.rs" [dependencies] activity_indicator.workspace = true +acp_tools.workspace = true agent.workspace = true agent_ui.workspace = true agent_settings.workspace = true @@ -33,6 +34,7 @@ audio.workspace = true auto_update.workspace = true auto_update_ui.workspace = true backtrace = "0.3" +bincode.workspace = true breadcrumbs.workspace = true call.workspace = true channel.workspace = true @@ -60,6 +62,7 @@ extensions_ui.workspace = true feature_flags.workspace = true feedback.workspace = true file_finder.workspace = true +system_specs.workspace = true fs.workspace = true futures.workspace = true git.workspace = true diff --git a/crates/zed/resources/info/SupportedPlatforms.plist b/crates/zed/resources/info/SupportedPlatforms.plist new file mode 100644 index 0000000000..fd2a4101d8 --- /dev/null +++ b/crates/zed/resources/info/SupportedPlatforms.plist @@ -0,0 +1,4 @@ +CFBundleSupportedPlatforms + + MacOSX + diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 45c67153eb..e99c8b564b 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -16,7 +16,7 @@ use extension_host::ExtensionStore; use fs::{Fs, RealFs}; use futures::{StreamExt, channel::oneshot, future}; use git::GitHostingProviderRegistry; -use gpui::{App, AppContext as _, Application, AsyncApp, Focusable as _, UpdateGlobal as _}; +use gpui::{App, AppContext, Application, AsyncApp, Focusable as _, UpdateGlobal as _}; use gpui_tokio::Tokio; use http_client::{Url, read_proxy_from_env}; @@ -47,8 +47,8 @@ use theme::{ use util::{ResultExt, TryFutureExt, maybe}; use uuid::Uuid; use workspace::{ - AppState, SerializedWorkspaceLocation, Toast, Workspace, WorkspaceSettings, WorkspaceStore, - notifications::NotificationId, + AppState, PathList, SerializedWorkspaceLocation, Toast, Workspace, WorkspaceSettings, + WorkspaceStore, notifications::NotificationId, }; use zed::{ OpenListener, OpenRequest, RawOpenRequest, app_menus, build_window_options, @@ -240,7 +240,7 @@ pub fn main() { option_env!("ZED_COMMIT_SHA").map(|commit_sha| AppCommitSha::new(commit_sha.to_string())); if args.system_specs { - let system_specs = feedback::system_specs::SystemSpecs::new_stateless( + let system_specs = system_specs::SystemSpecs::new_stateless( app_version, app_commit_sha, *release_channel::RELEASE_CHANNEL, @@ -566,6 +566,7 @@ pub fn main() { language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); agent_settings::init(cx); agent_servers::init(cx); + acp_tools::init(cx); web_search::init(cx); web_search_providers::init(app_state.client.clone(), cx); snippet_provider::init(cx); @@ -598,7 +599,7 @@ pub fn main() { repl::notebook::init(cx); diagnostics::init(cx); - audio::init(Assets, cx); + audio::init(cx); workspace::init(app_state.clone(), cx); ui_prompt::init(cx); @@ -948,15 +949,14 @@ async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp if let Some(locations) = restorable_workspace_locations(cx, &app_state).await { let mut tasks = Vec::new(); - for location in locations { + for (location, paths) in locations { match location { - SerializedWorkspaceLocation::Local(location, _) => { + SerializedWorkspaceLocation::Local => { let app_state = app_state.clone(); - let paths = location.paths().to_vec(); let task = cx.spawn(async move |cx| { let open_task = cx.update(|cx| { workspace::open_paths( - &paths, + &paths.paths(), app_state, workspace::OpenOptions::default(), cx, @@ -978,7 +978,7 @@ async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp match connection_options { Ok(connection_options) => recent_projects::open_ssh_project( connection_options, - ssh.paths.into_iter().map(PathBuf::from).collect(), + paths.paths().into_iter().map(PathBuf::from).collect(), app_state, workspace::OpenOptions::default(), cx, @@ -1069,7 +1069,7 @@ async fn restore_or_create_workspace(app_state: Arc, cx: &mut AsyncApp pub(crate) async fn restorable_workspace_locations( cx: &mut AsyncApp, app_state: &Arc, -) -> Option> { +) -> Option> { let mut restore_behavior = cx .update(|cx| WorkspaceSettings::get(None, cx).restore_on_startup) .ok()?; diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index f55468280c..ac06f1fd9f 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -60,7 +60,9 @@ pub fn init_panic_hook( .or_else(|| info.payload().downcast_ref::().cloned()) .unwrap_or_else(|| "Box".to_string()); - crashes::handle_panic(payload.clone(), info.location()); + if *release_channel::RELEASE_CHANNEL != ReleaseChannel::Dev { + crashes::handle_panic(payload.clone(), info.location()); + } let thread = thread::current(); let thread_name = thread.name().unwrap_or(""); @@ -87,7 +89,9 @@ pub fn init_panic_hook( }, backtrace, ); - std::process::exit(-1); + if MINIDUMP_ENDPOINT.is_none() { + std::process::exit(-1); + } } let main_module_base_address = get_main_module_base_address(); @@ -146,7 +150,9 @@ pub fn init_panic_hook( } zlog::flush(); - if !is_pty && let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() { + if (!is_pty || MINIDUMP_ENDPOINT.is_some()) + && let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() + { let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string(); let panic_file_path = paths::logs_dir().join(format!("zed-{timestamp}.panic")); let panic_file = fs::OpenOptions::new() @@ -251,6 +257,7 @@ pub fn init( endpoint, minidump_contents, &metadata, + installation_id.clone(), ) .await .log_err(); @@ -478,7 +485,9 @@ fn upload_panics_and_crashes( return; } cx.background_spawn(async move { - upload_previous_minidumps(http.clone()).await.warn_on_err(); + upload_previous_minidumps(http.clone(), installation_id.clone()) + .await + .warn_on_err(); let most_recent_panic = upload_previous_panics(http.clone(), &panic_report_url) .await .log_err() @@ -546,7 +555,10 @@ async fn upload_previous_panics( Ok(most_recent_panic) } -pub async fn upload_previous_minidumps(http: Arc) -> anyhow::Result<()> { +pub async fn upload_previous_minidumps( + http: Arc, + installation_id: Option, +) -> anyhow::Result<()> { let Some(minidump_endpoint) = MINIDUMP_ENDPOINT.as_ref() else { log::warn!("Minidump endpoint not set"); return Ok(()); @@ -569,6 +581,7 @@ pub async fn upload_previous_minidumps(http: Arc) -> anyhow:: .await .context("Failed to read minidump")?, &metadata, + installation_id.clone(), ) .await .log_err() @@ -586,6 +599,7 @@ async fn upload_minidump( endpoint: &str, minidump: Vec, metadata: &crashes::CrashInfo, + installation_id: Option, ) -> Result<()> { let mut form = Form::new() .part( @@ -601,15 +615,83 @@ async fn upload_minidump( .text("sentry[tags][version]", metadata.init.zed_version.clone()) .text("sentry[release]", metadata.init.commit_sha.clone()) .text("platform", "rust"); + let mut panic_message = "".to_owned(); if let Some(panic_info) = metadata.panic.as_ref() { - form = form.text("sentry[logentry][formatted]", panic_info.message.clone()); - form = form.text("span", panic_info.span.clone()); - // TODO: add gpu-context, feature-flag-context, and more of device-context like gpu - // name, screen resolution, available ram, device model, etc + panic_message = panic_info.message.clone(); + form = form + .text("sentry[logentry][formatted]", panic_info.message.clone()) + .text("span", panic_info.span.clone()); } if let Some(minidump_error) = metadata.minidump_error.clone() { form = form.text("minidump_error", minidump_error); } + if let Some(id) = installation_id.clone() { + form = form.text("sentry[user][id]", id) + } + + ::telemetry::event!( + "Minidump Uploaded", + panic_message = panic_message, + crashed_version = metadata.init.zed_version.clone(), + commit_sha = metadata.init.commit_sha.clone(), + ); + + let gpu_count = metadata.gpus.len(); + for (index, gpu) in metadata.gpus.iter().cloned().enumerate() { + let system_specs::GpuInfo { + device_name, + device_pci_id, + vendor_name, + vendor_pci_id, + driver_version, + driver_name, + } = gpu; + let num = if gpu_count == 1 && metadata.active_gpu.is_none() { + String::new() + } else { + index.to_string() + }; + let name = format!("gpu{num}"); + let root = format!("sentry[contexts][{name}]"); + form = form + .text( + format!("{root}[Description]"), + "A GPU found on the users system. May or may not be the GPU Zed is running on", + ) + .text(format!("{root}[type]"), "gpu") + .text(format!("{root}[name]"), device_name.unwrap_or(name)) + .text(format!("{root}[id]"), format!("{:#06x}", device_pci_id)) + .text( + format!("{root}[vendor_id]"), + format!("{:#06x}", vendor_pci_id), + ) + .text_if_some(format!("{root}[vendor_name]"), vendor_name) + .text_if_some(format!("{root}[driver_version]"), driver_version) + .text_if_some(format!("{root}[driver_name]"), driver_name); + } + if let Some(active_gpu) = metadata.active_gpu.clone() { + form = form + .text( + "sentry[contexts][Active_GPU][Description]", + "The GPU Zed is running on", + ) + .text("sentry[contexts][Active_GPU][type]", "gpu") + .text("sentry[contexts][Active_GPU][name]", active_gpu.device_name) + .text( + "sentry[contexts][Active_GPU][driver_version]", + active_gpu.driver_info, + ) + .text( + "sentry[contexts][Active_GPU][driver_name]", + active_gpu.driver_name, + ) + .text( + "sentry[contexts][Active_GPU][is_software_emulated]", + active_gpu.is_software_emulated.to_string(), + ); + } + + // TODO: feature-flag-context, and more of device-context like screen resolution, available ram, device model, etc let mut response_text = String::new(); let mut response = http.send_multipart_form(endpoint, form).await?; @@ -624,6 +706,27 @@ async fn upload_minidump( Ok(()) } +trait FormExt { + fn text_if_some( + self, + label: impl Into>, + value: Option>>, + ) -> Self; +} + +impl FormExt for Form { + fn text_if_some( + self, + label: impl Into>, + value: Option>>, + ) -> Self { + match value { + Some(value) => self.text(label.into(), value.into()), + None => self, + } + } +} + async fn upload_panic( http: &Arc, panic_report_url: &Url, diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 958149825a..638e1dca0e 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -344,7 +344,17 @@ pub fn initialize_workspace( if let Some(specs) = window.gpu_specs() { log::info!("Using GPU: {:?}", specs); - show_software_emulation_warning_if_needed(specs, window, cx); + show_software_emulation_warning_if_needed(specs.clone(), window, cx); + if let Some((crash_server, message)) = crashes::CRASH_HANDLER + .get() + .zip(bincode::serialize(&specs).ok()) + && let Err(err) = crash_server.send_message(3, message) + { + log::warn!( + "Failed to store active gpu info for crash reporting: {}", + err + ); + } } let edit_prediction_menu_handle = PopoverMenuHandle::default(); @@ -4614,7 +4624,7 @@ mod tests { gpui_tokio::init(cx); vim_mode_setting::init(cx); theme::init(theme::LoadThemes::JustBase, cx); - audio::init((), cx); + audio::init(cx); channel::init(&app_state.client, app_state.user_store.clone(), cx); call::init(app_state.client.clone(), app_state.user_store.clone(), cx); notifications::init(app_state.client.clone(), app_state.user_store.clone(), cx); diff --git a/crates/zed/src/zed/component_preview/persistence.rs b/crates/zed/src/zed/component_preview/persistence.rs index 780f7f7626..c37a4cc389 100644 --- a/crates/zed/src/zed/component_preview/persistence.rs +++ b/crates/zed/src/zed/component_preview/persistence.rs @@ -1,10 +1,17 @@ use anyhow::Result; -use db::{define_connection, query, sqlez::statement::Statement, sqlez_macros::sql}; +use db::{ + query, + sqlez::{domain::Domain, statement::Statement, thread_safe_connection::ThreadSafeConnection}, + sqlez_macros::sql, +}; use workspace::{ItemId, WorkspaceDb, WorkspaceId}; -define_connection! { - pub static ref COMPONENT_PREVIEW_DB: ComponentPreviewDb = - &[sql!( +pub struct ComponentPreviewDb(ThreadSafeConnection); + +impl Domain for ComponentPreviewDb { + const NAME: &str = stringify!(ComponentPreviewDb); + + const MIGRATIONS: &[&str] = &[sql!( CREATE TABLE component_previews ( workspace_id INTEGER, item_id INTEGER UNIQUE, @@ -13,9 +20,11 @@ define_connection! { FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) ON DELETE CASCADE ) STRICT; - )]; + )]; } +db::static_connection!(COMPONENT_PREVIEW_DB, ComponentPreviewDb, [WorkspaceDb]); + impl ComponentPreviewDb { pub async fn save_active_page( &self, diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index a9abd9bc74..bc2d757fd1 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -75,13 +75,10 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let new_provider = all_language_settings(None, cx).edit_predictions.provider; if new_provider != provider { - let tos_accepted = user_store.read(cx).has_accepted_terms_of_service(); - telemetry::event!( "Edit Prediction Provider Changed", from = provider, to = new_provider, - zed_ai_tos_accepted = tos_accepted, ); provider = new_provider; @@ -92,28 +89,6 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { user_store.clone(), cx, ); - - if !tos_accepted { - match provider { - EditPredictionProvider::Zed => { - let Some(window) = cx.active_window() else { - return; - }; - - window - .update(cx, |_, window, cx| { - window.dispatch_action( - Box::new(zed_actions::OpenZedPredictOnboarding), - cx, - ); - }) - .ok(); - } - EditPredictionProvider::None - | EditPredictionProvider::Copilot - | EditPredictionProvider::Supermaven => {} - } - } } } }) diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 827c7754fa..2194fb7af5 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -26,6 +26,7 @@ use std::thread; use std::time::Duration; use util::ResultExt; use util::paths::PathWithPosition; +use workspace::PathList; use workspace::item::ItemHandle; use workspace::{AppState, OpenOptions, SerializedWorkspaceLocation, Workspace}; @@ -361,12 +362,14 @@ async fn open_workspaces( if open_new_workspace == Some(true) { Vec::new() } else { - let locations = restorable_workspace_locations(cx, &app_state).await; - locations.unwrap_or_default() + restorable_workspace_locations(cx, &app_state) + .await + .unwrap_or_default() } } else { - vec![SerializedWorkspaceLocation::from_local_paths( - paths.into_iter().map(PathBuf::from), + vec![( + SerializedWorkspaceLocation::Local, + PathList::new(&paths.into_iter().map(PathBuf::from).collect::>()), )] }; @@ -394,9 +397,9 @@ async fn open_workspaces( // If there are paths to open, open a workspace for each grouping of paths let mut errored = false; - for location in grouped_locations { + for (location, workspace_paths) in grouped_locations { match location { - SerializedWorkspaceLocation::Local(workspace_paths, _) => { + SerializedWorkspaceLocation::Local => { let workspace_paths = workspace_paths .paths() .iter() @@ -429,7 +432,7 @@ async fn open_workspaces( cx.spawn(async move |cx| { open_ssh_project( connection_options, - ssh.paths.into_iter().map(PathBuf::from).collect(), + workspace_paths.paths().to_vec(), app_state, OpenOptions::default(), cx, diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index 9455369e9a..a5223a2cdf 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -156,7 +156,10 @@ pub mod workspace { #[action(deprecated_aliases = ["editor::CopyPath", "outline_panel::CopyPath", "project_panel::CopyPath"])] CopyPath, #[action(deprecated_aliases = ["editor::CopyRelativePath", "outline_panel::CopyRelativePath", "project_panel::CopyRelativePath"])] - CopyRelativePath + CopyRelativePath, + /// Opens the selected file with the system's default application. + #[action(deprecated_aliases = ["project_panel::OpenWithSystem"])] + OpenWithSystem, ] ); } @@ -287,7 +290,9 @@ pub mod agent { Chat, /// Toggles the language model selector dropdown. #[action(deprecated_aliases = ["assistant::ToggleModelSelector", "assistant2::ToggleModelSelector"])] - ToggleModelSelector + ToggleModelSelector, + /// Triggers re-authentication on Gemini + ReauthenticateAgent ] ); } diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 916699d29b..7b14d12796 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -118,12 +118,8 @@ impl Dismissable for ZedPredictUpsell { } } -pub fn should_show_upsell_modal(user_store: &Entity, cx: &App) -> bool { - if user_store.read(cx).has_accepted_terms_of_service() { - !ZedPredictUpsell::dismissed() - } else { - true - } +pub fn should_show_upsell_modal() -> bool { + !ZedPredictUpsell::dismissed() } #[derive(Clone)] @@ -1547,16 +1543,6 @@ impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider { ) -> bool { true } - - fn needs_terms_acceptance(&self, cx: &App) -> bool { - !self - .zeta - .read(cx) - .user_store - .read(cx) - .has_accepted_terms_of_service() - } - fn is_refreshing(&self) -> bool { !self.pending_completions.is_empty() } @@ -1569,10 +1555,6 @@ impl edit_prediction::EditPredictionProvider for ZetaEditPredictionProvider { _debounce: bool, cx: &mut Context, ) { - if self.needs_terms_acceptance(cx) { - return; - } - if self.zeta.read(cx).update_required { return; } diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index c7af36f431..251cad6234 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -16,6 +16,7 @@ - [Configuring Zed](./configuring-zed.md) - [Configuring Languages](./configuring-languages.md) - [Key bindings](./key-bindings.md) + - [All Actions](./all-actions.md) - [Snippets](./snippets.md) - [Themes](./themes.md) - [Icon Themes](./icon-themes.md) diff --git a/docs/src/ai/text-threads.md b/docs/src/ai/text-threads.md index 65a5dcba03..ed439252b4 100644 --- a/docs/src/ai/text-threads.md +++ b/docs/src/ai/text-threads.md @@ -16,7 +16,7 @@ To begin, type a message in a `You` block. As you type, the remaining tokens count for the selected model is updated. -Inserting text from an editor is as simple as highlighting the text and running `assistant: quote selection` ({#kb assistant::QuoteSelection}); Zed will wrap it in a fenced code block if it is code. +Inserting text from an editor is as simple as highlighting the text and running `agent: quote selection` ({#kb agent::QuoteSelection}); Zed will wrap it in a fenced code block if it is code. ![Quoting a selection](https://zed.dev/img/assistant/quoting-a-selection.png) @@ -148,7 +148,7 @@ Usage: `/terminal []` The `/selection` command inserts the selected text in the editor into the context. This is useful for referencing specific parts of your code. -This is equivalent to the `assistant: quote selection` command ({#kb assistant::QuoteSelection}). +This is equivalent to the `agent: quote selection` command ({#kb agent::QuoteSelection}). Usage: `/selection` diff --git a/docs/src/all-actions.md b/docs/src/all-actions.md new file mode 100644 index 0000000000..d20f7cfd63 --- /dev/null +++ b/docs/src/all-actions.md @@ -0,0 +1,3 @@ +## All Actions + +{#ACTIONS_TABLE#} diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 39d172ea5f..fb139db6e4 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -2425,6 +2425,7 @@ Examples: { "completions": { "words": "fallback", + "words_min_length": 3, "lsp": true, "lsp_fetch_timeout_ms": 0, "lsp_insert_mode": "replace_suffix" @@ -2444,6 +2445,17 @@ Examples: 2. `fallback` - Only if LSP response errors or times out, use document's words to show completions 3. `disabled` - Never fetch or complete document's words for completions (word-based completions can still be queried via a separate action) +### Min Words Query Length + +- Description: Minimum number of characters required to automatically trigger word-based completions. + Before that value, it's still possible to trigger the words-based completion manually with the corresponding editor command. +- Setting: `words_min_length` +- Default: `3` + +**Options** + +Positive integer values + ### LSP - Description: Whether to fetch LSP completions or not. @@ -3234,6 +3246,7 @@ Run the `theme selector: toggle` action in the command palette to see a current "scrollbar": { "show": null }, + "sticky_scroll": true, "show_diagnostics": "all", "indent_guides": { "show": "always" diff --git a/docs/src/diagnostics.md b/docs/src/diagnostics.md index a015fbebf8..9603c8197c 100644 --- a/docs/src/diagnostics.md +++ b/docs/src/diagnostics.md @@ -51,7 +51,7 @@ To configure, use ```json5 "project_panel": { - "diagnostics": "all", + "show_diagnostics": "all", } ``` diff --git a/docs/src/languages/ruby.md b/docs/src/languages/ruby.md index 6f530433bd..ef4b026db1 100644 --- a/docs/src/languages/ruby.md +++ b/docs/src/languages/ruby.md @@ -299,6 +299,7 @@ To run tests in your Ruby project, you can set up custom tasks in your local `.z "-n", "\"$ZED_CUSTOM_RUBY_TEST_NAME\"" ], + "cwd": "$ZED_WORKTREE_ROOT", "tags": ["ruby-test"] } ] @@ -321,6 +322,7 @@ Plain minitest does not support running tests by line number, only by name, so w "-n", "\"$ZED_CUSTOM_RUBY_TEST_NAME\"" ], + "cwd": "$ZED_WORKTREE_ROOT", "tags": ["ruby-test"] } ] @@ -334,6 +336,7 @@ Plain minitest does not support running tests by line number, only by name, so w "label": "test $ZED_RELATIVE_FILE:$ZED_ROW", "command": "bundle", "args": ["exec", "rspec", "\"$ZED_RELATIVE_FILE:$ZED_ROW\""], + "cwd": "$ZED_WORKTREE_ROOT", "tags": ["ruby-test"] } ] @@ -369,7 +372,7 @@ The Ruby extension provides a debug adapter for debugging Ruby code. Zed's name "label": "Debug Rails server", "adapter": "rdbg", "request": "launch", - "command": "$ZED_WORKTREE_ROOT/bin/rails", + "command": "./bin/rails", "args": ["server"], "cwd": "$ZED_WORKTREE_ROOT", "env": { diff --git a/docs/src/languages/rust.md b/docs/src/languages/rust.md index 7695280275..0bfa3ecac7 100644 --- a/docs/src/languages/rust.md +++ b/docs/src/languages/rust.md @@ -136,22 +136,7 @@ This is enabled by default and can be configured as ## Manual Cargo Diagnostics fetch By default, rust-analyzer has `checkOnSave: true` enabled, which causes every buffer save to trigger a `cargo check --workspace --all-targets` command. -For lager projects this might introduce excessive wait times, so a more fine-grained triggering could be enabled by altering the - -```json -"diagnostics": { - "cargo": { - // When enabled, Zed disables rust-analyzer's check on save and starts to query - // Cargo diagnostics separately. - "fetch_cargo_diagnostics": false - } -} -``` - -default settings. - -This will stop rust-analyzer from running `cargo check ...` on save, yet still allow to run -`editor: run/clear/cancel flycheck` commands in Rust files to refresh cargo diagnostics; the project diagnostics editor will also refresh cargo diagnostics with `editor: run flycheck` command when the setting is enabled. +If disabled with `checkOnSave: false` (see the example of the server configuration json above), it's still possible to fetch the diagnostics manually, with the `editor: run/clear/cancel flycheck` commands in Rust files to refresh cargo diagnostics; the project diagnostics editor will also refresh cargo diagnostics with `editor: run flycheck` command when the setting is enabled. ## More server configuration diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index 3ad1e381d9..24b2a9d769 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -430,6 +430,7 @@ Project panel can be shown/hidden with {#action project_panel::ToggleFocus} ({#k "indent_size": 20, // Pixels for each successive indent "auto_reveal_entries": true, // Show file in panel when activating its buffer "auto_fold_dirs": true, // Fold dirs with single subdir + "sticky_scroll": true, // Stick parent directories at top of the project panel. "scrollbar": { // Project panel scrollbar settings "show": null // Show/hide: (auto, system, always, never) }, diff --git a/script/squawk b/script/squawk index 8489206f14..497fcff089 100755 --- a/script/squawk +++ b/script/squawk @@ -15,13 +15,11 @@ SQUAWK_VERSION=0.26.0 SQUAWK_BIN="./target/squawk-$SQUAWK_VERSION" SQUAWK_ARGS="--assume-in-transaction --config script/lib/squawk.toml" -if [ ! -f "$SQUAWK_BIN" ]; then - pkgutil --pkg-info com.apple.pkg.RosettaUpdateAuto || /usr/sbin/softwareupdate --install-rosetta --agree-to-license - # When bootstrapping a brand new CI machine, the `target` directory may not exist yet. - mkdir -p "./target" - curl -L -o "$SQUAWK_BIN" "https://github.com/sbdchd/squawk/releases/download/v$SQUAWK_VERSION/squawk-darwin-x86_64" - chmod +x "$SQUAWK_BIN" -fi +pkgutil --pkg-info com.apple.pkg.RosettaUpdateAuto || /usr/sbin/softwareupdate --install-rosetta --agree-to-license +# When bootstrapping a brand new CI machine, the `target` directory may not exist yet. +mkdir -p "./target" +curl -L -o "$SQUAWK_BIN" "https://github.com/sbdchd/squawk/releases/download/v$SQUAWK_VERSION/squawk-darwin-x86_64" +chmod +x "$SQUAWK_BIN" if [ -n "$SQUAWK_GITHUB_TOKEN" ]; then export SQUAWK_GITHUB_REPO_OWNER=$(echo $GITHUB_REPOSITORY | awk -F/ '{print $1}') diff --git a/tooling/workspace-hack/Cargo.toml b/tooling/workspace-hack/Cargo.toml index 054e757056..2f9a963abc 100644 --- a/tooling/workspace-hack/Cargo.toml +++ b/tooling/workspace-hack/Cargo.toml @@ -54,6 +54,7 @@ digest = { version = "0.10", features = ["mac", "oid", "std"] } either = { version = "1", features = ["serde", "use_std"] } euclid = { version = "0.22" } event-listener = { version = "5" } +event-listener-strategy = { version = "0.5" } flate2 = { version = "1", features = ["zlib-rs"] } form_urlencoded = { version = "1" } futures = { version = "0.3", features = ["io-compat"] } @@ -108,7 +109,6 @@ rustc-hash = { version = "1" } rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", default-features = false, features = ["fs", "net", "std"] } rustls = { version = "0.23", features = ["ring"] } rustls-webpki = { version = "0.103", default-features = false, features = ["aws-lc-rs", "ring", "std"] } -schemars = { version = "1", features = ["chrono04", "indexmap2", "semver1"] } sea-orm = { version = "1", features = ["runtime-tokio-rustls", "sqlx-postgres", "sqlx-sqlite"] } sea-query-binder = { version = "0.7", default-features = false, features = ["postgres-array", "sqlx-postgres", "sqlx-sqlite", "with-bigdecimal", "with-chrono", "with-json", "with-rust_decimal", "with-time", "with-uuid"] } semver = { version = "1", features = ["serde"] } @@ -183,6 +183,7 @@ digest = { version = "0.10", features = ["mac", "oid", "std"] } either = { version = "1", features = ["serde", "use_std"] } euclid = { version = "0.22" } event-listener = { version = "5" } +event-listener-strategy = { version = "0.5" } flate2 = { version = "1", features = ["zlib-rs"] } form_urlencoded = { version = "1" } futures = { version = "0.3", features = ["io-compat"] } @@ -242,7 +243,6 @@ rustc-hash = { version = "1" } rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", default-features = false, features = ["fs", "net", "std"] } rustls = { version = "0.23", features = ["ring"] } rustls-webpki = { version = "0.103", default-features = false, features = ["aws-lc-rs", "ring", "std"] } -schemars = { version = "1", features = ["chrono04", "indexmap2", "semver1"] } sea-orm = { version = "1", features = ["runtime-tokio-rustls", "sqlx-postgres", "sqlx-sqlite"] } sea-query-binder = { version = "0.7", default-features = false, features = ["postgres-array", "sqlx-postgres", "sqlx-sqlite", "with-bigdecimal", "with-chrono", "with-json", "with-rust_decimal", "with-time", "with-uuid"] } semver = { version = "1", features = ["serde"] } @@ -403,7 +403,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } codespan-reporting = { version = "0.12" } crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -event-listener-strategy = { version = "0.5" } flume = { version = "0.11" } foldhash = { version = "0.1", default-features = false, features = ["std"] } getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } @@ -444,7 +443,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } codespan-reporting = { version = "0.12" } crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -event-listener-strategy = { version = "0.5" } flume = { version = "0.11" } foldhash = { version = "0.1", default-features = false, features = ["std"] } getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } @@ -483,7 +481,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } codespan-reporting = { version = "0.12" } crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -event-listener-strategy = { version = "0.5" } flume = { version = "0.11" } foldhash = { version = "0.1", default-features = false, features = ["std"] } getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } @@ -524,7 +521,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } codespan-reporting = { version = "0.12" } crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -event-listener-strategy = { version = "0.5" } flume = { version = "0.11" } foldhash = { version = "0.1", default-features = false, features = ["std"] } getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } @@ -610,7 +606,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } codespan-reporting = { version = "0.12" } crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -event-listener-strategy = { version = "0.5" } flume = { version = "0.11" } foldhash = { version = "0.1", default-features = false, features = ["std"] } getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] } @@ -651,7 +646,6 @@ bytemuck = { version = "1", default-features = false, features = ["min_const_gen cipher = { version = "0.4", default-features = false, features = ["block-padding", "rand_core", "zeroize"] } codespan-reporting = { version = "0.12" } crypto-common = { version = "0.1", default-features = false, features = ["rand_core", "std"] } -event-listener-strategy = { version = "0.5" } flume = { version = "0.11" } foldhash = { version = "0.1", default-features = false, features = ["std"] } getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-features = false, features = ["std"] }