Merge branch 'main' into add-contributing-guide

This commit is contained in:
Joseph T. Lyons 2024-01-09 12:16:16 -05:00
commit aab464a243
1609 changed files with 73709 additions and 359034 deletions

View file

@ -1,6 +1,3 @@
[alias]
xtask = "run --package xtask --"
[build]
# v0 mangling scheme provides more detailed backtraces around closures
rustflags = ["-C", "symbol-mangling-version=v0"]

View file

@ -2,8 +2,8 @@ name: Release Nightly
on:
schedule:
# Fire every day at 1:00pm and 1:00am
- cron: "0 1,13 * * *"
# Fire every day at 7:00am UTC (Roughly before EU workday and after US workday)
- cron: "0 7 * * *"
push:
tags:
- "nightly"
@ -92,7 +92,7 @@ jobs:
run: script/generate-licenses
- name: Create app bundle
run: script/bundle -2
run: script/bundle
- name: Upload Zed Nightly
run: script/upload-nightly

2904
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,137 +1,90 @@
[workspace]
members = [
"crates/assets",
"crates/activity_indicator",
"crates/activity_indicator2",
"crates/ai",
"crates/assistant",
"crates/assistant2",
"crates/audio",
"crates/audio2",
"crates/auto_update",
"crates/auto_update2",
"crates/breadcrumbs",
"crates/breadcrumbs2",
"crates/call",
"crates/call2",
"crates/channel",
"crates/channel2",
"crates/cli",
"crates/client",
"crates/client2",
"crates/clock",
"crates/collab",
"crates/collab2",
"crates/collab_ui",
"crates/collab_ui2",
"crates/collections",
"crates/command_palette",
"crates/command_palette2",
"crates/component_test",
"crates/context_menu",
"crates/copilot",
"crates/copilot2",
"crates/copilot_button",
"crates/copilot_ui",
"crates/db",
"crates/db2",
"crates/refineable",
"crates/refineable/derive_refineable",
"crates/diagnostics",
"crates/diagnostics2",
"crates/drag_and_drop",
"crates/editor",
"crates/feature_flags",
"crates/feature_flags2",
"crates/feedback",
"crates/file_finder",
"crates/fs",
"crates/fs2",
"crates/fsevent",
"crates/fuzzy",
"crates/fuzzy2",
"crates/git",
"crates/go_to_line",
"crates/go_to_line2",
"crates/gpui",
"crates/gpui_macros",
"crates/gpui2",
"crates/gpui2_macros",
"crates/gpui",
"crates/gpui_macros",
"crates/install_cli",
"crates/install_cli2",
"crates/journal",
"crates/journal2",
"crates/journal",
"crates/language",
"crates/language2",
"crates/language_selector",
"crates/language_selector2",
"crates/language_tools",
"crates/live_kit_client",
"crates/live_kit_server",
"crates/lsp",
"crates/lsp2",
"crates/media",
"crates/menu",
"crates/menu2",
"crates/multi_buffer",
"crates/multi_buffer2",
"crates/node_runtime",
"crates/notifications",
"crates/notifications2",
"crates/outline",
"crates/outline2",
"crates/picker",
"crates/picker2",
"crates/plugin",
"crates/plugin_macros",
"crates/plugin_runtime",
"crates/prettier",
"crates/prettier2",
"crates/project",
"crates/project2",
"crates/project_panel",
"crates/project_panel2",
"crates/project_symbols",
"crates/project_symbols2",
"crates/quick_action_bar2",
"crates/quick_action_bar",
"crates/recent_projects",
"crates/recent_projects2",
"crates/rope",
"crates/rpc",
"crates/rpc2",
"crates/search",
"crates/search2",
"crates/semantic_index",
"crates/semantic_index2",
"crates/settings",
"crates/settings2",
"crates/snippet",
"crates/sqlez",
"crates/sqlez_macros",
"crates/rich_text",
"crates/storybook2",
"crates/storybook",
"crates/sum_tree",
"crates/terminal",
"crates/terminal2",
"crates/terminal_view2",
"crates/terminal_view",
"crates/text",
"crates/theme",
"crates/theme2",
"crates/theme_importer",
"crates/theme_selector",
"crates/theme_selector2",
"crates/ui2",
"crates/ui",
"crates/util",
"crates/story",
"crates/vim",
"crates/vcs_menu",
"crates/vcs_menu2",
"crates/workspace2",
"crates/workspace",
"crates/welcome",
"crates/welcome2",
"crates/xtask",
"crates/zed",
"crates/zed2",
"crates/zed-actions",
"crates/zed_actions2"
"crates/zed_actions",
]
default-members = ["crates/zed"]
resolver = "2"
@ -140,10 +93,7 @@ resolver = "2"
anyhow = { version = "1.0.57" }
async-trait = { version = "0.1" }
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
# TODO: Switch back to the published version of `ctor` once:
# 1. A new version of `ctor` is published with this change: https://github.com/mmastrac/rust-ctor/pull/295
# 2. We've confirmed it's fine to update to the latest version of `ctor` (we're currently on v0.1.20).
ctor = { git = "https://github.com/zed-industries/rust-ctor", rev = "7f824cf6a7943885a649b579f33f9ac53f0d1db6" }
ctor = "0.2.6"
derive_more = { version = "0.99.17" }
env_logger = { version = "0.9" }
futures = { version = "0.3" }
@ -175,7 +125,7 @@ thiserror = { version = "1.0.29" }
time = { version = "0.3", features = ["serde", "serde-well-known"] }
toml = { version = "0.5" }
tiktoken-rs = "0.5.7"
tree-sitter = "0.20"
tree-sitter = { version = "0.20" }
unindent = { version = "0.1.7" }
pretty_assertions = "1.3.0"
git2 = { version = "0.15", default-features = false}
@ -206,13 +156,13 @@ tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-rack
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"}
tree-sitter-lua = "0.0.14"
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "786689b0562b9799ce53e824cb45a1a2a04dc673"}
tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "26bbaecda0039df4067861ab38ea8ea169f7f5aa"}
tree-sitter-vue = {git = "https://github.com/zed-industries/tree-sitter-vue", rev = "6608d9d60c386f19d80af7d8132322fa11199c42"}
tree-sitter-uiua = {git = "https://github.com/shnarazk/tree-sitter-uiua", rev = "9260f11be5900beda4ee6d1a24ab8ddfaf5a19b2"}
[patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "b5f461a69bf3df7298b1903574d506179e6390b0" }
async-task = { git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e" }
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "31c40449749c4263a91a43593831b82229049a4c" }
# wasmtime = { git = "https://github.com/bytecodealliance/wasmtime", rev = "v16.0.0" }
# TODO - Remove when a version is released with this PR: https://github.com/servo/core-foundation-rs/pull/457
cocoa = { git = "https://github.com/servo/core-foundation-rs", rev = "079665882507dd5e2ff77db3de5070c1f6c0fb85" }

View file

@ -1,6 +1,6 @@
# syntax = docker/dockerfile:1.2
FROM rust:1.73-bullseye as builder
FROM rust:1.75-bullseye as builder
WORKDIR app
COPY . .

View file

@ -1,4 +0,0 @@
web: cd ../zed.dev && PORT=3000 npm run dev
collab: cd crates/collab2 && RUST_LOG=${RUST_LOG:-warn,collab=info} cargo run serve
livekit: livekit-server --dev
postgrest: postgrest crates/collab2/admin_api.conf

View file

@ -0,0 +1,6 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M3 8C3 6.67392 3.52678 5.40215 4.46446 4.46447C5.40214 3.52679 6.67391 3.00001 7.99999 3.00001C9.39779 3.00527 10.7394 3.55069 11.7444 4.52223L13 5.77778" stroke="black" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M13 3.00001V5.77778H10.2222" stroke="black" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M13 8C13 9.32608 12.4732 10.5978 11.5355 11.5355C10.5978 12.4732 9.32607 13 7.99999 13C6.60219 12.9947 5.26054 12.4493 4.25555 11.4778L3 10.2222" stroke="black" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M5.77777 10.2222H3V13" stroke="black" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 748 B

View file

@ -1 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-at-sign"><circle cx="12" cy="12" r="4"/><path d="M16 8v5a3 3 0 0 0 6 0v-1a10 10 0 1 0-4 8"/></svg>
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-at-sign"><circle cx="12" cy="12" r="4"/><path d="M16 8v5a3 3 0 0 0 6 0v-1a10 10 0 1 0-4 8"/></svg>

Before

Width:  |  Height:  |  Size: 300 B

After

Width:  |  Height:  |  Size: 301 B

Before After
Before After

View file

@ -0,0 +1,3 @@
<svg width="15" height="11" viewBox="0 0 15 11" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5.24432 11L0.183239 5.90909L5.24432 0.818182H14.75V11H5.24432ZM5.68679 9.90625H13.6761V1.91193H5.68679L1.70952 5.90909L5.68679 9.90625ZM11.7223 8.15625L10.9964 8.89205L5.75639 3.66193L6.48224 2.92614L11.7223 8.15625ZM6.48224 8.89205L5.75639 8.15625L10.9964 2.92614L11.7223 3.66193L6.48224 8.89205Z" fill="black"/>
</svg>

After

Width:  |  Height:  |  Size: 427 B

View file

Before

Width:  |  Height:  |  Size: 387 B

After

Width:  |  Height:  |  Size: 387 B

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 382 B

After

Width:  |  Height:  |  Size: 382 B

Before After
Before After

1
assets/icons/delete.svg Normal file
View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-delete"><path d="M20 5H9l-7 7 7 7h11a2 2 0 0 0 2-2V7a2 2 0 0 0-2-2Z"/><line x1="18" x2="12" y1="9" y2="15"/><line x1="12" x2="18" y1="9" y2="15"/></svg>

After

Width:  |  Height:  |  Size: 355 B

1
assets/icons/escape.svg Normal file
View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-up-left-from-circle"><path d="M2 8V2h6"/><path d="m2 2 10 10"/><path d="M12 2A10 10 0 1 1 2 12"/></svg>

After

Width:  |  Height:  |  Size: 312 B

1
assets/icons/github.svg Normal file
View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-github"><path d="M15 22v-4a4.8 4.8 0 0 0-1-3.5c3 0 6-2 6-5.5.08-1.25-.27-2.48-1-3.5.28-1.15.28-2.35 0-3.5 0 0-1 0-3 1.5-2.64-.5-5.36-.5-8 0C6 2 5 2 5 2c-.3 1.15-.3 2.35 0 3.5A5.403 5.403 0 0 0 4 9c0 3.5 3 5.5 6 5.5-.39.49-.68 1.05-.85 1.65-.17.6-.22 1.23-.15 1.85v4"/><path d="M9 18c-4.51 2-5-2-7-2"/></svg>

After

Width:  |  Height:  |  Size: 510 B

View file

Before

Width:  |  Height:  |  Size: 1.9 KiB

After

Width:  |  Height:  |  Size: 1.9 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 390 B

After

Width:  |  Height:  |  Size: 390 B

Before After
Before After

View file

@ -1,4 +1 @@
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M9.5 1.5H13.5M13.5 1.5V5.5M13.5 1.5C12.1332 2.86683 10.3668 4.63317 9 6" stroke="white" stroke-linecap="round"/>
<path d="M1.5 9.5V13.5M1.5 13.5L6 9M1.5 13.5H5.5" stroke="white" stroke-linecap="round"/>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-maximize-2"><polyline points="15 3 21 3 21 9"/><polyline points="9 21 3 21 3 15"/><line x1="21" x2="14" y1="3" y2="10"/><line x1="3" x2="10" y1="21" y2="14"/></svg>

Before

Width:  |  Height:  |  Size: 315 B

After

Width:  |  Height:  |  Size: 367 B

Before After
Before After

View file

@ -1,3 +1 @@
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M1.5 3C1.22386 3 1 3.22386 1 3.5C1 3.77614 1.22386 4 1.5 4H13.5C13.7761 4 14 3.77614 14 3.5C14 3.22386 13.7761 3 13.5 3H1.5ZM1 7.5C1 7.22386 1.22386 7 1.5 7H13.5C13.7761 7 14 7.22386 14 7.5C14 7.77614 13.7761 8 13.5 8H1.5C1.22386 8 1 7.77614 1 7.5ZM1 11.5C1 11.2239 1.22386 11 1.5 11H13.5C13.7761 11 14 11.2239 14 11.5C14 11.7761 13.7761 12 13.5 12H1.5C1.22386 12 1 11.7761 1 11.5Z" fill="#CCCAC2"/>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-menu"><line x1="4" x2="20" y1="12" y2="12"/><line x1="4" x2="20" y1="6" y2="6"/><line x1="4" x2="20" y1="18" y2="18"/></svg>

Before

Width:  |  Height:  |  Size: 552 B

After

Width:  |  Height:  |  Size: 327 B

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 1.8 KiB

After

Width:  |  Height:  |  Size: 1.8 KiB

Before After
Before After

View file

@ -1,4 +1 @@
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M13 6L9 6M9 6L9 2M9 6C10.3668 4.63316 12.1332 2.86683 13.5 1.5" stroke="white" stroke-linecap="round"/>
<path d="M6 13L6 9M6 9L1.5 13.5M6 9L2 9" stroke="white" stroke-linecap="round"/>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-minimize-2"><polyline points="4 14 10 14 10 20"/><polyline points="20 10 14 10 14 4"/><line x1="14" x2="21" y1="10" y2="3"/><line x1="3" x2="10" y1="21" y2="14"/></svg>

Before

Width:  |  Height:  |  Size: 297 B

After

Width:  |  Height:  |  Size: 371 B

Before After
Before After

View file

@ -0,0 +1,6 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M9.19131 16V8.53964H10.6313L13.2953 14.3455C13.2553 13.8719 13.2153 13.4016 13.1753 12.9347C13.1353 12.4677 13.1153 11.9975 13.1153 11.5238V8.53964H14.3993V16H12.9593L10.2953 10.1941C10.3353 10.6678 10.3753 11.1381 10.4153 11.605C10.4553 12.0719 10.4753 12.5422 10.4753 13.0159V16H9.19131Z" fill="black"/>
<path d="M2 16V8.53964H4.208C4.408 8.53964 4.608 8.55318 4.808 8.58024C5.008 8.60731 5.2 8.65806 5.384 8.73249C5.568 8.80016 5.74 8.88813 5.9 8.9964C6.068 9.10467 6.216 9.22647 6.344 9.3618C6.472 9.49037 6.584 9.62909 6.68 9.77796C6.776 9.92683 6.86 10.0825 6.932 10.2449C7.004 10.4073 7.064 10.5731 7.112 10.7422C7.16 10.9046 7.196 11.0704 7.22 11.2396C7.252 11.4087 7.272 11.5813 7.28 11.7572C7.296 11.9264 7.304 12.0956 7.304 12.2647C7.304 12.4407 7.296 12.6166 7.28 12.7926C7.272 12.9617 7.252 13.1309 7.22 13.3001C7.196 13.4692 7.16 13.6384 7.112 13.8076C7.064 13.97 7.004 14.1324 6.932 14.2948C6.86 14.4572 6.776 14.6128 6.68 14.7617C6.584 14.9105 6.472 15.0526 6.344 15.188C6.216 15.3166 6.068 15.435 5.9 15.5432C5.74 15.6515 5.568 15.7429 5.384 15.8173C5.2 15.885 5.008 15.9323 4.808 15.9594C4.608 15.9865 4.408 16 4.208 16H2ZM3.284 15.0662H4.208C4.344 15.0662 4.48 15.0526 4.616 15.0256C4.752 14.9917 4.876 14.941 4.988 14.8733C5.108 14.7989 5.212 14.7177 5.3 14.6297C5.388 14.535 5.464 14.4369 5.528 14.3354C5.6 14.2339 5.66 14.129 5.708 14.0207C5.764 13.9057 5.808 13.7906 5.84 13.6756C5.872 13.5606 5.9 13.4455 5.924 13.3305C5.948 13.2155 5.968 13.1004 5.984 12.9854C6 12.8636 6.008 12.7452 6.008 12.6301C6.016 12.5083 6.02 12.3865 6.02 12.2647C6.02 12.1497 6.016 12.0347 6.008 11.9196C6.008 11.7978 6 11.6794 5.984 11.5644C5.968 11.4426 5.948 11.3242 5.924 11.2091C5.9 11.0941 5.872 10.9791 5.84 10.864C5.808 10.749 5.764 10.6373 5.708 10.5291C5.66 10.414 5.6 10.3058 5.528 10.2043C5.464 10.1028 5.388 10.008 5.3 9.92006C5.212 9.82533 5.108 9.74413 4.988 9.67646C4.876 9.60202 4.752 9.55127 4.616 9.52421C4.48 9.49037 4.344 9.47346 4.208 9.47346H3.284V15.0662Z" fill="black"/>
<path d="M11.7473 7.62276C11.5233 7.62276 11.2953 7.60584 11.0633 7.57201C10.8393 7.53817 10.6193 7.48404 10.4033 7.4096C10.1953 7.3284 10.0033 7.22352 9.82731 7.09495C9.65931 6.95961 9.51931 6.80736 9.40731 6.63819C9.29531 6.46226 9.21531 6.27955 9.16731 6.09009C9.11931 5.90062 9.09531 5.70776 9.09531 5.51153V2.11123C9.09531 1.91499 9.11531 1.72214 9.15531 1.53267C9.20331 1.33644 9.28331 1.15373 9.39531 0.984564C9.51531 0.815395 9.65931 0.666526 9.82731 0.537957C10.0033 0.402622 10.1953 0.294354 10.4033 0.213153C10.6193 0.131952 10.8393 0.0778178 11.0633 0.0507507C11.2953 0.0169169 11.5273 0 11.7593 0C11.9833 0 12.2033 0.0135335 12.4193 0.0406006C12.6353 0.0676677 12.8433 0.115035 13.0433 0.182703C13.2513 0.25037 13.4433 0.345105 13.6193 0.466907C13.7953 0.581942 13.9433 0.717277 14.0633 0.872912C14.1833 1.02855 14.2713 1.19772 14.3273 1.38042C14.3913 1.55635 14.4233 1.73906 14.4233 1.92853C14.4233 1.92853 14.4233 1.93191 14.4233 1.93868C14.4233 1.93868 14.4233 1.94206 14.4233 1.94883H13.1393C13.1393 1.94206 13.1393 1.93868 13.1393 1.93868C13.1393 1.78981 13.1033 1.64771 13.0313 1.51237C12.9593 1.37704 12.8553 1.26538 12.7193 1.17742C12.5913 1.08945 12.4433 1.02855 12.2753 0.994714C12.1073 0.954113 11.9353 0.933813 11.7593 0.933813C11.5753 0.933813 11.3913 0.957497 11.2073 1.00486C11.0313 1.05223 10.8753 1.13343 10.7393 1.24847C10.6113 1.35674 10.5193 1.48869 10.4633 1.64432C10.4073 1.79996 10.3793 1.95559 10.3793 2.11123V5.51153C10.3793 5.66716 10.4073 5.8228 10.4633 5.97843C10.5193 6.1273 10.6113 6.25925 10.7393 6.37429C10.8673 6.48256 11.0193 6.56376 11.1953 6.61789C11.3713 6.66526 11.5513 6.68894 11.7353 6.68894C11.8633 6.68894 11.9873 6.67879 12.1073 6.65849C12.2353 6.63819 12.3553 6.60436 12.4673 6.55699C12.5793 6.50962 12.6793 6.45211 12.7673 6.38444C12.8633 6.31 12.9353 6.22542 12.9833 6.13069C13.0393 6.03595 13.0753 5.93445 13.0913 5.82618C13.1073 5.71791 13.1153 5.61303 13.1153 5.51153V4.36456H11.7953V3.43075H14.3993V5.51153C14.3993 5.70776 14.3833 5.904 14.3513 6.10023C14.3273 6.29647 14.2593 6.48256 14.1473 6.65849C14.0353 6.82766 13.8873 6.97991 13.7033 7.11525C13.5273 7.24382 13.3313 7.34532 13.1153 7.41975C12.8993 7.49419 12.6753 7.54494 12.4433 7.57201C12.2193 7.60584 11.9873 7.62276 11.7473 7.62276Z" fill="black"/>
<path d="M2 7.54155V0.081192H4.58C4.812 0.081192 5.044 0.101492 5.276 0.142093C5.508 0.175926 5.728 0.240211 5.936 0.334945C6.144 0.42968 6.328 0.551482 6.488 0.700351C6.648 0.842452 6.776 1.00485 6.872 1.18756C6.976 1.36349 7.048 1.54958 7.088 1.74582C7.128 1.94205 7.148 2.14167 7.148 2.34467C7.148 2.54091 7.128 2.73715 7.088 2.93338C7.048 3.12962 6.976 3.31909 6.872 3.50179C6.776 3.67773 6.648 3.84013 6.488 3.989C6.328 4.13787 6.144 4.25967 5.936 4.3544C5.728 4.44914 5.508 4.5168 5.276 4.5574C5.044 4.59124 4.812 4.60816 4.58 4.60816H3.284V7.54155H2ZM4.58 3.66419C4.772 3.66419 4.956 3.63036 5.132 3.56269C5.316 3.49502 5.464 3.3969 5.576 3.26834C5.688 3.133 5.764 2.98752 5.804 2.83188C5.844 2.66948 5.864 2.50708 5.864 2.34467C5.864 2.18227 5.844 2.01987 5.804 1.85747C5.764 1.69506 5.688 1.54958 5.576 1.42101C5.464 1.28568 5.316 1.18417 5.132 1.11651C4.956 1.04884 4.772 1.01501 4.58 1.01501H3.284V3.66419H4.58Z" fill="black"/>
</svg>

After

Width:  |  Height:  |  Size: 5.2 KiB

13
assets/icons/page_up.svg Normal file
View file

@ -0,0 +1,13 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_1650_1657)">
<path d="M9.4433 15.9192V8.49652H12.0233C12.2553 8.49652 12.4873 8.51672 12.7193 8.55711C12.9513 8.59078 13.1713 8.65474 13.3793 8.74899C13.5873 8.84325 13.7713 8.96444 13.9313 9.11255C14.0913 9.25394 14.2193 9.41552 14.3153 9.5973C14.4193 9.77235 14.4913 9.95749 14.5313 10.1527C14.5713 10.348 14.5913 10.5466 14.5913 10.7486C14.5913 10.9438 14.5713 11.1391 14.5313 11.3343C14.4913 11.5296 14.4193 11.7181 14.3153 11.8998C14.2193 12.0749 14.0913 12.2365 13.9313 12.3846C13.7713 12.5327 13.5873 12.6539 13.3793 12.7482C13.1713 12.8424 12.9513 12.9097 12.7193 12.9501C12.4873 12.9838 12.2553 13.0006 12.0233 13.0006H10.7273V15.9192H9.4433ZM12.0233 12.0614C12.2153 12.0614 12.3993 12.0278 12.5753 11.9604C12.7593 11.8931 12.9073 11.7955 13.0193 11.6676C13.1313 11.5329 13.2073 11.3882 13.2473 11.2333C13.2873 11.0717 13.3073 10.9102 13.3073 10.7486C13.3073 10.587 13.2873 10.4254 13.2473 10.2638C13.2073 10.1022 13.1313 9.95749 13.0193 9.82957C12.9073 9.69492 12.7593 9.59393 12.5753 9.52661C12.3993 9.45928 12.2153 9.42562 12.0233 9.42562H10.7273V12.0614H12.0233Z" fill="black"/>
<path d="M4.604 16C4.372 16 4.144 15.9832 3.92 15.9495C3.696 15.9226 3.48 15.8687 3.272 15.7879C3.064 15.7071 2.876 15.5994 2.708 15.4648C2.54 15.3301 2.4 15.1786 2.288 15.0103C2.184 14.842 2.108 14.6636 2.06 14.4751C2.02 14.2798 2 14.0879 2 13.8994V8.49652H3.284V13.8994C3.284 14.0543 3.308 14.2058 3.356 14.3539C3.404 14.502 3.488 14.6333 3.608 14.7477C3.736 14.8622 3.888 14.9463 4.064 15.0002C4.24 15.0473 4.42 15.0709 4.604 15.0709C4.788 15.0709 4.968 15.0473 5.144 15.0002C5.32 14.9463 5.468 14.8622 5.588 14.7477C5.716 14.6333 5.804 14.502 5.852 14.3539C5.9 14.2058 5.924 14.0543 5.924 13.8994V8.49652H7.208V13.8994C7.208 14.0879 7.184 14.2798 7.136 14.4751C7.096 14.6636 7.02 14.842 6.908 15.0103C6.804 15.1786 6.668 15.3301 6.5 15.4648C6.332 15.5994 6.144 15.7071 5.936 15.7879C5.728 15.8687 5.512 15.9226 5.288 15.9495C5.064 15.9832 4.836 16 4.604 16Z" fill="black"/>
<path d="M11.7473 7.58426C11.5233 7.58426 11.2953 7.56743 11.0633 7.53377C10.8393 7.50011 10.6193 7.44625 10.4033 7.37219C10.1953 7.2914 10.0033 7.18704 9.82731 7.05912C9.65931 6.92447 9.51931 6.77299 9.40731 6.60467C9.29531 6.42963 9.21531 6.24785 9.16731 6.05933C9.11931 5.87082 9.09531 5.67894 9.09531 5.4837V2.10057C9.09531 1.90532 9.11531 1.71344 9.15531 1.52493C9.20331 1.32969 9.28331 1.14791 9.39531 0.979592C9.51531 0.811277 9.65931 0.66316 9.82731 0.535241C10.0033 0.400589 10.1953 0.292868 10.4033 0.212076C10.6193 0.131285 10.8393 0.0774248 11.0633 0.0504944C11.2953 0.0168315 11.5273 0 11.7593 0C11.9833 0 12.2033 0.0134652 12.4193 0.0403956C12.6353 0.0673261 12.8433 0.114454 13.0433 0.18178C13.2513 0.249106 13.4433 0.343362 13.6193 0.464549C13.7953 0.579003 13.9433 0.713655 14.0633 0.868505C14.1833 1.02335 14.2713 1.19167 14.3273 1.37345C14.3913 1.5485 14.4233 1.73028 14.4233 1.91879C14.4233 1.91879 14.4233 1.92215 14.4233 1.92889C14.4233 1.92889 14.4233 1.93225 14.4233 1.93899H13.1393C13.1393 1.93225 13.1393 1.92889 13.1393 1.92889C13.1393 1.78077 13.1033 1.63939 13.0313 1.50473C12.9593 1.37008 12.8553 1.25899 12.7193 1.17147C12.5913 1.08395 12.4433 1.02335 12.2753 0.989691C12.1073 0.949295 11.9353 0.929098 11.7593 0.929098C11.5753 0.929098 11.3913 0.952662 11.2073 0.99979C11.0313 1.04692 10.8753 1.12771 10.7393 1.24216C10.6113 1.34988 10.5193 1.48117 10.4633 1.63602C10.4073 1.79087 10.3793 1.94572 10.3793 2.10057V5.4837C10.3793 5.63855 10.4073 5.7934 10.4633 5.94824C10.5193 6.09636 10.6113 6.22765 10.7393 6.3421C10.8673 6.44982 11.0193 6.53061 11.1953 6.58447C11.3713 6.6316 11.5513 6.65517 11.7353 6.65517C11.8633 6.65517 11.9873 6.64507 12.1073 6.62487C12.2353 6.60467 12.3553 6.57101 12.4673 6.52388C12.5793 6.47675 12.6793 6.41953 12.7673 6.3522C12.8633 6.27814 12.9353 6.19398 12.9833 6.09973C13.0393 6.00547 13.0753 5.90448 13.0913 5.79676C13.1073 5.68904 13.1153 5.58468 13.1153 5.4837V4.34252H11.7953V3.41342H14.3993V5.4837C14.3993 5.67894 14.3833 5.87419 14.3513 6.06943C14.3273 6.26468 14.2593 6.44982 14.1473 6.62487C14.0353 6.79319 13.8873 6.94467 13.7033 7.07932C13.5273 7.20724 13.3313 7.30823 13.1153 7.38229C12.8993 7.45635 12.6753 7.50684 12.4433 7.53377C12.2193 7.56743 11.9873 7.58426 11.7473 7.58426Z" fill="black"/>
<path d="M2 7.50346V0.08078H4.58C4.812 0.08078 5.044 0.100978 5.276 0.141373C5.508 0.175036 5.728 0.238996 5.936 0.333252C6.144 0.427508 6.328 0.548695 6.488 0.696812C6.648 0.838196 6.776 0.999778 6.872 1.18156C6.976 1.35661 7.048 1.54175 7.088 1.737C7.128 1.93224 7.148 2.13085 7.148 2.33283C7.148 2.52808 7.128 2.72332 7.088 2.91857C7.048 3.11381 6.976 3.30233 6.872 3.48411C6.776 3.65915 6.648 3.82073 6.488 3.96885C6.328 4.11697 6.144 4.23816 5.936 4.33241C5.728 4.42667 5.508 4.49399 5.276 4.53439C5.044 4.56805 4.812 4.58488 4.58 4.58488H3.284V7.50346H2ZM4.58 3.64569C4.772 3.64569 4.956 3.61202 5.132 3.5447C5.316 3.47737 5.464 3.37975 5.576 3.25183C5.688 3.11718 5.764 2.97243 5.804 2.81758C5.844 2.656 5.864 2.49441 5.864 2.33283C5.864 2.17125 5.844 2.00967 5.804 1.84809C5.764 1.6865 5.688 1.54175 5.576 1.41383C5.464 1.27918 5.316 1.17819 5.132 1.11087C4.956 1.04354 4.772 1.00988 4.58 1.00988H3.284V3.64569H4.58Z" fill="black"/>
</g>
<defs>
<clipPath id="clip0_1650_1657">
<rect width="16" height="16" fill="white"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 5.3 KiB

View file

@ -1,8 +1 @@
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
<path
fill-rule="evenodd"
clip-rule="evenodd"
d="M9.42503 3.44136C10.0561 3.23654 10.7837 3.2402 11.3792 3.54623C12.7532 4.25224 13.3477 6.07191 12.7946 8C12.5465 8.8649 12.1102 9.70472 11.1861 10.5524C10.262 11.4 8.98034 11.9 8.38571 11.9C8.17269 11.9 8 11.7321 8 11.525C8 11.3179 8.17644 11.15 8.38571 11.15C9.06497 11.15 9.67189 10.7804 10.3906 10.236C10.9406 9.8193 11.3701 9.28633 11.608 8.82191C12.0628 7.93367 12.0782 6.68174 11.3433 6.34901C10.9904 6.73455 10.5295 6.95946 9.97725 6.95946C8.7773 6.95946 8.0701 5.99412 8.10051 5.12009C8.12957 4.28474 8.66032 3.68954 9.42503 3.44136ZM3.42503 3.44136C4.05614 3.23654 4.78366 3.2402 5.37923 3.54623C6.7532 4.25224 7.34766 6.07191 6.79462 8C6.54654 8.8649 6.11019 9.70472 5.1861 10.5524C4.26201 11.4 2.98034 11.9 2.38571 11.9C2.17269 11.9 2 11.7321 2 11.525C2 11.3179 2.17644 11.15 2.38571 11.15C3.06497 11.15 3.67189 10.7804 4.39058 10.236C4.94065 9.8193 5.37014 9.28633 5.60797 8.82191C6.06282 7.93367 6.07821 6.68174 5.3433 6.34901C4.99037 6.73455 4.52948 6.95946 3.97725 6.95946C2.7773 6.95946 2.0701 5.99412 2.10051 5.12009C2.12957 4.28474 2.66032 3.68954 3.42503 3.44136Z"
fill="currentColor"
/>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-text-quote"><path d="M17 6H3"/><path d="M21 12H8"/><path d="M21 18H8"/><path d="M3 12v6"/></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

After

Width:  |  Height:  |  Size: 299 B

Before After
Before After

View file

@ -1,3 +1 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.375 1.63C8.375 1.28482 8.65482 1.005 9 1.005H12.375C13.8247 1.005 15 2.18025 15 3.63V7.625C15 9.07474 13.8247 10.25 12.375 10.25H3.13388L6.07194 13.1881C6.31602 13.4321 6.31602 13.8279 6.07194 14.0719C5.82786 14.316 5.43214 14.316 5.18806 14.0719L1.18306 10.0669C0.938981 9.82286 0.938981 9.42714 1.18306 9.18306L5.18306 5.18306C5.42714 4.93898 5.82286 4.93898 6.06694 5.18306C6.31102 5.42714 6.31102 5.82286 6.06694 6.06694L3.13388 9H12.375C13.1344 9 13.75 8.38439 13.75 7.625V3.63C13.75 2.87061 13.1344 2.255 12.375 2.255H9C8.65482 2.255 8.375 1.97518 8.375 1.63Z" fill="black"/>
</svg>
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-corner-down-left"><polyline points="9 10 4 15 9 20"/><path d="M20 4v7a4 4 0 0 1-4 4H4"/></svg>

Before

Width:  |  Height:  |  Size: 737 B

After

Width:  |  Height:  |  Size: 297 B

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 3.7 KiB

After

Width:  |  Height:  |  Size: 3.7 KiB

Before After
Before After

1
assets/icons/snip.svg Normal file
View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-scissors"><circle cx="6" cy="6" r="3"/><path d="M8.12 8.12 12 12"/><path d="M20 4 8.12 15.88"/><circle cx="6" cy="18" r="3"/><path d="M14.8 14.8 20 20"/></svg>

After

Width:  |  Height:  |  Size: 362 B

1
assets/icons/space.svg Normal file
View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-space"><path d="M22 17v1c0 .5-.5 1-1 1H3c-.5 0-1-.5-1-1v-1"/></svg>

After

Width:  |  Height:  |  Size: 270 B

View file

Before

Width:  |  Height:  |  Size: 1.3 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

Before After
Before After

View file

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

Before After
Before After

View file

@ -1 +0,0 @@
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M7.81832 0.68179C7.64258 0.506054 7.35766 0.506054 7.18192 0.68179L5.18192 2.68179C5.00619 2.85753 5.00619 3.14245 5.18192 3.31819C5.35766 3.49392 5.64258 3.49392 5.81832 3.31819L7.05012 2.08638L7.05012 5.50023C7.05012 5.74876 7.25159 5.95023 7.50012 5.95023C7.74865 5.95023 7.95012 5.74876 7.95012 5.50023L7.95012 2.08638L9.18192 3.31819C9.35766 3.49392 9.64258 3.49392 9.81832 3.31819C9.99406 3.14245 9.99406 2.85753 9.81832 2.68179L7.81832 0.68179ZM7.95012 12.9136V9.50023C7.95012 9.2517 7.74865 9.05023 7.50012 9.05023C7.25159 9.05023 7.05012 9.2517 7.05012 9.50023V12.9136L5.81832 11.6818C5.64258 11.5061 5.35766 11.5061 5.18192 11.6818C5.00619 11.8575 5.00619 12.1424 5.18192 12.3182L7.18192 14.3182C7.26632 14.4026 7.38077 14.45 7.50012 14.45C7.61947 14.45 7.73393 14.4026 7.81832 14.3182L9.81832 12.3182C9.99406 12.1424 9.99406 11.8575 9.81832 11.6818C9.64258 11.5061 9.35766 11.5061 9.18192 11.6818L7.95012 12.9136ZM1.49994 7.00017C1.2238 7.00017 0.999939 7.22403 0.999939 7.50017C0.999939 7.77631 1.2238 8.00017 1.49994 8.00017L13.4999 8.00017C13.7761 8.00017 13.9999 7.77631 13.9999 7.50017C13.9999 7.22403 13.7761 7.00017 13.4999 7.00017L1.49994 7.00017Z" fill="currentColor" fill-rule="evenodd" clip-rule="evenodd"></path></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

1
assets/icons/tab.svg Normal file
View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-right-to-line"><path d="M17 12H3"/><path d="m11 18 6-6-6-6"/><path d="M21 5v14"/></svg>

After

Width:  |  Height:  |  Size: 296 B

View file

@ -402,7 +402,7 @@
"cmd-r": "workspace::ToggleRightDock",
"cmd-j": "workspace::ToggleBottomDock",
"alt-cmd-y": "workspace::CloseAllDocks",
"cmd-shift-f": "workspace::NewSearch",
"cmd-shift-f": "workspace::DeploySearch",
"cmd-k cmd-t": "theme_selector::Toggle",
"cmd-k cmd-s": "zed::OpenKeymap",
"cmd-t": "project_symbols::Toggle",
@ -502,7 +502,7 @@
}
},
{
"context": "Editor",
"context": "Editor && mode == full",
"bindings": {
"alt-enter": "editor::OpenExcerpts",
"cmd-f8": "editor::GoToHunk",

View file

@ -76,7 +76,7 @@
// or waits for a `copilot::Toggle`
"show_copilot_suggestions": true,
// Whether to show tabs and spaces in the editor.
// This setting can take two values:
// This setting can take three values:
//
// 1. Draw tabs and spaces only for the selected text (default):
// "selection"
@ -183,7 +183,7 @@
// Default height when the assistant is docked to the bottom.
"default_height": 320,
// The default OpenAI model to use when starting new conversations. This
// setting can take two values:
// setting can take three values:
//
// 1. "gpt-3.5-turbo-0613""
// 2. "gpt-4-0613""
@ -351,7 +351,7 @@
// }
"working_directory": "current_project_directory",
// Set the cursor blinking behavior in the terminal.
// May take 4 values:
// May take 3 values:
// 1. Never blink the cursor, ignoring the terminal mode
// "blinking": "off",
// 2. Default the cursor blink to off, but allow the terminal to

View file

@ -13,7 +13,7 @@
"appearance": "dark"
},
{
"name": "Noctus Hibernus",
"name": "Noctis Hibernus",
"file_name": "hibernus.json",
"appearance": "light"
},

View file

@ -1,19 +1,19 @@
{
"name": "Rose Pine",
"name": "Rosé Pine",
"author": "Rosé Pine",
"themes": [
{
"name": "Rose Pine",
"name": "Rosé Pine",
"file_name": "rose-pine.json",
"appearance": "dark"
},
{
"name": "Rose Pine Moon",
"name": "Rosé Pine Moon",
"file_name": "rose-pine-moon.json",
"appearance": "dark"
},
{
"name": "Rose Pine Dawn",
"name": "Rosé Pine Dawn",
"file_name": "rose-pine-dawn.json",
"appearance": "light"
}

View file

@ -15,10 +15,12 @@ language = { path = "../language" }
gpui = { path = "../gpui" }
project = { path = "../project" }
settings = { path = "../settings" }
ui = { path = "../ui" }
util = { path = "../util" }
theme = { path = "../theme" }
workspace = { path = "../workspace" }
workspace = { path = "../workspace", package = "workspace" }
anyhow.workspace = true
futures.workspace = true
smallvec.workspace = true

View file

@ -2,19 +2,19 @@ use auto_update::{AutoUpdateStatus, AutoUpdater, DismissErrorMessage};
use editor::Editor;
use futures::StreamExt;
use gpui::{
actions, anyhow,
elements::*,
platform::{CursorStyle, MouseButton},
AppContext, Entity, ModelHandle, View, ViewContext, ViewHandle,
actions, svg, AppContext, CursorStyle, EventEmitter, InteractiveElement as _, Model,
ParentElement as _, Render, SharedString, StatefulInteractiveElement, Styled, View,
ViewContext, VisualContext as _,
};
use language::{LanguageRegistry, LanguageServerBinaryStatus};
use project::{LanguageServerProgress, Project};
use smallvec::SmallVec;
use std::{cmp::Reverse, fmt::Write, sync::Arc};
use ui::prelude::*;
use util::ResultExt;
use workspace::{item::ItemHandle, StatusItemView, Workspace};
actions!(lsp_status, [ShowErrorMessage]);
actions!(activity_indicator, [ShowErrorMessage]);
const DOWNLOAD_ICON: &str = "icons/download.svg";
const WARNING_ICON: &str = "icons/warning.svg";
@ -25,8 +25,8 @@ pub enum Event {
pub struct ActivityIndicator {
statuses: Vec<LspStatus>,
project: ModelHandle<Project>,
auto_updater: Option<ModelHandle<AutoUpdater>>,
project: Model<Project>,
auto_updater: Option<Model<AutoUpdater>>,
}
struct LspStatus {
@ -47,20 +47,15 @@ struct Content {
on_click: Option<Arc<dyn Fn(&mut ActivityIndicator, &mut ViewContext<ActivityIndicator>)>>,
}
pub fn init(cx: &mut AppContext) {
cx.add_action(ActivityIndicator::show_error_message);
cx.add_action(ActivityIndicator::dismiss_error_message);
}
impl ActivityIndicator {
pub fn new(
workspace: &mut Workspace,
languages: Arc<LanguageRegistry>,
cx: &mut ViewContext<Workspace>,
) -> ViewHandle<ActivityIndicator> {
) -> View<ActivityIndicator> {
let project = workspace.project().clone();
let auto_updater = AutoUpdater::get(cx);
let this = cx.add_view(|cx: &mut ViewContext<Self>| {
let this = cx.new_view(|cx: &mut ViewContext<Self>| {
let mut status_events = languages.language_server_binary_statuses();
cx.spawn(|this, mut cx| async move {
while let Some((language, event)) = status_events.next().await {
@ -77,11 +72,13 @@ impl ActivityIndicator {
})
.detach();
cx.observe(&project, |_, _, cx| cx.notify()).detach();
if let Some(auto_updater) = auto_updater.as_ref() {
cx.observe(auto_updater, |_, _, cx| cx.notify()).detach();
}
cx.observe_active_labeled_tasks(|_, cx| cx.notify())
.detach();
// cx.observe_active_labeled_tasks(|_, cx| cx.notify())
// .detach();
Self {
statuses: Default::default(),
@ -89,6 +86,7 @@ impl ActivityIndicator {
auto_updater,
}
});
cx.subscribe(&this, move |workspace, _, event, cx| match event {
Event::ShowError { lsp_name, error } => {
if let Some(buffer) = project
@ -104,7 +102,7 @@ impl ActivityIndicator {
});
workspace.add_item(
Box::new(
cx.add_view(|cx| Editor::for_buffer(buffer, Some(project.clone()), cx)),
cx.new_view(|cx| Editor::for_buffer(buffer, Some(project.clone()), cx)),
),
cx,
);
@ -290,71 +288,41 @@ impl ActivityIndicator {
};
}
if let Some(most_recent_active_task) = cx.active_labeled_tasks().last() {
return Content {
icon: None,
message: most_recent_active_task.to_string(),
on_click: None,
};
}
// todo!(show active tasks)
// if let Some(most_recent_active_task) = cx.active_labeled_tasks().last() {
// return Content {
// icon: None,
// message: most_recent_active_task.to_string(),
// on_click: None,
// };
// }
Default::default()
}
}
impl Entity for ActivityIndicator {
type Event = Event;
}
impl EventEmitter<Event> for ActivityIndicator {}
impl View for ActivityIndicator {
fn ui_name() -> &'static str {
"ActivityIndicator"
}
impl Render for ActivityIndicator {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
let content = self.content_to_render(cx);
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
let Content {
icon,
message,
on_click,
} = self.content_to_render(cx);
let mut result = h_stack()
.id("activity-indicator")
.on_action(cx.listener(Self::show_error_message))
.on_action(cx.listener(Self::dismiss_error_message));
let mut element = MouseEventHandler::new::<Self, _>(0, cx, |state, cx| {
let theme = &theme::current(cx).workspace.status_bar.lsp_status;
let style = if state.hovered() && on_click.is_some() {
theme.hovered.as_ref().unwrap_or(&theme.default)
} else {
&theme.default
};
Flex::row()
.with_children(icon.map(|path| {
Svg::new(path)
.with_color(style.icon_color)
.constrained()
.with_width(style.icon_width)
.contained()
.with_margin_right(style.icon_spacing)
.aligned()
.into_any_named("activity-icon")
if let Some(on_click) = content.on_click {
result = result
.cursor(CursorStyle::PointingHand)
.on_click(cx.listener(move |this, _, cx| {
on_click(this, cx);
}))
.with_child(
Text::new(message, style.message.clone())
.with_soft_wrap(false)
.aligned(),
)
.constrained()
.with_height(style.height)
.contained()
.with_style(style.container)
.aligned()
});
if let Some(on_click) = on_click.clone() {
element = element
.with_cursor_style(CursorStyle::PointingHand)
.on_click(MouseButton::Left, move |_, this, cx| on_click(this, cx));
}
element.into_any()
result
.children(content.icon.map(|icon| svg().path(icon)))
.child(Label::new(SharedString::from(content.message)).size(LabelSize::Small))
}
}

View file

@ -1,28 +0,0 @@
[package]
name = "activity_indicator2"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/activity_indicator.rs"
doctest = false
[dependencies]
auto_update = { path = "../auto_update2", package = "auto_update2" }
editor = { path = "../editor2", package = "editor2" }
language = { path = "../language2", package = "language2" }
gpui = { path = "../gpui2", package = "gpui2" }
project = { path = "../project2", package = "project2" }
settings = { path = "../settings2", package = "settings2" }
ui = { path = "../ui2", package = "ui2" }
util = { path = "../util" }
theme = { path = "../theme2", package = "theme2" }
workspace = { path = "../workspace2", package = "workspace2" }
anyhow.workspace = true
futures.workspace = true
smallvec.workspace = true
[dev-dependencies]
editor = { path = "../editor2", package = "editor2", features = ["test-support"] }

View file

@ -1,333 +0,0 @@
use auto_update::{AutoUpdateStatus, AutoUpdater, DismissErrorMessage};
use editor::Editor;
use futures::StreamExt;
use gpui::{
actions, svg, AppContext, CursorStyle, Div, EventEmitter, InteractiveElement as _, Model,
ParentElement as _, Render, SharedString, Stateful, StatefulInteractiveElement, Styled, View,
ViewContext, VisualContext as _,
};
use language::{LanguageRegistry, LanguageServerBinaryStatus};
use project::{LanguageServerProgress, Project};
use smallvec::SmallVec;
use std::{cmp::Reverse, fmt::Write, sync::Arc};
use ui::h_stack;
use util::ResultExt;
use workspace::{item::ItemHandle, StatusItemView, Workspace};
actions!(activity_indicator, [ShowErrorMessage]);
const DOWNLOAD_ICON: &str = "icons/download.svg";
const WARNING_ICON: &str = "icons/warning.svg";
pub enum Event {
ShowError { lsp_name: Arc<str>, error: String },
}
pub struct ActivityIndicator {
statuses: Vec<LspStatus>,
project: Model<Project>,
auto_updater: Option<Model<AutoUpdater>>,
}
struct LspStatus {
name: Arc<str>,
status: LanguageServerBinaryStatus,
}
struct PendingWork<'a> {
language_server_name: &'a str,
progress_token: &'a str,
progress: &'a LanguageServerProgress,
}
#[derive(Default)]
struct Content {
icon: Option<&'static str>,
message: String,
on_click: Option<Arc<dyn Fn(&mut ActivityIndicator, &mut ViewContext<ActivityIndicator>)>>,
}
impl ActivityIndicator {
pub fn new(
workspace: &mut Workspace,
languages: Arc<LanguageRegistry>,
cx: &mut ViewContext<Workspace>,
) -> View<ActivityIndicator> {
let project = workspace.project().clone();
let auto_updater = AutoUpdater::get(cx);
let this = cx.build_view(|cx: &mut ViewContext<Self>| {
let mut status_events = languages.language_server_binary_statuses();
cx.spawn(|this, mut cx| async move {
while let Some((language, event)) = status_events.next().await {
this.update(&mut cx, |this, cx| {
this.statuses.retain(|s| s.name != language.name());
this.statuses.push(LspStatus {
name: language.name(),
status: event,
});
cx.notify();
})?;
}
anyhow::Ok(())
})
.detach();
cx.observe(&project, |_, _, cx| cx.notify()).detach();
if let Some(auto_updater) = auto_updater.as_ref() {
cx.observe(auto_updater, |_, _, cx| cx.notify()).detach();
}
// cx.observe_active_labeled_tasks(|_, cx| cx.notify())
// .detach();
Self {
statuses: Default::default(),
project: project.clone(),
auto_updater,
}
});
cx.subscribe(&this, move |workspace, _, event, cx| match event {
Event::ShowError { lsp_name, error } => {
if let Some(buffer) = project
.update(cx, |project, cx| project.create_buffer(error, None, cx))
.log_err()
{
buffer.update(cx, |buffer, cx| {
buffer.edit(
[(0..0, format!("Language server error: {}\n\n", lsp_name))],
None,
cx,
);
});
workspace.add_item(
Box::new(cx.build_view(|cx| {
Editor::for_buffer(buffer, Some(project.clone()), cx)
})),
cx,
);
}
}
})
.detach();
this
}
fn show_error_message(&mut self, _: &ShowErrorMessage, cx: &mut ViewContext<Self>) {
self.statuses.retain(|status| {
if let LanguageServerBinaryStatus::Failed { error } = &status.status {
cx.emit(Event::ShowError {
lsp_name: status.name.clone(),
error: error.clone(),
});
false
} else {
true
}
});
cx.notify();
}
fn dismiss_error_message(&mut self, _: &DismissErrorMessage, cx: &mut ViewContext<Self>) {
if let Some(updater) = &self.auto_updater {
updater.update(cx, |updater, cx| {
updater.dismiss_error(cx);
});
}
cx.notify();
}
fn pending_language_server_work<'a>(
&self,
cx: &'a AppContext,
) -> impl Iterator<Item = PendingWork<'a>> {
self.project
.read(cx)
.language_server_statuses()
.rev()
.filter_map(|status| {
if status.pending_work.is_empty() {
None
} else {
let mut pending_work = status
.pending_work
.iter()
.map(|(token, progress)| PendingWork {
language_server_name: status.name.as_str(),
progress_token: token.as_str(),
progress,
})
.collect::<SmallVec<[_; 4]>>();
pending_work.sort_by_key(|work| Reverse(work.progress.last_update_at));
Some(pending_work)
}
})
.flatten()
}
fn content_to_render(&mut self, cx: &mut ViewContext<Self>) -> Content {
// Show any language server has pending activity.
let mut pending_work = self.pending_language_server_work(cx);
if let Some(PendingWork {
language_server_name,
progress_token,
progress,
}) = pending_work.next()
{
let mut message = language_server_name.to_string();
message.push_str(": ");
if let Some(progress_message) = progress.message.as_ref() {
message.push_str(progress_message);
} else {
message.push_str(progress_token);
}
if let Some(percentage) = progress.percentage {
write!(&mut message, " ({}%)", percentage).unwrap();
}
let additional_work_count = pending_work.count();
if additional_work_count > 0 {
write!(&mut message, " + {} more", additional_work_count).unwrap();
}
return Content {
icon: None,
message,
on_click: None,
};
}
// Show any language server installation info.
let mut downloading = SmallVec::<[_; 3]>::new();
let mut checking_for_update = SmallVec::<[_; 3]>::new();
let mut failed = SmallVec::<[_; 3]>::new();
for status in &self.statuses {
let name = status.name.clone();
match status.status {
LanguageServerBinaryStatus::CheckingForUpdate => checking_for_update.push(name),
LanguageServerBinaryStatus::Downloading => downloading.push(name),
LanguageServerBinaryStatus::Failed { .. } => failed.push(name),
LanguageServerBinaryStatus::Downloaded | LanguageServerBinaryStatus::Cached => {}
}
}
if !downloading.is_empty() {
return Content {
icon: Some(DOWNLOAD_ICON),
message: format!(
"Downloading {} language server{}...",
downloading.join(", "),
if downloading.len() > 1 { "s" } else { "" }
),
on_click: None,
};
} else if !checking_for_update.is_empty() {
return Content {
icon: Some(DOWNLOAD_ICON),
message: format!(
"Checking for updates to {} language server{}...",
checking_for_update.join(", "),
if checking_for_update.len() > 1 {
"s"
} else {
""
}
),
on_click: None,
};
} else if !failed.is_empty() {
return Content {
icon: Some(WARNING_ICON),
message: format!(
"Failed to download {} language server{}. Click to show error.",
failed.join(", "),
if failed.len() > 1 { "s" } else { "" }
),
on_click: Some(Arc::new(|this, cx| {
this.show_error_message(&Default::default(), cx)
})),
};
}
// Show any application auto-update info.
if let Some(updater) = &self.auto_updater {
return match &updater.read(cx).status() {
AutoUpdateStatus::Checking => Content {
icon: Some(DOWNLOAD_ICON),
message: "Checking for Zed updates…".to_string(),
on_click: None,
},
AutoUpdateStatus::Downloading => Content {
icon: Some(DOWNLOAD_ICON),
message: "Downloading Zed update…".to_string(),
on_click: None,
},
AutoUpdateStatus::Installing => Content {
icon: Some(DOWNLOAD_ICON),
message: "Installing Zed update…".to_string(),
on_click: None,
},
AutoUpdateStatus::Updated => Content {
icon: None,
message: "Click to restart and update Zed".to_string(),
on_click: Some(Arc::new(|_, cx| {
workspace::restart(&Default::default(), cx)
})),
},
AutoUpdateStatus::Errored => Content {
icon: Some(WARNING_ICON),
message: "Auto update failed".to_string(),
on_click: Some(Arc::new(|this, cx| {
this.dismiss_error_message(&Default::default(), cx)
})),
},
AutoUpdateStatus::Idle => Default::default(),
};
}
// todo!(show active tasks)
// if let Some(most_recent_active_task) = cx.active_labeled_tasks().last() {
// return Content {
// icon: None,
// message: most_recent_active_task.to_string(),
// on_click: None,
// };
// }
Default::default()
}
}
impl EventEmitter<Event> for ActivityIndicator {}
impl Render for ActivityIndicator {
type Element = Stateful<Div>;
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
let content = self.content_to_render(cx);
let mut result = h_stack()
.id("activity-indicator")
.on_action(cx.listener(Self::show_error_message))
.on_action(cx.listener(Self::dismiss_error_message));
if let Some(on_click) = content.on_click {
result = result
.cursor(CursorStyle::PointingHand)
.on_click(cx.listener(move |this, _, cx| {
on_click(this, cx);
}))
}
result
.children(content.icon.map(|icon| svg().path(icon)))
.child(SharedString::from(content.message))
}
}
impl StatusItemView for ActivityIndicator {
fn set_active_pane_item(&mut self, _: Option<&dyn ItemHandle>, _: &mut ViewContext<Self>) {}
}

View file

@ -9,7 +9,7 @@ pub enum ProviderCredential {
pub trait CredentialProvider: Send + Sync {
fn has_credentials(&self) -> bool;
fn retrieve_credentials(&self, cx: &AppContext) -> ProviderCredential;
fn save_credentials(&self, cx: &AppContext, credential: ProviderCredential);
fn delete_credentials(&self, cx: &AppContext);
fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential;
fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential);
fn delete_credentials(&self, cx: &mut AppContext);
}

View file

@ -2,7 +2,7 @@ use crate::prompts::base::{PromptArguments, PromptTemplate};
use std::fmt::Write;
use std::{ops::Range, path::PathBuf};
use gpui::{AsyncAppContext, ModelHandle};
use gpui::{AsyncAppContext, Model};
use language::{Anchor, Buffer};
#[derive(Clone)]
@ -13,8 +13,12 @@ pub struct PromptCodeSnippet {
}
impl PromptCodeSnippet {
pub fn new(buffer: ModelHandle<Buffer>, range: Range<Anchor>, cx: &AsyncAppContext) -> Self {
let (content, language_name, file_path) = buffer.read_with(cx, |buffer, _| {
pub fn new(
buffer: Model<Buffer>,
range: Range<Anchor>,
cx: &mut AsyncAppContext,
) -> anyhow::Result<Self> {
let (content, language_name, file_path) = buffer.update(cx, |buffer, _| {
let snapshot = buffer.snapshot();
let content = snapshot.text_for_range(range.clone()).collect::<String>();
@ -27,13 +31,13 @@ impl PromptCodeSnippet {
.and_then(|file| Some(file.path().to_path_buf()));
(content, language_name, file_path)
});
})?;
PromptCodeSnippet {
anyhow::Ok(PromptCodeSnippet {
path: file_path,
language_name,
content,
}
})
}
}

View file

@ -3,7 +3,7 @@ use futures::{
future::BoxFuture, io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, FutureExt,
Stream, StreamExt,
};
use gpui::{executor::Background, AppContext};
use gpui::{AppContext, BackgroundExecutor};
use isahc::{http::StatusCode, Request, RequestExt};
use parking_lot::RwLock;
use serde::{Deserialize, Serialize};
@ -104,7 +104,7 @@ pub struct OpenAIResponseStreamEvent {
pub async fn stream_completion(
credential: ProviderCredential,
executor: Arc<Background>,
executor: BackgroundExecutor,
request: Box<dyn CompletionRequest>,
) -> Result<impl Stream<Item = Result<OpenAIResponseStreamEvent>>> {
let api_key = match credential {
@ -197,11 +197,11 @@ pub async fn stream_completion(
pub struct OpenAICompletionProvider {
model: OpenAILanguageModel,
credential: Arc<RwLock<ProviderCredential>>,
executor: Arc<Background>,
executor: BackgroundExecutor,
}
impl OpenAICompletionProvider {
pub fn new(model_name: &str, executor: Arc<Background>) -> Self {
pub fn new(model_name: &str, executor: BackgroundExecutor) -> Self {
let model = OpenAILanguageModel::load(model_name);
let credential = Arc::new(RwLock::new(ProviderCredential::NoCredentials));
Self {
@ -219,46 +219,45 @@ impl CredentialProvider for OpenAICompletionProvider {
_ => false,
}
}
fn retrieve_credentials(&self, cx: &AppContext) -> ProviderCredential {
let mut credential = self.credential.write();
match *credential {
ProviderCredential::Credentials { .. } => {
return credential.clone();
}
fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential {
let existing_credential = self.credential.read().clone();
let retrieved_credential = match existing_credential {
ProviderCredential::Credentials { .. } => existing_credential.clone(),
_ => {
if let Ok(api_key) = env::var("OPENAI_API_KEY") {
*credential = ProviderCredential::Credentials { api_key };
} else if let Some((_, api_key)) = cx
.platform()
.read_credentials(OPENAI_API_URL)
.log_err()
.flatten()
if let Some(api_key) = env::var("OPENAI_API_KEY").log_err() {
ProviderCredential::Credentials { api_key }
} else if let Some(Some((_, api_key))) =
cx.read_credentials(OPENAI_API_URL).log_err()
{
if let Some(api_key) = String::from_utf8(api_key).log_err() {
*credential = ProviderCredential::Credentials { api_key };
ProviderCredential::Credentials { api_key }
} else {
ProviderCredential::NoCredentials
}
} else {
};
ProviderCredential::NoCredentials
}
}
}
credential.clone()
};
*self.credential.write() = retrieved_credential.clone();
retrieved_credential
}
fn save_credentials(&self, cx: &AppContext, credential: ProviderCredential) {
match credential.clone() {
fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential) {
*self.credential.write() = credential.clone();
let credential = credential.clone();
match credential {
ProviderCredential::Credentials { api_key } => {
cx.platform()
.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
cx.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
.log_err();
}
_ => {}
}
*self.credential.write() = credential;
}
fn delete_credentials(&self, cx: &AppContext) {
cx.platform().delete_credentials(OPENAI_API_URL).log_err();
fn delete_credentials(&self, cx: &mut AppContext) {
cx.delete_credentials(OPENAI_API_URL).log_err();
*self.credential.write() = ProviderCredential::NoCredentials;
}
}

View file

@ -1,8 +1,8 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::AsyncReadExt;
use gpui::executor::Background;
use gpui::{serde_json, AppContext};
use gpui::AppContext;
use gpui::BackgroundExecutor;
use isahc::http::StatusCode;
use isahc::prelude::Configurable;
use isahc::{AsyncBody, Response};
@ -11,6 +11,7 @@ use parking_lot::{Mutex, RwLock};
use parse_duration::parse;
use postage::watch;
use serde::{Deserialize, Serialize};
use serde_json;
use std::env;
use std::ops::Add;
use std::sync::Arc;
@ -35,7 +36,7 @@ pub struct OpenAIEmbeddingProvider {
model: OpenAILanguageModel,
credential: Arc<RwLock<ProviderCredential>>,
pub client: Arc<dyn HttpClient>,
pub executor: Arc<Background>,
pub executor: BackgroundExecutor,
rate_limit_count_rx: watch::Receiver<Option<Instant>>,
rate_limit_count_tx: Arc<Mutex<watch::Sender<Option<Instant>>>>,
}
@ -66,7 +67,7 @@ struct OpenAIEmbeddingUsage {
}
impl OpenAIEmbeddingProvider {
pub fn new(client: Arc<dyn HttpClient>, executor: Arc<Background>) -> Self {
pub fn new(client: Arc<dyn HttpClient>, executor: BackgroundExecutor) -> Self {
let (rate_limit_count_tx, rate_limit_count_rx) = watch::channel_with(None);
let rate_limit_count_tx = Arc::new(Mutex::new(rate_limit_count_tx));
@ -153,46 +154,45 @@ impl CredentialProvider for OpenAIEmbeddingProvider {
_ => false,
}
}
fn retrieve_credentials(&self, cx: &AppContext) -> ProviderCredential {
let mut credential = self.credential.write();
match *credential {
ProviderCredential::Credentials { .. } => {
return credential.clone();
}
fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential {
let existing_credential = self.credential.read().clone();
let retrieved_credential = match existing_credential {
ProviderCredential::Credentials { .. } => existing_credential.clone(),
_ => {
if let Ok(api_key) = env::var("OPENAI_API_KEY") {
*credential = ProviderCredential::Credentials { api_key };
} else if let Some((_, api_key)) = cx
.platform()
.read_credentials(OPENAI_API_URL)
.log_err()
.flatten()
if let Some(api_key) = env::var("OPENAI_API_KEY").log_err() {
ProviderCredential::Credentials { api_key }
} else if let Some(Some((_, api_key))) =
cx.read_credentials(OPENAI_API_URL).log_err()
{
if let Some(api_key) = String::from_utf8(api_key).log_err() {
*credential = ProviderCredential::Credentials { api_key };
ProviderCredential::Credentials { api_key }
} else {
ProviderCredential::NoCredentials
}
} else {
};
ProviderCredential::NoCredentials
}
}
}
};
credential.clone()
*self.credential.write() = retrieved_credential.clone();
retrieved_credential
}
fn save_credentials(&self, cx: &AppContext, credential: ProviderCredential) {
match credential.clone() {
fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential) {
*self.credential.write() = credential.clone();
match credential {
ProviderCredential::Credentials { api_key } => {
cx.platform()
.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
cx.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
.log_err();
}
_ => {}
}
*self.credential.write() = credential;
}
fn delete_credentials(&self, cx: &AppContext) {
cx.platform().delete_credentials(OPENAI_API_URL).log_err();
fn delete_credentials(&self, cx: &mut AppContext) {
cx.delete_credentials(OPENAI_API_URL).log_err();
*self.credential.write() = ProviderCredential::NoCredentials;
}
}

View file

@ -104,11 +104,11 @@ impl CredentialProvider for FakeEmbeddingProvider {
fn has_credentials(&self) -> bool {
true
}
fn retrieve_credentials(&self, _cx: &AppContext) -> ProviderCredential {
fn retrieve_credentials(&self, _cx: &mut AppContext) -> ProviderCredential {
ProviderCredential::NotNeeded
}
fn save_credentials(&self, _cx: &AppContext, _credential: ProviderCredential) {}
fn delete_credentials(&self, _cx: &AppContext) {}
fn save_credentials(&self, _cx: &mut AppContext, _credential: ProviderCredential) {}
fn delete_credentials(&self, _cx: &mut AppContext) {}
}
#[async_trait]
@ -153,17 +153,10 @@ impl FakeCompletionProvider {
pub fn send_completion(&self, completion: impl Into<String>) {
let mut tx = self.last_completion_tx.lock();
println!("COMPLETION TX: {:?}", &tx);
let a = tx.as_mut().unwrap();
a.try_send(completion.into()).unwrap();
// tx.as_mut().unwrap().try_send(completion.into()).unwrap();
tx.as_mut().unwrap().try_send(completion.into()).unwrap();
}
pub fn finish_completion(&self) {
println!("FINISHING COMPLETION");
self.last_completion_tx.lock().take().unwrap();
}
}
@ -172,11 +165,11 @@ impl CredentialProvider for FakeCompletionProvider {
fn has_credentials(&self) -> bool {
true
}
fn retrieve_credentials(&self, _cx: &AppContext) -> ProviderCredential {
fn retrieve_credentials(&self, _cx: &mut AppContext) -> ProviderCredential {
ProviderCredential::NotNeeded
}
fn save_credentials(&self, _cx: &AppContext, _credential: ProviderCredential) {}
fn delete_credentials(&self, _cx: &AppContext) {}
fn save_credentials(&self, _cx: &mut AppContext, _credential: ProviderCredential) {}
fn delete_credentials(&self, _cx: &mut AppContext) {}
}
impl CompletionProvider for FakeCompletionProvider {
@ -188,10 +181,8 @@ impl CompletionProvider for FakeCompletionProvider {
&self,
_prompt: Box<dyn CompletionRequest>,
) -> BoxFuture<'static, anyhow::Result<BoxStream<'static, anyhow::Result<String>>>> {
println!("COMPLETING");
let (tx, rx) = mpsc::channel(1);
*self.last_completion_tx.lock() = Some(tx);
println!("TX: {:?}", *self.last_completion_tx.lock());
async move { Ok(rx.map(|rx| Ok(rx)).boxed()) }.boxed()
}
fn box_clone(&self) -> Box<dyn CompletionProvider> {

View file

@ -1,38 +0,0 @@
[package]
name = "ai2"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/ai2.rs"
doctest = false
[features]
test-support = []
[dependencies]
gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
language = { package = "language2", path = "../language2" }
async-trait.workspace = true
anyhow.workspace = true
futures.workspace = true
lazy_static.workspace = true
ordered-float.workspace = true
parking_lot.workspace = true
isahc.workspace = true
regex.workspace = true
serde.workspace = true
serde_json.workspace = true
postage.workspace = true
rand.workspace = true
log.workspace = true
parse_duration = "2.1.1"
tiktoken-rs.workspace = true
matrixmultiply = "0.3.7"
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
bincode = "1.3.3"
[dev-dependencies]
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }

View file

@ -1,8 +0,0 @@
pub mod auth;
pub mod completion;
pub mod embedding;
pub mod models;
pub mod prompts;
pub mod providers;
#[cfg(any(test, feature = "test-support"))]
pub mod test;

View file

@ -1,15 +0,0 @@
use gpui::AppContext;
#[derive(Clone, Debug)]
pub enum ProviderCredential {
Credentials { api_key: String },
NoCredentials,
NotNeeded,
}
pub trait CredentialProvider: Send + Sync {
fn has_credentials(&self) -> bool;
fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential;
fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential);
fn delete_credentials(&self, cx: &mut AppContext);
}

View file

@ -1,23 +0,0 @@
use anyhow::Result;
use futures::{future::BoxFuture, stream::BoxStream};
use crate::{auth::CredentialProvider, models::LanguageModel};
pub trait CompletionRequest: Send + Sync {
fn data(&self) -> serde_json::Result<String>;
}
pub trait CompletionProvider: CredentialProvider {
fn base_model(&self) -> Box<dyn LanguageModel>;
fn complete(
&self,
prompt: Box<dyn CompletionRequest>,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>>;
fn box_clone(&self) -> Box<dyn CompletionProvider>;
}
impl Clone for Box<dyn CompletionProvider> {
fn clone(&self) -> Box<dyn CompletionProvider> {
self.box_clone()
}
}

View file

@ -1,123 +0,0 @@
use std::time::Instant;
use anyhow::Result;
use async_trait::async_trait;
use ordered_float::OrderedFloat;
use rusqlite::types::{FromSql, FromSqlResult, ToSqlOutput, ValueRef};
use rusqlite::ToSql;
use crate::auth::CredentialProvider;
use crate::models::LanguageModel;
#[derive(Debug, PartialEq, Clone)]
pub struct Embedding(pub Vec<f32>);
// This is needed for semantic index functionality
// Unfortunately it has to live wherever the "Embedding" struct is created.
// Keeping this in here though, introduces a 'rusqlite' dependency into AI
// which is less than ideal
impl FromSql for Embedding {
fn column_result(value: ValueRef) -> FromSqlResult<Self> {
let bytes = value.as_blob()?;
let embedding: Result<Vec<f32>, Box<bincode::ErrorKind>> = bincode::deserialize(bytes);
if embedding.is_err() {
return Err(rusqlite::types::FromSqlError::Other(embedding.unwrap_err()));
}
Ok(Embedding(embedding.unwrap()))
}
}
impl ToSql for Embedding {
fn to_sql(&self) -> rusqlite::Result<ToSqlOutput> {
let bytes = bincode::serialize(&self.0)
.map_err(|err| rusqlite::Error::ToSqlConversionFailure(Box::new(err)))?;
Ok(ToSqlOutput::Owned(rusqlite::types::Value::Blob(bytes)))
}
}
impl From<Vec<f32>> for Embedding {
fn from(value: Vec<f32>) -> Self {
Embedding(value)
}
}
impl Embedding {
pub fn similarity(&self, other: &Self) -> OrderedFloat<f32> {
let len = self.0.len();
assert_eq!(len, other.0.len());
let mut result = 0.0;
unsafe {
matrixmultiply::sgemm(
1,
len,
1,
1.0,
self.0.as_ptr(),
len as isize,
1,
other.0.as_ptr(),
1,
len as isize,
0.0,
&mut result as *mut f32,
1,
1,
);
}
OrderedFloat(result)
}
}
#[async_trait]
pub trait EmbeddingProvider: CredentialProvider {
fn base_model(&self) -> Box<dyn LanguageModel>;
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>>;
fn max_tokens_per_batch(&self) -> usize;
fn rate_limit_expiration(&self) -> Option<Instant>;
}
#[cfg(test)]
mod tests {
use super::*;
use rand::prelude::*;
#[gpui::test]
fn test_similarity(mut rng: StdRng) {
assert_eq!(
Embedding::from(vec![1., 0., 0., 0., 0.])
.similarity(&Embedding::from(vec![0., 1., 0., 0., 0.])),
0.
);
assert_eq!(
Embedding::from(vec![2., 0., 0., 0., 0.])
.similarity(&Embedding::from(vec![3., 1., 0., 0., 0.])),
6.
);
for _ in 0..100 {
let size = 1536;
let mut a = vec![0.; size];
let mut b = vec![0.; size];
for (a, b) in a.iter_mut().zip(b.iter_mut()) {
*a = rng.gen();
*b = rng.gen();
}
let a = Embedding::from(a);
let b = Embedding::from(b);
assert_eq!(
round_to_decimals(a.similarity(&b), 1),
round_to_decimals(reference_dot(&a.0, &b.0), 1)
);
}
fn round_to_decimals(n: OrderedFloat<f32>, decimal_places: i32) -> f32 {
let factor = (10.0 as f32).powi(decimal_places);
(n * factor).round() / factor
}
fn reference_dot(a: &[f32], b: &[f32]) -> OrderedFloat<f32> {
OrderedFloat(a.iter().zip(b.iter()).map(|(a, b)| a * b).sum())
}
}
}

View file

@ -1,16 +0,0 @@
pub enum TruncationDirection {
Start,
End,
}
pub trait LanguageModel {
fn name(&self) -> String;
fn count_tokens(&self, content: &str) -> anyhow::Result<usize>;
fn truncate(
&self,
content: &str,
length: usize,
direction: TruncationDirection,
) -> anyhow::Result<String>;
fn capacity(&self) -> anyhow::Result<usize>;
}

View file

@ -1,330 +0,0 @@
use std::cmp::Reverse;
use std::ops::Range;
use std::sync::Arc;
use language::BufferSnapshot;
use util::ResultExt;
use crate::models::LanguageModel;
use crate::prompts::repository_context::PromptCodeSnippet;
pub(crate) enum PromptFileType {
Text,
Code,
}
// TODO: Set this up to manage for defaults well
pub struct PromptArguments {
pub model: Arc<dyn LanguageModel>,
pub user_prompt: Option<String>,
pub language_name: Option<String>,
pub project_name: Option<String>,
pub snippets: Vec<PromptCodeSnippet>,
pub reserved_tokens: usize,
pub buffer: Option<BufferSnapshot>,
pub selected_range: Option<Range<usize>>,
}
impl PromptArguments {
pub(crate) fn get_file_type(&self) -> PromptFileType {
if self
.language_name
.as_ref()
.and_then(|name| Some(!["Markdown", "Plain Text"].contains(&name.as_str())))
.unwrap_or(true)
{
PromptFileType::Code
} else {
PromptFileType::Text
}
}
}
pub trait PromptTemplate {
fn generate(
&self,
args: &PromptArguments,
max_token_length: Option<usize>,
) -> anyhow::Result<(String, usize)>;
}
#[repr(i8)]
#[derive(PartialEq, Eq, Ord)]
pub enum PromptPriority {
Mandatory, // Ignores truncation
Ordered { order: usize }, // Truncates based on priority
}
impl PartialOrd for PromptPriority {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
match (self, other) {
(Self::Mandatory, Self::Mandatory) => Some(std::cmp::Ordering::Equal),
(Self::Mandatory, Self::Ordered { .. }) => Some(std::cmp::Ordering::Greater),
(Self::Ordered { .. }, Self::Mandatory) => Some(std::cmp::Ordering::Less),
(Self::Ordered { order: a }, Self::Ordered { order: b }) => b.partial_cmp(a),
}
}
}
pub struct PromptChain {
args: PromptArguments,
templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)>,
}
impl PromptChain {
pub fn new(
args: PromptArguments,
templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)>,
) -> Self {
PromptChain { args, templates }
}
pub fn generate(&self, truncate: bool) -> anyhow::Result<(String, usize)> {
// Argsort based on Prompt Priority
let seperator = "\n";
let seperator_tokens = self.args.model.count_tokens(seperator)?;
let mut sorted_indices = (0..self.templates.len()).collect::<Vec<_>>();
sorted_indices.sort_by_key(|&i| Reverse(&self.templates[i].0));
// If Truncate
let mut tokens_outstanding = if truncate {
Some(self.args.model.capacity()? - self.args.reserved_tokens)
} else {
None
};
let mut prompts = vec!["".to_string(); sorted_indices.len()];
for idx in sorted_indices {
let (_, template) = &self.templates[idx];
if let Some((template_prompt, prompt_token_count)) =
template.generate(&self.args, tokens_outstanding).log_err()
{
if template_prompt != "" {
prompts[idx] = template_prompt;
if let Some(remaining_tokens) = tokens_outstanding {
let new_tokens = prompt_token_count + seperator_tokens;
tokens_outstanding = if remaining_tokens > new_tokens {
Some(remaining_tokens - new_tokens)
} else {
Some(0)
};
}
}
}
}
prompts.retain(|x| x != "");
let full_prompt = prompts.join(seperator);
let total_token_count = self.args.model.count_tokens(&full_prompt)?;
anyhow::Ok((prompts.join(seperator), total_token_count))
}
}
#[cfg(test)]
pub(crate) mod tests {
use crate::models::TruncationDirection;
use crate::test::FakeLanguageModel;
use super::*;
#[test]
pub fn test_prompt_chain() {
struct TestPromptTemplate {}
impl PromptTemplate for TestPromptTemplate {
fn generate(
&self,
args: &PromptArguments,
max_token_length: Option<usize>,
) -> anyhow::Result<(String, usize)> {
let mut content = "This is a test prompt template".to_string();
let mut token_count = args.model.count_tokens(&content)?;
if let Some(max_token_length) = max_token_length {
if token_count > max_token_length {
content = args.model.truncate(
&content,
max_token_length,
TruncationDirection::End,
)?;
token_count = max_token_length;
}
}
anyhow::Ok((content, token_count))
}
}
struct TestLowPriorityTemplate {}
impl PromptTemplate for TestLowPriorityTemplate {
fn generate(
&self,
args: &PromptArguments,
max_token_length: Option<usize>,
) -> anyhow::Result<(String, usize)> {
let mut content = "This is a low priority test prompt template".to_string();
let mut token_count = args.model.count_tokens(&content)?;
if let Some(max_token_length) = max_token_length {
if token_count > max_token_length {
content = args.model.truncate(
&content,
max_token_length,
TruncationDirection::End,
)?;
token_count = max_token_length;
}
}
anyhow::Ok((content, token_count))
}
}
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity: 100 });
let args = PromptArguments {
model: model.clone(),
language_name: None,
project_name: None,
snippets: Vec::new(),
reserved_tokens: 0,
buffer: None,
selected_range: None,
user_prompt: None,
};
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
(
PromptPriority::Ordered { order: 0 },
Box::new(TestPromptTemplate {}),
),
(
PromptPriority::Ordered { order: 1 },
Box::new(TestLowPriorityTemplate {}),
),
];
let chain = PromptChain::new(args, templates);
let (prompt, token_count) = chain.generate(false).unwrap();
assert_eq!(
prompt,
"This is a test prompt template\nThis is a low priority test prompt template"
.to_string()
);
assert_eq!(model.count_tokens(&prompt).unwrap(), token_count);
// Testing with Truncation Off
// Should ignore capacity and return all prompts
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity: 20 });
let args = PromptArguments {
model: model.clone(),
language_name: None,
project_name: None,
snippets: Vec::new(),
reserved_tokens: 0,
buffer: None,
selected_range: None,
user_prompt: None,
};
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
(
PromptPriority::Ordered { order: 0 },
Box::new(TestPromptTemplate {}),
),
(
PromptPriority::Ordered { order: 1 },
Box::new(TestLowPriorityTemplate {}),
),
];
let chain = PromptChain::new(args, templates);
let (prompt, token_count) = chain.generate(false).unwrap();
assert_eq!(
prompt,
"This is a test prompt template\nThis is a low priority test prompt template"
.to_string()
);
assert_eq!(model.count_tokens(&prompt).unwrap(), token_count);
// Testing with Truncation Off
// Should ignore capacity and return all prompts
let capacity = 20;
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity });
let args = PromptArguments {
model: model.clone(),
language_name: None,
project_name: None,
snippets: Vec::new(),
reserved_tokens: 0,
buffer: None,
selected_range: None,
user_prompt: None,
};
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
(
PromptPriority::Ordered { order: 0 },
Box::new(TestPromptTemplate {}),
),
(
PromptPriority::Ordered { order: 1 },
Box::new(TestLowPriorityTemplate {}),
),
(
PromptPriority::Ordered { order: 2 },
Box::new(TestLowPriorityTemplate {}),
),
];
let chain = PromptChain::new(args, templates);
let (prompt, token_count) = chain.generate(true).unwrap();
assert_eq!(prompt, "This is a test promp".to_string());
assert_eq!(token_count, capacity);
// Change Ordering of Prompts Based on Priority
let capacity = 120;
let reserved_tokens = 10;
let model: Arc<dyn LanguageModel> = Arc::new(FakeLanguageModel { capacity });
let args = PromptArguments {
model: model.clone(),
language_name: None,
project_name: None,
snippets: Vec::new(),
reserved_tokens,
buffer: None,
selected_range: None,
user_prompt: None,
};
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
(
PromptPriority::Mandatory,
Box::new(TestLowPriorityTemplate {}),
),
(
PromptPriority::Ordered { order: 0 },
Box::new(TestPromptTemplate {}),
),
(
PromptPriority::Ordered { order: 1 },
Box::new(TestLowPriorityTemplate {}),
),
];
let chain = PromptChain::new(args, templates);
let (prompt, token_count) = chain.generate(true).unwrap();
assert_eq!(
prompt,
"This is a low priority test prompt template\nThis is a test prompt template\nThis is a low priority test prompt "
.to_string()
);
assert_eq!(token_count, capacity - reserved_tokens);
}
}

View file

@ -1,164 +0,0 @@
use anyhow::anyhow;
use language::BufferSnapshot;
use language::ToOffset;
use crate::models::LanguageModel;
use crate::models::TruncationDirection;
use crate::prompts::base::PromptArguments;
use crate::prompts::base::PromptTemplate;
use std::fmt::Write;
use std::ops::Range;
use std::sync::Arc;
fn retrieve_context(
buffer: &BufferSnapshot,
selected_range: &Option<Range<usize>>,
model: Arc<dyn LanguageModel>,
max_token_count: Option<usize>,
) -> anyhow::Result<(String, usize, bool)> {
let mut prompt = String::new();
let mut truncated = false;
if let Some(selected_range) = selected_range {
let start = selected_range.start.to_offset(buffer);
let end = selected_range.end.to_offset(buffer);
let start_window = buffer.text_for_range(0..start).collect::<String>();
let mut selected_window = String::new();
if start == end {
write!(selected_window, "<|START|>").unwrap();
} else {
write!(selected_window, "<|START|").unwrap();
}
write!(
selected_window,
"{}",
buffer.text_for_range(start..end).collect::<String>()
)
.unwrap();
if start != end {
write!(selected_window, "|END|>").unwrap();
}
let end_window = buffer.text_for_range(end..buffer.len()).collect::<String>();
if let Some(max_token_count) = max_token_count {
let selected_tokens = model.count_tokens(&selected_window)?;
if selected_tokens > max_token_count {
return Err(anyhow!(
"selected range is greater than model context window, truncation not possible"
));
};
let mut remaining_tokens = max_token_count - selected_tokens;
let start_window_tokens = model.count_tokens(&start_window)?;
let end_window_tokens = model.count_tokens(&end_window)?;
let outside_tokens = start_window_tokens + end_window_tokens;
if outside_tokens > remaining_tokens {
let (start_goal_tokens, end_goal_tokens) =
if start_window_tokens < end_window_tokens {
let start_goal_tokens = (remaining_tokens / 2).min(start_window_tokens);
remaining_tokens -= start_goal_tokens;
let end_goal_tokens = remaining_tokens.min(end_window_tokens);
(start_goal_tokens, end_goal_tokens)
} else {
let end_goal_tokens = (remaining_tokens / 2).min(end_window_tokens);
remaining_tokens -= end_goal_tokens;
let start_goal_tokens = remaining_tokens.min(start_window_tokens);
(start_goal_tokens, end_goal_tokens)
};
let truncated_start_window =
model.truncate(&start_window, start_goal_tokens, TruncationDirection::Start)?;
let truncated_end_window =
model.truncate(&end_window, end_goal_tokens, TruncationDirection::End)?;
writeln!(
prompt,
"{truncated_start_window}{selected_window}{truncated_end_window}"
)
.unwrap();
truncated = true;
} else {
writeln!(prompt, "{start_window}{selected_window}{end_window}").unwrap();
}
} else {
// If we dont have a selected range, include entire file.
writeln!(prompt, "{}", &buffer.text()).unwrap();
// Dumb truncation strategy
if let Some(max_token_count) = max_token_count {
if model.count_tokens(&prompt)? > max_token_count {
truncated = true;
prompt = model.truncate(&prompt, max_token_count, TruncationDirection::End)?;
}
}
}
}
let token_count = model.count_tokens(&prompt)?;
anyhow::Ok((prompt, token_count, truncated))
}
pub struct FileContext {}
impl PromptTemplate for FileContext {
fn generate(
&self,
args: &PromptArguments,
max_token_length: Option<usize>,
) -> anyhow::Result<(String, usize)> {
if let Some(buffer) = &args.buffer {
let mut prompt = String::new();
// Add Initial Preamble
// TODO: Do we want to add the path in here?
writeln!(
prompt,
"The file you are currently working on has the following content:"
)
.unwrap();
let language_name = args
.language_name
.clone()
.unwrap_or("".to_string())
.to_lowercase();
let (context, _, truncated) = retrieve_context(
buffer,
&args.selected_range,
args.model.clone(),
max_token_length,
)?;
writeln!(prompt, "```{language_name}\n{context}\n```").unwrap();
if truncated {
writeln!(prompt, "Note the content has been truncated and only represents a portion of the file.").unwrap();
}
if let Some(selected_range) = &args.selected_range {
let start = selected_range.start.to_offset(buffer);
let end = selected_range.end.to_offset(buffer);
if start == end {
writeln!(prompt, "In particular, the user's cursor is currently on the '<|START|>' span in the above content, with no text selected.").unwrap();
} else {
writeln!(prompt, "In particular, the user has selected a section of the text between the '<|START|' and '|END|>' spans.").unwrap();
}
}
// Really dumb truncation strategy
if let Some(max_tokens) = max_token_length {
prompt = args
.model
.truncate(&prompt, max_tokens, TruncationDirection::End)?;
}
let token_count = args.model.count_tokens(&prompt)?;
anyhow::Ok((prompt, token_count))
} else {
Err(anyhow!("no buffer provided to retrieve file context from"))
}
}
}

View file

@ -1,99 +0,0 @@
use crate::prompts::base::{PromptArguments, PromptFileType, PromptTemplate};
use anyhow::anyhow;
use std::fmt::Write;
pub fn capitalize(s: &str) -> String {
let mut c = s.chars();
match c.next() {
None => String::new(),
Some(f) => f.to_uppercase().collect::<String>() + c.as_str(),
}
}
pub struct GenerateInlineContent {}
impl PromptTemplate for GenerateInlineContent {
fn generate(
&self,
args: &PromptArguments,
max_token_length: Option<usize>,
) -> anyhow::Result<(String, usize)> {
let Some(user_prompt) = &args.user_prompt else {
return Err(anyhow!("user prompt not provided"));
};
let file_type = args.get_file_type();
let content_type = match &file_type {
PromptFileType::Code => "code",
PromptFileType::Text => "text",
};
let mut prompt = String::new();
if let Some(selected_range) = &args.selected_range {
if selected_range.start == selected_range.end {
writeln!(
prompt,
"Assume the cursor is located where the `<|START|>` span is."
)
.unwrap();
writeln!(
prompt,
"{} can't be replaced, so assume your answer will be inserted at the cursor.",
capitalize(content_type)
)
.unwrap();
writeln!(
prompt,
"Generate {content_type} based on the users prompt: {user_prompt}",
)
.unwrap();
} else {
writeln!(prompt, "Modify the user's selected {content_type} based upon the users prompt: '{user_prompt}'").unwrap();
writeln!(prompt, "You must reply with only the adjusted {content_type} (within the '<|START|' and '|END|>' spans) not the entire file.").unwrap();
writeln!(prompt, "Double check that you only return code and not the '<|START|' and '|END|'> spans").unwrap();
}
} else {
writeln!(
prompt,
"Generate {content_type} based on the users prompt: {user_prompt}"
)
.unwrap();
}
if let Some(language_name) = &args.language_name {
writeln!(
prompt,
"Your answer MUST always and only be valid {}.",
language_name
)
.unwrap();
}
writeln!(prompt, "Never make remarks about the output.").unwrap();
writeln!(
prompt,
"Do not return anything else, except the generated {content_type}."
)
.unwrap();
match file_type {
PromptFileType::Code => {
// writeln!(prompt, "Always wrap your code in a Markdown block.").unwrap();
}
_ => {}
}
// Really dumb truncation strategy
if let Some(max_tokens) = max_token_length {
prompt = args.model.truncate(
&prompt,
max_tokens,
crate::models::TruncationDirection::End,
)?;
}
let token_count = args.model.count_tokens(&prompt)?;
anyhow::Ok((prompt, token_count))
}
}

View file

@ -1,5 +0,0 @@
pub mod base;
pub mod file_context;
pub mod generate;
pub mod preamble;
pub mod repository_context;

View file

@ -1,52 +0,0 @@
use crate::prompts::base::{PromptArguments, PromptFileType, PromptTemplate};
use std::fmt::Write;
pub struct EngineerPreamble {}
impl PromptTemplate for EngineerPreamble {
fn generate(
&self,
args: &PromptArguments,
max_token_length: Option<usize>,
) -> anyhow::Result<(String, usize)> {
let mut prompts = Vec::new();
match args.get_file_type() {
PromptFileType::Code => {
prompts.push(format!(
"You are an expert {}engineer.",
args.language_name.clone().unwrap_or("".to_string()) + " "
));
}
PromptFileType::Text => {
prompts.push("You are an expert engineer.".to_string());
}
}
if let Some(project_name) = args.project_name.clone() {
prompts.push(format!(
"You are currently working inside the '{project_name}' project in code editor Zed."
));
}
if let Some(mut remaining_tokens) = max_token_length {
let mut prompt = String::new();
let mut total_count = 0;
for prompt_piece in prompts {
let prompt_token_count =
args.model.count_tokens(&prompt_piece)? + args.model.count_tokens("\n")?;
if remaining_tokens > prompt_token_count {
writeln!(prompt, "{prompt_piece}").unwrap();
remaining_tokens -= prompt_token_count;
total_count += prompt_token_count;
}
}
anyhow::Ok((prompt, total_count))
} else {
let prompt = prompts.join("\n");
let token_count = args.model.count_tokens(&prompt)?;
anyhow::Ok((prompt, token_count))
}
}
}

View file

@ -1,98 +0,0 @@
use crate::prompts::base::{PromptArguments, PromptTemplate};
use std::fmt::Write;
use std::{ops::Range, path::PathBuf};
use gpui::{AsyncAppContext, Model};
use language::{Anchor, Buffer};
#[derive(Clone)]
pub struct PromptCodeSnippet {
path: Option<PathBuf>,
language_name: Option<String>,
content: String,
}
impl PromptCodeSnippet {
pub fn new(
buffer: Model<Buffer>,
range: Range<Anchor>,
cx: &mut AsyncAppContext,
) -> anyhow::Result<Self> {
let (content, language_name, file_path) = buffer.update(cx, |buffer, _| {
let snapshot = buffer.snapshot();
let content = snapshot.text_for_range(range.clone()).collect::<String>();
let language_name = buffer
.language()
.and_then(|language| Some(language.name().to_string().to_lowercase()));
let file_path = buffer
.file()
.and_then(|file| Some(file.path().to_path_buf()));
(content, language_name, file_path)
})?;
anyhow::Ok(PromptCodeSnippet {
path: file_path,
language_name,
content,
})
}
}
impl ToString for PromptCodeSnippet {
fn to_string(&self) -> String {
let path = self
.path
.as_ref()
.and_then(|path| Some(path.to_string_lossy().to_string()))
.unwrap_or("".to_string());
let language_name = self.language_name.clone().unwrap_or("".to_string());
let content = self.content.clone();
format!("The below code snippet may be relevant from file: {path}\n```{language_name}\n{content}\n```")
}
}
pub struct RepositoryContext {}
impl PromptTemplate for RepositoryContext {
fn generate(
&self,
args: &PromptArguments,
max_token_length: Option<usize>,
) -> anyhow::Result<(String, usize)> {
const MAXIMUM_SNIPPET_TOKEN_COUNT: usize = 500;
let template = "You are working inside a large repository, here are a few code snippets that may be useful.";
let mut prompt = String::new();
let mut remaining_tokens = max_token_length.clone();
let seperator_token_length = args.model.count_tokens("\n")?;
for snippet in &args.snippets {
let mut snippet_prompt = template.to_string();
let content = snippet.to_string();
writeln!(snippet_prompt, "{content}").unwrap();
let token_count = args.model.count_tokens(&snippet_prompt)?;
if token_count <= MAXIMUM_SNIPPET_TOKEN_COUNT {
if let Some(tokens_left) = remaining_tokens {
if tokens_left >= token_count {
writeln!(prompt, "{snippet_prompt}").unwrap();
remaining_tokens = if tokens_left >= (token_count + seperator_token_length)
{
Some(tokens_left - token_count - seperator_token_length)
} else {
Some(0)
};
}
} else {
writeln!(prompt, "{snippet_prompt}").unwrap();
}
}
}
let total_token_count = args.model.count_tokens(&prompt)?;
anyhow::Ok((prompt, total_token_count))
}
}

View file

@ -1 +0,0 @@
pub mod open_ai;

View file

@ -1,297 +0,0 @@
use anyhow::{anyhow, Result};
use futures::{
future::BoxFuture, io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, FutureExt,
Stream, StreamExt,
};
use gpui::{AppContext, BackgroundExecutor};
use isahc::{http::StatusCode, Request, RequestExt};
use parking_lot::RwLock;
use serde::{Deserialize, Serialize};
use std::{
env,
fmt::{self, Display},
io,
sync::Arc,
};
use util::ResultExt;
use crate::{
auth::{CredentialProvider, ProviderCredential},
completion::{CompletionProvider, CompletionRequest},
models::LanguageModel,
};
use crate::providers::open_ai::{OpenAILanguageModel, OPENAI_API_URL};
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
User,
Assistant,
System,
}
impl Role {
pub fn cycle(&mut self) {
*self = match self {
Role::User => Role::Assistant,
Role::Assistant => Role::System,
Role::System => Role::User,
}
}
}
impl Display for Role {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Role::User => write!(f, "User"),
Role::Assistant => write!(f, "Assistant"),
Role::System => write!(f, "System"),
}
}
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct RequestMessage {
pub role: Role,
pub content: String,
}
#[derive(Debug, Default, Serialize)]
pub struct OpenAIRequest {
pub model: String,
pub messages: Vec<RequestMessage>,
pub stream: bool,
pub stop: Vec<String>,
pub temperature: f32,
}
impl CompletionRequest for OpenAIRequest {
fn data(&self) -> serde_json::Result<String> {
serde_json::to_string(self)
}
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct ResponseMessage {
pub role: Option<Role>,
pub content: Option<String>,
}
#[derive(Deserialize, Debug)]
pub struct OpenAIUsage {
pub prompt_tokens: u32,
pub completion_tokens: u32,
pub total_tokens: u32,
}
#[derive(Deserialize, Debug)]
pub struct ChatChoiceDelta {
pub index: u32,
pub delta: ResponseMessage,
pub finish_reason: Option<String>,
}
#[derive(Deserialize, Debug)]
pub struct OpenAIResponseStreamEvent {
pub id: Option<String>,
pub object: String,
pub created: u32,
pub model: String,
pub choices: Vec<ChatChoiceDelta>,
pub usage: Option<OpenAIUsage>,
}
pub async fn stream_completion(
credential: ProviderCredential,
executor: BackgroundExecutor,
request: Box<dyn CompletionRequest>,
) -> Result<impl Stream<Item = Result<OpenAIResponseStreamEvent>>> {
let api_key = match credential {
ProviderCredential::Credentials { api_key } => api_key,
_ => {
return Err(anyhow!("no credentials provider for completion"));
}
};
let (tx, rx) = futures::channel::mpsc::unbounded::<Result<OpenAIResponseStreamEvent>>();
let json_data = request.data()?;
let mut response = Request::post(format!("{OPENAI_API_URL}/chat/completions"))
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {}", api_key))
.body(json_data)?
.send_async()
.await?;
let status = response.status();
if status == StatusCode::OK {
executor
.spawn(async move {
let mut lines = BufReader::new(response.body_mut()).lines();
fn parse_line(
line: Result<String, io::Error>,
) -> Result<Option<OpenAIResponseStreamEvent>> {
if let Some(data) = line?.strip_prefix("data: ") {
let event = serde_json::from_str(&data)?;
Ok(Some(event))
} else {
Ok(None)
}
}
while let Some(line) = lines.next().await {
if let Some(event) = parse_line(line).transpose() {
let done = event.as_ref().map_or(false, |event| {
event
.choices
.last()
.map_or(false, |choice| choice.finish_reason.is_some())
});
if tx.unbounded_send(event).is_err() {
break;
}
if done {
break;
}
}
}
anyhow::Ok(())
})
.detach();
Ok(rx)
} else {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
#[derive(Deserialize)]
struct OpenAIResponse {
error: OpenAIError,
}
#[derive(Deserialize)]
struct OpenAIError {
message: String,
}
match serde_json::from_str::<OpenAIResponse>(&body) {
Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
"Failed to connect to OpenAI API: {}",
response.error.message,
)),
_ => Err(anyhow!(
"Failed to connect to OpenAI API: {} {}",
response.status(),
body,
)),
}
}
}
#[derive(Clone)]
pub struct OpenAICompletionProvider {
model: OpenAILanguageModel,
credential: Arc<RwLock<ProviderCredential>>,
executor: BackgroundExecutor,
}
impl OpenAICompletionProvider {
pub fn new(model_name: &str, executor: BackgroundExecutor) -> Self {
let model = OpenAILanguageModel::load(model_name);
let credential = Arc::new(RwLock::new(ProviderCredential::NoCredentials));
Self {
model,
credential,
executor,
}
}
}
impl CredentialProvider for OpenAICompletionProvider {
fn has_credentials(&self) -> bool {
match *self.credential.read() {
ProviderCredential::Credentials { .. } => true,
_ => false,
}
}
fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential {
let existing_credential = self.credential.read().clone();
let retrieved_credential = match existing_credential {
ProviderCredential::Credentials { .. } => existing_credential.clone(),
_ => {
if let Some(api_key) = env::var("OPENAI_API_KEY").log_err() {
ProviderCredential::Credentials { api_key }
} else if let Some(Some((_, api_key))) =
cx.read_credentials(OPENAI_API_URL).log_err()
{
if let Some(api_key) = String::from_utf8(api_key).log_err() {
ProviderCredential::Credentials { api_key }
} else {
ProviderCredential::NoCredentials
}
} else {
ProviderCredential::NoCredentials
}
}
};
*self.credential.write() = retrieved_credential.clone();
retrieved_credential
}
fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential) {
*self.credential.write() = credential.clone();
let credential = credential.clone();
match credential {
ProviderCredential::Credentials { api_key } => {
cx.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
.log_err();
}
_ => {}
}
}
fn delete_credentials(&self, cx: &mut AppContext) {
cx.delete_credentials(OPENAI_API_URL).log_err();
*self.credential.write() = ProviderCredential::NoCredentials;
}
}
impl CompletionProvider for OpenAICompletionProvider {
fn base_model(&self) -> Box<dyn LanguageModel> {
let model: Box<dyn LanguageModel> = Box::new(self.model.clone());
model
}
fn complete(
&self,
prompt: Box<dyn CompletionRequest>,
) -> BoxFuture<'static, Result<BoxStream<'static, Result<String>>>> {
// Currently the CompletionRequest for OpenAI, includes a 'model' parameter
// This means that the model is determined by the CompletionRequest and not the CompletionProvider,
// which is currently model based, due to the langauge model.
// At some point in the future we should rectify this.
let credential = self.credential.read().clone();
let request = stream_completion(credential, self.executor.clone(), prompt);
async move {
let response = request.await?;
let stream = response
.filter_map(|response| async move {
match response {
Ok(mut response) => Some(Ok(response.choices.pop()?.delta.content?)),
Err(error) => Some(Err(error)),
}
})
.boxed();
Ok(stream)
}
.boxed()
}
fn box_clone(&self) -> Box<dyn CompletionProvider> {
Box::new((*self).clone())
}
}

View file

@ -1,305 +0,0 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::AsyncReadExt;
use gpui::BackgroundExecutor;
use gpui::{serde_json, AppContext};
use isahc::http::StatusCode;
use isahc::prelude::Configurable;
use isahc::{AsyncBody, Response};
use lazy_static::lazy_static;
use parking_lot::{Mutex, RwLock};
use parse_duration::parse;
use postage::watch;
use serde::{Deserialize, Serialize};
use std::env;
use std::ops::Add;
use std::sync::Arc;
use std::time::{Duration, Instant};
use tiktoken_rs::{cl100k_base, CoreBPE};
use util::http::{HttpClient, Request};
use util::ResultExt;
use crate::auth::{CredentialProvider, ProviderCredential};
use crate::embedding::{Embedding, EmbeddingProvider};
use crate::models::LanguageModel;
use crate::providers::open_ai::OpenAILanguageModel;
use crate::providers::open_ai::OPENAI_API_URL;
lazy_static! {
static ref OPENAI_BPE_TOKENIZER: CoreBPE = cl100k_base().unwrap();
}
#[derive(Clone)]
pub struct OpenAIEmbeddingProvider {
model: OpenAILanguageModel,
credential: Arc<RwLock<ProviderCredential>>,
pub client: Arc<dyn HttpClient>,
pub executor: BackgroundExecutor,
rate_limit_count_rx: watch::Receiver<Option<Instant>>,
rate_limit_count_tx: Arc<Mutex<watch::Sender<Option<Instant>>>>,
}
#[derive(Serialize)]
struct OpenAIEmbeddingRequest<'a> {
model: &'static str,
input: Vec<&'a str>,
}
#[derive(Deserialize)]
struct OpenAIEmbeddingResponse {
data: Vec<OpenAIEmbedding>,
usage: OpenAIEmbeddingUsage,
}
#[derive(Debug, Deserialize)]
struct OpenAIEmbedding {
embedding: Vec<f32>,
index: usize,
object: String,
}
#[derive(Deserialize)]
struct OpenAIEmbeddingUsage {
prompt_tokens: usize,
total_tokens: usize,
}
impl OpenAIEmbeddingProvider {
pub fn new(client: Arc<dyn HttpClient>, executor: BackgroundExecutor) -> Self {
let (rate_limit_count_tx, rate_limit_count_rx) = watch::channel_with(None);
let rate_limit_count_tx = Arc::new(Mutex::new(rate_limit_count_tx));
let model = OpenAILanguageModel::load("text-embedding-ada-002");
let credential = Arc::new(RwLock::new(ProviderCredential::NoCredentials));
OpenAIEmbeddingProvider {
model,
credential,
client,
executor,
rate_limit_count_rx,
rate_limit_count_tx,
}
}
fn get_api_key(&self) -> Result<String> {
match self.credential.read().clone() {
ProviderCredential::Credentials { api_key } => Ok(api_key),
_ => Err(anyhow!("api credentials not provided")),
}
}
fn resolve_rate_limit(&self) {
let reset_time = *self.rate_limit_count_tx.lock().borrow();
if let Some(reset_time) = reset_time {
if Instant::now() >= reset_time {
*self.rate_limit_count_tx.lock().borrow_mut() = None
}
}
log::trace!(
"resolving reset time: {:?}",
*self.rate_limit_count_tx.lock().borrow()
);
}
fn update_reset_time(&self, reset_time: Instant) {
let original_time = *self.rate_limit_count_tx.lock().borrow();
let updated_time = if let Some(original_time) = original_time {
if reset_time < original_time {
Some(reset_time)
} else {
Some(original_time)
}
} else {
Some(reset_time)
};
log::trace!("updating rate limit time: {:?}", updated_time);
*self.rate_limit_count_tx.lock().borrow_mut() = updated_time;
}
async fn send_request(
&self,
api_key: &str,
spans: Vec<&str>,
request_timeout: u64,
) -> Result<Response<AsyncBody>> {
let request = Request::post("https://api.openai.com/v1/embeddings")
.redirect_policy(isahc::config::RedirectPolicy::Follow)
.timeout(Duration::from_secs(request_timeout))
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {}", api_key))
.body(
serde_json::to_string(&OpenAIEmbeddingRequest {
input: spans.clone(),
model: "text-embedding-ada-002",
})
.unwrap()
.into(),
)?;
Ok(self.client.send(request).await?)
}
}
impl CredentialProvider for OpenAIEmbeddingProvider {
fn has_credentials(&self) -> bool {
match *self.credential.read() {
ProviderCredential::Credentials { .. } => true,
_ => false,
}
}
fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential {
let existing_credential = self.credential.read().clone();
let retrieved_credential = match existing_credential {
ProviderCredential::Credentials { .. } => existing_credential.clone(),
_ => {
if let Some(api_key) = env::var("OPENAI_API_KEY").log_err() {
ProviderCredential::Credentials { api_key }
} else if let Some(Some((_, api_key))) =
cx.read_credentials(OPENAI_API_URL).log_err()
{
if let Some(api_key) = String::from_utf8(api_key).log_err() {
ProviderCredential::Credentials { api_key }
} else {
ProviderCredential::NoCredentials
}
} else {
ProviderCredential::NoCredentials
}
}
};
*self.credential.write() = retrieved_credential.clone();
retrieved_credential
}
fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential) {
*self.credential.write() = credential.clone();
match credential {
ProviderCredential::Credentials { api_key } => {
cx.write_credentials(OPENAI_API_URL, "Bearer", api_key.as_bytes())
.log_err();
}
_ => {}
}
}
fn delete_credentials(&self, cx: &mut AppContext) {
cx.delete_credentials(OPENAI_API_URL).log_err();
*self.credential.write() = ProviderCredential::NoCredentials;
}
}
#[async_trait]
impl EmbeddingProvider for OpenAIEmbeddingProvider {
fn base_model(&self) -> Box<dyn LanguageModel> {
let model: Box<dyn LanguageModel> = Box::new(self.model.clone());
model
}
fn max_tokens_per_batch(&self) -> usize {
50000
}
fn rate_limit_expiration(&self) -> Option<Instant> {
*self.rate_limit_count_rx.borrow()
}
async fn embed_batch(&self, spans: Vec<String>) -> Result<Vec<Embedding>> {
const BACKOFF_SECONDS: [usize; 4] = [3, 5, 15, 45];
const MAX_RETRIES: usize = 4;
let api_key = self.get_api_key()?;
let mut request_number = 0;
let mut rate_limiting = false;
let mut request_timeout: u64 = 15;
let mut response: Response<AsyncBody>;
while request_number < MAX_RETRIES {
response = self
.send_request(
&api_key,
spans.iter().map(|x| &**x).collect(),
request_timeout,
)
.await?;
request_number += 1;
match response.status() {
StatusCode::REQUEST_TIMEOUT => {
request_timeout += 5;
}
StatusCode::OK => {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
let response: OpenAIEmbeddingResponse = serde_json::from_str(&body)?;
log::trace!(
"openai embedding completed. tokens: {:?}",
response.usage.total_tokens
);
// If we complete a request successfully that was previously rate_limited
// resolve the rate limit
if rate_limiting {
self.resolve_rate_limit()
}
return Ok(response
.data
.into_iter()
.map(|embedding| Embedding::from(embedding.embedding))
.collect());
}
StatusCode::TOO_MANY_REQUESTS => {
rate_limiting = true;
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
let delay_duration = {
let delay = Duration::from_secs(BACKOFF_SECONDS[request_number - 1] as u64);
if let Some(time_to_reset) =
response.headers().get("x-ratelimit-reset-tokens")
{
if let Ok(time_str) = time_to_reset.to_str() {
parse(time_str).unwrap_or(delay)
} else {
delay
}
} else {
delay
}
};
// If we've previously rate limited, increment the duration but not the count
let reset_time = Instant::now().add(delay_duration);
self.update_reset_time(reset_time);
log::trace!(
"openai rate limiting: waiting {:?} until lifted",
&delay_duration
);
self.executor.timer(delay_duration).await;
}
_ => {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
return Err(anyhow!(
"open ai bad request: {:?} {:?}",
&response.status(),
body
));
}
}
}
Err(anyhow!("openai max retries"))
}
}

View file

@ -1,9 +0,0 @@
pub mod completion;
pub mod embedding;
pub mod model;
pub use completion::*;
pub use embedding::*;
pub use model::OpenAILanguageModel;
pub const OPENAI_API_URL: &'static str = "https://api.openai.com/v1";

View file

@ -1,57 +0,0 @@
use anyhow::anyhow;
use tiktoken_rs::CoreBPE;
use util::ResultExt;
use crate::models::{LanguageModel, TruncationDirection};
#[derive(Clone)]
pub struct OpenAILanguageModel {
name: String,
bpe: Option<CoreBPE>,
}
impl OpenAILanguageModel {
pub fn load(model_name: &str) -> Self {
let bpe = tiktoken_rs::get_bpe_from_model(model_name).log_err();
OpenAILanguageModel {
name: model_name.to_string(),
bpe,
}
}
}
impl LanguageModel for OpenAILanguageModel {
fn name(&self) -> String {
self.name.clone()
}
fn count_tokens(&self, content: &str) -> anyhow::Result<usize> {
if let Some(bpe) = &self.bpe {
anyhow::Ok(bpe.encode_with_special_tokens(content).len())
} else {
Err(anyhow!("bpe for open ai model was not retrieved"))
}
}
fn truncate(
&self,
content: &str,
length: usize,
direction: TruncationDirection,
) -> anyhow::Result<String> {
if let Some(bpe) = &self.bpe {
let tokens = bpe.encode_with_special_tokens(content);
if tokens.len() > length {
match direction {
TruncationDirection::End => bpe.decode(tokens[..length].to_vec()),
TruncationDirection::Start => bpe.decode(tokens[length..].to_vec()),
}
} else {
bpe.decode(tokens)
}
} else {
Err(anyhow!("bpe for open ai model was not retrieved"))
}
}
fn capacity(&self) -> anyhow::Result<usize> {
anyhow::Ok(tiktoken_rs::model::get_context_size(&self.name))
}
}

View file

@ -1,11 +0,0 @@
pub trait LanguageModel {
fn name(&self) -> String;
fn count_tokens(&self, content: &str) -> anyhow::Result<usize>;
fn truncate(
&self,
content: &str,
length: usize,
direction: TruncationDirection,
) -> anyhow::Result<String>;
fn capacity(&self) -> anyhow::Result<usize>;
}

View file

@ -1,191 +0,0 @@
use std::{
sync::atomic::{self, AtomicUsize, Ordering},
time::Instant,
};
use async_trait::async_trait;
use futures::{channel::mpsc, future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
use gpui::AppContext;
use parking_lot::Mutex;
use crate::{
auth::{CredentialProvider, ProviderCredential},
completion::{CompletionProvider, CompletionRequest},
embedding::{Embedding, EmbeddingProvider},
models::{LanguageModel, TruncationDirection},
};
#[derive(Clone)]
pub struct FakeLanguageModel {
pub capacity: usize,
}
impl LanguageModel for FakeLanguageModel {
fn name(&self) -> String {
"dummy".to_string()
}
fn count_tokens(&self, content: &str) -> anyhow::Result<usize> {
anyhow::Ok(content.chars().collect::<Vec<char>>().len())
}
fn truncate(
&self,
content: &str,
length: usize,
direction: TruncationDirection,
) -> anyhow::Result<String> {
println!("TRYING TO TRUNCATE: {:?}", length.clone());
if length > self.count_tokens(content)? {
println!("NOT TRUNCATING");
return anyhow::Ok(content.to_string());
}
anyhow::Ok(match direction {
TruncationDirection::End => content.chars().collect::<Vec<char>>()[..length]
.into_iter()
.collect::<String>(),
TruncationDirection::Start => content.chars().collect::<Vec<char>>()[length..]
.into_iter()
.collect::<String>(),
})
}
fn capacity(&self) -> anyhow::Result<usize> {
anyhow::Ok(self.capacity)
}
}
pub struct FakeEmbeddingProvider {
pub embedding_count: AtomicUsize,
}
impl Clone for FakeEmbeddingProvider {
fn clone(&self) -> Self {
FakeEmbeddingProvider {
embedding_count: AtomicUsize::new(self.embedding_count.load(Ordering::SeqCst)),
}
}
}
impl Default for FakeEmbeddingProvider {
fn default() -> Self {
FakeEmbeddingProvider {
embedding_count: AtomicUsize::default(),
}
}
}
impl FakeEmbeddingProvider {
pub fn embedding_count(&self) -> usize {
self.embedding_count.load(atomic::Ordering::SeqCst)
}
pub fn embed_sync(&self, span: &str) -> Embedding {
let mut result = vec![1.0; 26];
for letter in span.chars() {
let letter = letter.to_ascii_lowercase();
if letter as u32 >= 'a' as u32 {
let ix = (letter as u32) - ('a' as u32);
if ix < 26 {
result[ix as usize] += 1.0;
}
}
}
let norm = result.iter().map(|x| x * x).sum::<f32>().sqrt();
for x in &mut result {
*x /= norm;
}
result.into()
}
}
impl CredentialProvider for FakeEmbeddingProvider {
fn has_credentials(&self) -> bool {
true
}
fn retrieve_credentials(&self, _cx: &mut AppContext) -> ProviderCredential {
ProviderCredential::NotNeeded
}
fn save_credentials(&self, _cx: &mut AppContext, _credential: ProviderCredential) {}
fn delete_credentials(&self, _cx: &mut AppContext) {}
}
#[async_trait]
impl EmbeddingProvider for FakeEmbeddingProvider {
fn base_model(&self) -> Box<dyn LanguageModel> {
Box::new(FakeLanguageModel { capacity: 1000 })
}
fn max_tokens_per_batch(&self) -> usize {
1000
}
fn rate_limit_expiration(&self) -> Option<Instant> {
None
}
async fn embed_batch(&self, spans: Vec<String>) -> anyhow::Result<Vec<Embedding>> {
self.embedding_count
.fetch_add(spans.len(), atomic::Ordering::SeqCst);
anyhow::Ok(spans.iter().map(|span| self.embed_sync(span)).collect())
}
}
pub struct FakeCompletionProvider {
last_completion_tx: Mutex<Option<mpsc::Sender<String>>>,
}
impl Clone for FakeCompletionProvider {
fn clone(&self) -> Self {
Self {
last_completion_tx: Mutex::new(None),
}
}
}
impl FakeCompletionProvider {
pub fn new() -> Self {
Self {
last_completion_tx: Mutex::new(None),
}
}
pub fn send_completion(&self, completion: impl Into<String>) {
let mut tx = self.last_completion_tx.lock();
tx.as_mut().unwrap().try_send(completion.into()).unwrap();
}
pub fn finish_completion(&self) {
self.last_completion_tx.lock().take().unwrap();
}
}
impl CredentialProvider for FakeCompletionProvider {
fn has_credentials(&self) -> bool {
true
}
fn retrieve_credentials(&self, _cx: &mut AppContext) -> ProviderCredential {
ProviderCredential::NotNeeded
}
fn save_credentials(&self, _cx: &mut AppContext, _credential: ProviderCredential) {}
fn delete_credentials(&self, _cx: &mut AppContext) {}
}
impl CompletionProvider for FakeCompletionProvider {
fn base_model(&self) -> Box<dyn LanguageModel> {
let model: Box<dyn LanguageModel> = Box::new(FakeLanguageModel { capacity: 8190 });
model
}
fn complete(
&self,
_prompt: Box<dyn CompletionRequest>,
) -> BoxFuture<'static, anyhow::Result<BoxStream<'static, anyhow::Result<String>>>> {
let (tx, rx) = mpsc::channel(1);
*self.last_completion_tx.lock() = Some(tx);
async move { Ok(rx.map(|rx| Ok(rx)).boxed()) }.boxed()
}
fn box_clone(&self) -> Box<dyn CompletionProvider> {
Box::new((*self).clone())
}
}

View file

@ -1,5 +1,5 @@
[package]
name = "zed_actions2"
name = "assets"
version = "0.1.0"
edition = "2021"
publish = false
@ -7,5 +7,6 @@ publish = false
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
gpui = { package = "gpui2", path = "../gpui2" }
serde.workspace = true
gpui = {path = "../gpui"}
rust-embed.workspace = true
anyhow.workspace = true

View file

@ -1,3 +1,4 @@
// This crate was essentially pulled out verbatim from main `zed` crate to avoid having to run RustEmbed macro whenever zed has to be rebuilt. It saves a second or two on an incremental build.
use anyhow::anyhow;
use gpui::{AssetSource, Result, SharedString};

View file

@ -18,13 +18,14 @@ gpui = { path = "../gpui" }
language = { path = "../language" }
menu = { path = "../menu" }
multi_buffer = { path = "../multi_buffer" }
project = { path = "../project" }
search = { path = "../search" }
semantic_index = { path = "../semantic_index" }
settings = { path = "../settings" }
theme = { path = "../theme" }
ui = { path = "../ui" }
util = { path = "../util" }
workspace = { path = "../workspace" }
semantic_index = { path = "../semantic_index" }
project = { path = "../project" }
uuid.workspace = true
log.workspace = true
@ -43,9 +44,9 @@ smol.workspace = true
tiktoken-rs.workspace = true
[dev-dependencies]
ai = { path = "../ai", features = ["test-support"]}
editor = { path = "../editor", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
ai = { path = "../ai", features = ["test-support"]}
ctor.workspace = true
env_logger.workspace = true

View file

@ -12,12 +12,28 @@ use chrono::{DateTime, Local};
use collections::HashMap;
use fs::Fs;
use futures::StreamExt;
use gpui::AppContext;
use gpui::{actions, AppContext, SharedString};
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::{cmp::Reverse, ffi::OsStr, path::PathBuf, sync::Arc};
use util::paths::CONVERSATIONS_DIR;
actions!(
assistant,
[
NewConversation,
Assist,
Split,
CycleMessageRole,
QuoteSelection,
ToggleFocus,
ResetKey,
InlineAssist,
ToggleIncludeConversation,
ToggleRetrieveContext,
]
);
#[derive(
Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize,
)]
@ -34,7 +50,7 @@ struct MessageMetadata {
enum MessageStatus {
Pending,
Done,
Error(Arc<str>),
Error(SharedString),
}
#[derive(Serialize, Deserialize)]

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,8 @@
use anyhow;
use gpui::Pixels;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::Setting;
use settings::Settings;
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
pub enum OpenAIModel {
@ -51,21 +52,37 @@ pub enum AssistantDockPosition {
pub struct AssistantSettings {
pub button: bool,
pub dock: AssistantDockPosition,
pub default_width: f32,
pub default_height: f32,
pub default_width: Pixels,
pub default_height: Pixels,
pub default_open_ai_model: OpenAIModel,
}
/// Assistant panel settings
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
pub struct AssistantSettingsContent {
/// Whether to show the assistant panel button in the status bar.
///
/// Default: true
pub button: Option<bool>,
/// Where to dock the assistant.
///
/// Default: right
pub dock: Option<AssistantDockPosition>,
/// Default width in pixels when the assistant is docked to the left or right.
///
/// Default: 640
pub default_width: Option<f32>,
/// Default height in pixels when the assistant is docked to the bottom.
///
/// Default: 320
pub default_height: Option<f32>,
/// The default OpenAI model to use when starting new conversations.
///
/// Default: gpt-4-1106-preview
pub default_open_ai_model: Option<OpenAIModel>,
}
impl Setting for AssistantSettings {
impl Settings for AssistantSettings {
const KEY: Option<&'static str> = Some("assistant");
type FileContent = AssistantSettingsContent;
@ -73,7 +90,7 @@ impl Setting for AssistantSettings {
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &gpui::AppContext,
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}

View file

@ -3,7 +3,7 @@ use ai::completion::{CompletionProvider, CompletionRequest};
use anyhow::Result;
use editor::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
use gpui::{Entity, ModelContext, ModelHandle, Task};
use gpui::{EventEmitter, Model, ModelContext, Task};
use language::{Rope, TransactionId};
use multi_buffer;
use std::{cmp, future, ops::Range, sync::Arc};
@ -21,7 +21,7 @@ pub enum CodegenKind {
pub struct Codegen {
provider: Arc<dyn CompletionProvider>,
buffer: ModelHandle<MultiBuffer>,
buffer: Model<MultiBuffer>,
snapshot: MultiBufferSnapshot,
kind: CodegenKind,
last_equal_ranges: Vec<Range<Anchor>>,
@ -32,13 +32,11 @@ pub struct Codegen {
_subscription: gpui::Subscription,
}
impl Entity for Codegen {
type Event = Event;
}
impl EventEmitter<Event> for Codegen {}
impl Codegen {
pub fn new(
buffer: ModelHandle<MultiBuffer>,
buffer: Model<MultiBuffer>,
kind: CodegenKind,
provider: Arc<dyn CompletionProvider>,
cx: &mut ModelContext<Self>,
@ -60,7 +58,7 @@ impl Codegen {
fn handle_buffer_event(
&mut self,
_buffer: ModelHandle<MultiBuffer>,
_buffer: Model<MultiBuffer>,
event: &multi_buffer::Event,
cx: &mut ModelContext<Self>,
) {
@ -111,13 +109,13 @@ impl Codegen {
.unwrap_or_else(|| snapshot.indent_size_for_line(selection_start.row));
let response = self.provider.complete(prompt);
self.generation = cx.spawn_weak(|this, mut cx| {
self.generation = cx.spawn(|this, mut cx| {
async move {
let generate = async {
let mut edit_start = range.start.to_offset(&snapshot);
let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1);
let diff = cx.background().spawn(async move {
let diff = cx.background_executor().spawn(async move {
let chunks = strip_invalid_spans_from_codeblock(response.await?);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
@ -183,12 +181,6 @@ impl Codegen {
});
while let Some(hunks) = hunks_rx.next().await {
let this = if let Some(this) = this.upgrade(&cx) {
this
} else {
break;
};
this.update(&mut cx, |this, cx| {
this.last_equal_ranges.clear();
@ -245,7 +237,7 @@ impl Codegen {
}
cx.notify();
});
})?;
}
diff.await?;
@ -253,17 +245,16 @@ impl Codegen {
};
let result = generate.await;
if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| {
this.last_equal_ranges.clear();
this.idle = true;
if let Err(error) = result {
this.error = Some(error);
}
cx.emit(Event::Finished);
cx.notify();
});
}
this.update(&mut cx, |this, cx| {
this.last_equal_ranges.clear();
this.idle = true;
if let Err(error) = result {
this.error = Some(error);
}
cx.emit(Event::Finished);
cx.notify();
})
.ok();
}
});
self.error.take();
@ -372,7 +363,7 @@ mod tests {
use super::*;
use ai::test::FakeCompletionProvider;
use futures::stream::{self};
use gpui::{executor::Deterministic, TestAppContext};
use gpui::{Context, TestAppContext};
use indoc::indoc;
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
use rand::prelude::*;
@ -391,12 +382,8 @@ mod tests {
}
#[gpui::test(iterations = 10)]
async fn test_transform_autoindent(
cx: &mut TestAppContext,
mut rng: StdRng,
deterministic: Arc<Deterministic>,
) {
cx.set_global(cx.read(SettingsStore::test));
async fn test_transform_autoindent(cx: &mut TestAppContext, mut rng: StdRng) {
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = indoc! {"
@ -408,14 +395,14 @@ mod tests {
}
"};
let buffer =
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let range = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.add_model(|cx| {
let codegen = cx.new_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Transform { range },
@ -442,10 +429,10 @@ mod tests {
println!("CHUNK: {:?}", &chunk);
provider.send_completion(chunk);
new_text = suffix;
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
}
provider.finish_completion();
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
@ -464,9 +451,8 @@ mod tests {
async fn test_autoindent_when_generating_past_indentation(
cx: &mut TestAppContext,
mut rng: StdRng,
deterministic: Arc<Deterministic>,
) {
cx.set_global(cx.read(SettingsStore::test));
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = indoc! {"
@ -475,14 +461,14 @@ mod tests {
}
"};
let buffer =
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 6))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.add_model(|cx| {
let codegen = cx.new_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Generate { position },
@ -508,10 +494,10 @@ mod tests {
let (chunk, suffix) = new_text.split_at(len);
provider.send_completion(chunk);
new_text = suffix;
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
}
provider.finish_completion();
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
@ -530,9 +516,8 @@ mod tests {
async fn test_autoindent_when_generating_before_indentation(
cx: &mut TestAppContext,
mut rng: StdRng,
deterministic: Arc<Deterministic>,
) {
cx.set_global(cx.read(SettingsStore::test));
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = concat!(
@ -541,14 +526,14 @@ mod tests {
"}\n" //
);
let buffer =
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 2))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.add_model(|cx| {
let codegen = cx.new_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Generate { position },
@ -575,10 +560,10 @@ mod tests {
println!("{:?}", &chunk);
provider.send_completion(chunk);
new_text = suffix;
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
}
provider.finish_completion();
deterministic.run_until_parked();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),

View file

@ -176,7 +176,7 @@ pub(crate) mod tests {
use super::*;
use std::sync::Arc;
use gpui::AppContext;
use gpui::{AppContext, Context};
use indoc::indoc;
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
use settings::SettingsStore;
@ -227,7 +227,8 @@ pub(crate) mod tests {
#[gpui::test]
fn test_outline_for_prompt(cx: &mut AppContext) {
cx.set_global(SettingsStore::test(cx));
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
language_settings::init(cx);
let text = indoc! {"
struct X {
@ -253,7 +254,7 @@ pub(crate) mod tests {
}
"};
let buffer =
cx.add_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
cx.new_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let snapshot = buffer.read(cx).snapshot();
assert_eq!(

View file

@ -1,54 +0,0 @@
[package]
name = "assistant2"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/assistant.rs"
doctest = false
[dependencies]
ai = { package = "ai2", path = "../ai2" }
client = { package = "client2", path = "../client2" }
collections = { path = "../collections"}
editor = { package = "editor2", path = "../editor2" }
fs = { package = "fs2", path = "../fs2" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { package = "language2", path = "../language2" }
menu = { package = "menu2", path = "../menu2" }
multi_buffer = { package = "multi_buffer2", path = "../multi_buffer2" }
project = { package = "project2", path = "../project2" }
search = { package = "search2", path = "../search2" }
semantic_index = { package = "semantic_index2", path = "../semantic_index2" }
settings = { package = "settings2", path = "../settings2" }
theme = { package = "theme2", path = "../theme2" }
ui = { package = "ui2", path = "../ui2" }
util = { path = "../util" }
workspace = { package = "workspace2", path = "../workspace2" }
uuid.workspace = true
log.workspace = true
anyhow.workspace = true
chrono = { version = "0.4", features = ["serde"] }
futures.workspace = true
indoc.workspace = true
isahc.workspace = true
ordered-float.workspace = true
parking_lot.workspace = true
regex.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
smol.workspace = true
tiktoken-rs.workspace = true
[dev-dependencies]
ai = { package = "ai2", path = "../ai2", features = ["test-support"]}
editor = { package = "editor2", path = "../editor2", features = ["test-support"] }
project = { package = "project2", path = "../project2", features = ["test-support"] }
ctor.workspace = true
env_logger.workspace = true
log.workspace = true
rand.workspace = true

View file

@ -1,63 +0,0 @@
> Hello, I'm Nathan Sobo.
I'm currently your sole creator and the leader of Zed Industries.
Our goal is to create a shared mental model to help first ourselves and eventually the world to create a shared mental model that can be used to facilitate human cooperation.
< Hello, Nathan! Welcome back to Zed. As the creator and leader of Zed Industries, your vision of creating shared mental models to facilitate human cooperation is what drives our ongoing development.
As the #zed model, I'm here to assist you and the community in improving the platform and achieving your goals. Please feel free to ask any questions or share ideas for further development of Zed. Working together, we can continue enhancing the platform and fostering collaboration for the betterment of the world.
> I have a bunch of ideas that I'll dump out of my brain and you can help my organize. Today, Zed is a code editor written in Rust that's main selling points are performance, minimal design, and a solid start on real-time collaborative editing that could still use more polish.
We've decided that our next milestone will be open-sourcing Zed on the Zed platform.
< Nathan, it's exciting to hear that you have more ideas to enhance Zed! Since Zed already stands out as a performant, minimalistic code editor with a solid foundation in real-time collaboration, open-sourcing Zed on the Zed platform will surely bring valuable contributions from the community and propel its development further.
Let's organize your ideas and create a plan for your next milestone. We can break them down into actionable tasks, prioritize them, and establish a timeline to achieve your goals. Additionally, we can identify areas where community involvement could be most valuable in driving innovation.
Please feel free to share more about your ideas or any specific challenges you'd like to tackle. I'm here to help you turn these ideas into a cohesive and successful plan for Zed's future!
@nathan> #zed, can you describe the conversation UI as you currently understand it.
< Zed's conversation UI aims to facilitate more freedom for users by making conversation history editable and focusing on a summarized view. Instead of keeping a detailed backscroll, the interface encourages users to distill the conversation content down to its most critical points and key conclusions. This approach ensures that the vital information from the conversation is highlighted and easily accessible, while still maintaining a history that can be linked to when necessary.
Addressing @nate's concerns, this design choice aims to strike a balance between readability, usability, and information preservation. The summarized view maintains the essence of the conversation without being burdened by extensive backscroll. Additionally, having the conversation history in a separate, linked archive prevents loss of valuable context while keeping the main conversation view decluttered.
@nathan> What details around the conversation view are worth exploring?
< @nathan, there are several aspects of the conversation view that are worth exploring and refining:
1. Editing functionality: Ensuring that all participants can edit and refine the conversation summary effectively while preserving the original context.
2. Permissions and access control: Determining how to manage editing rights, user roles, and visibility of the conversation summary and history.
3. Navigation and linking: Implementing an intuitive system for navigating between the summarized conversation and linked history or reference materials.
4. Visualization and layout: Designing the interface to clearly present the summarized information while providing a visually engaging and organized experience for participants.
5. Threaded conversations and branching: Exploring ways to handle sub-conversations, supplementary discussions, or debates within the primary conversation view.
Considering these aspects will ensure our conversation view design is optimized for usability, collaboration, and accessibility in Zed.
@nathan> @nate, which of these would you like to explore?
@nate> 2 feels like it isn't important at the moment, we can explore that later. Let's start with 4, which I think will lead us to discussion 3 and 5.
#zed share your thoughts on the points we need to consider to design a layout and visualization for a conversation view between you (#zed) and multuple peoople, or between multiple people and multiple bots (you and other bots).
@nathan> Agreed. I'm interested in threading I think more than anything. Or 4 yeah. I think we need to scope the threading conversation. Also, asking #zed to propose the solution... not sure it will be that effective but it's worth a try...
#zed describe a hypothetical conversation UI that meets the description.
< @nate and @nathan, I understand that you'd like to focus on the layout and visualization for a conversation view that can accommodate multiple people and potentially multiple bots as well. Here's a proposal for a conversation UI that meets the requirements:
1. Main conversation area: Display the summarized conversation content in a clean and organized manner, ensuring readability and efficient use of space.
2. Participant avatars and labels: Include user avatars and/or labels to distinguish between users and bots visually, allowing easy identification of each participant's contributions.
3. Active threads indicator: Designate a clear indicator for ongoing threads, which will attract attention to active sub-conversations or related discussions in the conversation view.
4. Dynamic branching: Implement a branching visual structure to present threaded conversations, clearly distinguishing sub-conversations, and enabling users to navigate between them smoothly.
5. Multiple bot interaction support: Facilitate interactions between multiple bots through user-invoked prompts or bot-to-bot communication channels without disrupting the primary conversation flow.
6. Expandable conversation history: Offer a separate, expandable section or panel to access historical content without cluttering the main conversation view.
7. Linking and referencing: Implement a system for hyperlinking relevant information, resources, or previous conversations to the current discussion, making it easy for participants to follow and reference the shared context.
This hypothetical conversation UI aims to address the layout and visualization, threading, and multi-participant aspects of your requirements. Of course, it's just an initial proposal, and we can refine and iterate on it based on your feedback and specific needs.

View file

@ -1,3 +0,0 @@
Push content to a deeper layer.
A context can have multiple sublayers.
You can enable or disable arbitrary sublayers at arbitrary nesting depths when viewing the document.

View file

@ -1,129 +0,0 @@
pub mod assistant_panel;
mod assistant_settings;
mod codegen;
mod prompts;
mod streaming_diff;
use ai::providers::open_ai::Role;
use anyhow::Result;
pub use assistant_panel::AssistantPanel;
use assistant_settings::OpenAIModel;
use chrono::{DateTime, Local};
use collections::HashMap;
use fs::Fs;
use futures::StreamExt;
use gpui::{actions, AppContext, SharedString};
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::{cmp::Reverse, ffi::OsStr, path::PathBuf, sync::Arc};
use util::paths::CONVERSATIONS_DIR;
actions!(
assistant,
[
NewConversation,
Assist,
Split,
CycleMessageRole,
QuoteSelection,
ToggleFocus,
ResetKey,
InlineAssist,
ToggleIncludeConversation,
ToggleRetrieveContext,
]
);
#[derive(
Copy, Clone, Debug, Default, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize,
)]
struct MessageId(usize);
#[derive(Clone, Debug, Serialize, Deserialize)]
struct MessageMetadata {
role: Role,
sent_at: DateTime<Local>,
status: MessageStatus,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
enum MessageStatus {
Pending,
Done,
Error(SharedString),
}
#[derive(Serialize, Deserialize)]
struct SavedMessage {
id: MessageId,
start: usize,
}
#[derive(Serialize, Deserialize)]
struct SavedConversation {
id: Option<String>,
zed: String,
version: String,
text: String,
messages: Vec<SavedMessage>,
message_metadata: HashMap<MessageId, MessageMetadata>,
summary: String,
model: OpenAIModel,
}
impl SavedConversation {
const VERSION: &'static str = "0.1.0";
}
struct SavedConversationMetadata {
title: String,
path: PathBuf,
mtime: chrono::DateTime<chrono::Local>,
}
impl SavedConversationMetadata {
pub async fn list(fs: Arc<dyn Fs>) -> Result<Vec<Self>> {
fs.create_dir(&CONVERSATIONS_DIR).await?;
let mut paths = fs.read_dir(&CONVERSATIONS_DIR).await?;
let mut conversations = Vec::<SavedConversationMetadata>::new();
while let Some(path) = paths.next().await {
let path = path?;
if path.extension() != Some(OsStr::new("json")) {
continue;
}
let pattern = r" - \d+.zed.json$";
let re = Regex::new(pattern).unwrap();
let metadata = fs.metadata(&path).await?;
if let Some((file_name, metadata)) = path
.file_name()
.and_then(|name| name.to_str())
.zip(metadata)
{
let title = re.replace(file_name, "");
conversations.push(Self {
title: title.into_owned(),
path,
mtime: metadata.mtime.into(),
});
}
}
conversations.sort_unstable_by_key(|conversation| Reverse(conversation.mtime));
Ok(conversations)
}
}
pub fn init(cx: &mut AppContext) {
assistant_panel::init(cx);
}
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
if std::env::var("RUST_LOG").is_ok() {
env_logger::init();
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,80 +0,0 @@
use anyhow;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::Settings;
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
pub enum OpenAIModel {
#[serde(rename = "gpt-3.5-turbo-0613")]
ThreePointFiveTurbo,
#[serde(rename = "gpt-4-0613")]
Four,
#[serde(rename = "gpt-4-1106-preview")]
FourTurbo,
}
impl OpenAIModel {
pub fn full_name(&self) -> &'static str {
match self {
OpenAIModel::ThreePointFiveTurbo => "gpt-3.5-turbo-0613",
OpenAIModel::Four => "gpt-4-0613",
OpenAIModel::FourTurbo => "gpt-4-1106-preview",
}
}
pub fn short_name(&self) -> &'static str {
match self {
OpenAIModel::ThreePointFiveTurbo => "gpt-3.5-turbo",
OpenAIModel::Four => "gpt-4",
OpenAIModel::FourTurbo => "gpt-4-turbo",
}
}
pub fn cycle(&self) -> Self {
match self {
OpenAIModel::ThreePointFiveTurbo => OpenAIModel::Four,
OpenAIModel::Four => OpenAIModel::FourTurbo,
OpenAIModel::FourTurbo => OpenAIModel::ThreePointFiveTurbo,
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum AssistantDockPosition {
Left,
Right,
Bottom,
}
#[derive(Deserialize, Debug)]
pub struct AssistantSettings {
pub button: bool,
pub dock: AssistantDockPosition,
pub default_width: f32,
pub default_height: f32,
pub default_open_ai_model: OpenAIModel,
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
pub struct AssistantSettingsContent {
pub button: Option<bool>,
pub dock: Option<AssistantDockPosition>,
pub default_width: Option<f32>,
pub default_height: Option<f32>,
pub default_open_ai_model: Option<OpenAIModel>,
}
impl Settings for AssistantSettings {
const KEY: Option<&'static str> = Some("assistant");
type FileContent = AssistantSettingsContent;
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}
}

View file

@ -1,688 +0,0 @@
use crate::streaming_diff::{Hunk, StreamingDiff};
use ai::completion::{CompletionProvider, CompletionRequest};
use anyhow::Result;
use editor::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
use gpui::{EventEmitter, Model, ModelContext, Task};
use language::{Rope, TransactionId};
use multi_buffer;
use std::{cmp, future, ops::Range, sync::Arc};
pub enum Event {
Finished,
Undone,
}
#[derive(Clone)]
pub enum CodegenKind {
Transform { range: Range<Anchor> },
Generate { position: Anchor },
}
pub struct Codegen {
provider: Arc<dyn CompletionProvider>,
buffer: Model<MultiBuffer>,
snapshot: MultiBufferSnapshot,
kind: CodegenKind,
last_equal_ranges: Vec<Range<Anchor>>,
transaction_id: Option<TransactionId>,
error: Option<anyhow::Error>,
generation: Task<()>,
idle: bool,
_subscription: gpui::Subscription,
}
impl EventEmitter<Event> for Codegen {}
impl Codegen {
pub fn new(
buffer: Model<MultiBuffer>,
kind: CodegenKind,
provider: Arc<dyn CompletionProvider>,
cx: &mut ModelContext<Self>,
) -> Self {
let snapshot = buffer.read(cx).snapshot(cx);
Self {
provider,
buffer: buffer.clone(),
snapshot,
kind,
last_equal_ranges: Default::default(),
transaction_id: Default::default(),
error: Default::default(),
idle: true,
generation: Task::ready(()),
_subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
}
}
fn handle_buffer_event(
&mut self,
_buffer: Model<MultiBuffer>,
event: &multi_buffer::Event,
cx: &mut ModelContext<Self>,
) {
if let multi_buffer::Event::TransactionUndone { transaction_id } = event {
if self.transaction_id == Some(*transaction_id) {
self.transaction_id = None;
self.generation = Task::ready(());
cx.emit(Event::Undone);
}
}
}
pub fn range(&self) -> Range<Anchor> {
match &self.kind {
CodegenKind::Transform { range } => range.clone(),
CodegenKind::Generate { position } => position.bias_left(&self.snapshot)..*position,
}
}
pub fn kind(&self) -> &CodegenKind {
&self.kind
}
pub fn last_equal_ranges(&self) -> &[Range<Anchor>] {
&self.last_equal_ranges
}
pub fn idle(&self) -> bool {
self.idle
}
pub fn error(&self) -> Option<&anyhow::Error> {
self.error.as_ref()
}
pub fn start(&mut self, prompt: Box<dyn CompletionRequest>, cx: &mut ModelContext<Self>) {
let range = self.range();
let snapshot = self.snapshot.clone();
let selected_text = snapshot
.text_for_range(range.start..range.end)
.collect::<Rope>();
let selection_start = range.start.to_point(&snapshot);
let suggested_line_indent = snapshot
.suggested_indents(selection_start.row..selection_start.row + 1, cx)
.into_values()
.next()
.unwrap_or_else(|| snapshot.indent_size_for_line(selection_start.row));
let response = self.provider.complete(prompt);
self.generation = cx.spawn(|this, mut cx| {
async move {
let generate = async {
let mut edit_start = range.start.to_offset(&snapshot);
let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1);
let diff = cx.background_executor().spawn(async move {
let chunks = strip_invalid_spans_from_codeblock(response.await?);
futures::pin_mut!(chunks);
let mut diff = StreamingDiff::new(selected_text.to_string());
let mut new_text = String::new();
let mut base_indent = None;
let mut line_indent = None;
let mut first_line = true;
while let Some(chunk) = chunks.next().await {
let chunk = chunk?;
let mut lines = chunk.split('\n').peekable();
while let Some(line) = lines.next() {
new_text.push_str(line);
if line_indent.is_none() {
if let Some(non_whitespace_ch_ix) =
new_text.find(|ch: char| !ch.is_whitespace())
{
line_indent = Some(non_whitespace_ch_ix);
base_indent = base_indent.or(line_indent);
let line_indent = line_indent.unwrap();
let base_indent = base_indent.unwrap();
let indent_delta = line_indent as i32 - base_indent as i32;
let mut corrected_indent_len = cmp::max(
0,
suggested_line_indent.len as i32 + indent_delta,
)
as usize;
if first_line {
corrected_indent_len = corrected_indent_len
.saturating_sub(selection_start.column as usize);
}
let indent_char = suggested_line_indent.char();
let mut indent_buffer = [0; 4];
let indent_str =
indent_char.encode_utf8(&mut indent_buffer);
new_text.replace_range(
..line_indent,
&indent_str.repeat(corrected_indent_len),
);
}
}
if line_indent.is_some() {
hunks_tx.send(diff.push_new(&new_text)).await?;
new_text.clear();
}
if lines.peek().is_some() {
hunks_tx.send(diff.push_new("\n")).await?;
line_indent = None;
first_line = false;
}
}
}
hunks_tx.send(diff.push_new(&new_text)).await?;
hunks_tx.send(diff.finish()).await?;
anyhow::Ok(())
});
while let Some(hunks) = hunks_rx.next().await {
this.update(&mut cx, |this, cx| {
this.last_equal_ranges.clear();
let transaction = this.buffer.update(cx, |buffer, cx| {
// Avoid grouping assistant edits with user edits.
buffer.finalize_last_transaction(cx);
buffer.start_transaction(cx);
buffer.edit(
hunks.into_iter().filter_map(|hunk| match hunk {
Hunk::Insert { text } => {
let edit_start = snapshot.anchor_after(edit_start);
Some((edit_start..edit_start, text))
}
Hunk::Remove { len } => {
let edit_end = edit_start + len;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
Some((edit_range, String::new()))
}
Hunk::Keep { len } => {
let edit_end = edit_start + len;
let edit_range = snapshot.anchor_after(edit_start)
..snapshot.anchor_before(edit_end);
edit_start = edit_end;
this.last_equal_ranges.push(edit_range);
None
}
}),
None,
cx,
);
buffer.end_transaction(cx)
});
if let Some(transaction) = transaction {
if let Some(first_transaction) = this.transaction_id {
// Group all assistant edits into the first transaction.
this.buffer.update(cx, |buffer, cx| {
buffer.merge_transactions(
transaction,
first_transaction,
cx,
)
});
} else {
this.transaction_id = Some(transaction);
this.buffer.update(cx, |buffer, cx| {
buffer.finalize_last_transaction(cx)
});
}
}
cx.notify();
})?;
}
diff.await?;
anyhow::Ok(())
};
let result = generate.await;
this.update(&mut cx, |this, cx| {
this.last_equal_ranges.clear();
this.idle = true;
if let Err(error) = result {
this.error = Some(error);
}
cx.emit(Event::Finished);
cx.notify();
})
.ok();
}
});
self.error.take();
self.idle = false;
cx.notify();
}
pub fn undo(&mut self, cx: &mut ModelContext<Self>) {
if let Some(transaction_id) = self.transaction_id {
self.buffer
.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
}
}
}
fn strip_invalid_spans_from_codeblock(
stream: impl Stream<Item = Result<String>>,
) -> impl Stream<Item = Result<String>> {
let mut first_line = true;
let mut buffer = String::new();
let mut starts_with_markdown_codeblock = false;
let mut includes_start_or_end_span = false;
stream.filter_map(move |chunk| {
let chunk = match chunk {
Ok(chunk) => chunk,
Err(err) => return future::ready(Some(Err(err))),
};
buffer.push_str(&chunk);
if buffer.len() > "<|S|".len() && buffer.starts_with("<|S|") {
includes_start_or_end_span = true;
buffer = buffer
.strip_prefix("<|S|>")
.or_else(|| buffer.strip_prefix("<|S|"))
.unwrap_or(&buffer)
.to_string();
} else if buffer.ends_with("|E|>") {
includes_start_or_end_span = true;
} else if buffer.starts_with("<|")
|| buffer.starts_with("<|S")
|| buffer.starts_with("<|S|")
|| buffer.ends_with("|")
|| buffer.ends_with("|E")
|| buffer.ends_with("|E|")
{
return future::ready(None);
}
if first_line {
if buffer == "" || buffer == "`" || buffer == "``" {
return future::ready(None);
} else if buffer.starts_with("```") {
starts_with_markdown_codeblock = true;
if let Some(newline_ix) = buffer.find('\n') {
buffer.replace_range(..newline_ix + 1, "");
first_line = false;
} else {
return future::ready(None);
}
}
}
let mut text = buffer.to_string();
if starts_with_markdown_codeblock {
text = text
.strip_suffix("\n```\n")
.or_else(|| text.strip_suffix("\n```"))
.or_else(|| text.strip_suffix("\n``"))
.or_else(|| text.strip_suffix("\n`"))
.or_else(|| text.strip_suffix('\n'))
.unwrap_or(&text)
.to_string();
}
if includes_start_or_end_span {
text = text
.strip_suffix("|E|>")
.or_else(|| text.strip_suffix("E|>"))
.or_else(|| text.strip_prefix("|>"))
.or_else(|| text.strip_prefix(">"))
.unwrap_or(&text)
.to_string();
};
if text.contains('\n') {
first_line = false;
}
let remainder = buffer.split_off(text.len());
let result = if buffer.is_empty() {
None
} else {
Some(Ok(buffer.clone()))
};
buffer = remainder;
future::ready(result)
})
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use super::*;
use ai::test::FakeCompletionProvider;
use futures::stream::{self};
use gpui::{Context, TestAppContext};
use indoc::indoc;
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
use rand::prelude::*;
use serde::Serialize;
use settings::SettingsStore;
#[derive(Serialize)]
pub struct DummyCompletionRequest {
pub name: String,
}
impl CompletionRequest for DummyCompletionRequest {
fn data(&self) -> serde_json::Result<String> {
serde_json::to_string(self)
}
}
#[gpui::test(iterations = 10)]
async fn test_transform_autoindent(cx: &mut TestAppContext, mut rng: StdRng) {
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = indoc! {"
fn main() {
let x = 0;
for _ in 0..10 {
x += 1;
}
}
"};
let buffer =
cx.build_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.build_model(|cx| MultiBuffer::singleton(buffer, cx));
let range = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(4, 5))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.build_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Transform { range },
provider.clone(),
cx,
)
});
let request = Box::new(DummyCompletionRequest {
name: "test".to_string(),
});
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
let mut new_text = concat!(
" let mut x = 0;\n",
" while x < 10 {\n",
" x += 1;\n",
" }",
);
while !new_text.is_empty() {
let max_len = cmp::min(new_text.len(), 10);
let len = rng.gen_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
println!("CHUNK: {:?}", &chunk);
provider.send_completion(chunk);
new_text = suffix;
cx.background_executor.run_until_parked();
}
provider.finish_completion();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
indoc! {"
fn main() {
let mut x = 0;
while x < 10 {
x += 1;
}
}
"}
);
}
#[gpui::test(iterations = 10)]
async fn test_autoindent_when_generating_past_indentation(
cx: &mut TestAppContext,
mut rng: StdRng,
) {
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = indoc! {"
fn main() {
le
}
"};
let buffer =
cx.build_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.build_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 6))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.build_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Generate { position },
provider.clone(),
cx,
)
});
let request = Box::new(DummyCompletionRequest {
name: "test".to_string(),
});
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
let mut new_text = concat!(
"t mut x = 0;\n",
"while x < 10 {\n",
" x += 1;\n",
"}", //
);
while !new_text.is_empty() {
let max_len = cmp::min(new_text.len(), 10);
let len = rng.gen_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
provider.send_completion(chunk);
new_text = suffix;
cx.background_executor.run_until_parked();
}
provider.finish_completion();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
indoc! {"
fn main() {
let mut x = 0;
while x < 10 {
x += 1;
}
}
"}
);
}
#[gpui::test(iterations = 10)]
async fn test_autoindent_when_generating_before_indentation(
cx: &mut TestAppContext,
mut rng: StdRng,
) {
cx.set_global(cx.update(SettingsStore::test));
cx.update(language_settings::init);
let text = concat!(
"fn main() {\n",
" \n",
"}\n" //
);
let buffer =
cx.build_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let buffer = cx.build_model(|cx| MultiBuffer::singleton(buffer, cx));
let position = buffer.read_with(cx, |buffer, cx| {
let snapshot = buffer.snapshot(cx);
snapshot.anchor_before(Point::new(1, 2))
});
let provider = Arc::new(FakeCompletionProvider::new());
let codegen = cx.build_model(|cx| {
Codegen::new(
buffer.clone(),
CodegenKind::Generate { position },
provider.clone(),
cx,
)
});
let request = Box::new(DummyCompletionRequest {
name: "test".to_string(),
});
codegen.update(cx, |codegen, cx| codegen.start(request, cx));
let mut new_text = concat!(
"let mut x = 0;\n",
"while x < 10 {\n",
" x += 1;\n",
"}", //
);
while !new_text.is_empty() {
let max_len = cmp::min(new_text.len(), 10);
let len = rng.gen_range(1..=max_len);
let (chunk, suffix) = new_text.split_at(len);
println!("{:?}", &chunk);
provider.send_completion(chunk);
new_text = suffix;
cx.background_executor.run_until_parked();
}
provider.finish_completion();
cx.background_executor.run_until_parked();
assert_eq!(
buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx).text()),
indoc! {"
fn main() {
let mut x = 0;
while x < 10 {
x += 1;
}
}
"}
);
}
#[gpui::test]
async fn test_strip_invalid_spans_from_codeblock() {
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("Lorem ipsum dolor", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum dolor"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum dolor"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor\n```", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum dolor"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("```\nLorem ipsum dolor\n```\n", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum dolor"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks(
"```html\n```js\nLorem ipsum dolor\n```\n```",
2
))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"```js\nLorem ipsum dolor\n```"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("``\nLorem ipsum dolor\n```", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"``\nLorem ipsum dolor\n```"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("<|S|Lorem ipsum|E|>", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("<|S|>Lorem ipsum", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("```\n<|S|>Lorem ipsum\n```", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum"
);
assert_eq!(
strip_invalid_spans_from_codeblock(chunks("```\n<|S|Lorem ipsum|E|>\n```", 2))
.map(|chunk| chunk.unwrap())
.collect::<String>()
.await,
"Lorem ipsum"
);
fn chunks(text: &str, size: usize) -> impl Stream<Item = Result<String>> {
stream::iter(
text.chars()
.collect::<Vec<_>>()
.chunks(size)
.map(|chunk| Ok(chunk.iter().collect::<String>()))
.collect::<Vec<_>>(),
)
}
}
fn rust_lang() -> Language {
Language::new(
LanguageConfig {
name: "Rust".into(),
path_suffixes: vec!["rs".to_string()],
..Default::default()
},
Some(tree_sitter_rust::language()),
)
.with_indents_query(
r#"
(call_expression) @indent
(field_expression) @indent
(_ "(" ")" @end) @indent
(_ "{" "}" @end) @indent
"#,
)
.unwrap()
}
}

View file

@ -1,389 +0,0 @@
use ai::models::LanguageModel;
use ai::prompts::base::{PromptArguments, PromptChain, PromptPriority, PromptTemplate};
use ai::prompts::file_context::FileContext;
use ai::prompts::generate::GenerateInlineContent;
use ai::prompts::preamble::EngineerPreamble;
use ai::prompts::repository_context::{PromptCodeSnippet, RepositoryContext};
use ai::providers::open_ai::OpenAILanguageModel;
use language::{BufferSnapshot, OffsetRangeExt, ToOffset};
use std::cmp::{self, Reverse};
use std::ops::Range;
use std::sync::Arc;
#[allow(dead_code)]
fn summarize(buffer: &BufferSnapshot, selected_range: Range<impl ToOffset>) -> String {
#[derive(Debug)]
struct Match {
collapse: Range<usize>,
keep: Vec<Range<usize>>,
}
let selected_range = selected_range.to_offset(buffer);
let mut ts_matches = buffer.matches(0..buffer.len(), |grammar| {
Some(&grammar.embedding_config.as_ref()?.query)
});
let configs = ts_matches
.grammars()
.iter()
.map(|g| g.embedding_config.as_ref().unwrap())
.collect::<Vec<_>>();
let mut matches = Vec::new();
while let Some(mat) = ts_matches.peek() {
let config = &configs[mat.grammar_index];
if let Some(collapse) = mat.captures.iter().find_map(|cap| {
if Some(cap.index) == config.collapse_capture_ix {
Some(cap.node.byte_range())
} else {
None
}
}) {
let mut keep = Vec::new();
for capture in mat.captures.iter() {
if Some(capture.index) == config.keep_capture_ix {
keep.push(capture.node.byte_range());
} else {
continue;
}
}
ts_matches.advance();
matches.push(Match { collapse, keep });
} else {
ts_matches.advance();
}
}
matches.sort_unstable_by_key(|mat| (mat.collapse.start, Reverse(mat.collapse.end)));
let mut matches = matches.into_iter().peekable();
let mut summary = String::new();
let mut offset = 0;
let mut flushed_selection = false;
while let Some(mat) = matches.next() {
// Keep extending the collapsed range if the next match surrounds
// the current one.
while let Some(next_mat) = matches.peek() {
if mat.collapse.start <= next_mat.collapse.start
&& mat.collapse.end >= next_mat.collapse.end
{
matches.next().unwrap();
} else {
break;
}
}
if offset > mat.collapse.start {
// Skip collapsed nodes that have already been summarized.
offset = cmp::max(offset, mat.collapse.end);
continue;
}
if offset <= selected_range.start && selected_range.start <= mat.collapse.end {
if !flushed_selection {
// The collapsed node ends after the selection starts, so we'll flush the selection first.
summary.extend(buffer.text_for_range(offset..selected_range.start));
summary.push_str("<|S|");
if selected_range.end == selected_range.start {
summary.push_str(">");
} else {
summary.extend(buffer.text_for_range(selected_range.clone()));
summary.push_str("|E|>");
}
offset = selected_range.end;
flushed_selection = true;
}
// If the selection intersects the collapsed node, we won't collapse it.
if selected_range.end >= mat.collapse.start {
continue;
}
}
summary.extend(buffer.text_for_range(offset..mat.collapse.start));
for keep in mat.keep {
summary.extend(buffer.text_for_range(keep));
}
offset = mat.collapse.end;
}
// Flush selection if we haven't already done so.
if !flushed_selection && offset <= selected_range.start {
summary.extend(buffer.text_for_range(offset..selected_range.start));
summary.push_str("<|S|");
if selected_range.end == selected_range.start {
summary.push_str(">");
} else {
summary.extend(buffer.text_for_range(selected_range.clone()));
summary.push_str("|E|>");
}
offset = selected_range.end;
}
summary.extend(buffer.text_for_range(offset..buffer.len()));
summary
}
pub fn generate_content_prompt(
user_prompt: String,
language_name: Option<&str>,
buffer: BufferSnapshot,
range: Range<usize>,
search_results: Vec<PromptCodeSnippet>,
model: &str,
project_name: Option<String>,
) -> anyhow::Result<String> {
// Using new Prompt Templates
let openai_model: Arc<dyn LanguageModel> = Arc::new(OpenAILanguageModel::load(model));
let lang_name = if let Some(language_name) = language_name {
Some(language_name.to_string())
} else {
None
};
let args = PromptArguments {
model: openai_model,
language_name: lang_name.clone(),
project_name,
snippets: search_results.clone(),
reserved_tokens: 1000,
buffer: Some(buffer),
selected_range: Some(range),
user_prompt: Some(user_prompt.clone()),
};
let templates: Vec<(PromptPriority, Box<dyn PromptTemplate>)> = vec![
(PromptPriority::Mandatory, Box::new(EngineerPreamble {})),
(
PromptPriority::Ordered { order: 1 },
Box::new(RepositoryContext {}),
),
(
PromptPriority::Ordered { order: 0 },
Box::new(FileContext {}),
),
(
PromptPriority::Mandatory,
Box::new(GenerateInlineContent {}),
),
];
let chain = PromptChain::new(args, templates);
let (prompt, _) = chain.generate(true)?;
anyhow::Ok(prompt)
}
#[cfg(test)]
pub(crate) mod tests {
use super::*;
use std::sync::Arc;
use gpui::{AppContext, Context};
use indoc::indoc;
use language::{language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, Point};
use settings::SettingsStore;
pub(crate) fn rust_lang() -> Language {
Language::new(
LanguageConfig {
name: "Rust".into(),
path_suffixes: vec!["rs".to_string()],
..Default::default()
},
Some(tree_sitter_rust::language()),
)
.with_embedding_query(
r#"
(
[(line_comment) (attribute_item)]* @context
.
[
(struct_item
name: (_) @name)
(enum_item
name: (_) @name)
(impl_item
trait: (_)? @name
"for"? @name
type: (_) @name)
(trait_item
name: (_) @name)
(function_item
name: (_) @name
body: (block
"{" @keep
"}" @keep) @collapse)
(macro_definition
name: (_) @name)
] @item
)
"#,
)
.unwrap()
}
#[gpui::test]
fn test_outline_for_prompt(cx: &mut AppContext) {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
language_settings::init(cx);
let text = indoc! {"
struct X {
a: usize,
b: usize,
}
impl X {
fn new() -> Self {
let a = 1;
let b = 2;
Self { a, b }
}
pub fn a(&self, param: bool) -> usize {
self.a
}
pub fn b(&self) -> usize {
self.b
}
}
"};
let buffer =
cx.build_model(|cx| Buffer::new(0, 0, text).with_language(Arc::new(rust_lang()), cx));
let snapshot = buffer.read(cx).snapshot();
assert_eq!(
summarize(&snapshot, Point::new(1, 4)..Point::new(1, 4)),
indoc! {"
struct X {
<|S|>a: usize,
b: usize,
}
impl X {
fn new() -> Self {}
pub fn a(&self, param: bool) -> usize {}
pub fn b(&self) -> usize {}
}
"}
);
assert_eq!(
summarize(&snapshot, Point::new(8, 12)..Point::new(8, 14)),
indoc! {"
struct X {
a: usize,
b: usize,
}
impl X {
fn new() -> Self {
let <|S|a |E|>= 1;
let b = 2;
Self { a, b }
}
pub fn a(&self, param: bool) -> usize {}
pub fn b(&self) -> usize {}
}
"}
);
assert_eq!(
summarize(&snapshot, Point::new(6, 0)..Point::new(6, 0)),
indoc! {"
struct X {
a: usize,
b: usize,
}
impl X {
<|S|>
fn new() -> Self {}
pub fn a(&self, param: bool) -> usize {}
pub fn b(&self) -> usize {}
}
"}
);
assert_eq!(
summarize(&snapshot, Point::new(21, 0)..Point::new(21, 0)),
indoc! {"
struct X {
a: usize,
b: usize,
}
impl X {
fn new() -> Self {}
pub fn a(&self, param: bool) -> usize {}
pub fn b(&self) -> usize {}
}
<|S|>"}
);
// Ensure nested functions get collapsed properly.
let text = indoc! {"
struct X {
a: usize,
b: usize,
}
impl X {
fn new() -> Self {
let a = 1;
let b = 2;
Self { a, b }
}
pub fn a(&self, param: bool) -> usize {
let a = 30;
fn nested() -> usize {
3
}
self.a + nested()
}
pub fn b(&self) -> usize {
self.b
}
}
"};
buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
let snapshot = buffer.read(cx).snapshot();
assert_eq!(
summarize(&snapshot, Point::new(0, 0)..Point::new(0, 0)),
indoc! {"
<|S|>struct X {
a: usize,
b: usize,
}
impl X {
fn new() -> Self {}
pub fn a(&self, param: bool) -> usize {}
pub fn b(&self) -> usize {}
}
"}
);
}
}

View file

@ -1,293 +0,0 @@
use collections::HashMap;
use ordered_float::OrderedFloat;
use std::{
cmp,
fmt::{self, Debug},
ops::Range,
};
struct Matrix {
cells: Vec<f64>,
rows: usize,
cols: usize,
}
impl Matrix {
fn new() -> Self {
Self {
cells: Vec::new(),
rows: 0,
cols: 0,
}
}
fn resize(&mut self, rows: usize, cols: usize) {
self.cells.resize(rows * cols, 0.);
self.rows = rows;
self.cols = cols;
}
fn get(&self, row: usize, col: usize) -> f64 {
if row >= self.rows {
panic!("row out of bounds")
}
if col >= self.cols {
panic!("col out of bounds")
}
self.cells[col * self.rows + row]
}
fn set(&mut self, row: usize, col: usize, value: f64) {
if row >= self.rows {
panic!("row out of bounds")
}
if col >= self.cols {
panic!("col out of bounds")
}
self.cells[col * self.rows + row] = value;
}
}
impl Debug for Matrix {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f)?;
for i in 0..self.rows {
for j in 0..self.cols {
write!(f, "{:5}", self.get(i, j))?;
}
writeln!(f)?;
}
Ok(())
}
}
#[derive(Debug)]
pub enum Hunk {
Insert { text: String },
Remove { len: usize },
Keep { len: usize },
}
pub struct StreamingDiff {
old: Vec<char>,
new: Vec<char>,
scores: Matrix,
old_text_ix: usize,
new_text_ix: usize,
equal_runs: HashMap<(usize, usize), u32>,
}
impl StreamingDiff {
const INSERTION_SCORE: f64 = -1.;
const DELETION_SCORE: f64 = -20.;
const EQUALITY_BASE: f64 = 1.8;
const MAX_EQUALITY_EXPONENT: i32 = 16;
pub fn new(old: String) -> Self {
let old = old.chars().collect::<Vec<_>>();
let mut scores = Matrix::new();
scores.resize(old.len() + 1, 1);
for i in 0..=old.len() {
scores.set(i, 0, i as f64 * Self::DELETION_SCORE);
}
Self {
old,
new: Vec::new(),
scores,
old_text_ix: 0,
new_text_ix: 0,
equal_runs: Default::default(),
}
}
pub fn push_new(&mut self, text: &str) -> Vec<Hunk> {
self.new.extend(text.chars());
self.scores.resize(self.old.len() + 1, self.new.len() + 1);
for j in self.new_text_ix + 1..=self.new.len() {
self.scores.set(0, j, j as f64 * Self::INSERTION_SCORE);
for i in 1..=self.old.len() {
let insertion_score = self.scores.get(i, j - 1) + Self::INSERTION_SCORE;
let deletion_score = self.scores.get(i - 1, j) + Self::DELETION_SCORE;
let equality_score = if self.old[i - 1] == self.new[j - 1] {
let mut equal_run = self.equal_runs.get(&(i - 1, j - 1)).copied().unwrap_or(0);
equal_run += 1;
self.equal_runs.insert((i, j), equal_run);
let exponent = cmp::min(equal_run as i32 / 4, Self::MAX_EQUALITY_EXPONENT);
self.scores.get(i - 1, j - 1) + Self::EQUALITY_BASE.powi(exponent)
} else {
f64::NEG_INFINITY
};
let score = insertion_score.max(deletion_score).max(equality_score);
self.scores.set(i, j, score);
}
}
let mut max_score = f64::NEG_INFINITY;
let mut next_old_text_ix = self.old_text_ix;
let next_new_text_ix = self.new.len();
for i in self.old_text_ix..=self.old.len() {
let score = self.scores.get(i, next_new_text_ix);
if score > max_score {
max_score = score;
next_old_text_ix = i;
}
}
let hunks = self.backtrack(next_old_text_ix, next_new_text_ix);
self.old_text_ix = next_old_text_ix;
self.new_text_ix = next_new_text_ix;
hunks
}
fn backtrack(&self, old_text_ix: usize, new_text_ix: usize) -> Vec<Hunk> {
let mut pending_insert: Option<Range<usize>> = None;
let mut hunks = Vec::new();
let mut i = old_text_ix;
let mut j = new_text_ix;
while (i, j) != (self.old_text_ix, self.new_text_ix) {
let insertion_score = if j > self.new_text_ix {
Some((i, j - 1))
} else {
None
};
let deletion_score = if i > self.old_text_ix {
Some((i - 1, j))
} else {
None
};
let equality_score = if i > self.old_text_ix && j > self.new_text_ix {
if self.old[i - 1] == self.new[j - 1] {
Some((i - 1, j - 1))
} else {
None
}
} else {
None
};
let (prev_i, prev_j) = [insertion_score, deletion_score, equality_score]
.iter()
.max_by_key(|cell| cell.map(|(i, j)| OrderedFloat(self.scores.get(i, j))))
.unwrap()
.unwrap();
if prev_i == i && prev_j == j - 1 {
if let Some(pending_insert) = pending_insert.as_mut() {
pending_insert.start = prev_j;
} else {
pending_insert = Some(prev_j..j);
}
} else {
if let Some(range) = pending_insert.take() {
hunks.push(Hunk::Insert {
text: self.new[range].iter().collect(),
});
}
let char_len = self.old[i - 1].len_utf8();
if prev_i == i - 1 && prev_j == j {
if let Some(Hunk::Remove { len }) = hunks.last_mut() {
*len += char_len;
} else {
hunks.push(Hunk::Remove { len: char_len })
}
} else {
if let Some(Hunk::Keep { len }) = hunks.last_mut() {
*len += char_len;
} else {
hunks.push(Hunk::Keep { len: char_len })
}
}
}
i = prev_i;
j = prev_j;
}
if let Some(range) = pending_insert.take() {
hunks.push(Hunk::Insert {
text: self.new[range].iter().collect(),
});
}
hunks.reverse();
hunks
}
pub fn finish(self) -> Vec<Hunk> {
self.backtrack(self.old.len(), self.new.len())
}
}
#[cfg(test)]
mod tests {
use std::env;
use super::*;
use rand::prelude::*;
#[gpui::test(iterations = 100)]
fn test_random_diffs(mut rng: StdRng) {
let old_text_len = env::var("OLD_TEXT_LEN")
.map(|i| i.parse().expect("invalid `OLD_TEXT_LEN` variable"))
.unwrap_or(10);
let new_text_len = env::var("NEW_TEXT_LEN")
.map(|i| i.parse().expect("invalid `NEW_TEXT_LEN` variable"))
.unwrap_or(10);
let old = util::RandomCharIter::new(&mut rng)
.take(old_text_len)
.collect::<String>();
log::info!("old text: {:?}", old);
let mut diff = StreamingDiff::new(old.clone());
let mut hunks = Vec::new();
let mut new_len = 0;
let mut new = String::new();
while new_len < new_text_len {
let new_chunk_len = rng.gen_range(1..=new_text_len - new_len);
let new_chunk = util::RandomCharIter::new(&mut rng)
.take(new_len)
.collect::<String>();
log::info!("new chunk: {:?}", new_chunk);
new_len += new_chunk_len;
new.push_str(&new_chunk);
let new_hunks = diff.push_new(&new_chunk);
log::info!("hunks: {:?}", new_hunks);
hunks.extend(new_hunks);
}
let final_hunks = diff.finish();
log::info!("final hunks: {:?}", final_hunks);
hunks.extend(final_hunks);
log::info!("new text: {:?}", new);
let mut old_ix = 0;
let mut new_ix = 0;
let mut patched = String::new();
for hunk in hunks {
match hunk {
Hunk::Keep { len } => {
assert_eq!(&old[old_ix..old_ix + len], &new[new_ix..new_ix + len]);
patched.push_str(&old[old_ix..old_ix + len]);
old_ix += len;
new_ix += len;
}
Hunk::Remove { len } => {
old_ix += len;
}
Hunk::Insert { text } => {
assert_eq!(text, &new[new_ix..new_ix + text.len()]);
patched.push_str(&text);
new_ix += text.len();
}
}
}
assert_eq!(patched, new);
}
}

View file

@ -13,11 +13,10 @@ gpui = { path = "../gpui" }
collections = { path = "../collections" }
util = { path = "../util" }
rodio ={version = "0.17.1", default-features=false, features = ["wav"]}
log.workspace = true
futures.workspace = true
anyhow.workspace = true
parking_lot.workspace = true
[dev-dependencies]

View file

@ -60,7 +60,7 @@ impl Audio {
return;
}
cx.update_global::<Self, _, _>(|this, cx| {
cx.update_global::<Self, _>(|this, cx| {
let output_handle = this.ensure_output_exists()?;
let source = SoundRegistry::global(cx).get(sound.file()).log_err()?;
output_handle.play_raw(source).log_err()?;
@ -73,7 +73,7 @@ impl Audio {
return;
}
cx.update_global::<Self, _, _>(|this, _| {
cx.update_global::<Self, _>(|this, _| {
this._output_stream.take();
this.output_handle.take();
});

View file

@ -1,24 +0,0 @@
[package]
name = "audio2"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/audio2.rs"
doctest = false
[dependencies]
gpui = { package = "gpui2", path = "../gpui2" }
collections = { path = "../collections" }
util = { path = "../util" }
rodio ={version = "0.17.1", default-features=false, features = ["wav"]}
log.workspace = true
futures.workspace = true
anyhow.workspace = true
parking_lot.workspace = true
[dev-dependencies]

View file

@ -1,23 +0,0 @@
[package]
name = "audio"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/audio.rs"
doctest = false
[dependencies]
gpui = { path = "../gpui" }
collections = { path = "../collections" }
util = { path = "../util" }
rodio ={version = "0.17.1", default-features=false, features = ["wav"]}
log.workspace = true
anyhow.workspace = true
parking_lot.workspace = true
[dev-dependencies]

View file

@ -1,44 +0,0 @@
use std::{io::Cursor, sync::Arc};
use anyhow::Result;
use collections::HashMap;
use gpui::{AppContext, AssetSource};
use rodio::{
source::{Buffered, SamplesConverter},
Decoder, Source,
};
type Sound = Buffered<SamplesConverter<Decoder<Cursor<Vec<u8>>>, f32>>;
pub struct SoundRegistry {
cache: Arc<parking_lot::Mutex<HashMap<String, Sound>>>,
assets: Box<dyn AssetSource>,
}
impl SoundRegistry {
pub fn new(source: impl AssetSource) -> Arc<Self> {
Arc::new(Self {
cache: Default::default(),
assets: Box::new(source),
})
}
pub fn global(cx: &AppContext) -> Arc<Self> {
cx.global::<Arc<Self>>().clone()
}
pub fn get(&self, name: &str) -> Result<impl Source<Item = f32>> {
if let Some(wav) = self.cache.lock().get(name) {
return Ok(wav.clone());
}
let path = format!("sounds/{}.wav", name);
let bytes = self.assets.load(&path)?.into_owned();
let cursor = Cursor::new(bytes);
let source = Decoder::new(cursor)?.convert_samples::<f32>().buffered();
self.cache.lock().insert(name.to_string(), source.clone());
Ok(source)
}
}

View file

@ -1,81 +0,0 @@
use assets::SoundRegistry;
use gpui::{AppContext, AssetSource};
use rodio::{OutputStream, OutputStreamHandle};
use util::ResultExt;
mod assets;
pub fn init(source: impl AssetSource, cx: &mut AppContext) {
cx.set_global(SoundRegistry::new(source));
cx.set_global(Audio::new());
}
pub enum Sound {
Joined,
Leave,
Mute,
Unmute,
StartScreenshare,
StopScreenshare,
}
impl Sound {
fn file(&self) -> &'static str {
match self {
Self::Joined => "joined_call",
Self::Leave => "leave_call",
Self::Mute => "mute",
Self::Unmute => "unmute",
Self::StartScreenshare => "start_screenshare",
Self::StopScreenshare => "stop_screenshare",
}
}
}
pub struct Audio {
_output_stream: Option<OutputStream>,
output_handle: Option<OutputStreamHandle>,
}
impl Audio {
pub fn new() -> Self {
Self {
_output_stream: None,
output_handle: None,
}
}
fn ensure_output_exists(&mut self) -> Option<&OutputStreamHandle> {
if self.output_handle.is_none() {
let (_output_stream, output_handle) = OutputStream::try_default().log_err().unzip();
self.output_handle = output_handle;
self._output_stream = _output_stream;
}
self.output_handle.as_ref()
}
pub fn play_sound(sound: Sound, cx: &mut AppContext) {
if !cx.has_global::<Self>() {
return;
}
cx.update_global::<Self, _, _>(|this, cx| {
let output_handle = this.ensure_output_exists()?;
let source = SoundRegistry::global(cx).get(sound.file()).log_err()?;
output_handle.play_raw(source).log_err()?;
Some(())
});
}
pub fn end_call(cx: &mut AppContext) {
if !cx.has_global::<Self>() {
return;
}
cx.update_global::<Self, _, _>(|this, _| {
this._output_stream.take();
this.output_handle.take();
});
}
}

View file

@ -1,44 +0,0 @@
use std::{io::Cursor, sync::Arc};
use anyhow::Result;
use collections::HashMap;
use gpui::{AppContext, AssetSource};
use rodio::{
source::{Buffered, SamplesConverter},
Decoder, Source,
};
type Sound = Buffered<SamplesConverter<Decoder<Cursor<Vec<u8>>>, f32>>;
pub struct SoundRegistry {
cache: Arc<parking_lot::Mutex<HashMap<String, Sound>>>,
assets: Box<dyn AssetSource>,
}
impl SoundRegistry {
pub fn new(source: impl AssetSource) -> Arc<Self> {
Arc::new(Self {
cache: Default::default(),
assets: Box::new(source),
})
}
pub fn global(cx: &AppContext) -> Arc<Self> {
cx.global::<Arc<Self>>().clone()
}
pub fn get(&self, name: &str) -> Result<impl Source<Item = f32>> {
if let Some(wav) = self.cache.lock().get(name) {
return Ok(wav.clone());
}
let path = format!("sounds/{}.wav", name);
let bytes = self.assets.load(&path)?.into_owned();
let cursor = Cursor::new(bytes);
let source = Decoder::new(cursor)?.convert_samples::<f32>().buffered();
self.cache.lock().insert(name.to_string(), source.clone());
Ok(source)
}
}

View file

@ -1,81 +0,0 @@
use assets::SoundRegistry;
use gpui::{AppContext, AssetSource};
use rodio::{OutputStream, OutputStreamHandle};
use util::ResultExt;
mod assets;
pub fn init(source: impl AssetSource, cx: &mut AppContext) {
cx.set_global(SoundRegistry::new(source));
cx.set_global(Audio::new());
}
pub enum Sound {
Joined,
Leave,
Mute,
Unmute,
StartScreenshare,
StopScreenshare,
}
impl Sound {
fn file(&self) -> &'static str {
match self {
Self::Joined => "joined_call",
Self::Leave => "leave_call",
Self::Mute => "mute",
Self::Unmute => "unmute",
Self::StartScreenshare => "start_screenshare",
Self::StopScreenshare => "stop_screenshare",
}
}
}
pub struct Audio {
_output_stream: Option<OutputStream>,
output_handle: Option<OutputStreamHandle>,
}
impl Audio {
pub fn new() -> Self {
Self {
_output_stream: None,
output_handle: None,
}
}
fn ensure_output_exists(&mut self) -> Option<&OutputStreamHandle> {
if self.output_handle.is_none() {
let (_output_stream, output_handle) = OutputStream::try_default().log_err().unzip();
self.output_handle = output_handle;
self._output_stream = _output_stream;
}
self.output_handle.as_ref()
}
pub fn play_sound(sound: Sound, cx: &mut AppContext) {
if !cx.has_global::<Self>() {
return;
}
cx.update_global::<Self, _>(|this, cx| {
let output_handle = this.ensure_output_exists()?;
let source = SoundRegistry::global(cx).get(sound.file()).log_err()?;
output_handle.play_raw(source).log_err()?;
Some(())
});
}
pub fn end_call(cx: &mut AppContext) {
if !cx.has_global::<Self>() {
return;
}
cx.update_global::<Self, _>(|this, _| {
this._output_stream.take();
this.output_handle.take();
});
}
}

View file

@ -22,6 +22,7 @@ anyhow.workspace = true
isahc.workspace = true
lazy_static.workspace = true
log.workspace = true
schemars.workspace = true
serde.workspace = true
serde_derive.workspace = true
serde_json.workspace = true

View file

@ -3,18 +3,24 @@ mod update_notification;
use anyhow::{anyhow, Context, Result};
use client::{Client, TelemetrySettings, ZED_APP_PATH, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN};
use db::kvp::KEY_VALUE_STORE;
use db::RELEASE_CHANNEL;
use gpui::{
actions, platform::AppVersion, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
Task, WeakViewHandle,
actions, AppContext, AsyncAppContext, Context as _, Model, ModelContext, SemanticVersion, Task,
ViewContext, VisualContext, WindowContext,
};
use isahc::AsyncBody;
use schemars::JsonSchema;
use serde::Deserialize;
use serde_derive::Serialize;
use settings::{Setting, SettingsStore};
use smol::{fs::File, io::AsyncReadExt, process::Command};
use smol::io::AsyncReadExt;
use settings::{Settings, SettingsStore};
use smol::{fs::File, process::Command};
use std::{ffi::OsString, sync::Arc, time::Duration};
use update_notification::UpdateNotification;
use util::channel::ReleaseChannel;
use util::channel::{AppCommitSha, ReleaseChannel};
use util::http::HttpClient;
use workspace::Workspace;
@ -42,9 +48,9 @@ pub enum AutoUpdateStatus {
pub struct AutoUpdater {
status: AutoUpdateStatus,
current_version: AppVersion,
current_version: SemanticVersion,
http_client: Arc<dyn HttpClient>,
pending_poll: Option<Task<()>>,
pending_poll: Option<Task<Option<()>>>,
server_url: String,
}
@ -54,41 +60,59 @@ struct JsonRelease {
url: String,
}
impl Entity for AutoUpdater {
type Event = ();
}
struct AutoUpdateSetting(bool);
impl Setting for AutoUpdateSetting {
/// Whether or not to automatically check for updates.
///
/// Default: true
#[derive(Clone, Default, JsonSchema, Deserialize, Serialize)]
#[serde(transparent)]
struct AutoUpdateSettingOverride(Option<bool>);
impl Settings for AutoUpdateSetting {
const KEY: Option<&'static str> = Some("auto_update");
type FileContent = Option<bool>;
type FileContent = AutoUpdateSettingOverride;
fn load(
default_value: &Option<bool>,
user_values: &[&Option<bool>],
_: &AppContext,
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut AppContext,
) -> Result<Self> {
Ok(Self(
Self::json_merge(default_value, user_values)?.ok_or_else(Self::missing_default)?,
Self::json_merge(default_value, user_values)?
.0
.ok_or_else(Self::missing_default)?,
))
}
}
pub fn init(http_client: Arc<dyn HttpClient>, server_url: String, cx: &mut AppContext) {
settings::register::<AutoUpdateSetting>(cx);
AutoUpdateSetting::register(cx);
if let Some(version) = (*ZED_APP_VERSION).or_else(|| cx.platform().app_version().ok()) {
let auto_updater = cx.add_model(|cx| {
cx.observe_new_views(|workspace: &mut Workspace, _cx| {
workspace.register_action(|_, action: &Check, cx| check(action, cx));
workspace.register_action(|_, action, cx| view_release_notes(action, cx));
// @nate - code to trigger update notification on launch
// todo!("remove this when Nate is done")
// workspace.show_notification(0, _cx, |cx| {
// cx.build_view(|_| UpdateNotification::new(SemanticVersion::from_str("1.1.1").unwrap()))
// });
})
.detach();
if let Some(version) = ZED_APP_VERSION.or_else(|| cx.app_metadata().app_version) {
let auto_updater = cx.new_model(|cx| {
let updater = AutoUpdater::new(version, http_client, server_url);
let mut update_subscription = settings::get::<AutoUpdateSetting>(cx)
let mut update_subscription = AutoUpdateSetting::get_global(cx)
.0
.then(|| updater.start_polling(cx));
cx.observe_global::<SettingsStore, _>(move |updater, cx| {
if settings::get::<AutoUpdateSetting>(cx).0 {
cx.observe_global::<SettingsStore>(move |updater, cx| {
if AutoUpdateSetting::get_global(cx).0 {
if update_subscription.is_none() {
update_subscription = Some(updater.start_polling(cx))
}
@ -101,19 +125,22 @@ pub fn init(http_client: Arc<dyn HttpClient>, server_url: String, cx: &mut AppCo
updater
});
cx.set_global(Some(auto_updater));
cx.add_global_action(check);
cx.add_global_action(view_release_notes);
cx.add_action(UpdateNotification::dismiss);
}
}
pub fn check(_: &Check, cx: &mut AppContext) {
pub fn check(_: &Check, cx: &mut WindowContext) {
if let Some(updater) = AutoUpdater::get(cx) {
updater.update(cx, |updater, cx| updater.poll(cx));
} else {
drop(cx.prompt(
gpui::PromptLevel::Info,
"Auto-updates disabled for non-bundled app.",
&["Ok"],
));
}
}
fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) {
pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) {
if let Some(auto_updater) = AutoUpdater::get(cx) {
let auto_updater = auto_updater.read(cx);
let server_url = &auto_updater.server_url;
@ -122,31 +149,28 @@ fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) {
match cx.global::<ReleaseChannel>() {
ReleaseChannel::Dev => {}
ReleaseChannel::Nightly => {}
ReleaseChannel::Preview => cx
.platform()
.open_url(&format!("{server_url}/releases/preview/{current_version}")),
ReleaseChannel::Stable => cx
.platform()
.open_url(&format!("{server_url}/releases/stable/{current_version}")),
ReleaseChannel::Preview => {
cx.open_url(&format!("{server_url}/releases/preview/{current_version}"))
}
ReleaseChannel::Stable => {
cx.open_url(&format!("{server_url}/releases/stable/{current_version}"))
}
}
}
}
}
pub fn notify_of_any_new_update(
workspace: WeakViewHandle<Workspace>,
cx: &mut AppContext,
) -> Option<()> {
pub fn notify_of_any_new_update(cx: &mut ViewContext<Workspace>) -> Option<()> {
let updater = AutoUpdater::get(cx)?;
let version = updater.read(cx).current_version;
let should_show_notification = updater.read(cx).should_show_update_notification(cx);
cx.spawn(|mut cx| async move {
cx.spawn(|workspace, mut cx| async move {
let should_show_notification = should_show_notification.await?;
if should_show_notification {
workspace.update(&mut cx, |workspace, cx| {
workspace.show_notification(0, cx, |cx| {
cx.add_view(|_| UpdateNotification::new(version))
cx.new_view(|_| UpdateNotification::new(version))
});
updater
.read(cx)
@ -162,12 +186,12 @@ pub fn notify_of_any_new_update(
}
impl AutoUpdater {
pub fn get(cx: &mut AppContext) -> Option<ModelHandle<Self>> {
cx.default_global::<Option<ModelHandle<Self>>>().clone()
pub fn get(cx: &mut AppContext) -> Option<Model<Self>> {
cx.default_global::<Option<Model<Self>>>().clone()
}
fn new(
current_version: AppVersion,
current_version: SemanticVersion,
http_client: Arc<dyn HttpClient>,
server_url: String,
) -> Self {
@ -180,11 +204,11 @@ impl AutoUpdater {
}
}
pub fn start_polling(&self, cx: &mut ModelContext<Self>) -> Task<()> {
pub fn start_polling(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
cx.spawn(|this, mut cx| async move {
loop {
this.update(&mut cx, |this, cx| this.poll(cx));
cx.background().timer(POLL_INTERVAL).await;
this.update(&mut cx, |this, cx| this.poll(cx))?;
cx.background_executor().timer(POLL_INTERVAL).await;
}
})
}
@ -198,7 +222,7 @@ impl AutoUpdater {
cx.notify();
self.pending_poll = Some(cx.spawn(|this, mut cx| async move {
let result = Self::update(this.clone(), cx.clone()).await;
let result = Self::update(this.upgrade()?, cx.clone()).await;
this.update(&mut cx, |this, cx| {
this.pending_poll = None;
if let Err(error) = result {
@ -206,7 +230,8 @@ impl AutoUpdater {
this.status = AutoUpdateStatus::Errored;
cx.notify();
}
});
})
.ok()
}));
}
@ -219,26 +244,26 @@ impl AutoUpdater {
cx.notify();
}
async fn update(this: ModelHandle<Self>, mut cx: AsyncAppContext) -> Result<()> {
async fn update(this: Model<Self>, mut cx: AsyncAppContext) -> Result<()> {
let (client, server_url, current_version) = this.read_with(&cx, |this, _| {
(
this.http_client.clone(),
this.server_url.clone(),
this.current_version,
)
});
})?;
let mut url_string = format!(
"{server_url}/api/releases/latest?token={ZED_SECRET_CLIENT_TOKEN}&asset=Zed.dmg"
);
cx.read(|cx| {
cx.update(|cx| {
if cx.has_global::<ReleaseChannel>() {
if let Some(param) = cx.global::<ReleaseChannel>().release_query_param() {
url_string += "&";
url_string += param;
}
}
});
})?;
let mut response = client.get(&url_string, Default::default(), true).await?;
@ -251,26 +276,32 @@ impl AutoUpdater {
let release: JsonRelease =
serde_json::from_slice(body.as_slice()).context("error deserializing release")?;
let latest_version = release.version.parse::<AppVersion>()?;
if latest_version <= current_version {
let should_download = match *RELEASE_CHANNEL {
ReleaseChannel::Nightly => cx
.try_read_global::<AppCommitSha, _>(|sha, _| release.version != sha.0)
.unwrap_or(true),
_ => release.version.parse::<SemanticVersion>()? > current_version,
};
if !should_download {
this.update(&mut cx, |this, cx| {
this.status = AutoUpdateStatus::Idle;
cx.notify();
});
})?;
return Ok(());
}
this.update(&mut cx, |this, cx| {
this.status = AutoUpdateStatus::Downloading;
cx.notify();
});
})?;
let temp_dir = tempdir::TempDir::new("zed-auto-update")?;
let dmg_path = temp_dir.path().join("Zed.dmg");
let mount_path = temp_dir.path().join("Zed");
let running_app_path = ZED_APP_PATH
.clone()
.map_or_else(|| cx.platform().app_path(), Ok)?;
.map_or_else(|| cx.update(|cx| cx.app_path())?, Ok)?;
let running_app_filename = running_app_path
.file_name()
.ok_or_else(|| anyhow!("invalid running app path"))?;
@ -279,15 +310,15 @@ impl AutoUpdater {
let mut dmg_file = File::create(&dmg_path).await?;
let (installation_id, release_channel, telemetry) = cx.read(|cx| {
let (installation_id, release_channel, telemetry) = cx.update(|cx| {
let installation_id = cx.global::<Arc<Client>>().telemetry().installation_id();
let release_channel = cx
.has_global::<ReleaseChannel>()
.then(|| cx.global::<ReleaseChannel>().display_name());
let telemetry = settings::get::<TelemetrySettings>(cx).metrics;
let telemetry = TelemetrySettings::get_global(cx).metrics;
(installation_id, release_channel, telemetry)
});
})?;
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
installation_id,
@ -302,7 +333,7 @@ impl AutoUpdater {
this.update(&mut cx, |this, cx| {
this.status = AutoUpdateStatus::Installing;
cx.notify();
});
})?;
let output = Command::new("hdiutil")
.args(&["attach", "-nobrowse"])
@ -348,7 +379,7 @@ impl AutoUpdater {
.detach_and_log_err(cx);
this.status = AutoUpdateStatus::Updated;
cx.notify();
});
})?;
Ok(())
}
@ -357,7 +388,7 @@ impl AutoUpdater {
should_show: bool,
cx: &AppContext,
) -> Task<Result<()>> {
cx.background().spawn(async move {
cx.background_executor().spawn(async move {
if should_show {
KEY_VALUE_STORE
.write_kvp(
@ -375,7 +406,7 @@ impl AutoUpdater {
}
fn should_show_update_notification(&self, cx: &AppContext) -> Task<Result<bool>> {
cx.background().spawn(async move {
cx.background_executor().spawn(async move {
Ok(KEY_VALUE_STORE
.read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?
.is_some())

View file

@ -1,106 +1,56 @@
use crate::ViewReleaseNotes;
use gpui::{
elements::{Flex, MouseEventHandler, Padding, ParentElement, Svg, Text},
platform::{AppVersion, CursorStyle, MouseButton},
Element, Entity, View, ViewContext,
div, DismissEvent, EventEmitter, InteractiveElement, IntoElement, ParentElement, Render,
SemanticVersion, StatefulInteractiveElement, Styled, ViewContext,
};
use menu::Cancel;
use util::channel::ReleaseChannel;
use workspace::notifications::Notification;
use workspace::ui::{h_stack, v_stack, Icon, IconName, Label, StyledExt};
pub struct UpdateNotification {
version: AppVersion,
version: SemanticVersion,
}
pub enum Event {
Dismiss,
}
impl Entity for UpdateNotification {
type Event = Event;
}
impl View for UpdateNotification {
fn ui_name() -> &'static str {
"UpdateNotification"
}
fn render(&mut self, cx: &mut gpui::ViewContext<Self>) -> gpui::AnyElement<Self> {
let theme = theme::current(cx).clone();
let theme = &theme.update_notification;
impl EventEmitter<DismissEvent> for UpdateNotification {}
impl Render for UpdateNotification {
fn render(&mut self, cx: &mut gpui::ViewContext<Self>) -> impl IntoElement {
let app_name = cx.global::<ReleaseChannel>().display_name();
MouseEventHandler::new::<ViewReleaseNotes, _>(0, cx, |state, cx| {
Flex::column()
.with_child(
Flex::row()
.with_child(
Text::new(
format!("Updated to {app_name} {}", self.version),
theme.message.text.clone(),
)
.contained()
.with_style(theme.message.container)
.aligned()
.top()
.left()
.flex(1., true),
)
.with_child(
MouseEventHandler::new::<Cancel, _>(0, cx, |state, _| {
let style = theme.dismiss_button.style_for(state);
Svg::new("icons/x.svg")
.with_color(style.color)
.constrained()
.with_width(style.icon_width)
.aligned()
.contained()
.with_style(style.container)
.constrained()
.with_width(style.button_width)
.with_height(style.button_width)
})
.with_padding(Padding::uniform(5.))
.on_click(MouseButton::Left, move |_, this, cx| {
this.dismiss(&Default::default(), cx)
})
.aligned()
.constrained()
.with_height(cx.font_cache().line_height(theme.message.text.font_size))
.aligned()
.top()
.flex_float(),
),
)
.with_child({
let style = theme.action_message.style_for(state);
Text::new("View the release notes", style.text.clone())
.contained()
.with_style(style.container)
})
.contained()
})
.with_cursor_style(CursorStyle::PointingHand)
.on_click(MouseButton::Left, |_, _, cx| {
crate::view_release_notes(&Default::default(), cx)
})
.into_any_named("update notification")
}
}
impl Notification for UpdateNotification {
fn should_dismiss_notification_on_event(&self, event: &<Self as Entity>::Event) -> bool {
matches!(event, Event::Dismiss)
v_stack()
.on_action(cx.listener(UpdateNotification::dismiss))
.elevation_3(cx)
.p_4()
.child(
h_stack()
.justify_between()
.child(Label::new(format!(
"Updated to {app_name} {}",
self.version
)))
.child(
div()
.id("cancel")
.child(Icon::new(IconName::Close))
.cursor_pointer()
.on_click(cx.listener(|this, _, cx| this.dismiss(&menu::Cancel, cx))),
),
)
.child(
div()
.id("notes")
.child(Label::new("View the release notes"))
.cursor_pointer()
.on_click(|_, cx| crate::view_release_notes(&Default::default(), cx)),
)
}
}
impl UpdateNotification {
pub fn new(version: AppVersion) -> Self {
pub fn new(version: SemanticVersion) -> Self {
Self { version }
}
pub fn dismiss(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
cx.emit(Event::Dismiss);
cx.emit(DismissEvent);
}
}

View file

@ -1,29 +0,0 @@
[package]
name = "auto_update2"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/auto_update.rs"
doctest = false
[dependencies]
db = { package = "db2", path = "../db2" }
client = { package = "client2", path = "../client2" }
gpui = { package = "gpui2", path = "../gpui2" }
menu = { package = "menu2", path = "../menu2" }
project = { package = "project2", path = "../project2" }
settings = { package = "settings2", path = "../settings2" }
theme = { package = "theme2", path = "../theme2" }
workspace = { package = "workspace2", path = "../workspace2" }
util = { path = "../util" }
anyhow.workspace = true
isahc.workspace = true
lazy_static.workspace = true
log.workspace = true
serde.workspace = true
serde_derive.workspace = true
serde_json.workspace = true
smol.workspace = true
tempdir.workspace = true

View file

@ -1,412 +0,0 @@
mod update_notification;
use anyhow::{anyhow, Context, Result};
use client::{Client, TelemetrySettings, ZED_APP_PATH, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN};
use db::kvp::KEY_VALUE_STORE;
use db::RELEASE_CHANNEL;
use gpui::{
actions, AppContext, AsyncAppContext, Context as _, Model, ModelContext, SemanticVersion, Task,
ViewContext, VisualContext,
};
use isahc::AsyncBody;
use serde::Deserialize;
use serde_derive::Serialize;
use smol::io::AsyncReadExt;
use settings::{Settings, SettingsStore};
use smol::{fs::File, process::Command};
use std::{ffi::OsString, sync::Arc, time::Duration};
use update_notification::UpdateNotification;
use util::channel::{AppCommitSha, ReleaseChannel};
use util::http::HttpClient;
use workspace::Workspace;
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
const POLL_INTERVAL: Duration = Duration::from_secs(60 * 60);
//todo!(remove CheckThatAutoUpdaterWorks)
actions!(
auto_update,
[
Check,
DismissErrorMessage,
ViewReleaseNotes,
CheckThatAutoUpdaterWorks
]
);
#[derive(Serialize)]
struct UpdateRequestBody {
installation_id: Option<Arc<str>>,
release_channel: Option<&'static str>,
telemetry: bool,
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum AutoUpdateStatus {
Idle,
Checking,
Downloading,
Installing,
Updated,
Errored,
}
pub struct AutoUpdater {
status: AutoUpdateStatus,
current_version: SemanticVersion,
http_client: Arc<dyn HttpClient>,
pending_poll: Option<Task<Option<()>>>,
server_url: String,
}
#[derive(Deserialize)]
struct JsonRelease {
version: String,
url: String,
}
struct AutoUpdateSetting(bool);
impl Settings for AutoUpdateSetting {
const KEY: Option<&'static str> = Some("auto_update");
type FileContent = Option<bool>;
fn load(
default_value: &Option<bool>,
user_values: &[&Option<bool>],
_: &mut AppContext,
) -> Result<Self> {
Ok(Self(
Self::json_merge(default_value, user_values)?.ok_or_else(Self::missing_default)?,
))
}
}
pub fn init(http_client: Arc<dyn HttpClient>, server_url: String, cx: &mut AppContext) {
AutoUpdateSetting::register(cx);
cx.observe_new_views(|workspace: &mut Workspace, _cx| {
workspace.register_action(|_, action: &Check, cx| check(action, cx));
// @nate - code to trigger update notification on launch
// workspace.show_notification(0, _cx, |cx| {
// cx.build_view(|_| UpdateNotification::new(SemanticVersion::from_str("1.1.1").unwrap()))
// });
})
.detach();
if let Some(version) = ZED_APP_VERSION.or_else(|| cx.app_metadata().app_version) {
let auto_updater = cx.build_model(|cx| {
let updater = AutoUpdater::new(version, http_client, server_url);
let mut update_subscription = AutoUpdateSetting::get_global(cx)
.0
.then(|| updater.start_polling(cx));
cx.observe_global::<SettingsStore>(move |updater, cx| {
if AutoUpdateSetting::get_global(cx).0 {
if update_subscription.is_none() {
update_subscription = Some(updater.start_polling(cx))
}
} else {
update_subscription.take();
}
})
.detach();
updater
});
cx.set_global(Some(auto_updater));
//todo!(action)
// cx.add_global_action(view_release_notes);
// cx.add_action(UpdateNotification::dismiss);
}
}
pub fn check(_: &Check, cx: &mut ViewContext<Workspace>) {
if let Some(updater) = AutoUpdater::get(cx) {
updater.update(cx, |updater, cx| updater.poll(cx));
} else {
drop(cx.prompt(
gpui::PromptLevel::Info,
"Auto-updates disabled for non-bundled app.",
&["Ok"],
));
}
}
pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) {
if let Some(auto_updater) = AutoUpdater::get(cx) {
let auto_updater = auto_updater.read(cx);
let server_url = &auto_updater.server_url;
let current_version = auto_updater.current_version;
if cx.has_global::<ReleaseChannel>() {
match cx.global::<ReleaseChannel>() {
ReleaseChannel::Dev => {}
ReleaseChannel::Nightly => {}
ReleaseChannel::Preview => {
cx.open_url(&format!("{server_url}/releases/preview/{current_version}"))
}
ReleaseChannel::Stable => {
cx.open_url(&format!("{server_url}/releases/stable/{current_version}"))
}
}
}
}
}
pub fn notify_of_any_new_update(cx: &mut ViewContext<Workspace>) -> Option<()> {
let updater = AutoUpdater::get(cx)?;
let version = updater.read(cx).current_version;
let should_show_notification = updater.read(cx).should_show_update_notification(cx);
cx.spawn(|workspace, mut cx| async move {
let should_show_notification = should_show_notification.await?;
if should_show_notification {
workspace.update(&mut cx, |workspace, cx| {
workspace.show_notification(0, cx, |cx| {
cx.build_view(|_| UpdateNotification::new(version))
});
updater
.read(cx)
.set_should_show_update_notification(false, cx)
.detach_and_log_err(cx);
})?;
}
anyhow::Ok(())
})
.detach();
None
}
impl AutoUpdater {
pub fn get(cx: &mut AppContext) -> Option<Model<Self>> {
cx.default_global::<Option<Model<Self>>>().clone()
}
fn new(
current_version: SemanticVersion,
http_client: Arc<dyn HttpClient>,
server_url: String,
) -> Self {
Self {
status: AutoUpdateStatus::Idle,
current_version,
http_client,
server_url,
pending_poll: None,
}
}
pub fn start_polling(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
cx.spawn(|this, mut cx| async move {
loop {
this.update(&mut cx, |this, cx| this.poll(cx))?;
cx.background_executor().timer(POLL_INTERVAL).await;
}
})
}
pub fn poll(&mut self, cx: &mut ModelContext<Self>) {
if self.pending_poll.is_some() || self.status == AutoUpdateStatus::Updated {
return;
}
self.status = AutoUpdateStatus::Checking;
cx.notify();
self.pending_poll = Some(cx.spawn(|this, mut cx| async move {
let result = Self::update(this.upgrade()?, cx.clone()).await;
this.update(&mut cx, |this, cx| {
this.pending_poll = None;
if let Err(error) = result {
log::error!("auto-update failed: error:{:?}", error);
this.status = AutoUpdateStatus::Errored;
cx.notify();
}
})
.ok()
}));
}
pub fn status(&self) -> AutoUpdateStatus {
self.status
}
pub fn dismiss_error(&mut self, cx: &mut ModelContext<Self>) {
self.status = AutoUpdateStatus::Idle;
cx.notify();
}
async fn update(this: Model<Self>, mut cx: AsyncAppContext) -> Result<()> {
let (client, server_url, current_version) = this.read_with(&cx, |this, _| {
(
this.http_client.clone(),
this.server_url.clone(),
this.current_version,
)
})?;
let mut url_string = format!(
"{server_url}/api/releases/latest?token={ZED_SECRET_CLIENT_TOKEN}&asset=Zed.dmg"
);
cx.update(|cx| {
if cx.has_global::<ReleaseChannel>() {
if let Some(param) = cx.global::<ReleaseChannel>().release_query_param() {
url_string += "&";
url_string += param;
}
}
})?;
let mut response = client.get(&url_string, Default::default(), true).await?;
let mut body = Vec::new();
response
.body_mut()
.read_to_end(&mut body)
.await
.context("error reading release")?;
let release: JsonRelease =
serde_json::from_slice(body.as_slice()).context("error deserializing release")?;
let should_download = match *RELEASE_CHANNEL {
ReleaseChannel::Nightly => cx
.try_read_global::<AppCommitSha, _>(|sha, _| release.version != sha.0)
.unwrap_or(true),
_ => release.version.parse::<SemanticVersion>()? <= current_version,
};
if !should_download {
this.update(&mut cx, |this, cx| {
this.status = AutoUpdateStatus::Idle;
cx.notify();
})?;
return Ok(());
}
this.update(&mut cx, |this, cx| {
this.status = AutoUpdateStatus::Downloading;
cx.notify();
})?;
let temp_dir = tempdir::TempDir::new("zed-auto-update")?;
let dmg_path = temp_dir.path().join("Zed.dmg");
let mount_path = temp_dir.path().join("Zed");
let running_app_path = ZED_APP_PATH
.clone()
.map_or_else(|| cx.update(|cx| cx.app_path())?, Ok)?;
let running_app_filename = running_app_path
.file_name()
.ok_or_else(|| anyhow!("invalid running app path"))?;
let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into();
mounted_app_path.push("/");
let mut dmg_file = File::create(&dmg_path).await?;
let (installation_id, release_channel, telemetry) = cx.update(|cx| {
let installation_id = cx.global::<Arc<Client>>().telemetry().installation_id();
let release_channel = cx
.has_global::<ReleaseChannel>()
.then(|| cx.global::<ReleaseChannel>().display_name());
let telemetry = TelemetrySettings::get_global(cx).metrics;
(installation_id, release_channel, telemetry)
})?;
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
installation_id,
release_channel,
telemetry,
})?);
let mut response = client.get(&release.url, request_body, true).await?;
smol::io::copy(response.body_mut(), &mut dmg_file).await?;
log::info!("downloaded update. path:{:?}", dmg_path);
this.update(&mut cx, |this, cx| {
this.status = AutoUpdateStatus::Installing;
cx.notify();
})?;
let output = Command::new("hdiutil")
.args(&["attach", "-nobrowse"])
.arg(&dmg_path)
.arg("-mountroot")
.arg(&temp_dir.path())
.output()
.await?;
if !output.status.success() {
Err(anyhow!(
"failed to mount: {:?}",
String::from_utf8_lossy(&output.stderr)
))?;
}
let output = Command::new("rsync")
.args(&["-av", "--delete"])
.arg(&mounted_app_path)
.arg(&running_app_path)
.output()
.await?;
if !output.status.success() {
Err(anyhow!(
"failed to copy app: {:?}",
String::from_utf8_lossy(&output.stderr)
))?;
}
let output = Command::new("hdiutil")
.args(&["detach"])
.arg(&mount_path)
.output()
.await?;
if !output.status.success() {
Err(anyhow!(
"failed to unmount: {:?}",
String::from_utf8_lossy(&output.stderr)
))?;
}
this.update(&mut cx, |this, cx| {
this.set_should_show_update_notification(true, cx)
.detach_and_log_err(cx);
this.status = AutoUpdateStatus::Updated;
cx.notify();
})?;
Ok(())
}
fn set_should_show_update_notification(
&self,
should_show: bool,
cx: &AppContext,
) -> Task<Result<()>> {
cx.background_executor().spawn(async move {
if should_show {
KEY_VALUE_STORE
.write_kvp(
SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string(),
"".to_string(),
)
.await?;
} else {
KEY_VALUE_STORE
.delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string())
.await?;
}
Ok(())
})
}
fn should_show_update_notification(&self, cx: &AppContext) -> Task<Result<bool>> {
cx.background_executor().spawn(async move {
Ok(KEY_VALUE_STORE
.read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?
.is_some())
})
}
}

View file

@ -1,56 +0,0 @@
use gpui::{
div, DismissEvent, Div, EventEmitter, InteractiveElement, ParentElement, Render,
SemanticVersion, StatefulInteractiveElement, Styled, ViewContext,
};
use util::channel::ReleaseChannel;
use workspace::ui::{h_stack, v_stack, Icon, IconElement, Label, StyledExt};
pub struct UpdateNotification {
version: SemanticVersion,
}
impl EventEmitter<DismissEvent> for UpdateNotification {}
impl Render for UpdateNotification {
type Element = Div;
fn render(&mut self, cx: &mut gpui::ViewContext<Self>) -> Self::Element {
let app_name = cx.global::<ReleaseChannel>().display_name();
v_stack()
.elevation_3(cx)
.p_4()
.child(
h_stack()
.justify_between()
.child(Label::new(format!(
"Updated to {app_name} {}",
self.version
)))
.child(
div()
.id("cancel")
.child(IconElement::new(Icon::Close))
.cursor_pointer()
.on_click(cx.listener(|this, _, cx| this.dismiss(cx))),
),
)
.child(
div()
.id("notes")
.child(Label::new("View the release notes"))
.cursor_pointer()
.on_click(|_, cx| crate::view_release_notes(&Default::default(), cx)),
)
}
}
impl UpdateNotification {
pub fn new(version: SemanticVersion) -> Self {
Self { version }
}
pub fn dismiss(&mut self, cx: &mut ViewContext<Self>) {
cx.emit(DismissEvent);
}
}

View file

@ -12,6 +12,7 @@ doctest = false
collections = { path = "../collections" }
editor = { path = "../editor" }
gpui = { path = "../gpui" }
ui = { path = "../ui" }
language = { path = "../language" }
project = { path = "../project" }
search = { path = "../search" }

View file

@ -1,108 +1,77 @@
use editor::Editor;
use gpui::{
elements::*, platform::MouseButton, AppContext, Entity, Subscription, View, ViewContext,
ViewHandle, WeakViewHandle,
Element, EventEmitter, IntoElement, ParentElement, Render, StyledText, Subscription,
ViewContext,
};
use itertools::Itertools;
use search::ProjectSearchView;
use theme::ActiveTheme;
use ui::{prelude::*, ButtonLike, ButtonStyle, Label, Tooltip};
use workspace::{
item::{ItemEvent, ItemHandle},
ToolbarItemLocation, ToolbarItemView, Workspace,
ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView,
};
pub enum Event {
UpdateLocation,
}
pub struct Breadcrumbs {
pane_focused: bool,
active_item: Option<Box<dyn ItemHandle>>,
project_search: Option<ViewHandle<ProjectSearchView>>,
subscription: Option<Subscription>,
workspace: WeakViewHandle<Workspace>,
}
impl Breadcrumbs {
pub fn new(workspace: &Workspace) -> Self {
pub fn new() -> Self {
Self {
pane_focused: false,
active_item: Default::default(),
subscription: Default::default(),
project_search: Default::default(),
workspace: workspace.weak_handle(),
}
}
}
impl Entity for Breadcrumbs {
type Event = Event;
}
impl EventEmitter<ToolbarItemEvent> for Breadcrumbs {}
impl View for Breadcrumbs {
fn ui_name() -> &'static str {
"Breadcrumbs"
}
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
let active_item = match &self.active_item {
Some(active_item) => active_item,
None => return Empty::new().into_any(),
impl Render for Breadcrumbs {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
let element = h_stack().text_ui();
let Some(active_item) = self.active_item.as_ref() else {
return element;
};
let Some(segments) = active_item.breadcrumbs(cx.theme(), cx) else {
return element;
};
let not_editor = active_item.downcast::<editor::Editor>().is_none();
let theme = theme::current(cx).clone();
let style = &theme.workspace.toolbar.breadcrumbs;
let highlighted_segments = segments.into_iter().map(|segment| {
let mut text_style = cx.text_style();
text_style.color = Color::Muted.color(cx);
let breadcrumbs = match active_item.breadcrumbs(&theme, cx) {
Some(breadcrumbs) => breadcrumbs,
None => return Empty::new().into_any(),
}
.into_iter()
.map(|breadcrumb| {
Text::new(
breadcrumb.text,
theme.workspace.toolbar.breadcrumbs.default.text.clone(),
)
.with_highlights(breadcrumb.highlights.unwrap_or_default())
.into_any()
StyledText::new(segment.text)
.with_highlights(&text_style, segment.highlights.unwrap_or_default())
.into_any()
});
let breadcrumbs = Itertools::intersperse_with(highlighted_segments, || {
Label::new("").color(Color::Muted).into_any_element()
});
let crumbs = Flex::row()
.with_children(Itertools::intersperse_with(breadcrumbs, || {
Label::new(" ", style.default.text.clone()).into_any()
}))
.constrained()
.with_height(theme.workspace.toolbar.breadcrumb_height)
.contained();
if not_editor || !self.pane_focused {
return crumbs
.with_style(style.default.container)
.aligned()
.left()
.into_any();
let breadcrumbs_stack = h_stack().gap_1().children(breadcrumbs);
match active_item
.downcast::<Editor>()
.map(|editor| editor.downgrade())
{
Some(editor) => element.child(
ButtonLike::new("toggle outline view")
.child(breadcrumbs_stack)
.style(ButtonStyle::Subtle)
.on_click(move |_, cx| {
if let Some(editor) = editor.upgrade() {
outline::toggle(editor, &outline::Toggle, cx)
}
})
.tooltip(|cx| Tooltip::for_action("Show symbol outline", &outline::Toggle, cx)),
),
None => element
// Match the height of the `ButtonLike` in the other arm.
.h(rems(22. / 16.))
.child(breadcrumbs_stack),
}
MouseEventHandler::new::<Breadcrumbs, _>(0, cx, |state, _| {
let style = style.style_for(state);
crumbs.with_style(style.container)
})
.on_click(MouseButton::Left, |_, this, cx| {
if let Some(workspace) = this.workspace.upgrade(cx) {
workspace.update(cx, |workspace, cx| {
outline::toggle(workspace, &Default::default(), cx)
})
}
})
.with_tooltip::<Breadcrumbs>(
0,
"Show symbol outline".to_owned(),
Some(Box::new(outline::Toggle)),
theme.tooltip.clone(),
cx,
)
.aligned()
.left()
.into_any()
}
}
@ -114,19 +83,21 @@ impl ToolbarItemView for Breadcrumbs {
) -> ToolbarItemLocation {
cx.notify();
self.active_item = None;
self.project_search = None;
if let Some(item) = active_pane_item {
let this = cx.weak_handle();
let this = cx.view().downgrade();
self.subscription = Some(item.subscribe_to_item_events(
cx,
Box::new(move |event, cx| {
if let Some(this) = this.upgrade(cx) {
if let ItemEvent::UpdateBreadcrumbs = event {
this.update(cx, |_, cx| {
cx.emit(Event::UpdateLocation);
cx.notify();
});
}
if let ItemEvent::UpdateBreadcrumbs = event {
this.update(cx, |this, cx| {
cx.notify();
if let Some(active_item) = this.active_item.as_ref() {
cx.emit(ToolbarItemEvent::ChangeLocation(
active_item.breadcrumb_location(cx),
))
}
})
.ok();
}
}),
));
@ -137,19 +108,6 @@ impl ToolbarItemView for Breadcrumbs {
}
}
fn location_for_event(
&self,
_: &Event,
current_location: ToolbarItemLocation,
cx: &AppContext,
) -> ToolbarItemLocation {
if let Some(active_item) = self.active_item.as_ref() {
active_item.breadcrumb_location(cx)
} else {
current_location
}
}
fn pane_focus_update(&mut self, pane_focused: bool, _: &mut ViewContext<Self>) {
self.pane_focused = pane_focused;
}

View file

@ -1,28 +0,0 @@
[package]
name = "breadcrumbs2"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/breadcrumbs.rs"
doctest = false
[dependencies]
collections = { path = "../collections" }
editor = { package = "editor2", path = "../editor2" }
gpui = { package = "gpui2", path = "../gpui2" }
ui = { package = "ui2", path = "../ui2" }
language = { package = "language2", path = "../language2" }
project = { package = "project2", path = "../project2" }
search = { package = "search2", path = "../search2" }
settings = { package = "settings2", path = "../settings2" }
theme = { package = "theme2", path = "../theme2" }
workspace = { package = "workspace2", path = "../workspace2" }
outline = { package = "outline2", path = "../outline2" }
itertools = "0.10"
[dev-dependencies]
editor = { package = "editor2", path = "../editor2", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
workspace = { package = "workspace2", path = "../workspace2", features = ["test-support"] }

View file

@ -1,127 +0,0 @@
use editor::Editor;
use gpui::{
Div, Element, EventEmitter, IntoElement, ParentElement, Render, StyledText, Subscription,
ViewContext,
};
use itertools::Itertools;
use theme::ActiveTheme;
use ui::{prelude::*, ButtonLike, ButtonStyle, Label, Tooltip};
use workspace::{
item::{ItemEvent, ItemHandle},
ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView,
};
pub enum Event {
UpdateLocation,
}
pub struct Breadcrumbs {
pane_focused: bool,
active_item: Option<Box<dyn ItemHandle>>,
subscription: Option<Subscription>,
}
impl Breadcrumbs {
pub fn new() -> Self {
Self {
pane_focused: false,
active_item: Default::default(),
subscription: Default::default(),
}
}
}
impl EventEmitter<Event> for Breadcrumbs {}
impl EventEmitter<ToolbarItemEvent> for Breadcrumbs {}
impl Render for Breadcrumbs {
type Element = Div;
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
let element = h_stack().text_ui();
let Some(active_item) = &self
.active_item
.as_ref()
.filter(|item| item.downcast::<editor::Editor>().is_some())
else {
return element;
};
let Some(segments) = active_item.breadcrumbs(cx.theme(), cx) else {
return element;
};
let highlighted_segments = segments.into_iter().map(|segment| {
StyledText::new(segment.text)
.with_highlights(&cx.text_style(), segment.highlights.unwrap_or_default())
.into_any()
});
let breadcrumbs = Itertools::intersperse_with(highlighted_segments, || {
Label::new("").into_any_element()
});
let editor = active_item
.downcast::<Editor>()
.map(|editor| editor.downgrade());
element.child(
ButtonLike::new("toggle outline view")
.style(ButtonStyle::Subtle)
.child(h_stack().gap_1().children(breadcrumbs))
.on_click(move |_, cx| {
if let Some(editor) = editor.as_ref().and_then(|editor| editor.upgrade()) {
outline::toggle(editor, &outline::Toggle, cx)
}
})
.tooltip(|cx| Tooltip::for_action("Show symbol outline", &outline::Toggle, cx)),
)
}
}
impl ToolbarItemView for Breadcrumbs {
fn set_active_pane_item(
&mut self,
active_pane_item: Option<&dyn ItemHandle>,
cx: &mut ViewContext<Self>,
) -> ToolbarItemLocation {
cx.notify();
self.active_item = None;
if let Some(item) = active_pane_item {
let this = cx.view().downgrade();
self.subscription = Some(item.subscribe_to_item_events(
cx,
Box::new(move |event, cx| {
if let ItemEvent::UpdateBreadcrumbs = event {
this.update(cx, |_, cx| {
cx.emit(Event::UpdateLocation);
cx.notify();
})
.ok();
}
}),
));
self.active_item = Some(item.boxed_clone());
item.breadcrumb_location(cx)
} else {
ToolbarItemLocation::Hidden
}
}
// fn location_for_event(
// &self,
// _: &Event,
// current_location: ToolbarItemLocation,
// cx: &AppContext,
// ) -> ToolbarItemLocation {
// if let Some(active_item) = self.active_item.as_ref() {
// active_item.breadcrumb_location(cx)
// } else {
// current_location
// }
// }
fn pane_focus_update(&mut self, pane_focused: bool, _: &mut ViewContext<Self>) {
self.pane_focused = pane_focused;
}
}

View file

@ -35,11 +35,13 @@ util = { path = "../util" }
anyhow.workspace = true
async-broadcast = "0.4"
futures.workspace = true
image = "0.23"
postage.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
serde_derive.workspace = true
smallvec.workspace = true
[dev-dependencies]
client = { path = "../client", features = ["test-support"] }

View file

@ -5,35 +5,29 @@ pub mod room;
use anyhow::{anyhow, Result};
use audio::Audio;
use call_settings::CallSettings;
use client::{proto, Client, TelemetrySettings, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE};
use client::{proto, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE};
use collections::HashSet;
use futures::{channel::oneshot, future::Shared, Future, FutureExt};
use gpui::{
AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task,
WeakModelHandle,
AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Subscription, Task,
WeakModel,
};
use postage::watch;
use project::Project;
use room::Event;
use settings::Settings;
use std::sync::Arc;
pub use participant::ParticipantLocation;
pub use room::Room;
pub fn init(client: Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut AppContext) {
settings::register::<CallSettings>(cx);
pub fn init(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
CallSettings::register(cx);
let active_call = cx.add_model(|cx| ActiveCall::new(client, user_store, cx));
let active_call = cx.new_model(|cx| ActiveCall::new(client, user_store, cx));
cx.set_global(active_call);
}
#[derive(Clone)]
pub struct IncomingCall {
pub room_id: u64,
pub calling_user: Arc<User>,
pub participants: Vec<Arc<User>>,
pub initial_project: Option<proto::ParticipantProject>,
}
pub struct OneAtATime {
cancel: Option<oneshot::Sender<()>>,
}
@ -65,43 +59,44 @@ impl OneAtATime {
}
}
#[derive(Clone)]
pub struct IncomingCall {
pub room_id: u64,
pub calling_user: Arc<User>,
pub participants: Vec<Arc<User>>,
pub initial_project: Option<proto::ParticipantProject>,
}
/// Singleton global maintaining the user's participation in a room across workspaces.
pub struct ActiveCall {
room: Option<(ModelHandle<Room>, Vec<Subscription>)>,
pending_room_creation: Option<Shared<Task<Result<ModelHandle<Room>, Arc<anyhow::Error>>>>>,
room: Option<(Model<Room>, Vec<Subscription>)>,
pending_room_creation: Option<Shared<Task<Result<Model<Room>, Arc<anyhow::Error>>>>>,
location: Option<WeakModel<Project>>,
_join_debouncer: OneAtATime,
location: Option<WeakModelHandle<Project>>,
pending_invites: HashSet<u64>,
incoming_call: (
watch::Sender<Option<IncomingCall>>,
watch::Receiver<Option<IncomingCall>>,
),
client: Arc<Client>,
user_store: ModelHandle<UserStore>,
user_store: Model<UserStore>,
_subscriptions: Vec<client::Subscription>,
}
impl Entity for ActiveCall {
type Event = room::Event;
}
impl EventEmitter<Event> for ActiveCall {}
impl ActiveCall {
fn new(
client: Arc<Client>,
user_store: ModelHandle<UserStore>,
cx: &mut ModelContext<Self>,
) -> Self {
fn new(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut ModelContext<Self>) -> Self {
Self {
room: None,
pending_room_creation: None,
location: None,
pending_invites: Default::default(),
incoming_call: watch::channel(),
_join_debouncer: OneAtATime { cancel: None },
_subscriptions: vec![
client.add_request_handler(cx.handle(), Self::handle_incoming_call),
client.add_message_handler(cx.handle(), Self::handle_call_canceled),
client.add_request_handler(cx.weak_model(), Self::handle_incoming_call),
client.add_message_handler(cx.weak_model(), Self::handle_call_canceled),
],
client,
user_store,
@ -113,35 +108,35 @@ impl ActiveCall {
}
async fn handle_incoming_call(
this: ModelHandle<Self>,
this: Model<Self>,
envelope: TypedEnvelope<proto::IncomingCall>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<proto::Ack> {
let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
let call = IncomingCall {
room_id: envelope.payload.room_id,
participants: user_store
.update(&mut cx, |user_store, cx| {
user_store.get_users(envelope.payload.participant_user_ids, cx)
})
})?
.await?,
calling_user: user_store
.update(&mut cx, |user_store, cx| {
user_store.get_user(envelope.payload.calling_user_id, cx)
})
})?
.await?,
initial_project: envelope.payload.initial_project,
};
this.update(&mut cx, |this, _| {
*this.incoming_call.0.borrow_mut() = Some(call);
});
})?;
Ok(proto::Ack {})
}
async fn handle_call_canceled(
this: ModelHandle<Self>,
this: Model<Self>,
envelope: TypedEnvelope<proto::CallCanceled>,
_: Arc<Client>,
mut cx: AsyncAppContext,
@ -154,18 +149,18 @@ impl ActiveCall {
{
incoming_call.take();
}
});
})?;
Ok(())
}
pub fn global(cx: &AppContext) -> ModelHandle<Self> {
cx.global::<ModelHandle<Self>>().clone()
pub fn global(cx: &AppContext) -> Model<Self> {
cx.global::<Model<Self>>().clone()
}
pub fn invite(
&mut self,
called_user_id: u64,
initial_project: Option<ModelHandle<Project>>,
initial_project: Option<Model<Project>>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
if !self.pending_invites.insert(called_user_id) {
@ -184,21 +179,21 @@ impl ActiveCall {
};
let invite = if let Some(room) = room {
cx.spawn_weak(|_, mut cx| async move {
cx.spawn(move |_, mut cx| async move {
let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
let initial_project_id = if let Some(initial_project) = initial_project {
Some(
room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))
room.update(&mut cx, |room, cx| room.share_project(initial_project, cx))?
.await?,
)
} else {
None
};
room.update(&mut cx, |room, cx| {
room.update(&mut cx, move |room, cx| {
room.call(called_user_id, initial_project_id, cx)
})
})?
.await?;
anyhow::Ok(())
@ -207,7 +202,7 @@ impl ActiveCall {
let client = self.client.clone();
let user_store = self.user_store.clone();
let room = cx
.spawn(|this, mut cx| async move {
.spawn(move |this, mut cx| async move {
let create_room = async {
let room = cx
.update(|cx| {
@ -218,31 +213,31 @@ impl ActiveCall {
user_store,
cx,
)
})
})?
.await?;
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))
this.update(&mut cx, |this, cx| this.set_room(Some(room.clone()), cx))?
.await?;
anyhow::Ok(room)
};
let room = create_room.await;
this.update(&mut cx, |this, _| this.pending_room_creation = None);
this.update(&mut cx, |this, _| this.pending_room_creation = None)?;
room.map_err(Arc::new)
})
.shared();
self.pending_room_creation = Some(room.clone());
cx.foreground().spawn(async move {
cx.background_executor().spawn(async move {
room.await.map_err(|err| anyhow!("{:?}", err))?;
anyhow::Ok(())
})
};
cx.spawn(|this, mut cx| async move {
cx.spawn(move |this, mut cx| async move {
let result = invite.await;
if result.is_ok() {
this.update(&mut cx, |this, cx| this.report_call_event("invite", cx));
this.update(&mut cx, |this, cx| this.report_call_event("invite", cx))?;
} else {
// TODO: Resport collaboration error
}
@ -250,7 +245,7 @@ impl ActiveCall {
this.update(&mut cx, |this, cx| {
this.pending_invites.remove(&called_user_id);
cx.notify();
});
})?;
result
})
}
@ -267,7 +262,7 @@ impl ActiveCall {
};
let client = self.client.clone();
cx.foreground().spawn(async move {
cx.background_executor().spawn(async move {
client
.request(proto::CancelCall {
room_id,
@ -306,23 +301,23 @@ impl ActiveCall {
cx.spawn(|this, mut cx| async move {
let room = join.await?;
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
.await?;
this.update(&mut cx, |this, cx| {
this.report_call_event("accept incoming", cx)
});
})?;
Ok(())
})
}
pub fn decline_incoming(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
pub fn decline_incoming(&mut self, _: &mut ModelContext<Self>) -> Result<()> {
let call = self
.incoming_call
.0
.borrow_mut()
.take()
.ok_or_else(|| anyhow!("no incoming call"))?;
report_call_event_for_room("decline incoming", call.room_id, None, &self.client, cx);
report_call_event_for_room("decline incoming", call.room_id, None, &self.client);
self.client.send(proto::DeclineCall {
room_id: call.room_id,
})?;
@ -333,7 +328,7 @@ impl ActiveCall {
&mut self,
channel_id: u64,
cx: &mut ModelContext<Self>,
) -> Task<Result<Option<ModelHandle<Room>>>> {
) -> Task<Result<Option<Model<Room>>>> {
if let Some(room) = self.room().cloned() {
if room.read(cx).channel_id() == Some(channel_id) {
return Task::ready(Ok(Some(room)));
@ -352,13 +347,13 @@ impl ActiveCall {
Room::join_channel(channel_id, client, user_store, cx).await
});
cx.spawn(move |this, mut cx| async move {
cx.spawn(|this, mut cx| async move {
let room = join.await?;
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))
this.update(&mut cx, |this, cx| this.set_room(room.clone(), cx))?
.await?;
this.update(&mut cx, |this, cx| {
this.report_call_event("join channel", cx)
});
})?;
Ok(room)
})
}
@ -366,6 +361,7 @@ impl ActiveCall {
pub fn hang_up(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
cx.notify();
self.report_call_event("hang up", cx);
Audio::end_call(cx);
if let Some((room, _)) = self.room.take() {
room.update(cx, |room, cx| room.leave(cx))
@ -376,7 +372,7 @@ impl ActiveCall {
pub fn share_project(
&mut self,
project: ModelHandle<Project>,
project: Model<Project>,
cx: &mut ModelContext<Self>,
) -> Task<Result<u64>> {
if let Some((room, _)) = self.room.as_ref() {
@ -389,7 +385,7 @@ impl ActiveCall {
pub fn unshare_project(
&mut self,
project: ModelHandle<Project>,
project: Model<Project>,
cx: &mut ModelContext<Self>,
) -> Result<()> {
if let Some((room, _)) = self.room.as_ref() {
@ -400,13 +396,13 @@ impl ActiveCall {
}
}
pub fn location(&self) -> Option<&WeakModelHandle<Project>> {
pub fn location(&self) -> Option<&WeakModel<Project>> {
self.location.as_ref()
}
pub fn set_location(
&mut self,
project: Option<&ModelHandle<Project>>,
project: Option<&Model<Project>>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
if project.is_some() || !*ZED_ALWAYS_ACTIVE {
@ -420,7 +416,7 @@ impl ActiveCall {
fn set_room(
&mut self,
room: Option<ModelHandle<Room>>,
room: Option<Model<Room>>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
if room.as_ref() != self.room.as_ref().map(|room| &room.0) {
@ -441,7 +437,10 @@ impl ActiveCall {
cx.subscribe(&room, |_, _, event, cx| cx.emit(event.clone())),
];
self.room = Some((room.clone(), subscriptions));
let location = self.location.and_then(|location| location.upgrade(cx));
let location = self
.location
.as_ref()
.and_then(|location| location.upgrade());
room.update(cx, |room, cx| room.set_location(location.as_ref(), cx))
}
} else {
@ -453,7 +452,7 @@ impl ActiveCall {
}
}
pub fn room(&self) -> Option<&ModelHandle<Room>> {
pub fn room(&self) -> Option<&Model<Room>> {
self.room.as_ref().map(|(room, _)| room)
}
@ -465,10 +464,10 @@ impl ActiveCall {
&self.pending_invites
}
pub fn report_call_event(&self, operation: &'static str, cx: &AppContext) {
pub fn report_call_event(&self, operation: &'static str, cx: &mut AppContext) {
if let Some(room) = self.room() {
let room = room.read(cx);
report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client, cx);
report_call_event_for_room(operation, room.id(), room.channel_id(), &self.client);
}
}
}
@ -478,12 +477,10 @@ pub fn report_call_event_for_room(
room_id: u64,
channel_id: Option<u64>,
client: &Arc<Client>,
cx: &AppContext,
) {
let telemetry = client.telemetry();
let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
telemetry.report_call_event(telemetry_settings, operation, Some(room_id), channel_id)
telemetry.report_call_event(operation, Some(room_id), channel_id)
}
pub fn report_call_event_for_channel(
@ -495,14 +492,8 @@ pub fn report_call_event_for_channel(
let room = ActiveCall::global(cx).read(cx).room();
let telemetry = client.telemetry();
let telemetry_settings = *settings::get::<TelemetrySettings>(cx);
telemetry.report_call_event(
telemetry_settings,
operation,
room.map(|r| r.read(cx).id()),
Some(channel_id),
)
telemetry.report_call_event(operation, room.map(|r| r.read(cx).id()), Some(channel_id))
}
#[cfg(test)]

Some files were not shown because too many files have changed in this diff Show more