Merge remote-tracking branch 'origin/main' into assistant-2
This commit is contained in:
commit
7a78e64831
68 changed files with 781 additions and 597 deletions
1
.github/workflows/ci.yml
vendored
1
.github/workflows/ci.yml
vendored
|
@ -93,7 +93,6 @@ jobs:
|
||||||
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
|
||||||
APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
|
APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
|
||||||
APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
|
APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
|
||||||
ZED_MIXPANEL_TOKEN: ${{ secrets.ZED_MIXPANEL_TOKEN }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: Install Rust
|
- name: Install Rust
|
||||||
run: |
|
run: |
|
||||||
|
|
16
.github/workflows/release_actions.yml
vendored
16
.github/workflows/release_actions.yml
vendored
|
@ -21,19 +21,3 @@ jobs:
|
||||||
|
|
||||||
${{ github.event.release.body }}
|
${{ github.event.release.body }}
|
||||||
```
|
```
|
||||||
mixpanel_release:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: "3.10.5"
|
|
||||||
architecture: "x64"
|
|
||||||
cache: "pip"
|
|
||||||
- run: pip install -r script/mixpanel_release/requirements.txt
|
|
||||||
- run: >
|
|
||||||
python script/mixpanel_release/main.py
|
|
||||||
${{ github.event.release.tag_name }}
|
|
||||||
${{ secrets.MIXPANEL_PROJECT_ID }}
|
|
||||||
${{ secrets.MIXPANEL_SERVICE_ACCOUNT_USERNAME }}
|
|
||||||
${{ secrets.MIXPANEL_SERVICE_ACCOUNT_SECRET }}
|
|
||||||
|
|
113
Cargo.lock
generated
113
Cargo.lock
generated
|
@ -190,15 +190,6 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ansi_term"
|
|
||||||
version = "0.12.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
|
|
||||||
dependencies = [
|
|
||||||
"winapi 0.3.9",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyhow"
|
name = "anyhow"
|
||||||
version = "1.0.71"
|
version = "1.0.71"
|
||||||
|
@ -414,7 +405,7 @@ checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -462,7 +453,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -505,7 +496,7 @@ checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -725,24 +716,24 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bindgen"
|
name = "bindgen"
|
||||||
version = "0.59.2"
|
version = "0.65.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8"
|
checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"cexpr",
|
"cexpr",
|
||||||
"clang-sys",
|
"clang-sys",
|
||||||
"clap 2.34.0",
|
|
||||||
"env_logger 0.9.3",
|
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"lazycell",
|
"lazycell",
|
||||||
"log",
|
"log",
|
||||||
"peeking_take_while",
|
"peeking_take_while",
|
||||||
|
"prettyplease",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"regex",
|
"regex",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"shlex",
|
"shlex",
|
||||||
|
"syn 2.0.18",
|
||||||
"which",
|
"which",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1112,21 +1103,6 @@ dependencies = [
|
||||||
"libloading",
|
"libloading",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "clap"
|
|
||||||
version = "2.34.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
|
|
||||||
dependencies = [
|
|
||||||
"ansi_term",
|
|
||||||
"atty",
|
|
||||||
"bitflags",
|
|
||||||
"strsim 0.8.0",
|
|
||||||
"textwrap 0.11.0",
|
|
||||||
"unicode-width",
|
|
||||||
"vec_map",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "3.2.25"
|
version = "3.2.25"
|
||||||
|
@ -1139,9 +1115,9 @@ dependencies = [
|
||||||
"clap_lex",
|
"clap_lex",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"strsim 0.10.0",
|
"strsim",
|
||||||
"termcolor",
|
"termcolor",
|
||||||
"textwrap 0.16.0",
|
"textwrap",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1171,7 +1147,7 @@ name = "cli"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap 3.2.25",
|
"clap",
|
||||||
"core-foundation",
|
"core-foundation",
|
||||||
"core-services",
|
"core-services",
|
||||||
"dirs 3.0.2",
|
"dirs 3.0.2",
|
||||||
|
@ -1281,7 +1257,7 @@ dependencies = [
|
||||||
"axum-extra",
|
"axum-extra",
|
||||||
"base64 0.13.1",
|
"base64 0.13.1",
|
||||||
"call",
|
"call",
|
||||||
"clap 3.2.25",
|
"clap",
|
||||||
"client",
|
"client",
|
||||||
"collections",
|
"collections",
|
||||||
"ctor",
|
"ctor",
|
||||||
|
@ -1824,7 +1800,7 @@ dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"scratch",
|
"scratch",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1841,7 +1817,7 @@ checksum = "2345488264226bf682893e25de0769f3360aac9957980ec49361b083ddaa5bc5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -2624,7 +2600,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -4385,7 +4361,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -4830,6 +4806,16 @@ dependencies = [
|
||||||
"yansi",
|
"yansi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "prettyplease"
|
||||||
|
version = "0.2.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3b69d39aab54d069e7f2fe8cb970493e7834601ca2d8c65fd7bbd183578080d1"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"syn 2.0.18",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro-crate"
|
name = "proc-macro-crate"
|
||||||
version = "0.1.5"
|
version = "0.1.5"
|
||||||
|
@ -4865,9 +4851,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.56"
|
version = "1.0.59"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
|
checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
@ -5144,9 +5130,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.27"
|
version = "1.0.28"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8f4f29d145265ec1c483c7c654450edde0bfe043d3938d6972630663356d9500"
|
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
@ -6090,7 +6076,7 @@ checksum = "a2a0814352fd64b58489904a44ea8d90cb1a91dcb6b4f5ebabc32c8318e93cb6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -6133,7 +6119,7 @@ checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -6620,12 +6606,6 @@ dependencies = [
|
||||||
"unicode-normalization",
|
"unicode-normalization",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "strsim"
|
|
||||||
version = "0.8.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strsim"
|
name = "strsim"
|
||||||
version = "0.10.0"
|
version = "0.10.0"
|
||||||
|
@ -6697,9 +6677,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.15"
|
version = "2.0.18"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822"
|
checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -6885,15 +6865,6 @@ dependencies = [
|
||||||
"util",
|
"util",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "textwrap"
|
|
||||||
version = "0.11.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
|
|
||||||
dependencies = [
|
|
||||||
"unicode-width",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "textwrap"
|
name = "textwrap"
|
||||||
version = "0.16.0"
|
version = "0.16.0"
|
||||||
|
@ -6967,7 +6938,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -7140,7 +7111,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -7328,7 +7299,7 @@ checksum = "0f57e3ca2a01450b1a921183a9c9cbfda207fd822cef4ccb00a65402cbba7a74"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -7889,12 +7860,6 @@ version = "0.2.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "vec_map"
|
|
||||||
version = "0.8.2"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "version_check"
|
name = "version_check"
|
||||||
version = "0.9.4"
|
version = "0.9.4"
|
||||||
|
@ -8058,7 +8023,7 @@ dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -8092,7 +8057,7 @@ checksum = "4783ce29f09b9d93134d41297aded3a712b7b979e9c6f28c32cb88c973a94869"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
"wasm-bindgen-backend",
|
"wasm-bindgen-backend",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
@ -8959,7 +8924,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
@ -16,6 +16,12 @@
|
||||||
"replace_newest": true
|
"replace_newest": true
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
"ctrl-cmd-g": [
|
||||||
|
"editor::SelectPrevious",
|
||||||
|
{
|
||||||
|
"replace_newest": true
|
||||||
|
}
|
||||||
|
],
|
||||||
"ctrl-shift-down": "editor::AddSelectionBelow",
|
"ctrl-shift-down": "editor::AddSelectionBelow",
|
||||||
"ctrl-shift-up": "editor::AddSelectionAbove",
|
"ctrl-shift-up": "editor::AddSelectionAbove",
|
||||||
"cmd-shift-backspace": "editor::DeleteToBeginningOfLine",
|
"cmd-shift-backspace": "editor::DeleteToBeginningOfLine",
|
||||||
|
|
|
@ -252,12 +252,24 @@
|
||||||
"replace_newest": false
|
"replace_newest": false
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
"ctrl-cmd-d": [
|
||||||
|
"editor::SelectPrevious",
|
||||||
|
{
|
||||||
|
"replace_newest": false
|
||||||
|
}
|
||||||
|
],
|
||||||
"cmd-k cmd-d": [
|
"cmd-k cmd-d": [
|
||||||
"editor::SelectNext",
|
"editor::SelectNext",
|
||||||
{
|
{
|
||||||
"replace_newest": true
|
"replace_newest": true
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
"cmd-k ctrl-cmd-d": [
|
||||||
|
"editor::SelectPrevious",
|
||||||
|
{
|
||||||
|
"replace_newest": true
|
||||||
|
}
|
||||||
|
],
|
||||||
"cmd-k cmd-i": "editor::Hover",
|
"cmd-k cmd-i": "editor::Hover",
|
||||||
"cmd-/": [
|
"cmd-/": [
|
||||||
"editor::ToggleComments",
|
"editor::ToggleComments",
|
||||||
|
@ -506,7 +518,7 @@
|
||||||
"terminal::SendText",
|
"terminal::SendText",
|
||||||
"\u0001"
|
"\u0001"
|
||||||
],
|
],
|
||||||
// Terminal.app compatability
|
// Terminal.app compatibility
|
||||||
"alt-left": [
|
"alt-left": [
|
||||||
"terminal::SendText",
|
"terminal::SendText",
|
||||||
"\u001bb"
|
"\u001bb"
|
||||||
|
|
|
@ -26,6 +26,12 @@
|
||||||
"replace_newest": false
|
"replace_newest": false
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
"ctrl-cmd-g": [
|
||||||
|
"editor::SelectPrevious",
|
||||||
|
{
|
||||||
|
"replace_newest": false
|
||||||
|
}
|
||||||
|
],
|
||||||
"cmd-/": [
|
"cmd-/": [
|
||||||
"editor::ToggleComments",
|
"editor::ToggleComments",
|
||||||
{
|
{
|
||||||
|
|
|
@ -253,7 +253,7 @@
|
||||||
// copy to the system clipboard.
|
// copy to the system clipboard.
|
||||||
"copy_on_select": false,
|
"copy_on_select": false,
|
||||||
// Any key-value pairs added to this list will be added to the terminal's
|
// Any key-value pairs added to this list will be added to the terminal's
|
||||||
// enviroment. Use `:` to seperate multiple values.
|
// environment. Use `:` to separate multiple values.
|
||||||
"env": {
|
"env": {
|
||||||
// "KEY": "value1:value2"
|
// "KEY": "value1:value2"
|
||||||
},
|
},
|
||||||
|
|
11
assets/settings/initial_local_settings.json
Normal file
11
assets/settings/initial_local_settings.json
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
// Folder-specific Zed settings
|
||||||
|
//
|
||||||
|
// A subset of Zed's settings can be configured on a per-folder basis.
|
||||||
|
//
|
||||||
|
// For information on how to configure Zed, see the Zed
|
||||||
|
// documentation: https://zed.dev/docs/configuring-zed
|
||||||
|
//
|
||||||
|
// To see all of Zed's default settings without changing your
|
||||||
|
// custom settings, run the `open default settings` command
|
||||||
|
// from the command palette or from `Zed` application menu.
|
||||||
|
{}
|
|
@ -159,10 +159,7 @@ impl Bundle {
|
||||||
fn path(&self) -> &Path {
|
fn path(&self) -> &Path {
|
||||||
match self {
|
match self {
|
||||||
Self::App { app_bundle, .. } => app_bundle,
|
Self::App { app_bundle, .. } => app_bundle,
|
||||||
Self::LocalPath {
|
Self::LocalPath { executable, .. } => executable,
|
||||||
executable: excutable,
|
|
||||||
..
|
|
||||||
} => excutable,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -776,15 +776,6 @@ impl Client {
|
||||||
if credentials.is_none() && try_keychain {
|
if credentials.is_none() && try_keychain {
|
||||||
credentials = read_credentials_from_keychain(cx);
|
credentials = read_credentials_from_keychain(cx);
|
||||||
read_from_keychain = credentials.is_some();
|
read_from_keychain = credentials.is_some();
|
||||||
if read_from_keychain {
|
|
||||||
cx.read(|cx| {
|
|
||||||
self.telemetry().report_mixpanel_event(
|
|
||||||
"read credentials from keychain",
|
|
||||||
Default::default(),
|
|
||||||
*settings::get::<TelemetrySettings>(cx),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if credentials.is_none() {
|
if credentials.is_none() {
|
||||||
let mut status_rx = self.status();
|
let mut status_rx = self.status();
|
||||||
|
@ -1072,11 +1063,8 @@ impl Client {
|
||||||
) -> Task<Result<Credentials>> {
|
) -> Task<Result<Credentials>> {
|
||||||
let platform = cx.platform();
|
let platform = cx.platform();
|
||||||
let executor = cx.background();
|
let executor = cx.background();
|
||||||
let telemetry = self.telemetry.clone();
|
|
||||||
let http = self.http.clone();
|
let http = self.http.clone();
|
||||||
|
|
||||||
let telemetry_settings = cx.read(|cx| *settings::get::<TelemetrySettings>(cx));
|
|
||||||
|
|
||||||
executor.clone().spawn(async move {
|
executor.clone().spawn(async move {
|
||||||
// Generate a pair of asymmetric encryption keys. The public key will be used by the
|
// Generate a pair of asymmetric encryption keys. The public key will be used by the
|
||||||
// zed server to encrypt the user's access token, so that it can'be intercepted by
|
// zed server to encrypt the user's access token, so that it can'be intercepted by
|
||||||
|
@ -1159,12 +1147,6 @@ impl Client {
|
||||||
.context("failed to decrypt access token")?;
|
.context("failed to decrypt access token")?;
|
||||||
platform.activate(true);
|
platform.activate(true);
|
||||||
|
|
||||||
telemetry.report_mixpanel_event(
|
|
||||||
"authenticate with browser",
|
|
||||||
Default::default(),
|
|
||||||
telemetry_settings,
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(Credentials {
|
Ok(Credentials {
|
||||||
user_id: user_id.parse()?,
|
user_id: user_id.parse()?,
|
||||||
access_token,
|
access_token,
|
||||||
|
|
|
@ -1,14 +1,9 @@
|
||||||
use crate::{TelemetrySettings, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
|
use crate::{TelemetrySettings, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
|
||||||
use db::kvp::KEY_VALUE_STORE;
|
use db::kvp::KEY_VALUE_STORE;
|
||||||
use gpui::{
|
use gpui::{executor::Background, serde_json, AppContext, Task};
|
||||||
executor::Background,
|
|
||||||
serde_json::{self, value::Map, Value},
|
|
||||||
AppContext, Task,
|
|
||||||
};
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use serde_json::json;
|
|
||||||
use std::{
|
use std::{
|
||||||
env,
|
env,
|
||||||
io::Write,
|
io::Write,
|
||||||
|
@ -19,7 +14,7 @@ use std::{
|
||||||
};
|
};
|
||||||
use tempfile::NamedTempFile;
|
use tempfile::NamedTempFile;
|
||||||
use util::http::HttpClient;
|
use util::http::HttpClient;
|
||||||
use util::{channel::ReleaseChannel, post_inc, ResultExt, TryFutureExt};
|
use util::{channel::ReleaseChannel, TryFutureExt};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
pub struct Telemetry {
|
pub struct Telemetry {
|
||||||
|
@ -37,23 +32,15 @@ struct TelemetryState {
|
||||||
os_name: &'static str,
|
os_name: &'static str,
|
||||||
os_version: Option<Arc<str>>,
|
os_version: Option<Arc<str>>,
|
||||||
architecture: &'static str,
|
architecture: &'static str,
|
||||||
mixpanel_events_queue: Vec<MixpanelEvent>,
|
|
||||||
clickhouse_events_queue: Vec<ClickhouseEventWrapper>,
|
clickhouse_events_queue: Vec<ClickhouseEventWrapper>,
|
||||||
next_mixpanel_event_id: usize,
|
|
||||||
flush_mixpanel_events_task: Option<Task<()>>,
|
|
||||||
flush_clickhouse_events_task: Option<Task<()>>,
|
flush_clickhouse_events_task: Option<Task<()>>,
|
||||||
log_file: Option<NamedTempFile>,
|
log_file: Option<NamedTempFile>,
|
||||||
is_staff: Option<bool>,
|
is_staff: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
const MIXPANEL_EVENTS_URL: &'static str = "https://api.mixpanel.com/track";
|
|
||||||
const MIXPANEL_ENGAGE_URL: &'static str = "https://api.mixpanel.com/engage#profile-set";
|
|
||||||
const CLICKHOUSE_EVENTS_URL_PATH: &'static str = "/api/events";
|
const CLICKHOUSE_EVENTS_URL_PATH: &'static str = "/api/events";
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref MIXPANEL_TOKEN: Option<String> = std::env::var("ZED_MIXPANEL_TOKEN")
|
|
||||||
.ok()
|
|
||||||
.or_else(|| option_env!("ZED_MIXPANEL_TOKEN").map(|key| key.to_string()));
|
|
||||||
static ref CLICKHOUSE_EVENTS_URL: String =
|
static ref CLICKHOUSE_EVENTS_URL: String =
|
||||||
format!("{}{}", *ZED_SERVER_URL, CLICKHOUSE_EVENTS_URL_PATH);
|
format!("{}{}", *ZED_SERVER_URL, CLICKHOUSE_EVENTS_URL_PATH);
|
||||||
}
|
}
|
||||||
|
@ -95,47 +82,6 @@ pub enum ClickhouseEvent {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug)]
|
|
||||||
struct MixpanelEvent {
|
|
||||||
event: String,
|
|
||||||
properties: MixpanelEventProperties,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Debug)]
|
|
||||||
struct MixpanelEventProperties {
|
|
||||||
// Mixpanel required fields
|
|
||||||
#[serde(skip_serializing_if = "str::is_empty")]
|
|
||||||
token: &'static str,
|
|
||||||
time: u128,
|
|
||||||
#[serde(rename = "distinct_id")]
|
|
||||||
installation_id: Option<Arc<str>>,
|
|
||||||
#[serde(rename = "$insert_id")]
|
|
||||||
insert_id: usize,
|
|
||||||
// Custom fields
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none", flatten)]
|
|
||||||
event_properties: Option<Map<String, Value>>,
|
|
||||||
#[serde(rename = "OS Name")]
|
|
||||||
os_name: &'static str,
|
|
||||||
#[serde(rename = "OS Version")]
|
|
||||||
os_version: Option<Arc<str>>,
|
|
||||||
#[serde(rename = "Release Channel")]
|
|
||||||
release_channel: Option<&'static str>,
|
|
||||||
#[serde(rename = "App Version")]
|
|
||||||
app_version: Option<Arc<str>>,
|
|
||||||
#[serde(rename = "Signed In")]
|
|
||||||
signed_in: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize)]
|
|
||||||
struct MixpanelEngageRequest {
|
|
||||||
#[serde(rename = "$token")]
|
|
||||||
token: &'static str,
|
|
||||||
#[serde(rename = "$distinct_id")]
|
|
||||||
installation_id: Arc<str>,
|
|
||||||
#[serde(rename = "$set")]
|
|
||||||
set: Value,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
const MAX_QUEUE_LEN: usize = 1;
|
const MAX_QUEUE_LEN: usize = 1;
|
||||||
|
|
||||||
|
@ -168,29 +114,13 @@ impl Telemetry {
|
||||||
release_channel,
|
release_channel,
|
||||||
installation_id: None,
|
installation_id: None,
|
||||||
metrics_id: None,
|
metrics_id: None,
|
||||||
mixpanel_events_queue: Default::default(),
|
|
||||||
clickhouse_events_queue: Default::default(),
|
clickhouse_events_queue: Default::default(),
|
||||||
flush_mixpanel_events_task: Default::default(),
|
|
||||||
flush_clickhouse_events_task: Default::default(),
|
flush_clickhouse_events_task: Default::default(),
|
||||||
next_mixpanel_event_id: 0,
|
|
||||||
log_file: None,
|
log_file: None,
|
||||||
is_staff: None,
|
is_staff: None,
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
if MIXPANEL_TOKEN.is_some() {
|
|
||||||
this.executor
|
|
||||||
.spawn({
|
|
||||||
let this = this.clone();
|
|
||||||
async move {
|
|
||||||
if let Some(tempfile) = NamedTempFile::new().log_err() {
|
|
||||||
this.state.lock().log_file = Some(tempfile);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.detach();
|
|
||||||
}
|
|
||||||
|
|
||||||
this
|
this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -218,20 +148,9 @@ impl Telemetry {
|
||||||
let mut state = this.state.lock();
|
let mut state = this.state.lock();
|
||||||
state.installation_id = Some(installation_id.clone());
|
state.installation_id = Some(installation_id.clone());
|
||||||
|
|
||||||
for event in &mut state.mixpanel_events_queue {
|
|
||||||
event
|
|
||||||
.properties
|
|
||||||
.installation_id
|
|
||||||
.get_or_insert_with(|| installation_id.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
let has_mixpanel_events = !state.mixpanel_events_queue.is_empty();
|
|
||||||
let has_clickhouse_events = !state.clickhouse_events_queue.is_empty();
|
let has_clickhouse_events = !state.clickhouse_events_queue.is_empty();
|
||||||
drop(state);
|
|
||||||
|
|
||||||
if has_mixpanel_events {
|
drop(state);
|
||||||
this.flush_mixpanel_events();
|
|
||||||
}
|
|
||||||
|
|
||||||
if has_clickhouse_events {
|
if has_clickhouse_events {
|
||||||
this.flush_clickhouse_events();
|
this.flush_clickhouse_events();
|
||||||
|
@ -256,37 +175,11 @@ impl Telemetry {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let this = self.clone();
|
|
||||||
let mut state = self.state.lock();
|
let mut state = self.state.lock();
|
||||||
let installation_id = state.installation_id.clone();
|
|
||||||
let metrics_id: Option<Arc<str>> = metrics_id.map(|id| id.into());
|
let metrics_id: Option<Arc<str>> = metrics_id.map(|id| id.into());
|
||||||
state.metrics_id = metrics_id.clone();
|
state.metrics_id = metrics_id.clone();
|
||||||
state.is_staff = Some(is_staff);
|
state.is_staff = Some(is_staff);
|
||||||
drop(state);
|
drop(state);
|
||||||
|
|
||||||
if let Some((token, installation_id)) = MIXPANEL_TOKEN.as_ref().zip(installation_id) {
|
|
||||||
self.executor
|
|
||||||
.spawn(
|
|
||||||
async move {
|
|
||||||
let json_bytes = serde_json::to_vec(&[MixpanelEngageRequest {
|
|
||||||
token,
|
|
||||||
installation_id,
|
|
||||||
set: json!({
|
|
||||||
"Staff": is_staff,
|
|
||||||
"ID": metrics_id,
|
|
||||||
"App": true
|
|
||||||
}),
|
|
||||||
}])?;
|
|
||||||
|
|
||||||
this.http_client
|
|
||||||
.post_json(MIXPANEL_ENGAGE_URL, json_bytes.into())
|
|
||||||
.await?;
|
|
||||||
anyhow::Ok(())
|
|
||||||
}
|
|
||||||
.log_err(),
|
|
||||||
)
|
|
||||||
.detach();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn report_clickhouse_event(
|
pub fn report_clickhouse_event(
|
||||||
|
@ -310,7 +203,7 @@ impl Telemetry {
|
||||||
});
|
});
|
||||||
|
|
||||||
if state.installation_id.is_some() {
|
if state.installation_id.is_some() {
|
||||||
if state.mixpanel_events_queue.len() >= MAX_QUEUE_LEN {
|
if state.clickhouse_events_queue.len() >= MAX_QUEUE_LEN {
|
||||||
drop(state);
|
drop(state);
|
||||||
self.flush_clickhouse_events();
|
self.flush_clickhouse_events();
|
||||||
} else {
|
} else {
|
||||||
|
@ -324,55 +217,6 @@ impl Telemetry {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn report_mixpanel_event(
|
|
||||||
self: &Arc<Self>,
|
|
||||||
kind: &str,
|
|
||||||
properties: Value,
|
|
||||||
telemetry_settings: TelemetrySettings,
|
|
||||||
) {
|
|
||||||
if !telemetry_settings.metrics {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut state = self.state.lock();
|
|
||||||
let event = MixpanelEvent {
|
|
||||||
event: kind.into(),
|
|
||||||
properties: MixpanelEventProperties {
|
|
||||||
token: "",
|
|
||||||
time: SystemTime::now()
|
|
||||||
.duration_since(UNIX_EPOCH)
|
|
||||||
.unwrap()
|
|
||||||
.as_millis(),
|
|
||||||
installation_id: state.installation_id.clone(),
|
|
||||||
insert_id: post_inc(&mut state.next_mixpanel_event_id),
|
|
||||||
event_properties: if let Value::Object(properties) = properties {
|
|
||||||
Some(properties)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
},
|
|
||||||
os_name: state.os_name,
|
|
||||||
os_version: state.os_version.clone(),
|
|
||||||
release_channel: state.release_channel,
|
|
||||||
app_version: state.app_version.clone(),
|
|
||||||
signed_in: state.metrics_id.is_some(),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
state.mixpanel_events_queue.push(event);
|
|
||||||
if state.installation_id.is_some() {
|
|
||||||
if state.mixpanel_events_queue.len() >= MAX_QUEUE_LEN {
|
|
||||||
drop(state);
|
|
||||||
self.flush_mixpanel_events();
|
|
||||||
} else {
|
|
||||||
let this = self.clone();
|
|
||||||
let executor = self.executor.clone();
|
|
||||||
state.flush_mixpanel_events_task = Some(self.executor.spawn(async move {
|
|
||||||
executor.timer(DEBOUNCE_INTERVAL).await;
|
|
||||||
this.flush_mixpanel_events();
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn metrics_id(self: &Arc<Self>) -> Option<Arc<str>> {
|
pub fn metrics_id(self: &Arc<Self>) -> Option<Arc<str>> {
|
||||||
self.state.lock().metrics_id.clone()
|
self.state.lock().metrics_id.clone()
|
||||||
}
|
}
|
||||||
|
@ -385,44 +229,6 @@ impl Telemetry {
|
||||||
self.state.lock().is_staff
|
self.state.lock().is_staff
|
||||||
}
|
}
|
||||||
|
|
||||||
fn flush_mixpanel_events(self: &Arc<Self>) {
|
|
||||||
let mut state = self.state.lock();
|
|
||||||
let mut events = mem::take(&mut state.mixpanel_events_queue);
|
|
||||||
state.flush_mixpanel_events_task.take();
|
|
||||||
drop(state);
|
|
||||||
|
|
||||||
if let Some(token) = MIXPANEL_TOKEN.as_ref() {
|
|
||||||
let this = self.clone();
|
|
||||||
self.executor
|
|
||||||
.spawn(
|
|
||||||
async move {
|
|
||||||
let mut json_bytes = Vec::new();
|
|
||||||
|
|
||||||
if let Some(file) = &mut this.state.lock().log_file {
|
|
||||||
let file = file.as_file_mut();
|
|
||||||
for event in &mut events {
|
|
||||||
json_bytes.clear();
|
|
||||||
serde_json::to_writer(&mut json_bytes, event)?;
|
|
||||||
file.write_all(&json_bytes)?;
|
|
||||||
file.write(b"\n")?;
|
|
||||||
|
|
||||||
event.properties.token = token;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
json_bytes.clear();
|
|
||||||
serde_json::to_writer(&mut json_bytes, &events)?;
|
|
||||||
this.http_client
|
|
||||||
.post_json(MIXPANEL_EVENTS_URL, json_bytes.into())
|
|
||||||
.await?;
|
|
||||||
anyhow::Ok(())
|
|
||||||
}
|
|
||||||
.log_err(),
|
|
||||||
)
|
|
||||||
.detach();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flush_clickhouse_events(self: &Arc<Self>) {
|
fn flush_clickhouse_events(self: &Arc<Self>) {
|
||||||
let mut state = self.state.lock();
|
let mut state = self.state.lock();
|
||||||
let mut events = mem::take(&mut state.clickhouse_events_queue);
|
let mut events = mem::take(&mut state.clickhouse_events_queue);
|
||||||
|
|
|
@ -1424,7 +1424,7 @@ async fn join_project(
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
for settings_file in dbg!(worktree.settings_files) {
|
for settings_file in worktree.settings_files {
|
||||||
session.peer.send(
|
session.peer.send(
|
||||||
session.connection_id,
|
session.connection_id,
|
||||||
proto::UpdateWorktreeSettings {
|
proto::UpdateWorktreeSettings {
|
||||||
|
@ -1554,8 +1554,6 @@ async fn update_worktree_settings(
|
||||||
message: proto::UpdateWorktreeSettings,
|
message: proto::UpdateWorktreeSettings,
|
||||||
session: Session,
|
session: Session,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
dbg!(&message);
|
|
||||||
|
|
||||||
let guest_connection_ids = session
|
let guest_connection_ids = session
|
||||||
.db()
|
.db()
|
||||||
.await
|
.await
|
||||||
|
|
|
@ -472,7 +472,7 @@ impl CollabTitlebarItem {
|
||||||
Stack::new()
|
Stack::new()
|
||||||
.with_child(
|
.with_child(
|
||||||
MouseEventHandler::<ShareUnshare, Self>::new(0, cx, |state, _| {
|
MouseEventHandler::<ShareUnshare, Self>::new(0, cx, |state, _| {
|
||||||
//TODO: Ensure this button has consistant width for both text variations
|
//TODO: Ensure this button has consistent width for both text variations
|
||||||
let style = titlebar.share_button.style_for(state, false);
|
let style = titlebar.share_button.style_for(state, false);
|
||||||
Label::new(label, style.text.clone())
|
Label::new(label, style.text.clone())
|
||||||
.contained()
|
.contained()
|
||||||
|
|
|
@ -4,7 +4,7 @@ mod sign_in;
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use async_compression::futures::bufread::GzipDecoder;
|
use async_compression::futures::bufread::GzipDecoder;
|
||||||
use async_tar::Archive;
|
use async_tar::Archive;
|
||||||
use collections::HashMap;
|
use collections::{HashMap, HashSet};
|
||||||
use futures::{channel::oneshot, future::Shared, Future, FutureExt, TryFutureExt};
|
use futures::{channel::oneshot, future::Shared, Future, FutureExt, TryFutureExt};
|
||||||
use gpui::{
|
use gpui::{
|
||||||
actions, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle,
|
actions, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle,
|
||||||
|
@ -127,7 +127,7 @@ impl CopilotServer {
|
||||||
struct RunningCopilotServer {
|
struct RunningCopilotServer {
|
||||||
lsp: Arc<LanguageServer>,
|
lsp: Arc<LanguageServer>,
|
||||||
sign_in_status: SignInStatus,
|
sign_in_status: SignInStatus,
|
||||||
registered_buffers: HashMap<u64, RegisteredBuffer>,
|
registered_buffers: HashMap<usize, RegisteredBuffer>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
@ -163,7 +163,6 @@ impl Status {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct RegisteredBuffer {
|
struct RegisteredBuffer {
|
||||||
id: u64,
|
|
||||||
uri: lsp::Url,
|
uri: lsp::Url,
|
||||||
language_id: String,
|
language_id: String,
|
||||||
snapshot: BufferSnapshot,
|
snapshot: BufferSnapshot,
|
||||||
|
@ -178,13 +177,13 @@ impl RegisteredBuffer {
|
||||||
buffer: &ModelHandle<Buffer>,
|
buffer: &ModelHandle<Buffer>,
|
||||||
cx: &mut ModelContext<Copilot>,
|
cx: &mut ModelContext<Copilot>,
|
||||||
) -> oneshot::Receiver<(i32, BufferSnapshot)> {
|
) -> oneshot::Receiver<(i32, BufferSnapshot)> {
|
||||||
let id = self.id;
|
|
||||||
let (done_tx, done_rx) = oneshot::channel();
|
let (done_tx, done_rx) = oneshot::channel();
|
||||||
|
|
||||||
if buffer.read(cx).version() == self.snapshot.version {
|
if buffer.read(cx).version() == self.snapshot.version {
|
||||||
let _ = done_tx.send((self.snapshot_version, self.snapshot.clone()));
|
let _ = done_tx.send((self.snapshot_version, self.snapshot.clone()));
|
||||||
} else {
|
} else {
|
||||||
let buffer = buffer.downgrade();
|
let buffer = buffer.downgrade();
|
||||||
|
let id = buffer.id();
|
||||||
let prev_pending_change =
|
let prev_pending_change =
|
||||||
mem::replace(&mut self.pending_buffer_change, Task::ready(None));
|
mem::replace(&mut self.pending_buffer_change, Task::ready(None));
|
||||||
self.pending_buffer_change = cx.spawn_weak(|copilot, mut cx| async move {
|
self.pending_buffer_change = cx.spawn_weak(|copilot, mut cx| async move {
|
||||||
|
@ -268,7 +267,7 @@ pub struct Copilot {
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
node_runtime: Arc<NodeRuntime>,
|
node_runtime: Arc<NodeRuntime>,
|
||||||
server: CopilotServer,
|
server: CopilotServer,
|
||||||
buffers: HashMap<u64, WeakModelHandle<Buffer>>,
|
buffers: HashSet<WeakModelHandle<Buffer>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Entity for Copilot {
|
impl Entity for Copilot {
|
||||||
|
@ -375,7 +374,7 @@ impl Copilot {
|
||||||
server
|
server
|
||||||
.on_notification::<LogMessage, _>(|params, _cx| {
|
.on_notification::<LogMessage, _>(|params, _cx| {
|
||||||
match params.level {
|
match params.level {
|
||||||
// Copilot is pretty agressive about logging
|
// Copilot is pretty aggressive about logging
|
||||||
0 => debug!("copilot: {}", params.message),
|
0 => debug!("copilot: {}", params.message),
|
||||||
1 => debug!("copilot: {}", params.message),
|
1 => debug!("copilot: {}", params.message),
|
||||||
_ => error!("copilot: {}", params.message),
|
_ => error!("copilot: {}", params.message),
|
||||||
|
@ -559,8 +558,8 @@ impl Copilot {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn register_buffer(&mut self, buffer: &ModelHandle<Buffer>, cx: &mut ModelContext<Self>) {
|
pub fn register_buffer(&mut self, buffer: &ModelHandle<Buffer>, cx: &mut ModelContext<Self>) {
|
||||||
let buffer_id = buffer.read(cx).remote_id();
|
let weak_buffer = buffer.downgrade();
|
||||||
self.buffers.insert(buffer_id, buffer.downgrade());
|
self.buffers.insert(weak_buffer.clone());
|
||||||
|
|
||||||
if let CopilotServer::Running(RunningCopilotServer {
|
if let CopilotServer::Running(RunningCopilotServer {
|
||||||
lsp: server,
|
lsp: server,
|
||||||
|
@ -573,8 +572,7 @@ impl Copilot {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let buffer_id = buffer.read(cx).remote_id();
|
registered_buffers.entry(buffer.id()).or_insert_with(|| {
|
||||||
registered_buffers.entry(buffer_id).or_insert_with(|| {
|
|
||||||
let uri: lsp::Url = uri_for_buffer(buffer, cx);
|
let uri: lsp::Url = uri_for_buffer(buffer, cx);
|
||||||
let language_id = id_for_language(buffer.read(cx).language());
|
let language_id = id_for_language(buffer.read(cx).language());
|
||||||
let snapshot = buffer.read(cx).snapshot();
|
let snapshot = buffer.read(cx).snapshot();
|
||||||
|
@ -592,7 +590,6 @@ impl Copilot {
|
||||||
.log_err();
|
.log_err();
|
||||||
|
|
||||||
RegisteredBuffer {
|
RegisteredBuffer {
|
||||||
id: buffer_id,
|
|
||||||
uri,
|
uri,
|
||||||
language_id,
|
language_id,
|
||||||
snapshot,
|
snapshot,
|
||||||
|
@ -603,8 +600,8 @@ impl Copilot {
|
||||||
this.handle_buffer_event(buffer, event, cx).log_err();
|
this.handle_buffer_event(buffer, event, cx).log_err();
|
||||||
}),
|
}),
|
||||||
cx.observe_release(buffer, move |this, _buffer, _cx| {
|
cx.observe_release(buffer, move |this, _buffer, _cx| {
|
||||||
this.buffers.remove(&buffer_id);
|
this.buffers.remove(&weak_buffer);
|
||||||
this.unregister_buffer(buffer_id);
|
this.unregister_buffer(&weak_buffer);
|
||||||
}),
|
}),
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
@ -619,8 +616,7 @@ impl Copilot {
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
if let Ok(server) = self.server.as_running() {
|
if let Ok(server) = self.server.as_running() {
|
||||||
let buffer_id = buffer.read(cx).remote_id();
|
if let Some(registered_buffer) = server.registered_buffers.get_mut(&buffer.id()) {
|
||||||
if let Some(registered_buffer) = server.registered_buffers.get_mut(&buffer_id) {
|
|
||||||
match event {
|
match event {
|
||||||
language::Event::Edited => {
|
language::Event::Edited => {
|
||||||
let _ = registered_buffer.report_changes(&buffer, cx);
|
let _ = registered_buffer.report_changes(&buffer, cx);
|
||||||
|
@ -674,9 +670,9 @@ impl Copilot {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unregister_buffer(&mut self, buffer_id: u64) {
|
fn unregister_buffer(&mut self, buffer: &WeakModelHandle<Buffer>) {
|
||||||
if let Ok(server) = self.server.as_running() {
|
if let Ok(server) = self.server.as_running() {
|
||||||
if let Some(buffer) = server.registered_buffers.remove(&buffer_id) {
|
if let Some(buffer) = server.registered_buffers.remove(&buffer.id()) {
|
||||||
server
|
server
|
||||||
.lsp
|
.lsp
|
||||||
.notify::<lsp::notification::DidCloseTextDocument>(
|
.notify::<lsp::notification::DidCloseTextDocument>(
|
||||||
|
@ -779,8 +775,7 @@ impl Copilot {
|
||||||
Err(error) => return Task::ready(Err(error)),
|
Err(error) => return Task::ready(Err(error)),
|
||||||
};
|
};
|
||||||
let lsp = server.lsp.clone();
|
let lsp = server.lsp.clone();
|
||||||
let buffer_id = buffer.read(cx).remote_id();
|
let registered_buffer = server.registered_buffers.get_mut(&buffer.id()).unwrap();
|
||||||
let registered_buffer = server.registered_buffers.get_mut(&buffer_id).unwrap();
|
|
||||||
let snapshot = registered_buffer.report_changes(buffer, cx);
|
let snapshot = registered_buffer.report_changes(buffer, cx);
|
||||||
let buffer = buffer.read(cx);
|
let buffer = buffer.read(cx);
|
||||||
let uri = registered_buffer.uri.clone();
|
let uri = registered_buffer.uri.clone();
|
||||||
|
@ -850,7 +845,7 @@ impl Copilot {
|
||||||
lsp_status: request::SignInStatus,
|
lsp_status: request::SignInStatus,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) {
|
) {
|
||||||
self.buffers.retain(|_, buffer| buffer.is_upgradable(cx));
|
self.buffers.retain(|buffer| buffer.is_upgradable(cx));
|
||||||
|
|
||||||
if let Ok(server) = self.server.as_running() {
|
if let Ok(server) = self.server.as_running() {
|
||||||
match lsp_status {
|
match lsp_status {
|
||||||
|
@ -858,7 +853,7 @@ impl Copilot {
|
||||||
| request::SignInStatus::MaybeOk { .. }
|
| request::SignInStatus::MaybeOk { .. }
|
||||||
| request::SignInStatus::AlreadySignedIn { .. } => {
|
| request::SignInStatus::AlreadySignedIn { .. } => {
|
||||||
server.sign_in_status = SignInStatus::Authorized;
|
server.sign_in_status = SignInStatus::Authorized;
|
||||||
for buffer in self.buffers.values().cloned().collect::<Vec<_>>() {
|
for buffer in self.buffers.iter().cloned().collect::<Vec<_>>() {
|
||||||
if let Some(buffer) = buffer.upgrade(cx) {
|
if let Some(buffer) = buffer.upgrade(cx) {
|
||||||
self.register_buffer(&buffer, cx);
|
self.register_buffer(&buffer, cx);
|
||||||
}
|
}
|
||||||
|
@ -866,14 +861,14 @@ impl Copilot {
|
||||||
}
|
}
|
||||||
request::SignInStatus::NotAuthorized { .. } => {
|
request::SignInStatus::NotAuthorized { .. } => {
|
||||||
server.sign_in_status = SignInStatus::Unauthorized;
|
server.sign_in_status = SignInStatus::Unauthorized;
|
||||||
for buffer_id in self.buffers.keys().copied().collect::<Vec<_>>() {
|
for buffer in self.buffers.iter().copied().collect::<Vec<_>>() {
|
||||||
self.unregister_buffer(buffer_id);
|
self.unregister_buffer(&buffer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
request::SignInStatus::NotSignedIn => {
|
request::SignInStatus::NotSignedIn => {
|
||||||
server.sign_in_status = SignInStatus::SignedOut;
|
server.sign_in_status = SignInStatus::SignedOut;
|
||||||
for buffer_id in self.buffers.keys().copied().collect::<Vec<_>>() {
|
for buffer in self.buffers.iter().copied().collect::<Vec<_>>() {
|
||||||
self.unregister_buffer(buffer_id);
|
self.unregister_buffer(&buffer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -896,9 +891,7 @@ fn uri_for_buffer(buffer: &ModelHandle<Buffer>, cx: &AppContext) -> lsp::Url {
|
||||||
if let Some(file) = buffer.read(cx).file().and_then(|file| file.as_local()) {
|
if let Some(file) = buffer.read(cx).file().and_then(|file| file.as_local()) {
|
||||||
lsp::Url::from_file_path(file.abs_path(cx)).unwrap()
|
lsp::Url::from_file_path(file.abs_path(cx)).unwrap()
|
||||||
} else {
|
} else {
|
||||||
format!("buffer://{}", buffer.read(cx).remote_id())
|
format!("buffer://{}", buffer.id()).parse().unwrap()
|
||||||
.parse()
|
|
||||||
.unwrap()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -475,7 +475,7 @@ impl DisplaySnapshot {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator of the start positions of the occurances of `target` in the `self` after `from`
|
/// Returns an iterator of the start positions of the occurrences of `target` in the `self` after `from`
|
||||||
/// Stops if `condition` returns false for any of the character position pairs observed.
|
/// Stops if `condition` returns false for any of the character position pairs observed.
|
||||||
pub fn find_while<'a>(
|
pub fn find_while<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
|
@ -486,7 +486,7 @@ impl DisplaySnapshot {
|
||||||
Self::find_internal(self.chars_at(from), target.chars().collect(), condition)
|
Self::find_internal(self.chars_at(from), target.chars().collect(), condition)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator of the end positions of the occurances of `target` in the `self` before `from`
|
/// Returns an iterator of the end positions of the occurrences of `target` in the `self` before `from`
|
||||||
/// Stops if `condition` returns false for any of the character position pairs observed.
|
/// Stops if `condition` returns false for any of the character position pairs observed.
|
||||||
pub fn reverse_find_while<'a>(
|
pub fn reverse_find_while<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
|
|
|
@ -37,6 +37,7 @@ pub use element::{
|
||||||
};
|
};
|
||||||
use futures::FutureExt;
|
use futures::FutureExt;
|
||||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||||
|
use gpui::LayoutContext;
|
||||||
use gpui::{
|
use gpui::{
|
||||||
actions,
|
actions,
|
||||||
color::Color,
|
color::Color,
|
||||||
|
@ -47,9 +48,8 @@ use gpui::{
|
||||||
impl_actions,
|
impl_actions,
|
||||||
keymap_matcher::KeymapContext,
|
keymap_matcher::KeymapContext,
|
||||||
platform::{CursorStyle, MouseButton},
|
platform::{CursorStyle, MouseButton},
|
||||||
serde_json::{self, json},
|
serde_json, AnyElement, AnyViewHandle, AppContext, AsyncAppContext, ClipboardItem, Element,
|
||||||
AnyElement, AnyViewHandle, AppContext, AsyncAppContext, ClipboardItem, Element, Entity,
|
Entity, ModelHandle, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||||
LayoutContext, ModelHandle, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
|
||||||
WindowContext,
|
WindowContext,
|
||||||
};
|
};
|
||||||
use highlight_matching_bracket::refresh_matching_bracket_highlights;
|
use highlight_matching_bracket::refresh_matching_bracket_highlights;
|
||||||
|
@ -113,6 +113,12 @@ pub struct SelectNext {
|
||||||
pub replace_newest: bool,
|
pub replace_newest: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Deserialize, PartialEq, Default)]
|
||||||
|
pub struct SelectPrevious {
|
||||||
|
#[serde(default)]
|
||||||
|
pub replace_newest: bool,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Deserialize, PartialEq)]
|
#[derive(Clone, Deserialize, PartialEq)]
|
||||||
pub struct SelectToBeginningOfLine {
|
pub struct SelectToBeginningOfLine {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
@ -274,6 +280,7 @@ impl_actions!(
|
||||||
editor,
|
editor,
|
||||||
[
|
[
|
||||||
SelectNext,
|
SelectNext,
|
||||||
|
SelectPrevious,
|
||||||
SelectToBeginningOfLine,
|
SelectToBeginningOfLine,
|
||||||
SelectToEndOfLine,
|
SelectToEndOfLine,
|
||||||
ToggleCodeActions,
|
ToggleCodeActions,
|
||||||
|
@ -369,6 +376,7 @@ pub fn init(cx: &mut AppContext) {
|
||||||
cx.add_action(Editor::add_selection_above);
|
cx.add_action(Editor::add_selection_above);
|
||||||
cx.add_action(Editor::add_selection_below);
|
cx.add_action(Editor::add_selection_below);
|
||||||
cx.add_action(Editor::select_next);
|
cx.add_action(Editor::select_next);
|
||||||
|
cx.add_action(Editor::select_previous);
|
||||||
cx.add_action(Editor::toggle_comments);
|
cx.add_action(Editor::toggle_comments);
|
||||||
cx.add_action(Editor::select_larger_syntax_node);
|
cx.add_action(Editor::select_larger_syntax_node);
|
||||||
cx.add_action(Editor::select_smaller_syntax_node);
|
cx.add_action(Editor::select_smaller_syntax_node);
|
||||||
|
@ -486,6 +494,7 @@ pub struct Editor {
|
||||||
columnar_selection_tail: Option<Anchor>,
|
columnar_selection_tail: Option<Anchor>,
|
||||||
add_selections_state: Option<AddSelectionsState>,
|
add_selections_state: Option<AddSelectionsState>,
|
||||||
select_next_state: Option<SelectNextState>,
|
select_next_state: Option<SelectNextState>,
|
||||||
|
select_prev_state: Option<SelectNextState>,
|
||||||
selection_history: SelectionHistory,
|
selection_history: SelectionHistory,
|
||||||
autoclose_regions: Vec<AutocloseRegion>,
|
autoclose_regions: Vec<AutocloseRegion>,
|
||||||
snippet_stack: InvalidationStack<SnippetState>,
|
snippet_stack: InvalidationStack<SnippetState>,
|
||||||
|
@ -544,6 +553,7 @@ pub struct EditorSnapshot {
|
||||||
struct SelectionHistoryEntry {
|
struct SelectionHistoryEntry {
|
||||||
selections: Arc<[Selection<Anchor>]>,
|
selections: Arc<[Selection<Anchor>]>,
|
||||||
select_next_state: Option<SelectNextState>,
|
select_next_state: Option<SelectNextState>,
|
||||||
|
select_prev_state: Option<SelectNextState>,
|
||||||
add_selections_state: Option<AddSelectionsState>,
|
add_selections_state: Option<AddSelectionsState>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1291,6 +1301,7 @@ impl Editor {
|
||||||
columnar_selection_tail: None,
|
columnar_selection_tail: None,
|
||||||
add_selections_state: None,
|
add_selections_state: None,
|
||||||
select_next_state: None,
|
select_next_state: None,
|
||||||
|
select_prev_state: None,
|
||||||
selection_history: Default::default(),
|
selection_history: Default::default(),
|
||||||
autoclose_regions: Default::default(),
|
autoclose_regions: Default::default(),
|
||||||
snippet_stack: Default::default(),
|
snippet_stack: Default::default(),
|
||||||
|
@ -1515,6 +1526,7 @@ impl Editor {
|
||||||
let buffer = &display_map.buffer_snapshot;
|
let buffer = &display_map.buffer_snapshot;
|
||||||
self.add_selections_state = None;
|
self.add_selections_state = None;
|
||||||
self.select_next_state = None;
|
self.select_next_state = None;
|
||||||
|
self.select_prev_state = None;
|
||||||
self.select_larger_syntax_node_stack.clear();
|
self.select_larger_syntax_node_stack.clear();
|
||||||
self.invalidate_autoclose_regions(&self.selections.disjoint_anchors(), buffer);
|
self.invalidate_autoclose_regions(&self.selections.disjoint_anchors(), buffer);
|
||||||
self.snippet_stack
|
self.snippet_stack
|
||||||
|
@ -2535,7 +2547,7 @@ impl Editor {
|
||||||
.read(cx)
|
.read(cx)
|
||||||
.text_anchor_for_position(position.clone(), cx)?;
|
.text_anchor_for_position(position.clone(), cx)?;
|
||||||
|
|
||||||
// OnTypeFormatting retuns a list of edits, no need to pass them between Zed instances,
|
// OnTypeFormatting returns a list of edits, no need to pass them between Zed instances,
|
||||||
// hence we do LSP request & edit on host side only — add formats to host's history.
|
// hence we do LSP request & edit on host side only — add formats to host's history.
|
||||||
let push_to_lsp_host_history = true;
|
let push_to_lsp_host_history = true;
|
||||||
// If this is not the host, append its history with new edits.
|
// If this is not the host, append its history with new edits.
|
||||||
|
@ -5223,6 +5235,101 @@ impl Editor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn select_previous(&mut self, action: &SelectPrevious, cx: &mut ViewContext<Self>) {
|
||||||
|
self.push_to_selection_history();
|
||||||
|
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||||
|
let buffer = &display_map.buffer_snapshot;
|
||||||
|
let mut selections = self.selections.all::<usize>(cx);
|
||||||
|
if let Some(mut select_prev_state) = self.select_prev_state.take() {
|
||||||
|
let query = &select_prev_state.query;
|
||||||
|
if !select_prev_state.done {
|
||||||
|
let first_selection = selections.iter().min_by_key(|s| s.id).unwrap();
|
||||||
|
let last_selection = selections.iter().max_by_key(|s| s.id).unwrap();
|
||||||
|
let mut next_selected_range = None;
|
||||||
|
// When we're iterating matches backwards, the oldest match will actually be the furthest one in the buffer.
|
||||||
|
let bytes_before_last_selection =
|
||||||
|
buffer.reversed_bytes_in_range(0..last_selection.start);
|
||||||
|
let bytes_after_first_selection =
|
||||||
|
buffer.reversed_bytes_in_range(first_selection.end..buffer.len());
|
||||||
|
let query_matches = query
|
||||||
|
.stream_find_iter(bytes_before_last_selection)
|
||||||
|
.map(|result| (last_selection.start, result))
|
||||||
|
.chain(
|
||||||
|
query
|
||||||
|
.stream_find_iter(bytes_after_first_selection)
|
||||||
|
.map(|result| (buffer.len(), result)),
|
||||||
|
);
|
||||||
|
for (end_offset, query_match) in query_matches {
|
||||||
|
let query_match = query_match.unwrap(); // can only fail due to I/O
|
||||||
|
let offset_range =
|
||||||
|
end_offset - query_match.end()..end_offset - query_match.start();
|
||||||
|
let display_range = offset_range.start.to_display_point(&display_map)
|
||||||
|
..offset_range.end.to_display_point(&display_map);
|
||||||
|
|
||||||
|
if !select_prev_state.wordwise
|
||||||
|
|| (!movement::is_inside_word(&display_map, display_range.start)
|
||||||
|
&& !movement::is_inside_word(&display_map, display_range.end))
|
||||||
|
{
|
||||||
|
next_selected_range = Some(offset_range);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(next_selected_range) = next_selected_range {
|
||||||
|
self.unfold_ranges([next_selected_range.clone()], false, true, cx);
|
||||||
|
self.change_selections(Some(Autoscroll::newest()), cx, |s| {
|
||||||
|
if action.replace_newest {
|
||||||
|
s.delete(s.newest_anchor().id);
|
||||||
|
}
|
||||||
|
s.insert_range(next_selected_range);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
select_prev_state.done = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.select_prev_state = Some(select_prev_state);
|
||||||
|
} else if selections.len() == 1 {
|
||||||
|
let selection = selections.last_mut().unwrap();
|
||||||
|
if selection.start == selection.end {
|
||||||
|
let word_range = movement::surrounding_word(
|
||||||
|
&display_map,
|
||||||
|
selection.start.to_display_point(&display_map),
|
||||||
|
);
|
||||||
|
selection.start = word_range.start.to_offset(&display_map, Bias::Left);
|
||||||
|
selection.end = word_range.end.to_offset(&display_map, Bias::Left);
|
||||||
|
selection.goal = SelectionGoal::None;
|
||||||
|
selection.reversed = false;
|
||||||
|
|
||||||
|
let query = buffer
|
||||||
|
.text_for_range(selection.start..selection.end)
|
||||||
|
.collect::<String>();
|
||||||
|
let query = query.chars().rev().collect::<String>();
|
||||||
|
let select_state = SelectNextState {
|
||||||
|
query: AhoCorasick::new_auto_configured(&[query]),
|
||||||
|
wordwise: true,
|
||||||
|
done: false,
|
||||||
|
};
|
||||||
|
self.unfold_ranges([selection.start..selection.end], false, true, cx);
|
||||||
|
self.change_selections(Some(Autoscroll::newest()), cx, |s| {
|
||||||
|
s.select(selections);
|
||||||
|
});
|
||||||
|
self.select_prev_state = Some(select_state);
|
||||||
|
} else {
|
||||||
|
let query = buffer
|
||||||
|
.text_for_range(selection.start..selection.end)
|
||||||
|
.collect::<String>();
|
||||||
|
let query = query.chars().rev().collect::<String>();
|
||||||
|
self.select_prev_state = Some(SelectNextState {
|
||||||
|
query: AhoCorasick::new_auto_configured(&[query]),
|
||||||
|
wordwise: false,
|
||||||
|
done: false,
|
||||||
|
});
|
||||||
|
self.select_previous(action, cx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn toggle_comments(&mut self, action: &ToggleComments, cx: &mut ViewContext<Self>) {
|
pub fn toggle_comments(&mut self, action: &ToggleComments, cx: &mut ViewContext<Self>) {
|
||||||
self.transact(cx, |this, cx| {
|
self.transact(cx, |this, cx| {
|
||||||
let mut selections = this.selections.all::<Point>(cx);
|
let mut selections = this.selections.all::<Point>(cx);
|
||||||
|
@ -5596,6 +5703,7 @@ impl Editor {
|
||||||
if let Some(entry) = self.selection_history.undo_stack.pop_back() {
|
if let Some(entry) = self.selection_history.undo_stack.pop_back() {
|
||||||
self.change_selections(None, cx, |s| s.select_anchors(entry.selections.to_vec()));
|
self.change_selections(None, cx, |s| s.select_anchors(entry.selections.to_vec()));
|
||||||
self.select_next_state = entry.select_next_state;
|
self.select_next_state = entry.select_next_state;
|
||||||
|
self.select_prev_state = entry.select_prev_state;
|
||||||
self.add_selections_state = entry.add_selections_state;
|
self.add_selections_state = entry.add_selections_state;
|
||||||
self.request_autoscroll(Autoscroll::newest(), cx);
|
self.request_autoscroll(Autoscroll::newest(), cx);
|
||||||
}
|
}
|
||||||
|
@ -5608,6 +5716,7 @@ impl Editor {
|
||||||
if let Some(entry) = self.selection_history.redo_stack.pop_back() {
|
if let Some(entry) = self.selection_history.redo_stack.pop_back() {
|
||||||
self.change_selections(None, cx, |s| s.select_anchors(entry.selections.to_vec()));
|
self.change_selections(None, cx, |s| s.select_anchors(entry.selections.to_vec()));
|
||||||
self.select_next_state = entry.select_next_state;
|
self.select_next_state = entry.select_next_state;
|
||||||
|
self.select_prev_state = entry.select_prev_state;
|
||||||
self.add_selections_state = entry.add_selections_state;
|
self.add_selections_state = entry.add_selections_state;
|
||||||
self.request_autoscroll(Autoscroll::newest(), cx);
|
self.request_autoscroll(Autoscroll::newest(), cx);
|
||||||
}
|
}
|
||||||
|
@ -6385,6 +6494,7 @@ impl Editor {
|
||||||
self.selection_history.push(SelectionHistoryEntry {
|
self.selection_history.push(SelectionHistoryEntry {
|
||||||
selections: self.selections.disjoint_anchors(),
|
selections: self.selections.disjoint_anchors(),
|
||||||
select_next_state: self.select_next_state.clone(),
|
select_next_state: self.select_next_state.clone(),
|
||||||
|
select_prev_state: self.select_prev_state.clone(),
|
||||||
add_selections_state: self.add_selections_state.clone(),
|
add_selections_state: self.add_selections_state.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -7130,15 +7240,6 @@ impl Editor {
|
||||||
.show_copilot_suggestions;
|
.show_copilot_suggestions;
|
||||||
|
|
||||||
let telemetry = project.read(cx).client().telemetry().clone();
|
let telemetry = project.read(cx).client().telemetry().clone();
|
||||||
telemetry.report_mixpanel_event(
|
|
||||||
match name {
|
|
||||||
"open" => "open editor",
|
|
||||||
"save" => "save editor",
|
|
||||||
_ => name,
|
|
||||||
},
|
|
||||||
json!({ "File Extension": file_extension, "Vim Mode": vim_mode, "In Clickhouse": true }),
|
|
||||||
telemetry_settings,
|
|
||||||
);
|
|
||||||
let event = ClickhouseEvent::Editor {
|
let event = ClickhouseEvent::Editor {
|
||||||
file_extension,
|
file_extension,
|
||||||
vim_mode,
|
vim_mode,
|
||||||
|
@ -7836,13 +7937,13 @@ pub fn diagnostic_block_renderer(diagnostic: Diagnostic, is_valid: bool) -> Rend
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn highlight_diagnostic_message(
|
pub fn highlight_diagnostic_message(
|
||||||
inital_highlights: Vec<usize>,
|
initial_highlights: Vec<usize>,
|
||||||
message: &str,
|
message: &str,
|
||||||
) -> (String, Vec<usize>) {
|
) -> (String, Vec<usize>) {
|
||||||
let mut message_without_backticks = String::new();
|
let mut message_without_backticks = String::new();
|
||||||
let mut prev_offset = 0;
|
let mut prev_offset = 0;
|
||||||
let mut inside_block = false;
|
let mut inside_block = false;
|
||||||
let mut highlights = inital_highlights;
|
let mut highlights = initial_highlights;
|
||||||
for (match_ix, (offset, _)) in message
|
for (match_ix, (offset, _)) in message
|
||||||
.match_indices('`')
|
.match_indices('`')
|
||||||
.chain([(message.len(), "")])
|
.chain([(message.len(), "")])
|
||||||
|
|
|
@ -9,7 +9,8 @@ use gpui::{
|
||||||
executor::Deterministic,
|
executor::Deterministic,
|
||||||
geometry::{rect::RectF, vector::vec2f},
|
geometry::{rect::RectF, vector::vec2f},
|
||||||
platform::{WindowBounds, WindowOptions},
|
platform::{WindowBounds, WindowOptions},
|
||||||
serde_json, TestAppContext,
|
serde_json::{self, json},
|
||||||
|
TestAppContext,
|
||||||
};
|
};
|
||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
use language::{
|
use language::{
|
||||||
|
@ -3107,6 +3108,57 @@ async fn test_select_next(cx: &mut gpui::TestAppContext) {
|
||||||
cx.assert_editor_state("«abcˇ»\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»");
|
cx.assert_editor_state("«abcˇ»\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
async fn test_select_previous(cx: &mut gpui::TestAppContext) {
|
||||||
|
init_test(cx, |_| {});
|
||||||
|
{
|
||||||
|
// `Select previous` without a selection (selects wordwise)
|
||||||
|
let mut cx = EditorTestContext::new(cx).await;
|
||||||
|
cx.set_state("abc\nˇabc abc\ndefabc\nabc");
|
||||||
|
|
||||||
|
cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx));
|
||||||
|
cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc");
|
||||||
|
|
||||||
|
cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx));
|
||||||
|
cx.assert_editor_state("«abcˇ»\n«abcˇ» abc\ndefabc\nabc");
|
||||||
|
|
||||||
|
cx.update_editor(|view, cx| view.undo_selection(&UndoSelection, cx));
|
||||||
|
cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc");
|
||||||
|
|
||||||
|
cx.update_editor(|view, cx| view.redo_selection(&RedoSelection, cx));
|
||||||
|
cx.assert_editor_state("«abcˇ»\n«abcˇ» abc\ndefabc\nabc");
|
||||||
|
|
||||||
|
cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx));
|
||||||
|
cx.assert_editor_state("«abcˇ»\n«abcˇ» abc\ndefabc\n«abcˇ»");
|
||||||
|
|
||||||
|
cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx));
|
||||||
|
cx.assert_editor_state("«abcˇ»\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»");
|
||||||
|
}
|
||||||
|
{
|
||||||
|
// `Select previous` with a selection
|
||||||
|
let mut cx = EditorTestContext::new(cx).await;
|
||||||
|
cx.set_state("abc\n«ˇabc» abc\ndefabc\nabc");
|
||||||
|
|
||||||
|
cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx));
|
||||||
|
cx.assert_editor_state("«abcˇ»\n«ˇabc» abc\ndefabc\nabc");
|
||||||
|
|
||||||
|
cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx));
|
||||||
|
cx.assert_editor_state("«abcˇ»\n«ˇabc» abc\ndefabc\n«abcˇ»");
|
||||||
|
|
||||||
|
cx.update_editor(|view, cx| view.undo_selection(&UndoSelection, cx));
|
||||||
|
cx.assert_editor_state("«abcˇ»\n«ˇabc» abc\ndefabc\nabc");
|
||||||
|
|
||||||
|
cx.update_editor(|view, cx| view.redo_selection(&RedoSelection, cx));
|
||||||
|
cx.assert_editor_state("«abcˇ»\n«ˇabc» abc\ndefabc\n«abcˇ»");
|
||||||
|
|
||||||
|
cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx));
|
||||||
|
cx.assert_editor_state("«abcˇ»\n«ˇabc» abc\ndef«abcˇ»\n«abcˇ»");
|
||||||
|
|
||||||
|
cx.update_editor(|e, cx| e.select_previous(&SelectPrevious::default(), cx));
|
||||||
|
cx.assert_editor_state("«abcˇ»\n«ˇabc» «abcˇ»\ndef«abcˇ»\n«abcˇ»");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
|
async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
|
||||||
init_test(cx, |_| {});
|
init_test(cx, |_| {});
|
||||||
|
@ -4270,7 +4322,7 @@ async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
|
||||||
);
|
);
|
||||||
assert!(!cx.read(|cx| editor.is_dirty(cx)));
|
assert!(!cx.read(|cx| editor.is_dirty(cx)));
|
||||||
|
|
||||||
// Set rust language override and assert overriden tabsize is sent to language server
|
// Set rust language override and assert overridden tabsize is sent to language server
|
||||||
update_test_settings(cx, |settings| {
|
update_test_settings(cx, |settings| {
|
||||||
settings.languages.insert(
|
settings.languages.insert(
|
||||||
"Rust".into(),
|
"Rust".into(),
|
||||||
|
@ -4384,7 +4436,7 @@ async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
|
||||||
);
|
);
|
||||||
assert!(!cx.read(|cx| editor.is_dirty(cx)));
|
assert!(!cx.read(|cx| editor.is_dirty(cx)));
|
||||||
|
|
||||||
// Set rust language override and assert overriden tabsize is sent to language server
|
// Set rust language override and assert overridden tabsize is sent to language server
|
||||||
update_test_settings(cx, |settings| {
|
update_test_settings(cx, |settings| {
|
||||||
settings.languages.insert(
|
settings.languages.insert(
|
||||||
"Rust".into(),
|
"Rust".into(),
|
||||||
|
@ -4725,7 +4777,7 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
|
||||||
two
|
two
|
||||||
threeˇ
|
threeˇ
|
||||||
"},
|
"},
|
||||||
"overlapping aditional edit",
|
"overlapping additional edit",
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
indoc! {"
|
indoc! {"
|
||||||
|
|
|
@ -3115,7 +3115,7 @@ mod tests {
|
||||||
editor_width: f32,
|
editor_width: f32,
|
||||||
) -> Vec<Invisible> {
|
) -> Vec<Invisible> {
|
||||||
info!(
|
info!(
|
||||||
"Creating editor with mode {editor_mode:?}, witdh {editor_width} and text '{input_text}'"
|
"Creating editor with mode {editor_mode:?}, width {editor_width} and text '{input_text}'"
|
||||||
);
|
);
|
||||||
let (_, editor) = cx.add_window(|cx| {
|
let (_, editor) = cx.add_window(|cx| {
|
||||||
let buffer = MultiBuffer::build_simple(&input_text, cx);
|
let buffer = MultiBuffer::build_simple(&input_text, cx);
|
||||||
|
|
|
@ -199,6 +199,13 @@ pub struct MultiBufferBytes<'a> {
|
||||||
chunk: &'a [u8],
|
chunk: &'a [u8],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct ReversedMultiBufferBytes<'a> {
|
||||||
|
range: Range<usize>,
|
||||||
|
excerpts: Cursor<'a, Excerpt, usize>,
|
||||||
|
excerpt_bytes: Option<ExcerptBytes<'a>>,
|
||||||
|
chunk: &'a [u8],
|
||||||
|
}
|
||||||
|
|
||||||
struct ExcerptChunks<'a> {
|
struct ExcerptChunks<'a> {
|
||||||
content_chunks: BufferChunks<'a>,
|
content_chunks: BufferChunks<'a>,
|
||||||
footer_height: usize,
|
footer_height: usize,
|
||||||
|
@ -1978,7 +1985,6 @@ impl MultiBufferSnapshot {
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
MultiBufferBytes {
|
MultiBufferBytes {
|
||||||
range,
|
range,
|
||||||
excerpts,
|
excerpts,
|
||||||
|
@ -1987,6 +1993,33 @@ impl MultiBufferSnapshot {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn reversed_bytes_in_range<T: ToOffset>(
|
||||||
|
&self,
|
||||||
|
range: Range<T>,
|
||||||
|
) -> ReversedMultiBufferBytes {
|
||||||
|
let range = range.start.to_offset(self)..range.end.to_offset(self);
|
||||||
|
let mut excerpts = self.excerpts.cursor::<usize>();
|
||||||
|
excerpts.seek(&range.end, Bias::Left, &());
|
||||||
|
|
||||||
|
let mut chunk = &[][..];
|
||||||
|
let excerpt_bytes = if let Some(excerpt) = excerpts.item() {
|
||||||
|
let mut excerpt_bytes = excerpt.reversed_bytes_in_range(
|
||||||
|
range.start - excerpts.start()..range.end - excerpts.start(),
|
||||||
|
);
|
||||||
|
chunk = excerpt_bytes.next().unwrap_or(&[][..]);
|
||||||
|
Some(excerpt_bytes)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
ReversedMultiBufferBytes {
|
||||||
|
range,
|
||||||
|
excerpts,
|
||||||
|
excerpt_bytes,
|
||||||
|
chunk,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn buffer_rows(&self, start_row: u32) -> MultiBufferRows {
|
pub fn buffer_rows(&self, start_row: u32) -> MultiBufferRows {
|
||||||
let mut result = MultiBufferRows {
|
let mut result = MultiBufferRows {
|
||||||
buffer_row_range: 0..0,
|
buffer_row_range: 0..0,
|
||||||
|
@ -3420,6 +3453,26 @@ impl Excerpt {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn reversed_bytes_in_range(&self, range: Range<usize>) -> ExcerptBytes {
|
||||||
|
let content_start = self.range.context.start.to_offset(&self.buffer);
|
||||||
|
let bytes_start = content_start + range.start;
|
||||||
|
let bytes_end = content_start + cmp::min(range.end, self.text_summary.len);
|
||||||
|
let footer_height = if self.has_trailing_newline
|
||||||
|
&& range.start <= self.text_summary.len
|
||||||
|
&& range.end > self.text_summary.len
|
||||||
|
{
|
||||||
|
1
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
|
let content_bytes = self.buffer.reversed_bytes_in_range(bytes_start..bytes_end);
|
||||||
|
|
||||||
|
ExcerptBytes {
|
||||||
|
content_bytes,
|
||||||
|
footer_height,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn clip_anchor(&self, text_anchor: text::Anchor) -> text::Anchor {
|
fn clip_anchor(&self, text_anchor: text::Anchor) -> text::Anchor {
|
||||||
if text_anchor
|
if text_anchor
|
||||||
.cmp(&self.range.context.start, &self.buffer)
|
.cmp(&self.range.context.start, &self.buffer)
|
||||||
|
@ -3738,6 +3791,38 @@ impl<'a> io::Read for MultiBufferBytes<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> ReversedMultiBufferBytes<'a> {
|
||||||
|
fn consume(&mut self, len: usize) {
|
||||||
|
self.range.end -= len;
|
||||||
|
self.chunk = &self.chunk[..self.chunk.len() - len];
|
||||||
|
|
||||||
|
if !self.range.is_empty() && self.chunk.is_empty() {
|
||||||
|
if let Some(chunk) = self.excerpt_bytes.as_mut().and_then(|bytes| bytes.next()) {
|
||||||
|
self.chunk = chunk;
|
||||||
|
} else {
|
||||||
|
self.excerpts.next(&());
|
||||||
|
if let Some(excerpt) = self.excerpts.item() {
|
||||||
|
let mut excerpt_bytes =
|
||||||
|
excerpt.bytes_in_range(0..self.range.end - self.excerpts.start());
|
||||||
|
self.chunk = excerpt_bytes.next().unwrap();
|
||||||
|
self.excerpt_bytes = Some(excerpt_bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> io::Read for ReversedMultiBufferBytes<'a> {
|
||||||
|
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||||
|
let len = cmp::min(buf.len(), self.chunk.len());
|
||||||
|
buf[..len].copy_from_slice(&self.chunk[..len]);
|
||||||
|
buf[..len].reverse();
|
||||||
|
if len > 0 {
|
||||||
|
self.consume(len);
|
||||||
|
}
|
||||||
|
Ok(len)
|
||||||
|
}
|
||||||
|
}
|
||||||
impl<'a> Iterator for ExcerptBytes<'a> {
|
impl<'a> Iterator for ExcerptBytes<'a> {
|
||||||
type Item = &'a [u8];
|
type Item = &'a [u8];
|
||||||
|
|
||||||
|
@ -5258,7 +5343,7 @@ mod tests {
|
||||||
assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\nAB5678");
|
assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\nAB5678");
|
||||||
|
|
||||||
// An undo in the multibuffer undoes the multibuffer transaction
|
// An undo in the multibuffer undoes the multibuffer transaction
|
||||||
// and also any individual buffer edits that have occured since
|
// and also any individual buffer edits that have occurred since
|
||||||
// that transaction.
|
// that transaction.
|
||||||
multibuffer.undo(cx);
|
multibuffer.undo(cx);
|
||||||
assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
|
assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
|
||||||
|
|
|
@ -48,8 +48,8 @@ pub fn marked_display_snapshot(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn select_ranges(editor: &mut Editor, marked_text: &str, cx: &mut ViewContext<Editor>) {
|
pub fn select_ranges(editor: &mut Editor, marked_text: &str, cx: &mut ViewContext<Editor>) {
|
||||||
let (umarked_text, text_ranges) = marked_text_ranges(marked_text, true);
|
let (unmarked_text, text_ranges) = marked_text_ranges(marked_text, true);
|
||||||
assert_eq!(editor.text(cx), umarked_text);
|
assert_eq!(editor.text(cx), unmarked_text);
|
||||||
editor.change_selections(None, cx, |s| s.select_ranges(text_ranges));
|
editor.change_selections(None, cx, |s| s.select_ranges(text_ranges));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,8 @@ use std::{env, fmt::Display};
|
||||||
use sysinfo::{System, SystemExt};
|
use sysinfo::{System, SystemExt};
|
||||||
use util::channel::ReleaseChannel;
|
use util::channel::ReleaseChannel;
|
||||||
|
|
||||||
|
// TODO: Move this file out of feedback and into a more general place
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize)]
|
#[derive(Clone, Debug, Serialize)]
|
||||||
pub struct SystemSpecs {
|
pub struct SystemSpecs {
|
||||||
#[serde(serialize_with = "serialize_app_version")]
|
#[serde(serialize_with = "serialize_app_version")]
|
||||||
|
|
|
@ -32,7 +32,7 @@ use repository::{FakeGitRepositoryState, GitFileStatus};
|
||||||
use std::sync::Weak;
|
use std::sync::Weak;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref LINE_SEPERATORS_REGEX: Regex = Regex::new("\r\n|\r|\u{2028}|\u{2029}").unwrap();
|
static ref LINE_SEPARATORS_REGEX: Regex = Regex::new("\r\n|\r|\u{2028}|\u{2029}").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||||
|
@ -77,13 +77,13 @@ impl LineEnding {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn normalize(text: &mut String) {
|
pub fn normalize(text: &mut String) {
|
||||||
if let Cow::Owned(replaced) = LINE_SEPERATORS_REGEX.replace_all(text, "\n") {
|
if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
|
||||||
*text = replaced;
|
*text = replaced;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
|
pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
|
||||||
if let Cow::Owned(replaced) = LINE_SEPERATORS_REGEX.replace_all(&text, "\n") {
|
if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
|
||||||
replaced.into()
|
replaced.into()
|
||||||
} else {
|
} else {
|
||||||
text
|
text
|
||||||
|
|
|
@ -53,7 +53,7 @@ uuid = { version = "1.1.2", features = ["v4"] }
|
||||||
waker-fn = "1.1.0"
|
waker-fn = "1.1.0"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
bindgen = "0.59.2"
|
bindgen = "0.65.1"
|
||||||
cc = "1.0.67"
|
cc = "1.0.67"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|
|
@ -6335,9 +6335,9 @@ mod tests {
|
||||||
#[crate::test(self)]
|
#[crate::test(self)]
|
||||||
async fn test_labeled_tasks(cx: &mut TestAppContext) {
|
async fn test_labeled_tasks(cx: &mut TestAppContext) {
|
||||||
assert_eq!(None, cx.update(|cx| cx.active_labeled_tasks().next()));
|
assert_eq!(None, cx.update(|cx| cx.active_labeled_tasks().next()));
|
||||||
let (mut sender, mut reciever) = postage::oneshot::channel::<()>();
|
let (mut sender, mut receiver) = postage::oneshot::channel::<()>();
|
||||||
let task = cx
|
let task = cx
|
||||||
.update(|cx| cx.spawn_labeled("Test Label", |_| async move { reciever.recv().await }));
|
.update(|cx| cx.spawn_labeled("Test Label", |_| async move { receiver.recv().await }));
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Some("Test Label"),
|
Some("Test Label"),
|
||||||
|
|
|
@ -965,10 +965,10 @@ impl<'a> WindowContext<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rect_for_text_range(&self, range_utf16: Range<usize>) -> Option<RectF> {
|
pub fn rect_for_text_range(&self, range_utf16: Range<usize>) -> Option<RectF> {
|
||||||
let root_view_id = self.window.root_view().id();
|
let focused_view_id = self.window.focused_view_id?;
|
||||||
self.window
|
self.window
|
||||||
.rendered_views
|
.rendered_views
|
||||||
.get(&root_view_id)?
|
.get(&focused_view_id)?
|
||||||
.rect_for_text_range(range_utf16, self)
|
.rect_for_text_range(range_utf16, self)
|
||||||
.log_err()
|
.log_err()
|
||||||
.flatten()
|
.flatten()
|
||||||
|
|
|
@ -84,8 +84,8 @@ impl InputHandler for WindowInputHandler {
|
||||||
|
|
||||||
fn rect_for_range(&self, range_utf16: Range<usize>) -> Option<RectF> {
|
fn rect_for_range(&self, range_utf16: Range<usize>) -> Option<RectF> {
|
||||||
self.app
|
self.app
|
||||||
.borrow_mut()
|
.borrow()
|
||||||
.update_window(self.window_id, |cx| cx.rect_for_text_range(range_utf16))
|
.read_window(self.window_id, |cx| cx.rect_for_text_range(range_utf16))
|
||||||
.flatten()
|
.flatten()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl KeymapMatcher {
|
||||||
/// MatchResult::Pending =>
|
/// MatchResult::Pending =>
|
||||||
/// There exist bindings which are still waiting for more keys.
|
/// There exist bindings which are still waiting for more keys.
|
||||||
/// MatchResult::Complete(matches) =>
|
/// MatchResult::Complete(matches) =>
|
||||||
/// 1 or more bindings have recieved the necessary key presses.
|
/// 1 or more bindings have received the necessary key presses.
|
||||||
/// The order of the matched actions is by position of the matching first,
|
/// The order of the matched actions is by position of the matching first,
|
||||||
// and order in the keymap second.
|
// and order in the keymap second.
|
||||||
pub fn push_keystroke(
|
pub fn push_keystroke(
|
||||||
|
|
|
@ -264,7 +264,7 @@ impl settings::Setting for AllLanguageSettings {
|
||||||
let mut root_schema = generator.root_schema_for::<Self::FileContent>();
|
let mut root_schema = generator.root_schema_for::<Self::FileContent>();
|
||||||
|
|
||||||
// Create a schema for a 'languages overrides' object, associating editor
|
// Create a schema for a 'languages overrides' object, associating editor
|
||||||
// settings with specific langauges.
|
// settings with specific languages.
|
||||||
assert!(root_schema
|
assert!(root_schema
|
||||||
.definitions
|
.definitions
|
||||||
.contains_key("LanguageSettingsContent"));
|
.contains_key("LanguageSettingsContent"));
|
||||||
|
|
|
@ -773,7 +773,7 @@ impl<'a> SyntaxMapCaptures<'a> {
|
||||||
} in layers
|
} in layers
|
||||||
{
|
{
|
||||||
let grammar = match &language.grammar {
|
let grammar = match &language.grammar {
|
||||||
Some(grammer) => grammer,
|
Some(grammar) => grammar,
|
||||||
None => continue,
|
None => continue,
|
||||||
};
|
};
|
||||||
let query = match query(&grammar) {
|
let query = match query(&grammar) {
|
||||||
|
@ -896,7 +896,7 @@ impl<'a> SyntaxMapMatches<'a> {
|
||||||
} in layers
|
} in layers
|
||||||
{
|
{
|
||||||
let grammar = match &language.grammar {
|
let grammar = match &language.grammar {
|
||||||
Some(grammer) => grammer,
|
Some(grammar) => grammar,
|
||||||
None => continue,
|
None => continue,
|
||||||
};
|
};
|
||||||
let query = match query(&grammar) {
|
let query = match query(&grammar) {
|
||||||
|
|
|
@ -18,4 +18,4 @@ metal = "0.21.0"
|
||||||
objc = "0.2"
|
objc = "0.2"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
bindgen = "0.59.2"
|
bindgen = "0.65.1"
|
||||||
|
|
|
@ -11,7 +11,7 @@ use syn::{parse_macro_input, Block, FnArg, ForeignItemFn, Ident, ItemFn, Pat, Ty
|
||||||
/// "Hello from Wasm".into()
|
/// "Hello from Wasm".into()
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
/// This macro makes a function defined guest-side avaliable host-side.
|
/// This macro makes a function defined guest-side available host-side.
|
||||||
/// Note that all arguments and return types must be `serde`.
|
/// Note that all arguments and return types must be `serde`.
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn export(args: TokenStream, function: TokenStream) -> TokenStream {
|
pub fn export(args: TokenStream, function: TokenStream) -> TokenStream {
|
||||||
|
@ -92,7 +92,7 @@ pub fn export(args: TokenStream, function: TokenStream) -> TokenStream {
|
||||||
/// #[import]
|
/// #[import]
|
||||||
/// pub fn operating_system_name() -> String;
|
/// pub fn operating_system_name() -> String;
|
||||||
/// ```
|
/// ```
|
||||||
/// This macro makes a function defined host-side avaliable guest-side.
|
/// This macro makes a function defined host-side available guest-side.
|
||||||
/// Note that all arguments and return types must be `serde`.
|
/// Note that all arguments and return types must be `serde`.
|
||||||
/// All that's provided is a signature, as the function is implemented host-side.
|
/// All that's provided is a signature, as the function is implemented host-side.
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
|
|
|
@ -127,7 +127,7 @@ use plugin_handles::RopeHandle;
|
||||||
pub fn append(rope: RopeHandle, string: &str);
|
pub fn append(rope: RopeHandle, string: &str);
|
||||||
```
|
```
|
||||||
|
|
||||||
This allows us to perform an operation on a `Rope`, but how do we get a `RopeHandle` into a plugin? Well, as plugins, we can only aquire resources to handles we're given, so we'd need to expose a fuction that takes a handle.
|
This allows us to perform an operation on a `Rope`, but how do we get a `RopeHandle` into a plugin? Well, as plugins, we can only acquire resources to handles we're given, so we'd need to expose a function that takes a handle.
|
||||||
|
|
||||||
To illustrate that point, here's an example. First, we'd define a plugin-side function as follows:
|
To illustrate that point, here's an example. First, we'd define a plugin-side function as follows:
|
||||||
|
|
||||||
|
@ -177,7 +177,7 @@ So here's what calling `append_newline` would do, from the top:
|
||||||
|
|
||||||
6. And from here on out we return up the callstack, through Wasm, to Rust all the way back to where we started. Right before we return, we clear out the `ResourcePool`, so that we're no longer holding onto the underlying resource.
|
6. And from here on out we return up the callstack, through Wasm, to Rust all the way back to where we started. Right before we return, we clear out the `ResourcePool`, so that we're no longer holding onto the underlying resource.
|
||||||
|
|
||||||
Throughout this entire chain of calls, the resource remain host-side. By temporarilty checking it into a `ResourcePool`, we're able to keep a reference to the resource that we can use, while avoiding copying the uncopyable resource.
|
Throughout this entire chain of calls, the resource remain host-side. By temporarily checking it into a `ResourcePool`, we're able to keep a reference to the resource that we can use, while avoiding copying the uncopyable resource.
|
||||||
|
|
||||||
## Final Notes
|
## Final Notes
|
||||||
|
|
||||||
|
|
|
@ -132,7 +132,7 @@ impl PluginBuilder {
|
||||||
"env",
|
"env",
|
||||||
&format!("__{}", name),
|
&format!("__{}", name),
|
||||||
move |mut caller: Caller<'_, WasiCtxAlloc>, packed_buffer: u64| {
|
move |mut caller: Caller<'_, WasiCtxAlloc>, packed_buffer: u64| {
|
||||||
// TODO: use try block once avaliable
|
// TODO: use try block once available
|
||||||
let result: Result<(WasiBuffer, Memory, _), Trap> = (|| {
|
let result: Result<(WasiBuffer, Memory, _), Trap> = (|| {
|
||||||
// grab a handle to the memory
|
// grab a handle to the memory
|
||||||
let plugin_memory = match caller.get_export("memory") {
|
let plugin_memory = match caller.get_export("memory") {
|
||||||
|
@ -211,7 +211,7 @@ impl PluginBuilder {
|
||||||
"env",
|
"env",
|
||||||
&format!("__{}", name),
|
&format!("__{}", name),
|
||||||
move |mut caller: Caller<'_, WasiCtxAlloc>, packed_buffer: u64| {
|
move |mut caller: Caller<'_, WasiCtxAlloc>, packed_buffer: u64| {
|
||||||
// TODO: use try block once avaliable
|
// TODO: use try block once available
|
||||||
let result: Result<(WasiBuffer, Memory, Vec<u8>), Trap> = (|| {
|
let result: Result<(WasiBuffer, Memory, Vec<u8>), Trap> = (|| {
|
||||||
// grab a handle to the memory
|
// grab a handle to the memory
|
||||||
let plugin_memory = match caller.get_export("memory") {
|
let plugin_memory = match caller.get_export("memory") {
|
||||||
|
@ -297,7 +297,7 @@ pub enum PluginBinary<'a> {
|
||||||
Precompiled(&'a [u8]),
|
Precompiled(&'a [u8]),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Represents a WebAssembly plugin, with access to the WebAssembly System Inferface.
|
/// Represents a WebAssembly plugin, with access to the WebAssembly System Interface.
|
||||||
/// Build a new plugin using [`PluginBuilder`].
|
/// Build a new plugin using [`PluginBuilder`].
|
||||||
pub struct Plugin {
|
pub struct Plugin {
|
||||||
store: Store<WasiCtxAlloc>,
|
store: Store<WasiCtxAlloc>,
|
||||||
|
@ -559,7 +559,7 @@ impl Plugin {
|
||||||
.ok_or_else(|| anyhow!("Could not grab slice of plugin memory"))?;
|
.ok_or_else(|| anyhow!("Could not grab slice of plugin memory"))?;
|
||||||
|
|
||||||
// write the argument to linear memory
|
// write the argument to linear memory
|
||||||
// this returns a (ptr, lentgh) pair
|
// this returns a (ptr, length) pair
|
||||||
let arg_buffer = Self::bytes_to_buffer(
|
let arg_buffer = Self::bytes_to_buffer(
|
||||||
self.store.data().alloc_buffer(),
|
self.store.data().alloc_buffer(),
|
||||||
&mut plugin_memory,
|
&mut plugin_memory,
|
||||||
|
@ -569,7 +569,7 @@ impl Plugin {
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
// call the function, passing in the buffer and its length
|
// call the function, passing in the buffer and its length
|
||||||
// this returns a ptr to a (ptr, lentgh) pair
|
// this returns a ptr to a (ptr, length) pair
|
||||||
let result_buffer = handle
|
let result_buffer = handle
|
||||||
.function
|
.function
|
||||||
.call_async(&mut self.store, arg_buffer.into_u64())
|
.call_async(&mut self.store, arg_buffer.into_u64())
|
||||||
|
|
|
@ -4052,7 +4052,7 @@ impl Project {
|
||||||
let end_within = range.start.cmp(&primary.end, buffer).is_le()
|
let end_within = range.start.cmp(&primary.end, buffer).is_le()
|
||||||
&& range.end.cmp(&primary.end, buffer).is_ge();
|
&& range.end.cmp(&primary.end, buffer).is_ge();
|
||||||
|
|
||||||
//Skip addtional edits which overlap with the primary completion edit
|
//Skip additional edits which overlap with the primary completion edit
|
||||||
//https://github.com/zed-industries/zed/pull/1871
|
//https://github.com/zed-industries/zed/pull/1871
|
||||||
if !start_within && !end_within {
|
if !start_within && !end_within {
|
||||||
buffer.edit([(range, text)], None, cx);
|
buffer.edit([(range, text)], None, cx);
|
||||||
|
|
|
@ -157,7 +157,7 @@ impl RepositoryEntry {
|
||||||
self.statuses
|
self.statuses
|
||||||
.iter_from(&repo_path)
|
.iter_from(&repo_path)
|
||||||
.take_while(|(key, _)| key.starts_with(&repo_path))
|
.take_while(|(key, _)| key.starts_with(&repo_path))
|
||||||
// Short circut once we've found the highest level
|
// Short circuit once we've found the highest level
|
||||||
.take_until(|(_, status)| status == &&GitFileStatus::Conflict)
|
.take_until(|(_, status)| status == &&GitFileStatus::Conflict)
|
||||||
.map(|(_, status)| status)
|
.map(|(_, status)| status)
|
||||||
.reduce(
|
.reduce(
|
||||||
|
@ -3623,7 +3623,7 @@ pub trait WorktreeHandle {
|
||||||
|
|
||||||
impl WorktreeHandle for ModelHandle<Worktree> {
|
impl WorktreeHandle for ModelHandle<Worktree> {
|
||||||
// When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
|
// When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
|
||||||
// occurred before the worktree was constructed. These events can cause the worktree to perfrom
|
// occurred before the worktree was constructed. These events can cause the worktree to perform
|
||||||
// extra directory scans, and emit extra scan-state notifications.
|
// extra directory scans, and emit extra scan-state notifications.
|
||||||
//
|
//
|
||||||
// This function mutates the worktree's directory and waits for those mutations to be picked up,
|
// This function mutates the worktree's directory and waits for those mutations to be picked up,
|
||||||
|
|
|
@ -276,7 +276,7 @@ mod tests {
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Set up fake langauge server to return fuzzy matches against
|
// Set up fake language server to return fuzzy matches against
|
||||||
// a fixed set of symbol names.
|
// a fixed set of symbol names.
|
||||||
let fake_symbols = [
|
let fake_symbols = [
|
||||||
symbol("one", "/external"),
|
symbol("one", "/external"),
|
||||||
|
|
|
@ -179,7 +179,11 @@ impl Rope {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bytes_in_range(&self, range: Range<usize>) -> Bytes {
|
pub fn bytes_in_range(&self, range: Range<usize>) -> Bytes {
|
||||||
Bytes::new(self, range)
|
Bytes::new(self, range, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reversed_bytes_in_range(&self, range: Range<usize>) -> Bytes {
|
||||||
|
Bytes::new(self, range, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chunks(&self) -> Chunks {
|
pub fn chunks(&self) -> Chunks {
|
||||||
|
@ -579,22 +583,33 @@ impl<'a> Iterator for Chunks<'a> {
|
||||||
pub struct Bytes<'a> {
|
pub struct Bytes<'a> {
|
||||||
chunks: sum_tree::Cursor<'a, Chunk, usize>,
|
chunks: sum_tree::Cursor<'a, Chunk, usize>,
|
||||||
range: Range<usize>,
|
range: Range<usize>,
|
||||||
|
reversed: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Bytes<'a> {
|
impl<'a> Bytes<'a> {
|
||||||
pub fn new(rope: &'a Rope, range: Range<usize>) -> Self {
|
pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self {
|
||||||
let mut chunks = rope.chunks.cursor();
|
let mut chunks = rope.chunks.cursor();
|
||||||
|
if reversed {
|
||||||
|
chunks.seek(&range.end, Bias::Left, &());
|
||||||
|
} else {
|
||||||
chunks.seek(&range.start, Bias::Right, &());
|
chunks.seek(&range.start, Bias::Right, &());
|
||||||
Self { chunks, range }
|
}
|
||||||
|
Self {
|
||||||
|
chunks,
|
||||||
|
range,
|
||||||
|
reversed,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn peek(&self) -> Option<&'a [u8]> {
|
pub fn peek(&self) -> Option<&'a [u8]> {
|
||||||
let chunk = self.chunks.item()?;
|
let chunk = self.chunks.item()?;
|
||||||
|
if self.reversed && self.range.start >= self.chunks.end(&()) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
let chunk_start = *self.chunks.start();
|
let chunk_start = *self.chunks.start();
|
||||||
if self.range.end <= chunk_start {
|
if self.range.end <= chunk_start {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let start = self.range.start.saturating_sub(chunk_start);
|
let start = self.range.start.saturating_sub(chunk_start);
|
||||||
let end = self.range.end - chunk_start;
|
let end = self.range.end - chunk_start;
|
||||||
Some(&chunk.0.as_bytes()[start..chunk.0.len().min(end)])
|
Some(&chunk.0.as_bytes()[start..chunk.0.len().min(end)])
|
||||||
|
@ -607,8 +622,12 @@ impl<'a> Iterator for Bytes<'a> {
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
let result = self.peek();
|
let result = self.peek();
|
||||||
if result.is_some() {
|
if result.is_some() {
|
||||||
|
if self.reversed {
|
||||||
|
self.chunks.prev(&());
|
||||||
|
} else {
|
||||||
self.chunks.next(&());
|
self.chunks.next(&());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -617,11 +636,22 @@ impl<'a> io::Read for Bytes<'a> {
|
||||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||||
if let Some(chunk) = self.peek() {
|
if let Some(chunk) = self.peek() {
|
||||||
let len = cmp::min(buf.len(), chunk.len());
|
let len = cmp::min(buf.len(), chunk.len());
|
||||||
|
if self.reversed {
|
||||||
|
buf[..len].copy_from_slice(&chunk[chunk.len() - len..]);
|
||||||
|
buf[..len].reverse();
|
||||||
|
self.range.end -= len;
|
||||||
|
} else {
|
||||||
buf[..len].copy_from_slice(&chunk[..len]);
|
buf[..len].copy_from_slice(&chunk[..len]);
|
||||||
self.range.start += len;
|
self.range.start += len;
|
||||||
|
}
|
||||||
|
|
||||||
if len == chunk.len() {
|
if len == chunk.len() {
|
||||||
|
if self.reversed {
|
||||||
|
self.chunks.prev(&());
|
||||||
|
} else {
|
||||||
self.chunks.next(&());
|
self.chunks.next(&());
|
||||||
}
|
}
|
||||||
|
}
|
||||||
Ok(len)
|
Ok(len)
|
||||||
} else {
|
} else {
|
||||||
Ok(0)
|
Ok(0)
|
||||||
|
|
|
@ -476,7 +476,7 @@ message Symbol {
|
||||||
string name = 4;
|
string name = 4;
|
||||||
int32 kind = 5;
|
int32 kind = 5;
|
||||||
string path = 6;
|
string path = 6;
|
||||||
// Cannot use generate anchors for unopend files,
|
// Cannot use generate anchors for unopened files,
|
||||||
// so we are forced to use point coords instead
|
// so we are forced to use point coords instead
|
||||||
PointUtf16 start = 7;
|
PointUtf16 start = 7;
|
||||||
PointUtf16 end = 8;
|
PointUtf16 end = 8;
|
||||||
|
|
|
@ -42,7 +42,7 @@ impl PublicKey {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrivateKey {
|
impl PrivateKey {
|
||||||
/// Decrypt a base64-encoded string that was encrypted by the correspoding public key.
|
/// Decrypt a base64-encoded string that was encrypted by the corresponding public key.
|
||||||
pub fn decrypt_string(&self, encrypted_string: &str) -> Result<String> {
|
pub fn decrypt_string(&self, encrypted_string: &str) -> Result<String> {
|
||||||
let encrypted_bytes = base64::decode_config(encrypted_string, base64::URL_SAFE)
|
let encrypted_bytes = base64::decode_config(encrypted_string, base64::URL_SAFE)
|
||||||
.context("failed to base64-decode encrypted string")?;
|
.context("failed to base64-decode encrypted string")?;
|
||||||
|
|
|
@ -25,7 +25,7 @@ use std::{
|
||||||
borrow::Cow,
|
borrow::Cow,
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
mem,
|
mem,
|
||||||
ops::Range,
|
ops::{Not, Range},
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
@ -242,7 +242,13 @@ impl View for ProjectSearchView {
|
||||||
|
|
||||||
impl Item for ProjectSearchView {
|
impl Item for ProjectSearchView {
|
||||||
fn tab_tooltip_text(&self, cx: &AppContext) -> Option<Cow<str>> {
|
fn tab_tooltip_text(&self, cx: &AppContext) -> Option<Cow<str>> {
|
||||||
Some(self.query_editor.read(cx).text(cx).into())
|
let query_text = self.query_editor.read(cx).text(cx);
|
||||||
|
|
||||||
|
query_text
|
||||||
|
.is_empty()
|
||||||
|
.not()
|
||||||
|
.then(|| query_text.into())
|
||||||
|
.or_else(|| Some("Project Search".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn act_as_type<'a>(
|
fn act_as_type<'a>(
|
||||||
|
|
|
@ -9,10 +9,23 @@ pub use settings_store::{Setting, SettingsJsonSchemaParams, SettingsStore};
|
||||||
use std::{borrow::Cow, str};
|
use std::{borrow::Cow, str};
|
||||||
|
|
||||||
pub const DEFAULT_SETTINGS_ASSET_PATH: &str = "settings/default.json";
|
pub const DEFAULT_SETTINGS_ASSET_PATH: &str = "settings/default.json";
|
||||||
pub const INITIAL_USER_SETTINGS_ASSET_PATH: &str = "settings/initial_user_settings.json";
|
const INITIAL_USER_SETTINGS_ASSET_PATH: &str = "settings/initial_user_settings.json";
|
||||||
|
const INITIAL_LOCAL_SETTINGS_ASSET_PATH: &str = "settings/initial_local_settings.json";
|
||||||
|
|
||||||
pub fn initial_user_settings_content(assets: &'static impl AssetSource) -> Cow<'static, str> {
|
pub fn default_settings() -> Cow<'static, str> {
|
||||||
match assets.load(INITIAL_USER_SETTINGS_ASSET_PATH).unwrap() {
|
asset_str(&assets::Assets, DEFAULT_SETTINGS_ASSET_PATH)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn initial_user_settings_content(assets: &dyn AssetSource) -> Cow<'_, str> {
|
||||||
|
asset_str(assets, INITIAL_USER_SETTINGS_ASSET_PATH)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn initial_local_settings_content(assets: &dyn AssetSource) -> Cow<'_, str> {
|
||||||
|
asset_str(assets, INITIAL_LOCAL_SETTINGS_ASSET_PATH)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn asset_str<'a>(assets: &'a dyn AssetSource, path: &str) -> Cow<'a, str> {
|
||||||
|
match assets.load(path).unwrap() {
|
||||||
Cow::Borrowed(s) => Cow::Borrowed(str::from_utf8(s).unwrap()),
|
Cow::Borrowed(s) => Cow::Borrowed(str::from_utf8(s).unwrap()),
|
||||||
Cow::Owned(s) => Cow::Owned(String::from_utf8(s).unwrap()),
|
Cow::Owned(s) => Cow::Owned(String::from_utf8(s).unwrap()),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
use crate::{settings_store::SettingsStore, Setting, DEFAULT_SETTINGS_ASSET_PATH};
|
use crate::{settings_store::SettingsStore, Setting};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use assets::Assets;
|
use assets::Assets;
|
||||||
use fs::Fs;
|
use fs::Fs;
|
||||||
use futures::{channel::mpsc, StreamExt};
|
use futures::{channel::mpsc, StreamExt};
|
||||||
use gpui::{executor::Background, AppContext, AssetSource};
|
use gpui::{executor::Background, AppContext};
|
||||||
use std::{
|
use std::{
|
||||||
borrow::Cow,
|
|
||||||
io::ErrorKind,
|
io::ErrorKind,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
str,
|
str,
|
||||||
|
@ -28,19 +27,12 @@ pub fn get_local<'a, T: Setting>(location: Option<(usize, &Path)>, cx: &'a AppCo
|
||||||
cx.global::<SettingsStore>().get(location)
|
cx.global::<SettingsStore>().get(location)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default_settings() -> Cow<'static, str> {
|
|
||||||
match Assets.load(DEFAULT_SETTINGS_ASSET_PATH).unwrap() {
|
|
||||||
Cow::Borrowed(s) => Cow::Borrowed(str::from_utf8(s).unwrap()),
|
|
||||||
Cow::Owned(s) => Cow::Owned(String::from_utf8(s).unwrap()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const EMPTY_THEME_NAME: &'static str = "empty-theme";
|
pub const EMPTY_THEME_NAME: &'static str = "empty-theme";
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
pub fn test_settings() -> String {
|
pub fn test_settings() -> String {
|
||||||
let mut value = crate::settings_store::parse_json_with_comments::<serde_json::Value>(
|
let mut value = crate::settings_store::parse_json_with_comments::<serde_json::Value>(
|
||||||
default_settings().as_ref(),
|
crate::default_settings().as_ref(),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
util::merge_non_null_json_value_into(
|
util::merge_non_null_json_value_into(
|
||||||
|
|
|
@ -623,22 +623,6 @@ impl<T: Setting> AnySettingValue for SettingValue<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// impl Debug for SettingsStore {
|
|
||||||
// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
// return f
|
|
||||||
// .debug_struct("SettingsStore")
|
|
||||||
// .field(
|
|
||||||
// "setting_value_sets_by_type",
|
|
||||||
// &self
|
|
||||||
// .setting_values
|
|
||||||
// .values()
|
|
||||||
// .map(|set| (set.setting_type_name(), set))
|
|
||||||
// .collect::<HashMap<_, _>>(),
|
|
||||||
// )
|
|
||||||
// .finish_non_exhaustive();
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
fn update_value_in_json_text<'a>(
|
fn update_value_in_json_text<'a>(
|
||||||
text: &mut String,
|
text: &mut String,
|
||||||
key_path: &mut Vec<&'a str>,
|
key_path: &mut Vec<&'a str>,
|
||||||
|
@ -681,6 +665,10 @@ fn update_value_in_json_text<'a>(
|
||||||
key_path.pop();
|
key_path.pop();
|
||||||
}
|
}
|
||||||
} else if old_value != new_value {
|
} else if old_value != new_value {
|
||||||
|
let mut new_value = new_value.clone();
|
||||||
|
if let Some(new_object) = new_value.as_object_mut() {
|
||||||
|
new_object.retain(|_, v| !v.is_null());
|
||||||
|
}
|
||||||
let (range, replacement) =
|
let (range, replacement) =
|
||||||
replace_value_in_json_text(text, &key_path, tab_size, &new_value);
|
replace_value_in_json_text(text, &key_path, tab_size, &new_value);
|
||||||
text.replace_range(range.clone(), &replacement);
|
text.replace_range(range.clone(), &replacement);
|
||||||
|
@ -692,7 +680,7 @@ fn replace_value_in_json_text(
|
||||||
text: &str,
|
text: &str,
|
||||||
key_path: &[&str],
|
key_path: &[&str],
|
||||||
tab_size: usize,
|
tab_size: usize,
|
||||||
new_value: impl Serialize,
|
new_value: &serde_json::Value,
|
||||||
) -> (Range<usize>, String) {
|
) -> (Range<usize>, String) {
|
||||||
const LANGUAGE_OVERRIDES: &'static str = "language_overrides";
|
const LANGUAGE_OVERRIDES: &'static str = "language_overrides";
|
||||||
const LANGUAGES: &'static str = "languages";
|
const LANGUAGES: &'static str = "languages";
|
||||||
|
@ -1039,24 +1027,32 @@ mod tests {
|
||||||
r#"{
|
r#"{
|
||||||
"languages": {
|
"languages": {
|
||||||
"JSON": {
|
"JSON": {
|
||||||
"is_enabled": true
|
"language_setting_1": true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}"#
|
}"#
|
||||||
.unindent(),
|
.unindent(),
|
||||||
|settings| {
|
|settings| {
|
||||||
settings.languages.get_mut("JSON").unwrap().is_enabled = false;
|
|
||||||
settings
|
settings
|
||||||
.languages
|
.languages
|
||||||
.insert("Rust".into(), LanguageSettingEntry { is_enabled: true });
|
.get_mut("JSON")
|
||||||
|
.unwrap()
|
||||||
|
.language_setting_1 = Some(false);
|
||||||
|
settings.languages.insert(
|
||||||
|
"Rust".into(),
|
||||||
|
LanguageSettingEntry {
|
||||||
|
language_setting_2: Some(true),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
);
|
||||||
},
|
},
|
||||||
r#"{
|
r#"{
|
||||||
"languages": {
|
"languages": {
|
||||||
"Rust": {
|
"Rust": {
|
||||||
"is_enabled": true
|
"language_setting_2": true
|
||||||
},
|
},
|
||||||
"JSON": {
|
"JSON": {
|
||||||
"is_enabled": false
|
"language_setting_1": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}"#
|
}"#
|
||||||
|
@ -1119,6 +1115,23 @@ mod tests {
|
||||||
.unindent(),
|
.unindent(),
|
||||||
cx,
|
cx,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
check_settings_update::<UserSettings>(
|
||||||
|
&mut store,
|
||||||
|
r#"{
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.unindent(),
|
||||||
|
|settings| settings.age = Some(37),
|
||||||
|
r#"{
|
||||||
|
"user": {
|
||||||
|
"age": 37
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#
|
||||||
|
.unindent(),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_settings_update<T: Setting>(
|
fn check_settings_update<T: Setting>(
|
||||||
|
@ -1247,9 +1260,10 @@ mod tests {
|
||||||
languages: HashMap<String, LanguageSettingEntry>,
|
languages: HashMap<String, LanguageSettingEntry>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
|
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||||
struct LanguageSettingEntry {
|
struct LanguageSettingEntry {
|
||||||
is_enabled: bool,
|
language_setting_1: Option<bool>,
|
||||||
|
language_setting_2: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Setting for LanguageSettings {
|
impl Setting for LanguageSettings {
|
||||||
|
|
|
@ -160,7 +160,7 @@ impl<M: Migrator> ThreadSafeConnection<M> {
|
||||||
|
|
||||||
// Create a one shot channel for the result of the queued write
|
// Create a one shot channel for the result of the queued write
|
||||||
// so we can await on the result
|
// so we can await on the result
|
||||||
let (sender, reciever) = oneshot::channel();
|
let (sender, receiver) = oneshot::channel();
|
||||||
|
|
||||||
let thread_safe_connection = (*self).clone();
|
let thread_safe_connection = (*self).clone();
|
||||||
write_channel(Box::new(move || {
|
write_channel(Box::new(move || {
|
||||||
|
@ -168,7 +168,7 @@ impl<M: Migrator> ThreadSafeConnection<M> {
|
||||||
let result = connection.with_write(|connection| callback(connection));
|
let result = connection.with_write(|connection| callback(connection));
|
||||||
sender.send(result).ok();
|
sender.send(result).ok();
|
||||||
}));
|
}));
|
||||||
reciever.map(|response| response.expect("Write queue unexpectedly closed"))
|
receiver.map(|response| response.expect("Write queue unexpectedly closed"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn create_connection(
|
pub(crate) fn create_connection(
|
||||||
|
@ -245,10 +245,10 @@ pub fn background_thread_queue() -> WriteQueueConstructor {
|
||||||
use std::sync::mpsc::channel;
|
use std::sync::mpsc::channel;
|
||||||
|
|
||||||
Box::new(|| {
|
Box::new(|| {
|
||||||
let (sender, reciever) = channel::<QueuedWrite>();
|
let (sender, receiver) = channel::<QueuedWrite>();
|
||||||
|
|
||||||
thread::spawn(move || {
|
thread::spawn(move || {
|
||||||
while let Ok(write) = reciever.recv() {
|
while let Ok(write) = receiver.recv() {
|
||||||
write()
|
write()
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -45,7 +45,7 @@ pub fn convert_color(alac_color: &AnsiColor, style: &TerminalStyle) -> Color {
|
||||||
}
|
}
|
||||||
|
|
||||||
///Converts an 8 bit ANSI color to it's GPUI equivalent.
|
///Converts an 8 bit ANSI color to it's GPUI equivalent.
|
||||||
///Accepts usize for compatability with the alacritty::Colors interface,
|
///Accepts usize for compatibility with the alacritty::Colors interface,
|
||||||
///Other than that use case, should only be called with values in the [0,255] range
|
///Other than that use case, should only be called with values in the [0,255] range
|
||||||
pub fn get_color_at_index(index: &usize, style: &TerminalStyle) -> Color {
|
pub fn get_color_at_index(index: &usize, style: &TerminalStyle) -> Color {
|
||||||
match index {
|
match index {
|
||||||
|
@ -78,7 +78,7 @@ pub fn get_color_at_index(index: &usize, style: &TerminalStyle) -> Color {
|
||||||
let step = (u8::MAX as f32 / 24.).floor() as u8; //Split the RGB grayscale values into 24 chunks
|
let step = (u8::MAX as f32 / 24.).floor() as u8; //Split the RGB grayscale values into 24 chunks
|
||||||
Color::new(i * step, i * step, i * step, u8::MAX) //Map the ANSI-grayscale components to the RGB-grayscale
|
Color::new(i * step, i * step, i * step, u8::MAX) //Map the ANSI-grayscale components to the RGB-grayscale
|
||||||
}
|
}
|
||||||
//For compatability with the alacritty::Colors interface
|
//For compatibility with the alacritty::Colors interface
|
||||||
256 => style.foreground,
|
256 => style.foreground,
|
||||||
257 => style.background,
|
257 => style.background,
|
||||||
258 => style.cursor,
|
258 => style.cursor,
|
||||||
|
|
|
@ -18,6 +18,6 @@ There are currently many distinct paths for getting keystrokes to the terminal:
|
||||||
|
|
||||||
3. IME text. When the special character mappings fail, we pass the keystroke back to GPUI to hand it to the IME system. This comes back to us in the `View::replace_text_in_range()` method, and we then send that to the terminal directly, bypassing `try_keystroke()`.
|
3. IME text. When the special character mappings fail, we pass the keystroke back to GPUI to hand it to the IME system. This comes back to us in the `View::replace_text_in_range()` method, and we then send that to the terminal directly, bypassing `try_keystroke()`.
|
||||||
|
|
||||||
4. Pasted text has a seperate pathway.
|
4. Pasted text has a separate pathway.
|
||||||
|
|
||||||
Generally, there's a distinction between 'keystrokes that need to be mapped' and 'strings which need to be written'. I've attempted to unify these under the '.try_keystroke()' API and the `.input()` API (which try_keystroke uses) so we have consistent input handling across the terminal
|
Generally, there's a distinction between 'keystrokes that need to be mapped' and 'strings which need to be written'. I've attempted to unify these under the '.try_keystroke()' API and the `.input()` API (which try_keystroke uses) so we have consistent input handling across the terminal
|
|
@ -40,7 +40,7 @@ function contrast_colour {
|
||||||
|
|
||||||
# Uncomment the below for more precise luminance calculations
|
# Uncomment the below for more precise luminance calculations
|
||||||
|
|
||||||
# # Calculate percieved brightness
|
# # Calculate perceived brightness
|
||||||
# # See https://www.w3.org/TR/AERT#color-contrast
|
# # See https://www.w3.org/TR/AERT#color-contrast
|
||||||
# # and http://www.itu.int/rec/R-REC-BT.601
|
# # and http://www.itu.int/rec/R-REC-BT.601
|
||||||
# # Luminance is in range 0..5000 as each value is 0..5
|
# # Luminance is in range 0..5000 as each value is 0..5
|
||||||
|
|
|
@ -34,7 +34,7 @@ use std::{mem, ops::Range};
|
||||||
|
|
||||||
use crate::TerminalView;
|
use crate::TerminalView;
|
||||||
|
|
||||||
///The information generated during layout that is nescessary for painting
|
///The information generated during layout that is necessary for painting
|
||||||
pub struct LayoutState {
|
pub struct LayoutState {
|
||||||
cells: Vec<LayoutCell>,
|
cells: Vec<LayoutCell>,
|
||||||
rects: Vec<LayoutRect>,
|
rects: Vec<LayoutRect>,
|
||||||
|
@ -206,7 +206,7 @@ impl TerminalElement {
|
||||||
//Expand background rect range
|
//Expand background rect range
|
||||||
{
|
{
|
||||||
if matches!(bg, Named(NamedColor::Background)) {
|
if matches!(bg, Named(NamedColor::Background)) {
|
||||||
//Continue to next cell, resetting variables if nescessary
|
//Continue to next cell, resetting variables if necessary
|
||||||
cur_alac_color = None;
|
cur_alac_color = None;
|
||||||
if let Some(rect) = cur_rect {
|
if let Some(rect) = cur_rect {
|
||||||
rects.push(rect);
|
rects.push(rect);
|
||||||
|
|
|
@ -804,7 +804,7 @@ mod tests {
|
||||||
let workspace = workspace.read(cx);
|
let workspace = workspace.read(cx);
|
||||||
let active_entry = project.read(cx).active_entry();
|
let active_entry = project.read(cx).active_entry();
|
||||||
|
|
||||||
//Make sure enviroment is as expeted
|
//Make sure environment is as expected
|
||||||
assert!(active_entry.is_none());
|
assert!(active_entry.is_none());
|
||||||
assert!(workspace.worktrees(cx).next().is_none());
|
assert!(workspace.worktrees(cx).next().is_none());
|
||||||
|
|
||||||
|
@ -825,7 +825,7 @@ mod tests {
|
||||||
let workspace = workspace.read(cx);
|
let workspace = workspace.read(cx);
|
||||||
let active_entry = project.read(cx).active_entry();
|
let active_entry = project.read(cx).active_entry();
|
||||||
|
|
||||||
//Make sure enviroment is as expeted
|
//Make sure environment is as expected
|
||||||
assert!(active_entry.is_none());
|
assert!(active_entry.is_none());
|
||||||
assert!(workspace.worktrees(cx).next().is_some());
|
assert!(workspace.worktrees(cx).next().is_some());
|
||||||
|
|
||||||
|
|
|
@ -193,7 +193,7 @@ fn test_line_len() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_common_prefix_at_positionn() {
|
fn test_common_prefix_at_position() {
|
||||||
let text = "a = str; b = δα";
|
let text = "a = str; b = δα";
|
||||||
let buffer = Buffer::new(0, 0, text.into());
|
let buffer = Buffer::new(0, 0, text.into());
|
||||||
|
|
||||||
|
@ -216,7 +216,7 @@ fn test_common_prefix_at_positionn() {
|
||||||
empty_range_after(text, "str"),
|
empty_range_after(text, "str"),
|
||||||
);
|
);
|
||||||
|
|
||||||
// prefix matching is case insenstive.
|
// prefix matching is case insensitive.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
buffer.common_prefix_at(offset1, "Strαngε"),
|
buffer.common_prefix_at(offset1, "Strαngε"),
|
||||||
range_of(text, "str"),
|
range_of(text, "str"),
|
||||||
|
|
|
@ -1749,6 +1749,12 @@ impl BufferSnapshot {
|
||||||
self.visible_text.bytes_in_range(start..end)
|
self.visible_text.bytes_in_range(start..end)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
|
||||||
|
let start = range.start.to_offset(self);
|
||||||
|
let end = range.end.to_offset(self);
|
||||||
|
self.visible_text.reversed_bytes_in_range(start..end)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
|
pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
|
||||||
let start = range.start.to_offset(self);
|
let start = range.start.to_offset(self);
|
||||||
let end = range.end.to_offset(self);
|
let end = range.end.to_offset(self);
|
||||||
|
|
|
@ -9,6 +9,7 @@ pub mod test;
|
||||||
use std::{
|
use std::{
|
||||||
cmp::{self, Ordering},
|
cmp::{self, Ordering},
|
||||||
ops::{AddAssign, Range, RangeInclusive},
|
ops::{AddAssign, Range, RangeInclusive},
|
||||||
|
panic::Location,
|
||||||
pin::Pin,
|
pin::Pin,
|
||||||
task::{Context, Poll},
|
task::{Context, Poll},
|
||||||
};
|
};
|
||||||
|
@ -129,11 +130,13 @@ where
|
||||||
{
|
{
|
||||||
type Ok = T;
|
type Ok = T;
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
fn log_err(self) -> Option<T> {
|
fn log_err(self) -> Option<T> {
|
||||||
match self {
|
match self {
|
||||||
Ok(value) => Some(value),
|
Ok(value) => Some(value),
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
log::error!("{:?}", error);
|
let caller = Location::caller();
|
||||||
|
log::error!("{}:{}: {:?}", caller.file(), caller.line(), error);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -756,7 +756,7 @@ mod test {
|
||||||
ˇ
|
ˇ
|
||||||
The quick"})
|
The quick"})
|
||||||
.await;
|
.await;
|
||||||
// Indoc disallows trailing whitspace.
|
// Indoc disallows trailing whitespace.
|
||||||
cx.assert(" ˇ \nThe quick").await;
|
cx.assert(" ˇ \nThe quick").await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ use tokio::{
|
||||||
use crate::state::Mode;
|
use crate::state::Mode;
|
||||||
use collections::VecDeque;
|
use collections::VecDeque;
|
||||||
|
|
||||||
// Neovim doesn't like to be started simultaneously from multiple threads. We use thsi lock
|
// Neovim doesn't like to be started simultaneously from multiple threads. We use this lock
|
||||||
// to ensure we are only constructing one neovim connection at a time.
|
// to ensure we are only constructing one neovim connection at a time.
|
||||||
#[cfg(feature = "neovim")]
|
#[cfg(feature = "neovim")]
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
|
|
|
@ -1022,7 +1022,7 @@ impl Pane {
|
||||||
let is_active_item = target_item_id == active_item_id;
|
let is_active_item = target_item_id == active_item_id;
|
||||||
let target_pane = cx.weak_handle();
|
let target_pane = cx.weak_handle();
|
||||||
|
|
||||||
// The `CloseInactiveItems` action should really be called "CloseOthers" and the behaviour should be dynamically based on the tab the action is ran on. Currenlty, this is a weird action because you can run it on a non-active tab and it will close everything by the actual active tab
|
// The `CloseInactiveItems` action should really be called "CloseOthers" and the behaviour should be dynamically based on the tab the action is ran on. Currently, this is a weird action because you can run it on a non-active tab and it will close everything by the actual active tab
|
||||||
|
|
||||||
self.tab_context_menu.update(cx, |menu, cx| {
|
self.tab_context_menu.update(cx, |menu, cx| {
|
||||||
menu.show(
|
menu.show(
|
||||||
|
|
|
@ -15,7 +15,6 @@ mod toolbar;
|
||||||
mod workspace_settings;
|
mod workspace_settings;
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use assets::Assets;
|
|
||||||
use call::ActiveCall;
|
use call::ActiveCall;
|
||||||
use client::{
|
use client::{
|
||||||
proto::{self, PeerId},
|
proto::{self, PeerId},
|
||||||
|
@ -83,7 +82,7 @@ use status_bar::StatusBar;
|
||||||
pub use status_bar::StatusItemView;
|
pub use status_bar::StatusItemView;
|
||||||
use theme::{Theme, ThemeSettings};
|
use theme::{Theme, ThemeSettings};
|
||||||
pub use toolbar::{ToolbarItemLocation, ToolbarItemView};
|
pub use toolbar::{ToolbarItemLocation, ToolbarItemView};
|
||||||
use util::{async_iife, paths, ResultExt};
|
use util::{async_iife, ResultExt};
|
||||||
pub use workspace_settings::{AutosaveSetting, GitGutterSetting, WorkspaceSettings};
|
pub use workspace_settings::{AutosaveSetting, GitGutterSetting, WorkspaceSettings};
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
|
@ -133,8 +132,6 @@ actions!(
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
|
|
||||||
actions!(zed, [OpenSettings]);
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq)]
|
#[derive(Clone, PartialEq)]
|
||||||
pub struct OpenPaths {
|
pub struct OpenPaths {
|
||||||
pub paths: Vec<PathBuf>,
|
pub paths: Vec<PathBuf>,
|
||||||
|
@ -295,17 +292,6 @@ pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) {
|
||||||
.detach();
|
.detach();
|
||||||
});
|
});
|
||||||
|
|
||||||
cx.add_action(
|
|
||||||
move |_: &mut Workspace, _: &OpenSettings, cx: &mut ViewContext<Workspace>| {
|
|
||||||
create_and_open_local_file(&paths::SETTINGS, cx, || {
|
|
||||||
settings::initial_user_settings_content(&Assets)
|
|
||||||
.as_ref()
|
|
||||||
.into()
|
|
||||||
})
|
|
||||||
.detach_and_log_err(cx);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
let client = &app_state.client;
|
let client = &app_state.client;
|
||||||
client.add_view_request_handler(Workspace::handle_follow);
|
client.add_view_request_handler(Workspace::handle_follow);
|
||||||
client.add_view_message_handler(Workspace::handle_unfollow);
|
client.add_view_message_handler(Workspace::handle_unfollow);
|
||||||
|
@ -765,25 +751,21 @@ impl Workspace {
|
||||||
DB.next_id().await.unwrap_or(0)
|
DB.next_id().await.unwrap_or(0)
|
||||||
};
|
};
|
||||||
|
|
||||||
let window_bounds_override =
|
|
||||||
ZED_WINDOW_POSITION
|
|
||||||
.zip(*ZED_WINDOW_SIZE)
|
|
||||||
.map(|(position, size)| {
|
|
||||||
WindowBounds::Fixed(RectF::new(
|
|
||||||
cx.platform().screens()[0].bounds().origin() + position,
|
|
||||||
size,
|
|
||||||
))
|
|
||||||
});
|
|
||||||
|
|
||||||
let build_workspace = |cx: &mut ViewContext<Workspace>| {
|
|
||||||
Workspace::new(workspace_id, project_handle.clone(), app_state.clone(), cx)
|
|
||||||
};
|
|
||||||
|
|
||||||
let workspace = requesting_window_id
|
let workspace = requesting_window_id
|
||||||
.and_then(|window_id| {
|
.and_then(|window_id| {
|
||||||
cx.update(|cx| cx.replace_root_view(window_id, |cx| build_workspace(cx)))
|
cx.update(|cx| {
|
||||||
|
cx.replace_root_view(window_id, |cx| {
|
||||||
|
Workspace::new(
|
||||||
|
workspace_id,
|
||||||
|
project_handle.clone(),
|
||||||
|
app_state.clone(),
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| {
|
||||||
|
let window_bounds_override = window_bounds_env_override(&cx);
|
||||||
let (bounds, display) = if let Some(bounds) = window_bounds_override {
|
let (bounds, display) = if let Some(bounds) = window_bounds_override {
|
||||||
(Some(bounds), None)
|
(Some(bounds), None)
|
||||||
} else {
|
} else {
|
||||||
|
@ -819,7 +801,14 @@ impl Workspace {
|
||||||
// Use the serialized workspace to construct the new window
|
// Use the serialized workspace to construct the new window
|
||||||
cx.add_window(
|
cx.add_window(
|
||||||
(app_state.build_window_options)(bounds, display, cx.platform().as_ref()),
|
(app_state.build_window_options)(bounds, display, cx.platform().as_ref()),
|
||||||
|cx| build_workspace(cx),
|
|cx| {
|
||||||
|
Workspace::new(
|
||||||
|
workspace_id,
|
||||||
|
project_handle.clone(),
|
||||||
|
app_state.clone(),
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
},
|
||||||
)
|
)
|
||||||
.1
|
.1
|
||||||
});
|
});
|
||||||
|
@ -3120,6 +3109,17 @@ impl Workspace {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn window_bounds_env_override(cx: &AsyncAppContext) -> Option<WindowBounds> {
|
||||||
|
ZED_WINDOW_POSITION
|
||||||
|
.zip(*ZED_WINDOW_SIZE)
|
||||||
|
.map(|(position, size)| {
|
||||||
|
WindowBounds::Fixed(RectF::new(
|
||||||
|
cx.platform().screens()[0].bounds().origin() + position,
|
||||||
|
size,
|
||||||
|
))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
async fn open_items(
|
async fn open_items(
|
||||||
serialized_workspace: Option<SerializedWorkspace>,
|
serialized_workspace: Option<SerializedWorkspace>,
|
||||||
workspace: &WeakViewHandle<Workspace>,
|
workspace: &WeakViewHandle<Workspace>,
|
||||||
|
@ -3652,8 +3652,13 @@ pub fn join_remote_project(
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
let window_bounds_override = window_bounds_env_override(&cx);
|
||||||
let (_, workspace) = cx.add_window(
|
let (_, workspace) = cx.add_window(
|
||||||
(app_state.build_window_options)(None, None, cx.platform().as_ref()),
|
(app_state.build_window_options)(
|
||||||
|
window_bounds_override,
|
||||||
|
None,
|
||||||
|
cx.platform().as_ref(),
|
||||||
|
),
|
||||||
|cx| Workspace::new(0, project, app_state.clone(), cx),
|
|cx| Workspace::new(0, project, app_state.clone(), cx),
|
||||||
);
|
);
|
||||||
(app_state.initialize_workspace)(
|
(app_state.initialize_workspace)(
|
||||||
|
@ -4434,7 +4439,7 @@ mod tests {
|
||||||
assert!(!panel.has_focus(cx));
|
assert!(!panel.has_focus(cx));
|
||||||
});
|
});
|
||||||
|
|
||||||
// Transfering focus back to the panel keeps it zoomed
|
// Transferring focus back to the panel keeps it zoomed
|
||||||
workspace.update(cx, |workspace, cx| {
|
workspace.update(cx, |workspace, cx| {
|
||||||
workspace.toggle_panel_focus::<TestPanel>(cx);
|
workspace.toggle_panel_focus::<TestPanel>(cx);
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
fn main() {
|
fn main() {
|
||||||
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.15.7");
|
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.15.7");
|
||||||
|
|
||||||
if let Ok(value) = std::env::var("ZED_MIXPANEL_TOKEN") {
|
|
||||||
println!("cargo:rustc-env=ZED_MIXPANEL_TOKEN={value}");
|
|
||||||
}
|
|
||||||
if let Ok(value) = std::env::var("ZED_PREVIEW_CHANNEL") {
|
if let Ok(value) = std::env::var("ZED_PREVIEW_CHANNEL") {
|
||||||
println!("cargo:rustc-env=ZED_PREVIEW_CHANNEL={value}");
|
println!("cargo:rustc-env=ZED_PREVIEW_CHANNEL={value}");
|
||||||
}
|
}
|
||||||
|
|
|
@ -207,7 +207,7 @@ impl LspAdapter for EsLintLspAdapter {
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
// At the time of writing the latest vscode-eslint release was released in 2020 and requires
|
// At the time of writing the latest vscode-eslint release was released in 2020 and requires
|
||||||
// special custom LSP protocol extensions be handled to fully initalize. Download the latest
|
// special custom LSP protocol extensions be handled to fully initialize. Download the latest
|
||||||
// prerelease instead to sidestep this issue
|
// prerelease instead to sidestep this issue
|
||||||
let release = latest_github_release("microsoft/vscode-eslint", true, http).await?;
|
let release = latest_github_release("microsoft/vscode-eslint", true, http).await?;
|
||||||
Ok(Box::new(GitHubLspBinaryVersion {
|
Ok(Box::new(GitHubLspBinaryVersion {
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Allow binary to be called Zed for a nice application menu when running executable direcly
|
// Allow binary to be called Zed for a nice application menu when running executable directly
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
@ -32,6 +32,7 @@ use std::{
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
fs::OpenOptions,
|
fs::OpenOptions,
|
||||||
io::Write as _,
|
io::Write as _,
|
||||||
|
ops::Not,
|
||||||
os::unix::prelude::OsStrExt,
|
os::unix::prelude::OsStrExt,
|
||||||
panic,
|
panic,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
@ -55,9 +56,7 @@ use fs::RealFs;
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
use staff_mode::StaffMode;
|
use staff_mode::StaffMode;
|
||||||
use util::{channel::RELEASE_CHANNEL, paths, ResultExt, TryFutureExt};
|
use util::{channel::RELEASE_CHANNEL, paths, ResultExt, TryFutureExt};
|
||||||
use workspace::{
|
use workspace::{item::ItemHandle, notifications::NotifyResultExt, AppState, Workspace};
|
||||||
item::ItemHandle, notifications::NotifyResultExt, AppState, OpenSettings, Workspace,
|
|
||||||
};
|
|
||||||
use zed::{
|
use zed::{
|
||||||
self, build_window_options, handle_keymap_file_changes, initialize_workspace, languages, menus,
|
self, build_window_options, handle_keymap_file_changes, initialize_workspace, languages, menus,
|
||||||
};
|
};
|
||||||
|
@ -70,10 +69,7 @@ fn main() {
|
||||||
log::info!("========== starting zed ==========");
|
log::info!("========== starting zed ==========");
|
||||||
let mut app = gpui::App::new(Assets).unwrap();
|
let mut app = gpui::App::new(Assets).unwrap();
|
||||||
|
|
||||||
let app_version = ZED_APP_VERSION
|
init_panic_hook(&app);
|
||||||
.or_else(|| app.platform().app_version().ok())
|
|
||||||
.map_or("dev".to_string(), |v| v.to_string());
|
|
||||||
init_panic_hook(app_version);
|
|
||||||
|
|
||||||
app.background();
|
app.background();
|
||||||
|
|
||||||
|
@ -173,11 +169,6 @@ fn main() {
|
||||||
.detach();
|
.detach();
|
||||||
|
|
||||||
client.telemetry().start();
|
client.telemetry().start();
|
||||||
client.telemetry().report_mixpanel_event(
|
|
||||||
"start app",
|
|
||||||
Default::default(),
|
|
||||||
*settings::get::<TelemetrySettings>(cx),
|
|
||||||
);
|
|
||||||
|
|
||||||
let app_state = Arc::new(AppState {
|
let app_state = Arc::new(AppState {
|
||||||
languages,
|
languages,
|
||||||
|
@ -374,33 +365,96 @@ struct Panic {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
location_data: Option<LocationData>,
|
location_data: Option<LocationData>,
|
||||||
backtrace: Vec<String>,
|
backtrace: Vec<String>,
|
||||||
// TODO
|
app_version: String,
|
||||||
// stripped_backtrace: String,
|
release_channel: String,
|
||||||
time: u128,
|
os_name: String,
|
||||||
|
os_version: Option<String>,
|
||||||
|
architecture: String,
|
||||||
|
panicked_on: u128,
|
||||||
|
identifying_backtrace: Option<Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct PanicRequest {
|
struct PanicRequest {
|
||||||
panic: Panic,
|
panic: Panic,
|
||||||
version: String,
|
|
||||||
token: String,
|
token: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init_panic_hook(app_version: String) {
|
fn init_panic_hook(app: &App) {
|
||||||
let is_pty = stdout_is_a_pty();
|
let is_pty = stdout_is_a_pty();
|
||||||
|
let platform = app.platform();
|
||||||
|
|
||||||
panic::set_hook(Box::new(move |info| {
|
panic::set_hook(Box::new(move |info| {
|
||||||
let backtrace = Backtrace::new();
|
let app_version = ZED_APP_VERSION
|
||||||
|
.or_else(|| platform.app_version().ok())
|
||||||
|
.map_or("dev".to_string(), |v| v.to_string());
|
||||||
|
|
||||||
let thread = thread::current();
|
let thread = thread::current();
|
||||||
let thread = thread.name().unwrap_or("<unnamed>");
|
let thread = thread.name().unwrap_or("<unnamed>");
|
||||||
|
|
||||||
let payload = match info.payload().downcast_ref::<&'static str>() {
|
let payload = info.payload();
|
||||||
Some(s) => *s,
|
let payload = None
|
||||||
None => match info.payload().downcast_ref::<String>() {
|
.or_else(|| payload.downcast_ref::<&str>().map(|s| s.to_string()))
|
||||||
Some(s) => &**s,
|
.or_else(|| payload.downcast_ref::<String>().map(|s| s.clone()))
|
||||||
None => "Box<Any>",
|
.unwrap_or_else(|| "Box<Any>".to_string());
|
||||||
},
|
|
||||||
};
|
let backtrace = Backtrace::new();
|
||||||
|
let backtrace = backtrace
|
||||||
|
.frames()
|
||||||
|
.iter()
|
||||||
|
.filter_map(|frame| {
|
||||||
|
let symbol = frame.symbols().first()?;
|
||||||
|
let path = symbol.filename()?;
|
||||||
|
Some((path, symbol.lineno(), format!("{:#}", symbol.name()?)))
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let this_file_path = Path::new(file!());
|
||||||
|
|
||||||
|
// Find the first frame in the backtrace for this panic hook itself. Exclude
|
||||||
|
// that frame and all frames before it.
|
||||||
|
let mut start_frame_ix = 0;
|
||||||
|
let mut codebase_root_path = None;
|
||||||
|
for (ix, (path, _, _)) in backtrace.iter().enumerate() {
|
||||||
|
if path.ends_with(this_file_path) {
|
||||||
|
start_frame_ix = ix + 1;
|
||||||
|
codebase_root_path = path.ancestors().nth(this_file_path.components().count());
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Exclude any subsequent frames inside of rust's panic handling system.
|
||||||
|
while let Some((path, _, _)) = backtrace.get(start_frame_ix) {
|
||||||
|
if path.starts_with("/rustc") {
|
||||||
|
start_frame_ix += 1;
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build two backtraces:
|
||||||
|
// * one for display, which includes symbol names for all frames, and files
|
||||||
|
// and line numbers for symbols in this codebase
|
||||||
|
// * one for identification and de-duplication, which only includes symbol
|
||||||
|
// names for symbols in this codebase.
|
||||||
|
let mut display_backtrace = Vec::new();
|
||||||
|
let mut identifying_backtrace = Vec::new();
|
||||||
|
for (path, line, symbol) in &backtrace[start_frame_ix..] {
|
||||||
|
display_backtrace.push(symbol.clone());
|
||||||
|
|
||||||
|
if let Some(codebase_root_path) = &codebase_root_path {
|
||||||
|
if let Ok(suffix) = path.strip_prefix(&codebase_root_path) {
|
||||||
|
identifying_backtrace.push(symbol.clone());
|
||||||
|
|
||||||
|
let display_path = suffix.to_string_lossy();
|
||||||
|
if let Some(line) = line {
|
||||||
|
display_backtrace.push(format!(" {display_path}:{line}"));
|
||||||
|
} else {
|
||||||
|
display_backtrace.push(format!(" {display_path}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let panic_data = Panic {
|
let panic_data = Panic {
|
||||||
thread: thread.into(),
|
thread: thread.into(),
|
||||||
|
@ -409,15 +463,23 @@ fn init_panic_hook(app_version: String) {
|
||||||
file: location.file().into(),
|
file: location.file().into(),
|
||||||
line: location.line(),
|
line: location.line(),
|
||||||
}),
|
}),
|
||||||
backtrace: format!("{:?}", backtrace)
|
app_version: app_version.clone(),
|
||||||
.split("\n")
|
release_channel: RELEASE_CHANNEL.dev_name().into(),
|
||||||
.map(|line| line.to_string())
|
os_name: platform.os_name().into(),
|
||||||
.collect(),
|
os_version: platform
|
||||||
// modified_backtrace: None,
|
.os_version()
|
||||||
time: SystemTime::now()
|
.ok()
|
||||||
|
.map(|os_version| os_version.to_string()),
|
||||||
|
architecture: env::consts::ARCH.into(),
|
||||||
|
panicked_on: SystemTime::now()
|
||||||
.duration_since(UNIX_EPOCH)
|
.duration_since(UNIX_EPOCH)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.as_millis(),
|
.as_millis(),
|
||||||
|
backtrace: display_backtrace,
|
||||||
|
identifying_backtrace: identifying_backtrace
|
||||||
|
.is_empty()
|
||||||
|
.not()
|
||||||
|
.then_some(identifying_backtrace),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() {
|
if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() {
|
||||||
|
@ -427,8 +489,7 @@ fn init_panic_hook(app_version: String) {
|
||||||
}
|
}
|
||||||
|
|
||||||
let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
|
let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
|
||||||
let panic_file_path =
|
let panic_file_path = paths::LOGS_DIR.join(format!("zed-{}.panic", timestamp));
|
||||||
paths::LOGS_DIR.join(format!("zed-{}-{}.panic", app_version, timestamp));
|
|
||||||
let panic_file = std::fs::OpenOptions::new()
|
let panic_file = std::fs::OpenOptions::new()
|
||||||
.append(true)
|
.append(true)
|
||||||
.create(true)
|
.create(true)
|
||||||
|
@ -463,15 +524,9 @@ fn upload_previous_panics(http: Arc<dyn HttpClient>, cx: &mut AppContext) {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut components = filename.split('-');
|
if !filename.starts_with("zed") {
|
||||||
if components.next() != Some("zed") {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let version = if let Some(version) = components.next() {
|
|
||||||
version
|
|
||||||
} else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
if telemetry_settings.diagnostics {
|
if telemetry_settings.diagnostics {
|
||||||
let panic_data_text = smol::fs::read_to_string(&child_path)
|
let panic_data_text = smol::fs::read_to_string(&child_path)
|
||||||
|
@ -480,7 +535,6 @@ fn upload_previous_panics(http: Arc<dyn HttpClient>, cx: &mut AppContext) {
|
||||||
|
|
||||||
let body = serde_json::to_string(&PanicRequest {
|
let body = serde_json::to_string(&PanicRequest {
|
||||||
panic: serde_json::from_str(&panic_data_text)?,
|
panic: serde_json::from_str(&panic_data_text)?,
|
||||||
version: version.to_string(),
|
|
||||||
token: ZED_SECRET_CLIENT_TOKEN.into(),
|
token: ZED_SECRET_CLIENT_TOKEN.into(),
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
@ -821,6 +875,6 @@ pub fn background_actions() -> &'static [(&'static str, &'static dyn Action)] {
|
||||||
("Go to file", &file_finder::Toggle),
|
("Go to file", &file_finder::Toggle),
|
||||||
("Open command palette", &command_palette::Toggle),
|
("Open command palette", &command_palette::Toggle),
|
||||||
("Open recent projects", &recent_projects::OpenRecent),
|
("Open recent projects", &recent_projects::OpenRecent),
|
||||||
("Change your settings", &OpenSettings),
|
("Change your settings", &zed::OpenSettings),
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,10 +12,11 @@ pub fn menus() -> Vec<Menu<'static>> {
|
||||||
MenuItem::submenu(Menu {
|
MenuItem::submenu(Menu {
|
||||||
name: "Preferences",
|
name: "Preferences",
|
||||||
items: vec![
|
items: vec![
|
||||||
MenuItem::action("Open Settings", workspace::OpenSettings),
|
MenuItem::action("Open Settings", super::OpenSettings),
|
||||||
MenuItem::action("Open Key Bindings", super::OpenKeymap),
|
MenuItem::action("Open Key Bindings", super::OpenKeymap),
|
||||||
MenuItem::action("Open Default Settings", super::OpenDefaultSettings),
|
MenuItem::action("Open Default Settings", super::OpenDefaultSettings),
|
||||||
MenuItem::action("Open Default Key Bindings", super::OpenDefaultKeymap),
|
MenuItem::action("Open Default Key Bindings", super::OpenDefaultKeymap),
|
||||||
|
MenuItem::action("Open Local Settings", super::OpenLocalSettings),
|
||||||
MenuItem::action("Select Theme", theme_selector::Toggle),
|
MenuItem::action("Select Theme", theme_selector::Toggle),
|
||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
|
|
|
@ -31,16 +31,23 @@ use project_panel::ProjectPanel;
|
||||||
use search::{BufferSearchBar, ProjectSearchBar};
|
use search::{BufferSearchBar, ProjectSearchBar};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::to_string_pretty;
|
use serde_json::to_string_pretty;
|
||||||
use settings::{KeymapFileContent, SettingsStore, DEFAULT_SETTINGS_ASSET_PATH};
|
use settings::{
|
||||||
|
initial_local_settings_content, KeymapFileContent, SettingsStore, DEFAULT_SETTINGS_ASSET_PATH,
|
||||||
|
};
|
||||||
use std::{borrow::Cow, str, sync::Arc};
|
use std::{borrow::Cow, str, sync::Arc};
|
||||||
use terminal_view::terminal_panel::{self, TerminalPanel};
|
use terminal_view::terminal_panel::{self, TerminalPanel};
|
||||||
use util::{channel::ReleaseChannel, paths, ResultExt};
|
use util::{
|
||||||
|
channel::ReleaseChannel,
|
||||||
|
paths::{self, LOCAL_SETTINGS_RELATIVE_PATH},
|
||||||
|
ResultExt,
|
||||||
|
};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use welcome::BaseKeymap;
|
use welcome::BaseKeymap;
|
||||||
pub use workspace;
|
pub use workspace;
|
||||||
use workspace::{
|
use workspace::{
|
||||||
create_and_open_local_file, dock::PanelHandle, open_new, AppState, NewFile, NewWindow,
|
create_and_open_local_file, dock::PanelHandle,
|
||||||
Workspace, WorkspaceSettings,
|
notifications::simple_message_notification::MessageNotification, open_new, AppState, NewFile,
|
||||||
|
NewWindow, Workspace, WorkspaceSettings,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Deserialize, Clone, PartialEq)]
|
#[derive(Deserialize, Clone, PartialEq)]
|
||||||
|
@ -66,6 +73,8 @@ actions!(
|
||||||
OpenLicenses,
|
OpenLicenses,
|
||||||
OpenTelemetryLog,
|
OpenTelemetryLog,
|
||||||
OpenKeymap,
|
OpenKeymap,
|
||||||
|
OpenSettings,
|
||||||
|
OpenLocalSettings,
|
||||||
OpenDefaultSettings,
|
OpenDefaultSettings,
|
||||||
OpenDefaultKeymap,
|
OpenDefaultKeymap,
|
||||||
IncreaseBufferFontSize,
|
IncreaseBufferFontSize,
|
||||||
|
@ -158,6 +167,17 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut gpui::AppContext) {
|
||||||
create_and_open_local_file(&paths::KEYMAP, cx, Default::default).detach_and_log_err(cx);
|
create_and_open_local_file(&paths::KEYMAP, cx, Default::default).detach_and_log_err(cx);
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
cx.add_action(
|
||||||
|
move |_: &mut Workspace, _: &OpenSettings, cx: &mut ViewContext<Workspace>| {
|
||||||
|
create_and_open_local_file(&paths::SETTINGS, cx, || {
|
||||||
|
settings::initial_user_settings_content(&Assets)
|
||||||
|
.as_ref()
|
||||||
|
.into()
|
||||||
|
})
|
||||||
|
.detach_and_log_err(cx);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
cx.add_action(open_local_settings_file);
|
||||||
cx.add_action(
|
cx.add_action(
|
||||||
move |workspace: &mut Workspace, _: &OpenDefaultKeymap, cx: &mut ViewContext<Workspace>| {
|
move |workspace: &mut Workspace, _: &OpenDefaultKeymap, cx: &mut ViewContext<Workspace>| {
|
||||||
open_bundled_file(
|
open_bundled_file(
|
||||||
|
@ -555,6 +575,76 @@ pub fn handle_keymap_file_changes(
|
||||||
.detach();
|
.detach();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn open_local_settings_file(
|
||||||
|
workspace: &mut Workspace,
|
||||||
|
_: &OpenLocalSettings,
|
||||||
|
cx: &mut ViewContext<Workspace>,
|
||||||
|
) {
|
||||||
|
let project = workspace.project().clone();
|
||||||
|
let worktree = project
|
||||||
|
.read(cx)
|
||||||
|
.visible_worktrees(cx)
|
||||||
|
.find_map(|tree| tree.read(cx).root_entry()?.is_dir().then_some(tree));
|
||||||
|
if let Some(worktree) = worktree {
|
||||||
|
let tree_id = worktree.read(cx).id();
|
||||||
|
cx.spawn(|workspace, mut cx| async move {
|
||||||
|
let file_path = &*LOCAL_SETTINGS_RELATIVE_PATH;
|
||||||
|
|
||||||
|
if let Some(dir_path) = file_path.parent() {
|
||||||
|
if worktree.read_with(&cx, |tree, _| tree.entry_for_path(dir_path).is_none()) {
|
||||||
|
project
|
||||||
|
.update(&mut cx, |project, cx| {
|
||||||
|
project.create_entry((tree_id, dir_path), true, cx)
|
||||||
|
})
|
||||||
|
.ok_or_else(|| anyhow!("worktree was removed"))?
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if worktree.read_with(&cx, |tree, _| tree.entry_for_path(file_path).is_none()) {
|
||||||
|
project
|
||||||
|
.update(&mut cx, |project, cx| {
|
||||||
|
project.create_entry((tree_id, file_path), false, cx)
|
||||||
|
})
|
||||||
|
.ok_or_else(|| anyhow!("worktree was removed"))?
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let editor = workspace
|
||||||
|
.update(&mut cx, |workspace, cx| {
|
||||||
|
workspace.open_path((tree_id, file_path), None, true, cx)
|
||||||
|
})?
|
||||||
|
.await?
|
||||||
|
.downcast::<Editor>()
|
||||||
|
.ok_or_else(|| anyhow!("unexpected item type"))?;
|
||||||
|
|
||||||
|
editor
|
||||||
|
.downgrade()
|
||||||
|
.update(&mut cx, |editor, cx| {
|
||||||
|
if let Some(buffer) = editor.buffer().read(cx).as_singleton() {
|
||||||
|
if buffer.read(cx).is_empty() {
|
||||||
|
buffer.update(cx, |buffer, cx| {
|
||||||
|
buffer.edit(
|
||||||
|
[(0..0, initial_local_settings_content(&Assets))],
|
||||||
|
None,
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
|
||||||
|
anyhow::Ok(())
|
||||||
|
})
|
||||||
|
.detach();
|
||||||
|
} else {
|
||||||
|
workspace.show_notification(0, cx, |cx| {
|
||||||
|
cx.add_view(|_| MessageNotification::new("This project has no folders open."))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn open_telemetry_log_file(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) {
|
fn open_telemetry_log_file(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) {
|
||||||
workspace.with_local_workspace(cx, move |workspace, cx| {
|
workspace.with_local_workspace(cx, move |workspace, cx| {
|
||||||
let app_state = workspace.app_state().clone();
|
let app_state = workspace.app_state().clone();
|
||||||
|
|
|
@ -1,30 +0,0 @@
|
||||||
import datetime
|
|
||||||
import sys
|
|
||||||
import requests
|
|
||||||
|
|
||||||
def main():
|
|
||||||
version = sys.argv[1]
|
|
||||||
version = version.removeprefix("v")
|
|
||||||
project_id = sys.argv[2]
|
|
||||||
account_username = sys.argv[3]
|
|
||||||
account_secret = sys.argv[4]
|
|
||||||
|
|
||||||
current_datetime = datetime.datetime.now(datetime.timezone.utc)
|
|
||||||
current_datetime = current_datetime.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
|
|
||||||
url = f"https://mixpanel.com/api/app/projects/{project_id}/annotations"
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"date": current_datetime,
|
|
||||||
"description": version
|
|
||||||
}
|
|
||||||
|
|
||||||
response = requests.post(
|
|
||||||
url,
|
|
||||||
auth=(account_username, account_secret),
|
|
||||||
json=payload
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1 +0,0 @@
|
||||||
requests==2.28.1
|
|
|
@ -23,7 +23,7 @@ function checkLicenses(
|
||||||
licenses: string[]
|
licenses: string[]
|
||||||
) {
|
) {
|
||||||
for (const { meta } of schemeMetaWithLicense) {
|
for (const { meta } of schemeMetaWithLicense) {
|
||||||
// FIXME: Add support for conjuctions and conditions
|
// FIXME: Add support for conjunctions and conditions
|
||||||
if (licenses.indexOf(meta.license.SPDX) < 0) {
|
if (licenses.indexOf(meta.license.SPDX) < 0) {
|
||||||
throw Error(
|
throw Error(
|
||||||
`License for theme ${meta.name} (${meta.license.SPDX}) is not supported`
|
`License for theme ${meta.name} (${meta.license.SPDX}) is not supported`
|
||||||
|
|
|
@ -112,7 +112,7 @@ export default function editor(colorScheme: ColorScheme) {
|
||||||
widthEm: 0.15,
|
widthEm: 0.15,
|
||||||
cornerRadius: 0.05,
|
cornerRadius: 0.05,
|
||||||
},
|
},
|
||||||
/** Highlights matching occurences of what is under the cursor
|
/** Highlights matching occurrences of what is under the cursor
|
||||||
* as well as matched brackets
|
* as well as matched brackets
|
||||||
*/
|
*/
|
||||||
documentHighlightReadBackground: withOpacity(
|
documentHighlightReadBackground: withOpacity(
|
||||||
|
|
|
@ -8,10 +8,10 @@ export default function projectPanel(colorScheme: ColorScheme) {
|
||||||
let layer = colorScheme.middle
|
let layer = colorScheme.middle
|
||||||
|
|
||||||
let baseEntry = {
|
let baseEntry = {
|
||||||
height: 24,
|
height: 22,
|
||||||
iconColor: foreground(layer, "variant"),
|
iconColor: foreground(layer, "variant"),
|
||||||
iconSize: 8,
|
iconSize: 7,
|
||||||
iconSpacing: 8,
|
iconSpacing: 5,
|
||||||
}
|
}
|
||||||
|
|
||||||
let status = {
|
let status = {
|
||||||
|
@ -71,8 +71,8 @@ export default function projectPanel(colorScheme: ColorScheme) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
background: background(layer),
|
background: background(layer),
|
||||||
padding: { left: 12, right: 12, top: 6, bottom: 6 },
|
padding: { left: 6, right: 6, top: 0, bottom: 6 },
|
||||||
indentWidth: 8,
|
indentWidth: 12,
|
||||||
entry,
|
entry,
|
||||||
draggedEntry: {
|
draggedEntry: {
|
||||||
...baseEntry,
|
...baseEntry,
|
||||||
|
@ -83,7 +83,12 @@ export default function projectPanel(colorScheme: ColorScheme) {
|
||||||
},
|
},
|
||||||
ignoredEntry: {
|
ignoredEntry: {
|
||||||
...entry,
|
...entry,
|
||||||
|
iconColor: foreground(layer, "disabled"),
|
||||||
text: text(layer, "mono", "disabled"),
|
text: text(layer, "mono", "disabled"),
|
||||||
|
active: {
|
||||||
|
...entry.active,
|
||||||
|
iconColor: foreground(layer, "variant"),
|
||||||
|
}
|
||||||
},
|
},
|
||||||
cutEntry: {
|
cutEntry: {
|
||||||
...entry,
|
...entry,
|
||||||
|
|
|
@ -33,7 +33,7 @@ export default function search(colorScheme: ColorScheme) {
|
||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
// TODO: Add an activeMatchBackground on the rust side to differenciate between active and inactive
|
// TODO: Add an activeMatchBackground on the rust side to differentiate between active and inactive
|
||||||
matchBackground: withOpacity(foreground(layer, "accent"), 0.4),
|
matchBackground: withOpacity(foreground(layer, "accent"), 0.4),
|
||||||
optionButton: {
|
optionButton: {
|
||||||
...text(layer, "mono", "on"),
|
...text(layer, "mono", "on"),
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import { Curve } from "./ref/curves"
|
import { Curve } from "./ref/curves"
|
||||||
|
|
||||||
export interface ColorAccessiblityValue {
|
export interface ColorAccessibilityValue {
|
||||||
value: number
|
value: number
|
||||||
aaPass: boolean
|
aaPass: boolean
|
||||||
aaaPass: boolean
|
aaaPass: boolean
|
||||||
|
@ -12,14 +12,14 @@ export interface ColorAccessiblityValue {
|
||||||
* @note This implementation is currently basic – Currently we only calculate contrasts against black and white, in the future will allow for dynamic color contrast calculation based on the colors present in a given palette.
|
* @note This implementation is currently basic – Currently we only calculate contrasts against black and white, in the future will allow for dynamic color contrast calculation based on the colors present in a given palette.
|
||||||
* @note The goal is to align with WCAG3 accessibility standards as they become stabilized. See the [WCAG 3 Introduction](https://www.w3.org/WAI/standards-guidelines/wcag/wcag3-intro/) for more information.
|
* @note The goal is to align with WCAG3 accessibility standards as they become stabilized. See the [WCAG 3 Introduction](https://www.w3.org/WAI/standards-guidelines/wcag/wcag3-intro/) for more information.
|
||||||
*/
|
*/
|
||||||
export interface ColorAccessiblity {
|
export interface ColorAccessibility {
|
||||||
black: ColorAccessiblityValue
|
black: ColorAccessibilityValue
|
||||||
white: ColorAccessiblityValue
|
white: ColorAccessibilityValue
|
||||||
}
|
}
|
||||||
|
|
||||||
export type Color = {
|
export type Color = {
|
||||||
step: number
|
step: number
|
||||||
contrast: ColorAccessiblity
|
contrast: ColorAccessibility
|
||||||
hex: string
|
hex: string
|
||||||
lch: number[]
|
lch: number[]
|
||||||
rgba: number[]
|
rgba: number[]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue