catchup with main

This commit is contained in:
KCaverly 2023-08-01 10:40:38 -04:00
commit 300c693d55
34 changed files with 1443 additions and 415 deletions

View file

@ -6,6 +6,16 @@ jobs:
discord_release: discord_release:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Get appropriate URL
id: get-appropriate-url
run: |
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
URL="https://zed.dev/releases/preview/latest"
else
URL="https://zed.dev/releases/stable/latest"
fi
echo "::set-output name=URL::$URL"
- name: Discord Webhook Action - name: Discord Webhook Action
uses: tsickert/discord-webhook@v5.3.0 uses: tsickert/discord-webhook@v5.3.0
with: with:
@ -13,6 +23,6 @@ jobs:
content: | content: |
📣 Zed ${{ github.event.release.tag_name }} was just released! 📣 Zed ${{ github.event.release.tag_name }} was just released!
Restart your Zed or head to https://zed.dev/releases/stable/latest to grab it. Restart your Zed or head to ${{ steps.get-appropriate-url.outputs.URL }} to grab it.
${{ github.event.release.body }} ${{ github.event.release.body }}

5
.zed/settings.json Normal file
View file

@ -0,0 +1,5 @@
{
"JSON": {
"tab_size": 4
}
}

147
Cargo.lock generated
View file

@ -141,7 +141,7 @@ source = "git+https://github.com/alacritty/alacritty?rev=7b9f32300ee0a249c087230
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -187,9 +187,9 @@ checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
[[package]] [[package]]
name = "alsa" name = "alsa"
version = "0.7.0" version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8512c9117059663fb5606788fbca3619e2a91dac0e3fe516242eab1fa6be5e44" checksum = "e2562ad8dcf0f789f65c6fdaad8a8a9708ed6b488e649da28c01656ad66b8b47"
dependencies = [ dependencies = [
"alsa-sys", "alsa-sys",
"bitflags 1.3.2", "bitflags 1.3.2",
@ -215,9 +215,9 @@ checksum = "ec8ad6edb4840b78c5c3d88de606b22252d552b55f3a4699fbb10fc070ec3049"
[[package]] [[package]]
name = "android-activity" name = "android-activity"
version = "0.4.2" version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40bc1575e653f158cbdc6ebcd917b9564e66321c5325c232c3591269c257be69" checksum = "64529721f27c2314ced0890ce45e469574a73e5e6fdd6e9da1860eb29285f5e0"
dependencies = [ dependencies = [
"android-properties", "android-properties",
"bitflags 1.3.2", "bitflags 1.3.2",
@ -507,7 +507,7 @@ checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -555,7 +555,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -598,7 +598,7 @@ checksum = "cc6dde6e4ed435a4c1ee4e73592f5ba9da2151af10076cc04858746af9352d09"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -856,7 +856,7 @@ dependencies = [
"regex", "regex",
"rustc-hash", "rustc-hash",
"shlex", "shlex",
"syn 2.0.27", "syn 2.0.28",
"which", "which",
] ]
@ -1040,7 +1040,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05"
dependencies = [ dependencies = [
"memchr", "memchr",
"regex-automata 0.3.3", "regex-automata 0.3.4",
"serde", "serde",
] ]
@ -1358,7 +1358,7 @@ dependencies = [
"heck 0.4.1", "heck 0.4.1",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -1425,7 +1425,7 @@ dependencies = [
"sum_tree", "sum_tree",
"tempfile", "tempfile",
"thiserror", "thiserror",
"time 0.3.23", "time 0.3.24",
"tiny_http", "tiny_http",
"url", "url",
"util", "util",
@ -1527,7 +1527,7 @@ dependencies = [
"sha-1 0.9.8", "sha-1 0.9.8",
"sqlx", "sqlx",
"theme", "theme",
"time 0.3.23", "time 0.3.24",
"tokio", "tokio",
"tokio-tungstenite", "tokio-tungstenite",
"toml 0.5.11", "toml 0.5.11",
@ -2124,6 +2124,15 @@ dependencies = [
"byteorder", "byteorder",
] ]
[[package]]
name = "deranged"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8810e7e2cf385b1e9b50d68264908ec367ba642c96d02edfe61c39e88e2a3c01"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "dhat" name = "dhat"
version = "0.3.2" version = "0.3.2"
@ -2425,9 +2434,9 @@ dependencies = [
[[package]] [[package]]
name = "errno" name = "errno"
version = "0.3.1" version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f"
dependencies = [ dependencies = [
"errno-dragonfly", "errno-dragonfly",
"libc", "libc",
@ -2731,7 +2740,7 @@ dependencies = [
"smol", "smol",
"sum_tree", "sum_tree",
"tempfile", "tempfile",
"time 0.3.23", "time 0.3.24",
"util", "util",
] ]
@ -2881,7 +2890,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -3121,7 +3130,7 @@ dependencies = [
"smol", "smol",
"sqlez", "sqlez",
"sum_tree", "sum_tree",
"time 0.3.23", "time 0.3.24",
"tiny-skia", "tiny-skia",
"usvg", "usvg",
"util", "util",
@ -4034,9 +4043,9 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
[[package]] [[package]]
name = "linux-raw-sys" name = "linux-raw-sys"
version = "0.4.3" version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503"
[[package]] [[package]]
name = "lipsum" name = "lipsum"
@ -4744,7 +4753,7 @@ dependencies = [
"proc-macro-crate 1.3.1", "proc-macro-crate 1.3.1",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -4895,7 +4904,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -5134,9 +5143,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
[[package]] [[package]]
name = "pest" name = "pest"
version = "2.7.1" version = "2.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0d2d1d55045829d65aad9d389139882ad623b33b904e7c9f1b10c5b8927298e5" checksum = "1acb4a4365a13f749a93f1a094a7805e5cfa0955373a9de860d962eaa3a5fe5a"
dependencies = [ dependencies = [
"thiserror", "thiserror",
"ucd-trie", "ucd-trie",
@ -5192,7 +5201,7 @@ checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -5230,7 +5239,7 @@ dependencies = [
"line-wrap", "line-wrap",
"quick-xml", "quick-xml",
"serde", "serde",
"time 0.3.23", "time 0.3.24",
] ]
[[package]] [[package]]
@ -5345,7 +5354,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c64d9ba0963cdcea2e1b2230fbae2bab30eb25a174be395c41e764bfb65dd62" checksum = "6c64d9ba0963cdcea2e1b2230fbae2bab30eb25a174be395c41e764bfb65dd62"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -5905,7 +5914,7 @@ checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575"
dependencies = [ dependencies = [
"aho-corasick 1.0.2", "aho-corasick 1.0.2",
"memchr", "memchr",
"regex-automata 0.3.3", "regex-automata 0.3.4",
"regex-syntax 0.7.4", "regex-syntax 0.7.4",
] ]
@ -5920,9 +5929,9 @@ dependencies = [
[[package]] [[package]]
name = "regex-automata" name = "regex-automata"
version = "0.3.3" version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" checksum = "b7b6d6190b7594385f61bd3911cd1be99dfddcfc365a4160cc2ab5bff4aed294"
dependencies = [ dependencies = [
"aho-corasick 1.0.2", "aho-corasick 1.0.2",
"memchr", "memchr",
@ -6217,7 +6226,7 @@ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"rust-embed-utils", "rust-embed-utils",
"syn 2.0.27", "syn 2.0.28",
"walkdir", "walkdir",
] ]
@ -6234,13 +6243,12 @@ dependencies = [
[[package]] [[package]]
name = "rust_decimal" name = "rust_decimal"
version = "1.30.0" version = "1.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0446843641c69436765a35a5a77088e28c2e6a12da93e84aa3ab1cd4aa5a042" checksum = "4a2ab0025103a60ecaaf3abf24db1db240a4e1c15837090d2c32f625ac98abea"
dependencies = [ dependencies = [
"arrayvec 0.7.4", "arrayvec 0.7.4",
"borsh", "borsh",
"bytecheck",
"byteorder", "byteorder",
"bytes 1.4.0", "bytes 1.4.0",
"num-traits", "num-traits",
@ -6294,7 +6302,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06"
dependencies = [ dependencies = [
"bitflags 1.3.2", "bitflags 1.3.2",
"errno 0.3.1", "errno 0.3.2",
"io-lifetimes 1.0.11", "io-lifetimes 1.0.11",
"libc", "libc",
"linux-raw-sys 0.3.8", "linux-raw-sys 0.3.8",
@ -6308,9 +6316,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5"
dependencies = [ dependencies = [
"bitflags 2.3.3", "bitflags 2.3.3",
"errno 0.3.1", "errno 0.3.2",
"libc", "libc",
"linux-raw-sys 0.4.3", "linux-raw-sys 0.4.5",
"windows-sys", "windows-sys",
] ]
@ -6509,7 +6517,7 @@ dependencies = [
"serde_json", "serde_json",
"sqlx", "sqlx",
"thiserror", "thiserror",
"time 0.3.23", "time 0.3.24",
"tracing", "tracing",
"url", "url",
"uuid 1.4.1", "uuid 1.4.1",
@ -6537,7 +6545,7 @@ dependencies = [
"rust_decimal", "rust_decimal",
"sea-query-derive", "sea-query-derive",
"serde_json", "serde_json",
"time 0.3.23", "time 0.3.24",
"uuid 1.4.1", "uuid 1.4.1",
] ]
@ -6552,7 +6560,7 @@ dependencies = [
"sea-query", "sea-query",
"serde_json", "serde_json",
"sqlx", "sqlx",
"time 0.3.23", "time 0.3.24",
"uuid 1.4.1", "uuid 1.4.1",
] ]
@ -6725,22 +6733,22 @@ checksum = "5a9f47faea3cad316faa914d013d24f471cd90bfca1a0c70f05a3f42c6441e99"
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.177" version = "1.0.180"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63ba2516aa6bf82e0b19ca8b50019d52df58455d3cf9bdaf6315225fdd0c560a" checksum = "0ea67f183f058fe88a4e3ec6e2788e003840893b91bac4559cabedd00863b3ed"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.177" version = "1.0.180"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "401797fe7833d72109fedec6bfcbe67c0eed9b99772f26eb8afd261f0abc6fd3" checksum = "24e744d7782b686ab3b73267ef05697159cc0e5abbed3f47f9933165e5219036"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -6795,7 +6803,7 @@ checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -7241,7 +7249,7 @@ dependencies = [
"sqlx-rt", "sqlx-rt",
"stringprep", "stringprep",
"thiserror", "thiserror",
"time 0.3.23", "time 0.3.24",
"tokio-stream", "tokio-stream",
"url", "url",
"uuid 1.4.1", "uuid 1.4.1",
@ -7490,9 +7498,9 @@ dependencies = [
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.27" version = "2.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0" checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -7560,9 +7568,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]] [[package]]
name = "target-lexicon" name = "target-lexicon"
version = "0.12.10" version = "0.12.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d2faeef5759ab89935255b1a4cd98e0baf99d1085e37d36599c625dac49ae8e" checksum = "9d0e916b1148c8e263850e1ebcbd046f333e0683c724876bb0da63ea4373dc8a"
[[package]] [[package]]
name = "tempdir" name = "tempdir"
@ -7745,7 +7753,7 @@ checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -7818,10 +7826,11 @@ dependencies = [
[[package]] [[package]]
name = "time" name = "time"
version = "0.3.23" version = "0.3.24"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446" checksum = "b79eabcd964882a646b3584543ccabeae7869e9ac32a46f6f22b7a5bd405308b"
dependencies = [ dependencies = [
"deranged",
"itoa 1.0.9", "itoa 1.0.9",
"serde", "serde",
"time-core", "time-core",
@ -7836,9 +7845,9 @@ checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb"
[[package]] [[package]]
name = "time-macros" name = "time-macros"
version = "0.2.10" version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4" checksum = "eb71511c991639bb078fd5bf97757e03914361c48100d52878b8e52b46fb92cd"
dependencies = [ dependencies = [
"time-core", "time-core",
] ]
@ -7934,7 +7943,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -8156,7 +8165,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]
@ -8270,7 +8279,7 @@ dependencies = [
[[package]] [[package]]
name = "tree-sitter-elixir" name = "tree-sitter-elixir"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/elixir-lang/tree-sitter-elixir?rev=4ba9dab6e2602960d95b2b625f3386c27e08084e#4ba9dab6e2602960d95b2b625f3386c27e08084e" source = "git+https://github.com/elixir-lang/tree-sitter-elixir?rev=a2861e88a730287a60c11ea9299c033c7d076e30#a2861e88a730287a60c11ea9299c033c7d076e30"
dependencies = [ dependencies = [
"cc", "cc",
"tree-sitter", "tree-sitter",
@ -8370,6 +8379,15 @@ dependencies = [
"tree-sitter", "tree-sitter",
] ]
[[package]]
name = "tree-sitter-nix"
version = "0.0.1"
source = "git+https://github.com/nix-community/tree-sitter-nix?rev=66e3e9ce9180ae08fc57372061006ef83f0abde7#66e3e9ce9180ae08fc57372061006ef83f0abde7"
dependencies = [
"cc",
"tree-sitter",
]
[[package]] [[package]]
name = "tree-sitter-php" name = "tree-sitter-php"
version = "0.19.1" version = "0.19.1"
@ -8947,7 +8965,7 @@ dependencies = [
"once_cell", "once_cell",
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@ -8981,7 +8999,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
"wasm-bindgen-backend", "wasm-bindgen-backend",
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
@ -9611,9 +9629,9 @@ dependencies = [
[[package]] [[package]]
name = "winnow" name = "winnow"
version = "0.5.1" version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25b5872fa2e10bd067ae946f927e726d7d603eaeb6e02fa6a350e0722d2b8c11" checksum = "8bd122eb777186e60c3fdf765a58ac76e41c582f1f535fbf3314434c6b58f3f7"
dependencies = [ dependencies = [
"memchr", "memchr",
] ]
@ -9896,6 +9914,7 @@ dependencies = [
"tree-sitter-json 0.20.0", "tree-sitter-json 0.20.0",
"tree-sitter-lua", "tree-sitter-lua",
"tree-sitter-markdown", "tree-sitter-markdown",
"tree-sitter-nix",
"tree-sitter-php", "tree-sitter-php",
"tree-sitter-python", "tree-sitter-python",
"tree-sitter-racket", "tree-sitter-racket",
@ -9941,7 +9960,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"syn 2.0.27", "syn 2.0.28",
] ]
[[package]] [[package]]

View file

@ -111,7 +111,7 @@ tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", re
tree-sitter-c = "0.20.1" tree-sitter-c = "0.20.1"
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev="f44509141e7e483323d2ec178f2d2e6c0fc041c1" } tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev="f44509141e7e483323d2ec178f2d2e6c0fc041c1" }
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" } tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "4ba9dab6e2602960d95b2b625f3386c27e08084e" } tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "a2861e88a730287a60c11ea9299c033c7d076e30" }
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40"} tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40"}
tree-sitter-embedded-template = "0.20.0" tree-sitter-embedded-template = "0.20.0"
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" } tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
@ -131,6 +131,7 @@ tree-sitter-svelte = { git = "https://github.com/Himujjal/tree-sitter-svelte", r
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"} tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a"}
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"} tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930"}
tree-sitter-lua = "0.0.14" tree-sitter-lua = "0.0.14"
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
[patch.crates-io] [patch.crates-io]
tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "1c65ca24bc9a734ab70115188f465e12eecf224e" } tree-sitter = { git = "https://github.com/tree-sitter/tree-sitter", rev = "1c65ca24bc9a734ab70115188f465e12eecf224e" }

View file

@ -1,159 +1,179 @@
{ {
"suffixes": { "suffixes": {
"aac": "audio", "aac": "audio",
"bash": "terminal", "accdb": "storage",
"bmp": "image", "bak": "backup",
"c": "code", "bash": "terminal",
"conf": "settings", "bash_aliases": "terminal",
"cpp": "code", "bash_logout": "terminal",
"cc": "code", "bash_profile": "terminal",
"css": "code", "bashrc": "terminal",
"doc": "document", "bmp": "image",
"docx": "document", "c": "code",
"eslintrc": "eslint", "cc": "code",
"eslintrc.js": "eslint", "conf": "settings",
"eslintrc.json": "eslint", "cpp": "code",
"flac": "audio", "css": "code",
"fish": "terminal", "csv": "storage",
"gitattributes": "vcs", "dat": "storage",
"gitignore": "vcs", "db": "storage",
"gitmodules": "vcs", "dbf": "storage",
"gif": "image", "dll": "storage",
"go": "code", "doc": "document",
"h": "code", "docx": "document",
"handlebars": "code", "eslintrc": "eslint",
"hbs": "template", "eslintrc.js": "eslint",
"htm": "template", "eslintrc.json": "eslint",
"html": "template", "fmp": "storage",
"svelte": "template", "fp7": "storage",
"hpp": "code", "flac": "audio",
"ico": "image", "fish": "terminal",
"ini": "settings", "frm": "storage",
"java": "code", "gdb": "storage",
"jpeg": "image", "gitattributes": "vcs",
"jpg": "image", "gitignore": "vcs",
"js": "code", "gitmodules": "vcs",
"json": "storage", "gif": "image",
"lock": "lock", "go": "code",
"log": "log", "h": "code",
"md": "document", "handlebars": "code",
"mdx": "document", "hbs": "template",
"mp3": "audio", "htm": "template",
"mp4": "video", "html": "template",
"ods": "document", "ib": "storage",
"odp": "document", "ico": "image",
"odt": "document", "ini": "settings",
"ogg": "video", "java": "code",
"pdf": "document", "jpeg": "image",
"php": "code", "jpg": "image",
"png": "image", "js": "code",
"ppt": "document", "json": "storage",
"pptx": "document", "ldf": "storage",
"prettierrc": "prettier", "lock": "lock",
"prettierignore": "prettier", "log": "log",
"ps1": "terminal", "mdb": "storage",
"psd": "image", "md": "document",
"py": "code", "mdf": "storage",
"rb": "code", "mdx": "document",
"rkt": "code", "mp3": "audio",
"rs": "rust", "mp4": "video",
"rtf": "document", "myd": "storage",
"scm": "code", "myi": "storage",
"sh": "terminal", "ods": "document",
"bashrc": "terminal", "odp": "document",
"bash_profile": "terminal", "odt": "document",
"bash_aliases": "terminal", "ogg": "video",
"bash_logout": "terminal", "pdb": "storage",
"profile": "terminal", "pdf": "document",
"zshrc": "terminal", "php": "code",
"zshenv": "terminal", "png": "image",
"zsh_profile": "terminal", "ppt": "document",
"zsh_aliases": "terminal", "pptx": "document",
"zsh_histfile": "terminal", "prettierignore": "prettier",
"zlogin": "terminal", "prettierrc": "prettier",
"sql": "code", "profile": "terminal",
"svg": "image", "ps1": "terminal",
"swift": "code", "psd": "image",
"tiff": "image", "py": "code",
"toml": "toml", "rb": "code",
"ts": "typescript", "rkt": "code",
"tsx": "code", "rs": "rust",
"txt": "document", "rtf": "document",
"wav": "audio", "sav": "storage",
"webm": "video", "scm": "code",
"xls": "document", "sh": "terminal",
"xlsx": "document", "sqlite": "storage",
"xml": "template", "sdf": "storage",
"yaml": "settings", "svelte": "template",
"yml": "settings", "svg": "image",
"zsh": "terminal" "swift": "code",
}, "ts": "typescript",
"types": { "tsx": "code",
"audio": { "tiff": "image",
"icon": "icons/file_icons/audio.svg" "toml": "toml",
"tsv": "storage",
"txt": "document",
"wav": "audio",
"webm": "video",
"xls": "document",
"xlsx": "document",
"xml": "template",
"yaml": "settings",
"yml": "settings",
"zlogin": "terminal",
"zsh": "terminal",
"zsh_aliases": "terminal",
"zshenv": "terminal",
"zsh_histfile": "terminal",
"zsh_profile": "terminal",
"zshrc": "terminal"
}, },
"code": { "types": {
"icon": "icons/file_icons/code.svg" "audio": {
}, "icon": "icons/file_icons/audio.svg"
"collapsed_chevron": { },
"icon": "icons/file_icons/chevron_right.svg" "code": {
}, "icon": "icons/file_icons/code.svg"
"collapsed_folder": { },
"icon": "icons/file_icons/folder.svg" "collapsed_chevron": {
}, "icon": "icons/file_icons/chevron_right.svg"
"default": { },
"icon": "icons/file_icons/file.svg" "collapsed_folder": {
}, "icon": "icons/file_icons/folder.svg"
"document": { },
"icon": "icons/file_icons/book.svg" "default": {
}, "icon": "icons/file_icons/file.svg"
"eslint": { },
"icon": "icons/file_icons/eslint.svg" "document": {
}, "icon": "icons/file_icons/book.svg"
"expanded_chevron": { },
"icon": "icons/file_icons/chevron_down.svg" "eslint": {
}, "icon": "icons/file_icons/eslint.svg"
"expanded_folder": { },
"icon": "icons/file_icons/folder_open.svg" "expanded_chevron": {
}, "icon": "icons/file_icons/chevron_down.svg"
"image": { },
"icon": "icons/file_icons/image.svg" "expanded_folder": {
}, "icon": "icons/file_icons/folder_open.svg"
"lock": { },
"icon": "icons/file_icons/lock.svg" "image": {
}, "icon": "icons/file_icons/image.svg"
"log": { },
"icon": "icons/file_icons/info.svg" "lock": {
}, "icon": "icons/file_icons/lock.svg"
"prettier": { },
"icon": "icons/file_icons/prettier.svg" "log": {
}, "icon": "icons/file_icons/info.svg"
"rust": { },
"icon": "icons/file_icons/rust.svg" "prettier": {
}, "icon": "icons/file_icons/prettier.svg"
"settings": { },
"icon": "icons/file_icons/settings.svg" "rust": {
}, "icon": "icons/file_icons/rust.svg"
"storage": { },
"icon": "icons/file_icons/database.svg" "settings": {
}, "icon": "icons/file_icons/settings.svg"
"template": { },
"icon": "icons/file_icons/html.svg" "storage": {
}, "icon": "icons/file_icons/database.svg"
"terminal": { },
"icon": "icons/file_icons/terminal.svg" "template": {
}, "icon": "icons/file_icons/html.svg"
"toml": { },
"icon": "icons/file_icons/toml.svg" "terminal": {
}, "icon": "icons/file_icons/terminal.svg"
"typescript": { },
"icon": "icons/file_icons/typescript.svg" "toml": {
}, "icon": "icons/file_icons/toml.svg"
"vcs": { },
"icon": "icons/file_icons/git.svg" "typescript": {
}, "icon": "icons/file_icons/typescript.svg"
"video": { },
"icon": "icons/file_icons/video.svg" "vcs": {
"icon": "icons/file_icons/git.svg"
},
"video": {
"icon": "icons/file_icons/video.svg"
}
} }
}
} }

View file

@ -22,6 +22,7 @@
"alt-cmd-right": "pane::ActivateNextItem", "alt-cmd-right": "pane::ActivateNextItem",
"cmd-w": "pane::CloseActiveItem", "cmd-w": "pane::CloseActiveItem",
"alt-cmd-t": "pane::CloseInactiveItems", "alt-cmd-t": "pane::CloseInactiveItems",
"ctrl-alt-cmd-w": "workspace::CloseInactiveTabsAndPanes",
"cmd-k u": "pane::CloseCleanItems", "cmd-k u": "pane::CloseCleanItems",
"cmd-k cmd-w": "pane::CloseAllItems", "cmd-k cmd-w": "pane::CloseAllItems",
"cmd-shift-w": "workspace::CloseWindow", "cmd-shift-w": "workspace::CloseWindow",
@ -226,12 +227,26 @@
"alt-enter": "search::SelectAllMatches" "alt-enter": "search::SelectAllMatches"
} }
}, },
{
"context": "BufferSearchBar > Editor",
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery"
}
},
{ {
"context": "ProjectSearchBar", "context": "ProjectSearchBar",
"bindings": { "bindings": {
"escape": "project_search::ToggleFocus" "escape": "project_search::ToggleFocus"
} }
}, },
{
"context": "ProjectSearchBar > Editor",
"bindings": {
"up": "search::PreviousHistoryQuery",
"down": "search::NextHistoryQuery"
}
},
{ {
"context": "ProjectSearchView", "context": "ProjectSearchView",
"bindings": { "bindings": {

View file

@ -1637,6 +1637,7 @@ impl ConversationEditor {
let mut editor = Editor::for_buffer(conversation.read(cx).buffer.clone(), None, cx); let mut editor = Editor::for_buffer(conversation.read(cx).buffer.clone(), None, cx);
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx); editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
editor.set_show_gutter(false, cx); editor.set_show_gutter(false, cx);
editor.set_show_wrap_guides(false, cx);
editor editor
}); });

View file

@ -338,9 +338,9 @@ impl Copilot {
let (server, fake_server) = let (server, fake_server) =
LanguageServer::fake("copilot".into(), Default::default(), cx.to_async()); LanguageServer::fake("copilot".into(), Default::default(), cx.to_async());
let http = util::http::FakeHttpClient::create(|_| async { unreachable!() }); let http = util::http::FakeHttpClient::create(|_| async { unreachable!() });
let this = cx.add_model(|cx| Self { let this = cx.add_model(|_| Self {
http: http.clone(), http: http.clone(),
node_runtime: NodeRuntime::instance(http, cx.background().clone()), node_runtime: NodeRuntime::instance(http),
server: CopilotServer::Running(RunningCopilotServer { server: CopilotServer::Running(RunningCopilotServer {
lsp: Arc::new(server), lsp: Arc::new(server),
sign_in_status: SignInStatus::Authorized, sign_in_status: SignInStatus::Authorized,

View file

@ -543,6 +543,7 @@ pub struct Editor {
show_local_selections: bool, show_local_selections: bool,
mode: EditorMode, mode: EditorMode,
show_gutter: bool, show_gutter: bool,
show_wrap_guides: Option<bool>,
placeholder_text: Option<Arc<str>>, placeholder_text: Option<Arc<str>>,
highlighted_rows: Option<Range<u32>>, highlighted_rows: Option<Range<u32>>,
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
@ -1375,6 +1376,7 @@ impl Editor {
show_local_selections: true, show_local_selections: true,
mode, mode,
show_gutter: mode == EditorMode::Full, show_gutter: mode == EditorMode::Full,
show_wrap_guides: None,
placeholder_text: None, placeholder_text: None,
highlighted_rows: None, highlighted_rows: None,
background_highlights: Default::default(), background_highlights: Default::default(),
@ -1537,7 +1539,7 @@ impl Editor {
self.collapse_matches = collapse_matches; self.collapse_matches = collapse_matches;
} }
fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> { pub fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> {
if self.collapse_matches { if self.collapse_matches {
return range.start..range.start; return range.start..range.start;
} }
@ -6374,8 +6376,8 @@ impl Editor {
.range .range
.to_offset(definition.target.buffer.read(cx)); .to_offset(definition.target.buffer.read(cx));
let range = self.range_for_match(&range);
if Some(&definition.target.buffer) == self.buffer.read(cx).as_singleton().as_ref() { if Some(&definition.target.buffer) == self.buffer.read(cx).as_singleton().as_ref() {
let range = self.range_for_match(&range);
self.change_selections(Some(Autoscroll::fit()), cx, |s| { self.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]); s.select_ranges([range]);
}); });
@ -6392,7 +6394,6 @@ impl Editor {
// When selecting a definition in a different buffer, disable the nav history // When selecting a definition in a different buffer, disable the nav history
// to avoid creating a history entry at the previous cursor location. // to avoid creating a history entry at the previous cursor location.
pane.update(cx, |pane, _| pane.disable_history()); pane.update(cx, |pane, _| pane.disable_history());
let range = target_editor.range_for_match(&range);
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| { target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]); s.select_ranges([range]);
}); });
@ -7188,6 +7189,10 @@ impl Editor {
pub fn wrap_guides(&self, cx: &AppContext) -> SmallVec<[(usize, bool); 2]> { pub fn wrap_guides(&self, cx: &AppContext) -> SmallVec<[(usize, bool); 2]> {
let mut wrap_guides = smallvec::smallvec![]; let mut wrap_guides = smallvec::smallvec![];
if self.show_wrap_guides == Some(false) {
return wrap_guides;
}
let settings = self.buffer.read(cx).settings_at(0, cx); let settings = self.buffer.read(cx).settings_at(0, cx);
if settings.show_wrap_guides { if settings.show_wrap_guides {
if let SoftWrap::Column(soft_wrap) = self.soft_wrap_mode(cx) { if let SoftWrap::Column(soft_wrap) = self.soft_wrap_mode(cx) {
@ -7245,6 +7250,11 @@ impl Editor {
cx.notify(); cx.notify();
} }
pub fn set_show_wrap_guides(&mut self, show_gutter: bool, cx: &mut ViewContext<Self>) {
self.show_wrap_guides = Some(show_gutter);
cx.notify();
}
pub fn reveal_in_finder(&mut self, _: &RevealInFinder, cx: &mut ViewContext<Self>) { pub fn reveal_in_finder(&mut self, _: &RevealInFinder, cx: &mut ViewContext<Self>) {
if let Some(buffer) = self.buffer().read(cx).as_singleton() { if let Some(buffer) = self.buffer().read(cx).as_singleton() {
if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) { if let Some(file) = buffer.read(cx).file().and_then(|f| f.as_local()) {

View file

@ -546,8 +546,20 @@ impl EditorElement {
}); });
} }
let scroll_left =
layout.position_map.snapshot.scroll_position().x() * layout.position_map.em_width;
for (wrap_position, active) in layout.wrap_guides.iter() { for (wrap_position, active) in layout.wrap_guides.iter() {
let x = text_bounds.origin_x() + wrap_position + layout.position_map.em_width / 2.; let x =
(text_bounds.origin_x() + wrap_position + layout.position_map.em_width / 2.)
- scroll_left;
if x < text_bounds.origin_x()
|| (layout.show_scrollbars && x > self.scrollbar_left(&bounds))
{
continue;
}
let color = if *active { let color = if *active {
self.style.active_wrap_guide self.style.active_wrap_guide
} else { } else {
@ -1036,6 +1048,10 @@ impl EditorElement {
scene.pop_layer(); scene.pop_layer();
} }
fn scrollbar_left(&self, bounds: &RectF) -> f32 {
bounds.max_x() - self.style.theme.scrollbar.width
}
fn paint_scrollbar( fn paint_scrollbar(
&mut self, &mut self,
scene: &mut SceneBuilder, scene: &mut SceneBuilder,
@ -1054,7 +1070,7 @@ impl EditorElement {
let top = bounds.min_y(); let top = bounds.min_y();
let bottom = bounds.max_y(); let bottom = bounds.max_y();
let right = bounds.max_x(); let right = bounds.max_x();
let left = right - style.width; let left = self.scrollbar_left(&bounds);
let row_range = &layout.scrollbar_row_range; let row_range = &layout.scrollbar_row_range;
let max_row = layout.max_row as f32 + (row_range.end - row_range.start); let max_row = layout.max_row as f32 + (row_range.end - row_range.start);

View file

@ -1128,6 +1128,12 @@ impl AppContext {
self.keystroke_matcher.clear_bindings(); self.keystroke_matcher.clear_bindings();
} }
pub fn binding_for_action(&self, action: &dyn Action) -> Option<&Binding> {
self.keystroke_matcher
.bindings_for_action(action.id())
.find(|binding| binding.action().eq(action))
}
pub fn default_global<T: 'static + Default>(&mut self) -> &T { pub fn default_global<T: 'static + Default>(&mut self) -> &T {
let type_id = TypeId::of::<T>(); let type_id = TypeId::of::<T>();
self.update(|this| { self.update(|this| {

View file

@ -58,11 +58,14 @@ fn build_bridge(swift_target: &SwiftTarget) {
"cargo:rerun-if-changed={}/Package.resolved", "cargo:rerun-if-changed={}/Package.resolved",
SWIFT_PACKAGE_NAME SWIFT_PACKAGE_NAME
); );
let swift_package_root = swift_package_root(); let swift_package_root = swift_package_root();
let swift_target_folder = swift_target_folder();
if !Command::new("swift") if !Command::new("swift")
.arg("build") .arg("build")
.args(["--configuration", &env::var("PROFILE").unwrap()]) .args(["--configuration", &env::var("PROFILE").unwrap()])
.args(["--triple", &swift_target.target.triple]) .args(["--triple", &swift_target.target.triple])
.args(["--build-path".into(), swift_target_folder])
.current_dir(&swift_package_root) .current_dir(&swift_package_root)
.status() .status()
.unwrap() .unwrap()
@ -128,6 +131,12 @@ fn swift_package_root() -> PathBuf {
env::current_dir().unwrap().join(SWIFT_PACKAGE_NAME) env::current_dir().unwrap().join(SWIFT_PACKAGE_NAME)
} }
fn swift_target_folder() -> PathBuf {
env::current_dir()
.unwrap()
.join(format!("../../target/{SWIFT_PACKAGE_NAME}"))
}
fn copy_dir(source: &Path, destination: &Path) { fn copy_dir(source: &Path, destination: &Path) {
assert!( assert!(
Command::new("rm") Command::new("rm")
@ -155,8 +164,7 @@ fn copy_dir(source: &Path, destination: &Path) {
impl SwiftTarget { impl SwiftTarget {
fn out_dir_path(&self) -> PathBuf { fn out_dir_path(&self) -> PathBuf {
swift_package_root() swift_target_folder()
.join(".build")
.join(&self.target.unversioned_triple) .join(&self.target.unversioned_triple)
.join(env::var("PROFILE").unwrap()) .join(env::var("PROFILE").unwrap())
} }

View file

@ -1,9 +1,6 @@
use anyhow::{anyhow, bail, Context, Result}; use anyhow::{anyhow, bail, Context, Result};
use async_compression::futures::bufread::GzipDecoder; use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive; use async_tar::Archive;
use futures::lock::Mutex;
use futures::{future::Shared, FutureExt};
use gpui::{executor::Background, Task};
use serde::Deserialize; use serde::Deserialize;
use smol::{fs, io::BufReader, process::Command}; use smol::{fs, io::BufReader, process::Command};
use std::process::{Output, Stdio}; use std::process::{Output, Stdio};
@ -33,20 +30,12 @@ pub struct NpmInfoDistTags {
pub struct NodeRuntime { pub struct NodeRuntime {
http: Arc<dyn HttpClient>, http: Arc<dyn HttpClient>,
background: Arc<Background>,
installation_path: Mutex<Option<Shared<Task<Result<PathBuf, Arc<anyhow::Error>>>>>>,
} }
impl NodeRuntime { impl NodeRuntime {
pub fn instance(http: Arc<dyn HttpClient>, background: Arc<Background>) -> Arc<NodeRuntime> { pub fn instance(http: Arc<dyn HttpClient>) -> Arc<NodeRuntime> {
RUNTIME_INSTANCE RUNTIME_INSTANCE
.get_or_init(|| { .get_or_init(|| Arc::new(NodeRuntime { http }))
Arc::new(NodeRuntime {
http,
background,
installation_path: Mutex::new(None),
})
})
.clone() .clone()
} }
@ -61,7 +50,9 @@ impl NodeRuntime {
subcommand: &str, subcommand: &str,
args: &[&str], args: &[&str],
) -> Result<Output> { ) -> Result<Output> {
let attempt = |installation_path: PathBuf| async move { let attempt = || async move {
let installation_path = self.install_if_needed().await?;
let mut env_path = installation_path.join("bin").into_os_string(); let mut env_path = installation_path.join("bin").into_os_string();
if let Some(existing_path) = std::env::var_os("PATH") { if let Some(existing_path) = std::env::var_os("PATH") {
if !existing_path.is_empty() { if !existing_path.is_empty() {
@ -92,10 +83,9 @@ impl NodeRuntime {
command.output().await.map_err(|e| anyhow!("{e}")) command.output().await.map_err(|e| anyhow!("{e}"))
}; };
let installation_path = self.install_if_needed().await?; let mut output = attempt().await;
let mut output = attempt(installation_path.clone()).await;
if output.is_err() { if output.is_err() {
output = attempt(installation_path).await; output = attempt().await;
if output.is_err() { if output.is_err() {
return Err(anyhow!( return Err(anyhow!(
"failed to launch npm subcommand {subcommand} subcommand" "failed to launch npm subcommand {subcommand} subcommand"
@ -167,23 +157,8 @@ impl NodeRuntime {
} }
async fn install_if_needed(&self) -> Result<PathBuf> { async fn install_if_needed(&self) -> Result<PathBuf> {
let task = self log::info!("Node runtime install_if_needed");
.installation_path
.lock()
.await
.get_or_insert_with(|| {
let http = self.http.clone();
self.background
.spawn(async move { Self::install(http).await.map_err(Arc::new) })
.shared()
})
.clone();
task.await.map_err(|e| anyhow!("{}", e))
}
async fn install(http: Arc<dyn HttpClient>) -> Result<PathBuf> {
log::info!("installing Node runtime");
let arch = match consts::ARCH { let arch = match consts::ARCH {
"x86_64" => "x64", "x86_64" => "x64",
"aarch64" => "arm64", "aarch64" => "arm64",
@ -214,7 +189,8 @@ impl NodeRuntime {
let file_name = format!("node-{VERSION}-darwin-{arch}.tar.gz"); let file_name = format!("node-{VERSION}-darwin-{arch}.tar.gz");
let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}"); let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}");
let mut response = http let mut response = self
.http
.get(&url, Default::default(), true) .get(&url, Default::default(), true)
.await .await
.context("error downloading Node binary tarball")?; .context("error downloading Node binary tarball")?;

View file

@ -1,7 +1,6 @@
use crate::{worktree::WorktreeHandle, Event, *}; use crate::{search::PathMatcher, worktree::WorktreeHandle, Event, *};
use fs::{FakeFs, LineEnding, RealFs}; use fs::{FakeFs, LineEnding, RealFs};
use futures::{future, StreamExt}; use futures::{future, StreamExt};
use globset::Glob;
use gpui::{executor::Deterministic, test::subscribe, AppContext}; use gpui::{executor::Deterministic, test::subscribe, AppContext};
use language::{ use language::{
language_settings::{AllLanguageSettings, LanguageSettingsContent}, language_settings::{AllLanguageSettings, LanguageSettingsContent},
@ -3641,7 +3640,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
vec![Glob::new("*.odd").unwrap().compile_matcher()], vec![PathMatcher::new("*.odd").unwrap()],
Vec::new() Vec::new()
), ),
cx cx
@ -3659,7 +3658,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
search_query, search_query,
false, false,
true, true,
vec![Glob::new("*.rs").unwrap().compile_matcher()], vec![PathMatcher::new("*.rs").unwrap()],
Vec::new() Vec::new()
), ),
cx cx
@ -3681,8 +3680,8 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
false, false,
true, true,
vec![ vec![
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher(), PathMatcher::new("*.odd").unwrap(),
], ],
Vec::new() Vec::new()
), ),
@ -3705,9 +3704,9 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
false, false,
true, true,
vec![ vec![
Glob::new("*.rs").unwrap().compile_matcher(), PathMatcher::new("*.rs").unwrap(),
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher(), PathMatcher::new("*.odd").unwrap(),
], ],
Vec::new() Vec::new()
), ),
@ -3752,7 +3751,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
false, false,
true, true,
Vec::new(), Vec::new(),
vec![Glob::new("*.odd").unwrap().compile_matcher()], vec![PathMatcher::new("*.odd").unwrap()],
), ),
cx cx
) )
@ -3775,7 +3774,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
false, false,
true, true,
Vec::new(), Vec::new(),
vec![Glob::new("*.rs").unwrap().compile_matcher()], vec![PathMatcher::new("*.rs").unwrap()],
), ),
cx cx
) )
@ -3797,8 +3796,8 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true, true,
Vec::new(), Vec::new(),
vec![ vec![
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher(), PathMatcher::new("*.odd").unwrap(),
], ],
), ),
cx cx
@ -3821,9 +3820,9 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true, true,
Vec::new(), Vec::new(),
vec![ vec![
Glob::new("*.rs").unwrap().compile_matcher(), PathMatcher::new("*.rs").unwrap(),
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher(), PathMatcher::new("*.odd").unwrap(),
], ],
), ),
cx cx
@ -3860,8 +3859,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
vec![Glob::new("*.odd").unwrap().compile_matcher()], vec![PathMatcher::new("*.odd").unwrap()],
vec![Glob::new("*.odd").unwrap().compile_matcher()], vec![PathMatcher::new("*.odd").unwrap()],
), ),
cx cx
) )
@ -3878,8 +3877,8 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
search_query, search_query,
false, false,
true, true,
vec![Glob::new("*.ts").unwrap().compile_matcher()], vec![PathMatcher::new("*.ts").unwrap()],
vec![Glob::new("*.ts").unwrap().compile_matcher()], vec![PathMatcher::new("*.ts").unwrap()],
), ),
cx cx
) )
@ -3897,12 +3896,12 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false, false,
true, true,
vec![ vec![
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher() PathMatcher::new("*.odd").unwrap()
], ],
vec![ vec![
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher() PathMatcher::new("*.odd").unwrap()
], ],
), ),
cx cx
@ -3921,12 +3920,12 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false, false,
true, true,
vec![ vec![
Glob::new("*.ts").unwrap().compile_matcher(), PathMatcher::new("*.ts").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher() PathMatcher::new("*.odd").unwrap()
], ],
vec![ vec![
Glob::new("*.rs").unwrap().compile_matcher(), PathMatcher::new("*.rs").unwrap(),
Glob::new("*.odd").unwrap().compile_matcher() PathMatcher::new("*.odd").unwrap()
], ],
), ),
cx cx

View file

@ -1,5 +1,5 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder}; use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
use anyhow::Result; use anyhow::{Context, Result};
use client::proto; use client::proto;
use globset::{Glob, GlobMatcher}; use globset::{Glob, GlobMatcher};
use itertools::Itertools; use itertools::Itertools;
@ -9,7 +9,7 @@ use smol::future::yield_now;
use std::{ use std::{
io::{BufRead, BufReader, Read}, io::{BufRead, BufReader, Read},
ops::Range, ops::Range,
path::Path, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
@ -20,8 +20,8 @@ pub enum SearchQuery {
query: Arc<str>, query: Arc<str>,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
files_to_include: Vec<GlobMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<GlobMatcher>, files_to_exclude: Vec<PathMatcher>,
}, },
Regex { Regex {
regex: Regex, regex: Regex,
@ -29,18 +29,43 @@ pub enum SearchQuery {
multiline: bool, multiline: bool,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
files_to_include: Vec<GlobMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<GlobMatcher>, files_to_exclude: Vec<PathMatcher>,
}, },
} }
#[derive(Clone, Debug)]
pub struct PathMatcher {
maybe_path: PathBuf,
glob: GlobMatcher,
}
impl std::fmt::Display for PathMatcher {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.maybe_path.to_string_lossy().fmt(f)
}
}
impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher {
glob: Glob::new(&maybe_glob)?.compile_matcher(),
maybe_path: PathBuf::from(maybe_glob),
})
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
}
}
impl SearchQuery { impl SearchQuery {
pub fn text( pub fn text(
query: impl ToString, query: impl ToString,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
files_to_include: Vec<GlobMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<GlobMatcher>, files_to_exclude: Vec<PathMatcher>,
) -> Self { ) -> Self {
let query = query.to_string(); let query = query.to_string();
let search = AhoCorasickBuilder::new() let search = AhoCorasickBuilder::new()
@ -61,8 +86,8 @@ impl SearchQuery {
query: impl ToString, query: impl ToString,
whole_word: bool, whole_word: bool,
case_sensitive: bool, case_sensitive: bool,
files_to_include: Vec<GlobMatcher>, files_to_include: Vec<PathMatcher>,
files_to_exclude: Vec<GlobMatcher>, files_to_exclude: Vec<PathMatcher>,
) -> Result<Self> { ) -> Result<Self> {
let mut query = query.to_string(); let mut query = query.to_string();
let initial_query = Arc::from(query.as_str()); let initial_query = Arc::from(query.as_str());
@ -96,16 +121,16 @@ impl SearchQuery {
message.query, message.query,
message.whole_word, message.whole_word,
message.case_sensitive, message.case_sensitive,
deserialize_globs(&message.files_to_include)?, deserialize_path_matches(&message.files_to_include)?,
deserialize_globs(&message.files_to_exclude)?, deserialize_path_matches(&message.files_to_exclude)?,
) )
} else { } else {
Ok(Self::text( Ok(Self::text(
message.query, message.query,
message.whole_word, message.whole_word,
message.case_sensitive, message.case_sensitive,
deserialize_globs(&message.files_to_include)?, deserialize_path_matches(&message.files_to_include)?,
deserialize_globs(&message.files_to_exclude)?, deserialize_path_matches(&message.files_to_exclude)?,
)) ))
} }
} }
@ -120,12 +145,12 @@ impl SearchQuery {
files_to_include: self files_to_include: self
.files_to_include() .files_to_include()
.iter() .iter()
.map(|g| g.glob().to_string()) .map(|matcher| matcher.to_string())
.join(","), .join(","),
files_to_exclude: self files_to_exclude: self
.files_to_exclude() .files_to_exclude()
.iter() .iter()
.map(|g| g.glob().to_string()) .map(|matcher| matcher.to_string())
.join(","), .join(","),
} }
} }
@ -266,7 +291,7 @@ impl SearchQuery {
matches!(self, Self::Regex { .. }) matches!(self, Self::Regex { .. })
} }
pub fn files_to_include(&self) -> &[GlobMatcher] { pub fn files_to_include(&self) -> &[PathMatcher] {
match self { match self {
Self::Text { Self::Text {
files_to_include, .. files_to_include, ..
@ -277,7 +302,7 @@ impl SearchQuery {
} }
} }
pub fn files_to_exclude(&self) -> &[GlobMatcher] { pub fn files_to_exclude(&self) -> &[PathMatcher] {
match self { match self {
Self::Text { Self::Text {
files_to_exclude, .. files_to_exclude, ..
@ -306,11 +331,63 @@ impl SearchQuery {
} }
} }
fn deserialize_globs(glob_set: &str) -> Result<Vec<GlobMatcher>> { fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<Vec<PathMatcher>> {
glob_set glob_set
.split(',') .split(',')
.map(str::trim) .map(str::trim)
.filter(|glob_str| !glob_str.is_empty()) .filter(|glob_str| !glob_str.is_empty())
.map(|glob_str| Ok(Glob::new(glob_str)?.compile_matcher())) .map(|glob_str| {
PathMatcher::new(glob_str)
.with_context(|| format!("deserializing path match glob {glob_str}"))
})
.collect() .collect()
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn path_matcher_creation_for_valid_paths() {
for valid_path in [
"file",
"Cargo.toml",
".DS_Store",
"~/dir/another_dir/",
"./dir/file",
"dir/[a-z].txt",
"../dir/filé",
] {
let path_matcher = PathMatcher::new(valid_path).unwrap_or_else(|e| {
panic!("Valid path {valid_path} should be accepted, but got: {e}")
});
assert!(
path_matcher.is_match(valid_path),
"Path matcher for valid path {valid_path} should match itself"
)
}
}
#[test]
fn path_matcher_creation_for_globs() {
for invalid_glob in ["dir/[].txt", "dir/[a-z.txt", "dir/{file"] {
match PathMatcher::new(invalid_glob) {
Ok(_) => panic!("Invalid glob {invalid_glob} should not be accepted"),
Err(_expected) => {}
}
}
for valid_glob in [
"dir/?ile",
"dir/*.txt",
"dir/**/file",
"dir/[a-z].txt",
"{dir,file}",
] {
match PathMatcher::new(valid_glob) {
Ok(_expected) => {}
Err(e) => panic!("Valid glob {valid_glob} should be accepted, but got: {e}"),
}
}
}
}

View file

@ -115,6 +115,7 @@ actions!(
[ [
ExpandSelectedEntry, ExpandSelectedEntry,
CollapseSelectedEntry, CollapseSelectedEntry,
CollapseAllEntries,
NewDirectory, NewDirectory,
NewFile, NewFile,
Copy, Copy,
@ -140,6 +141,7 @@ pub fn init(assets: impl AssetSource, cx: &mut AppContext) {
file_associations::init(assets, cx); file_associations::init(assets, cx);
cx.add_action(ProjectPanel::expand_selected_entry); cx.add_action(ProjectPanel::expand_selected_entry);
cx.add_action(ProjectPanel::collapse_selected_entry); cx.add_action(ProjectPanel::collapse_selected_entry);
cx.add_action(ProjectPanel::collapse_all_entries);
cx.add_action(ProjectPanel::select_prev); cx.add_action(ProjectPanel::select_prev);
cx.add_action(ProjectPanel::select_next); cx.add_action(ProjectPanel::select_next);
cx.add_action(ProjectPanel::new_file); cx.add_action(ProjectPanel::new_file);
@ -514,6 +516,12 @@ impl ProjectPanel {
} }
} }
pub fn collapse_all_entries(&mut self, _: &CollapseAllEntries, cx: &mut ViewContext<Self>) {
self.expanded_dir_ids.clear();
self.update_visible_entries(None, cx);
cx.notify();
}
fn toggle_expanded(&mut self, entry_id: ProjectEntryId, cx: &mut ViewContext<Self>) { fn toggle_expanded(&mut self, entry_id: ProjectEntryId, cx: &mut ViewContext<Self>) {
if let Some(worktree_id) = self.project.read(cx).worktree_id_for_entry(entry_id, cx) { if let Some(worktree_id) = self.project.read(cx).worktree_id_for_entry(entry_id, cx) {
if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) { if let Some(expanded_dir_ids) = self.expanded_dir_ids.get_mut(&worktree_id) {
@ -2678,6 +2686,63 @@ mod tests {
); );
} }
#[gpui::test]
async fn test_collapse_all_entries(cx: &mut gpui::TestAppContext) {
init_test_with_editor(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/project_root",
json!({
"dir_1": {
"nested_dir": {
"file_a.py": "# File contents",
"file_b.py": "# File contents",
"file_c.py": "# File contents",
},
"file_1.py": "# File contents",
"file_2.py": "# File contents",
"file_3.py": "# File contents",
},
"dir_2": {
"file_1.py": "# File contents",
"file_2.py": "# File contents",
"file_3.py": "# File contents",
}
}),
)
.await;
let project = Project::test(fs.clone(), ["/project_root".as_ref()], cx).await;
let (_, workspace) = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let panel = workspace.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx));
panel.update(cx, |panel, cx| {
panel.collapse_all_entries(&CollapseAllEntries, cx)
});
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&["v project_root", " > dir_1", " > dir_2",]
);
// Open dir_1 and make sure nested_dir was collapsed when running collapse_all_entries
toggle_expand_dir(&panel, "project_root/dir_1", cx);
cx.foreground().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..10, cx),
&[
"v project_root",
" v dir_1 <== selected",
" > nested_dir",
" file_1.py",
" file_2.py",
" file_3.py",
" > dir_2",
]
);
}
fn toggle_expand_dir( fn toggle_expand_dir(
panel: &ViewHandle<ProjectPanel>, panel: &ViewHandle<ProjectPanel>,
path: impl AsRef<Path>, path: impl AsRef<Path>,
@ -2878,3 +2943,4 @@ mod tests {
}); });
} }
} }
// TODO - a workspace command?

View file

@ -1,6 +1,6 @@
use crate::{ use crate::{
SearchOptions, SelectAllMatches, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, NextHistoryQuery, PreviousHistoryQuery, SearchHistory, SearchOptions, SelectAllMatches,
ToggleRegex, ToggleWholeWord, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleRegex, ToggleWholeWord,
}; };
use collections::HashMap; use collections::HashMap;
use editor::Editor; use editor::Editor;
@ -46,6 +46,8 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(BufferSearchBar::select_prev_match_on_pane); cx.add_action(BufferSearchBar::select_prev_match_on_pane);
cx.add_action(BufferSearchBar::select_all_matches_on_pane); cx.add_action(BufferSearchBar::select_all_matches_on_pane);
cx.add_action(BufferSearchBar::handle_editor_cancel); cx.add_action(BufferSearchBar::handle_editor_cancel);
cx.add_action(BufferSearchBar::next_history_query);
cx.add_action(BufferSearchBar::previous_history_query);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx); add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx); add_toggle_option_action::<ToggleWholeWord>(SearchOptions::WHOLE_WORD, cx);
add_toggle_option_action::<ToggleRegex>(SearchOptions::REGEX, cx); add_toggle_option_action::<ToggleRegex>(SearchOptions::REGEX, cx);
@ -65,7 +67,7 @@ fn add_toggle_option_action<A: Action>(option: SearchOptions, cx: &mut AppContex
} }
pub struct BufferSearchBar { pub struct BufferSearchBar {
pub query_editor: ViewHandle<Editor>, query_editor: ViewHandle<Editor>,
active_searchable_item: Option<Box<dyn SearchableItemHandle>>, active_searchable_item: Option<Box<dyn SearchableItemHandle>>,
active_match_index: Option<usize>, active_match_index: Option<usize>,
active_searchable_item_subscription: Option<Subscription>, active_searchable_item_subscription: Option<Subscription>,
@ -76,6 +78,7 @@ pub struct BufferSearchBar {
default_options: SearchOptions, default_options: SearchOptions,
query_contains_error: bool, query_contains_error: bool,
dismissed: bool, dismissed: bool,
search_history: SearchHistory,
} }
impl Entity for BufferSearchBar { impl Entity for BufferSearchBar {
@ -106,6 +109,48 @@ impl View for BufferSearchBar {
.map(|active_searchable_item| active_searchable_item.supported_options()) .map(|active_searchable_item| active_searchable_item.supported_options())
.unwrap_or_default(); .unwrap_or_default();
let previous_query_keystrokes =
cx.binding_for_action(&PreviousHistoryQuery {})
.map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let next_query_keystrokes = cx.binding_for_action(&NextHistoryQuery {}).map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let new_placeholder_text = match (previous_query_keystrokes, next_query_keystrokes) {
(Some(previous_query_keystrokes), Some(next_query_keystrokes)) => {
format!(
"Search ({}/{} for previous/next query)",
previous_query_keystrokes.join(" "),
next_query_keystrokes.join(" ")
)
}
(None, Some(next_query_keystrokes)) => {
format!(
"Search ({} for next query)",
next_query_keystrokes.join(" ")
)
}
(Some(previous_query_keystrokes), None) => {
format!(
"Search ({} for previous query)",
previous_query_keystrokes.join(" ")
)
}
(None, None) => String::new(),
};
self.query_editor.update(cx, |editor, cx| {
editor.set_placeholder_text(new_placeholder_text, cx);
});
Flex::row() Flex::row()
.with_child( .with_child(
Flex::row() Flex::row()
@ -258,6 +303,7 @@ impl BufferSearchBar {
pending_search: None, pending_search: None,
query_contains_error: false, query_contains_error: false,
dismissed: true, dismissed: true,
search_history: SearchHistory::default(),
} }
} }
@ -341,7 +387,7 @@ impl BufferSearchBar {
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) -> oneshot::Receiver<()> { ) -> oneshot::Receiver<()> {
let options = options.unwrap_or(self.default_options); let options = options.unwrap_or(self.default_options);
if query != self.query_editor.read(cx).text(cx) || self.search_options != options { if query != self.query(cx) || self.search_options != options {
self.query_editor.update(cx, |query_editor, cx| { self.query_editor.update(cx, |query_editor, cx| {
query_editor.buffer().update(cx, |query_buffer, cx| { query_editor.buffer().update(cx, |query_buffer, cx| {
let len = query_buffer.len(cx); let len = query_buffer.len(cx);
@ -674,7 +720,7 @@ impl BufferSearchBar {
fn update_matches(&mut self, cx: &mut ViewContext<Self>) -> oneshot::Receiver<()> { fn update_matches(&mut self, cx: &mut ViewContext<Self>) -> oneshot::Receiver<()> {
let (done_tx, done_rx) = oneshot::channel(); let (done_tx, done_rx) = oneshot::channel();
let query = self.query_editor.read(cx).text(cx); let query = self.query(cx);
self.pending_search.take(); self.pending_search.take();
if let Some(active_searchable_item) = self.active_searchable_item.as_ref() { if let Some(active_searchable_item) = self.active_searchable_item.as_ref() {
if query.is_empty() { if query.is_empty() {
@ -707,6 +753,7 @@ impl BufferSearchBar {
) )
}; };
let query_text = query.as_str().to_string();
let matches = active_searchable_item.find_matches(query, cx); let matches = active_searchable_item.find_matches(query, cx);
let active_searchable_item = active_searchable_item.downgrade(); let active_searchable_item = active_searchable_item.downgrade();
@ -720,6 +767,7 @@ impl BufferSearchBar {
.insert(active_searchable_item.downgrade(), matches); .insert(active_searchable_item.downgrade(), matches);
this.update_match_index(cx); this.update_match_index(cx);
this.search_history.add(query_text);
if !this.dismissed { if !this.dismissed {
let matches = this let matches = this
.searchable_items_with_matches .searchable_items_with_matches
@ -753,6 +801,28 @@ impl BufferSearchBar {
cx.notify(); cx.notify();
} }
} }
fn next_history_query(&mut self, _: &NextHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(new_query) = self.search_history.next().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
} else {
self.search_history.reset_selection();
let _ = self.search("", Some(self.search_options), cx);
}
}
fn previous_history_query(&mut self, _: &PreviousHistoryQuery, cx: &mut ViewContext<Self>) {
if self.query(cx).is_empty() {
if let Some(new_query) = self.search_history.current().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
return;
}
}
if let Some(new_query) = self.search_history.previous().map(str::to_string) {
let _ = self.search(&new_query, Some(self.search_options), cx);
}
}
} }
#[cfg(test)] #[cfg(test)]
@ -1333,4 +1403,154 @@ mod tests {
); );
}); });
} }
#[gpui::test]
async fn test_search_query_history(cx: &mut TestAppContext) {
crate::project_search::tests::init_test(cx);
let buffer_text = r#"
A regular expression (shortened as regex or regexp;[1] also referred to as
rational expression[2][3]) is a sequence of characters that specifies a search
pattern in text. Usually such patterns are used by string-searching algorithms
for "find" or "find and replace" operations on strings, or for input validation.
"#
.unindent();
let buffer = cx.add_model(|cx| Buffer::new(0, buffer_text, cx));
let (window_id, _root_view) = cx.add_window(|_| EmptyView);
let editor = cx.add_view(window_id, |cx| Editor::for_buffer(buffer.clone(), None, cx));
let search_bar = cx.add_view(window_id, |cx| {
let mut search_bar = BufferSearchBar::new(cx);
search_bar.set_active_pane_item(Some(&editor), cx);
search_bar.show(cx);
search_bar
});
// Add 3 search items into the history.
search_bar
.update(cx, |search_bar, cx| search_bar.search("a", None, cx))
.await
.unwrap();
search_bar
.update(cx, |search_bar, cx| search_bar.search("b", None, cx))
.await
.unwrap();
search_bar
.update(cx, |search_bar, cx| {
search_bar.search("c", Some(SearchOptions::CASE_SENSITIVE), cx)
})
.await
.unwrap();
// Ensure that the latest search is active.
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next history query after the latest should set the query to the empty string.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// First previous query for empty current query should set the query to the latest.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Further previous items should go over the history in reverse order.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Previous items should never go behind the first history item.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "a");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "a");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next items should go over the history in the original order.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar
.update(cx, |search_bar, cx| search_bar.search("ba", None, cx))
.await
.unwrap();
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "ba");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
// New search input should add another entry to history and move the selection to the end of the history.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "b");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "c");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "ba");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_bar.read_with(cx, |search_bar, cx| {
assert_eq!(search_bar.query(cx), "");
assert_eq!(search_bar.search_options, SearchOptions::NONE);
});
}
} }

View file

@ -1,15 +1,14 @@
use crate::{ use crate::{
SearchOptions, SelectNextMatch, SelectPrevMatch, ToggleCaseSensitive, ToggleRegex, NextHistoryQuery, PreviousHistoryQuery, SearchHistory, SearchOptions, SelectNextMatch,
ToggleWholeWord, SelectPrevMatch, ToggleCaseSensitive, ToggleRegex, ToggleWholeWord,
}; };
use anyhow::Result; use anyhow::Context;
use collections::HashMap; use collections::HashMap;
use editor::{ use editor::{
items::active_match_index, scroll::autoscroll::Autoscroll, Anchor, Editor, MultiBuffer, items::active_match_index, scroll::autoscroll::Autoscroll, Anchor, Editor, MultiBuffer,
SelectAll, MAX_TAB_TITLE_LEN, SelectAll, MAX_TAB_TITLE_LEN,
}; };
use futures::StreamExt; use futures::StreamExt;
use globset::{Glob, GlobMatcher};
use gpui::{ use gpui::{
actions, actions,
elements::*, elements::*,
@ -19,7 +18,10 @@ use gpui::{
}; };
use menu::Confirm; use menu::Confirm;
use postage::stream::Stream; use postage::stream::Stream;
use project::{search::SearchQuery, Entry, Project}; use project::{
search::{PathMatcher, SearchQuery},
Entry, Project,
};
use semantic_index::SemanticIndex; use semantic_index::SemanticIndex;
use smallvec::SmallVec; use smallvec::SmallVec;
use std::{ use std::{
@ -54,6 +56,8 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(ProjectSearchBar::search_in_new); cx.add_action(ProjectSearchBar::search_in_new);
cx.add_action(ProjectSearchBar::select_next_match); cx.add_action(ProjectSearchBar::select_next_match);
cx.add_action(ProjectSearchBar::select_prev_match); cx.add_action(ProjectSearchBar::select_prev_match);
cx.add_action(ProjectSearchBar::next_history_query);
cx.add_action(ProjectSearchBar::previous_history_query);
cx.capture_action(ProjectSearchBar::tab); cx.capture_action(ProjectSearchBar::tab);
cx.capture_action(ProjectSearchBar::tab_previous); cx.capture_action(ProjectSearchBar::tab_previous);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx); add_toggle_option_action::<ToggleCaseSensitive>(SearchOptions::CASE_SENSITIVE, cx);
@ -81,6 +85,7 @@ struct ProjectSearch {
match_ranges: Vec<Range<Anchor>>, match_ranges: Vec<Range<Anchor>>,
active_query: Option<SearchQuery>, active_query: Option<SearchQuery>,
search_id: usize, search_id: usize,
search_history: SearchHistory,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -129,6 +134,7 @@ impl ProjectSearch {
match_ranges: Default::default(), match_ranges: Default::default(),
active_query: None, active_query: None,
search_id: 0, search_id: 0,
search_history: SearchHistory::default(),
} }
} }
@ -142,6 +148,7 @@ impl ProjectSearch {
match_ranges: self.match_ranges.clone(), match_ranges: self.match_ranges.clone(),
active_query: self.active_query.clone(), active_query: self.active_query.clone(),
search_id: self.search_id, search_id: self.search_id,
search_history: self.search_history.clone(),
}) })
} }
@ -150,6 +157,7 @@ impl ProjectSearch {
.project .project
.update(cx, |project, cx| project.search(query.clone(), cx)); .update(cx, |project, cx| project.search(query.clone(), cx));
self.search_id += 1; self.search_id += 1;
self.search_history.add(query.as_str().to_string());
self.active_query = Some(query); self.active_query = Some(query);
self.match_ranges.clear(); self.match_ranges.clear();
self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move { self.pending_search = Some(cx.spawn_weak(|this, mut cx| async move {
@ -185,27 +193,22 @@ impl ProjectSearch {
cx.notify(); cx.notify();
} }
fn semantic_search( fn semantic_search(&mut self, query: SearchQuery, cx: &mut ModelContext<Self>) {
&mut self,
query: String,
include_files: Vec<GlobMatcher>,
exclude_files: Vec<GlobMatcher>,
cx: &mut ModelContext<Self>,
) {
let search = SemanticIndex::global(cx).map(|index| { let search = SemanticIndex::global(cx).map(|index| {
index.update(cx, |semantic_index, cx| { index.update(cx, |semantic_index, cx| {
semantic_index.search_project( semantic_index.search_project(
self.project.clone(), self.project.clone(),
query.clone(), query.as_str().to_owned(),
10, 10,
include_files, query.files_to_include().to_vec(),
exclude_files, query.files_to_exclude().to_vec(),
cx, cx,
) )
}) })
}); });
self.search_id += 1; self.search_id += 1;
self.match_ranges.clear(); self.match_ranges.clear();
self.search_history.add(query.as_str().to_string());
self.pending_search = Some(cx.spawn(|this, mut cx| async move { self.pending_search = Some(cx.spawn(|this, mut cx| async move {
let results = search?.await.log_err()?; let results = search?.await.log_err()?;
@ -282,6 +285,49 @@ impl View for ProjectSearchView {
Cow::Borrowed("No results") Cow::Borrowed("No results")
}; };
let previous_query_keystrokes =
cx.binding_for_action(&PreviousHistoryQuery {})
.map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let next_query_keystrokes =
cx.binding_for_action(&NextHistoryQuery {}).map(|binding| {
binding
.keystrokes()
.iter()
.map(|k| k.to_string())
.collect::<Vec<_>>()
});
let new_placeholder_text = match (previous_query_keystrokes, next_query_keystrokes) {
(Some(previous_query_keystrokes), Some(next_query_keystrokes)) => {
format!(
"Search ({}/{} for previous/next query)",
previous_query_keystrokes.join(" "),
next_query_keystrokes.join(" ")
)
}
(None, Some(next_query_keystrokes)) => {
format!(
"Search ({} for next query)",
next_query_keystrokes.join(" ")
)
}
(Some(previous_query_keystrokes), None) => {
format!(
"Search ({} for previous query)",
previous_query_keystrokes.join(" ")
)
}
(None, None) => String::new(),
};
self.query_editor.update(cx, |editor, cx| {
editor.set_placeholder_text(new_placeholder_text, cx);
});
MouseEventHandler::<Status, _>::new(0, cx, |_, _| { MouseEventHandler::<Status, _>::new(0, cx, |_, _| {
Label::new(text, theme.search.results_status.clone()) Label::new(text, theme.search.results_status.clone())
.aligned() .aligned()
@ -590,8 +636,7 @@ impl ProjectSearchView {
if !dir_entry.is_dir() { if !dir_entry.is_dir() {
return; return;
} }
let filter_path = dir_entry.path.join("**"); let Some(filter_str) = dir_entry.path.to_str() else { return; };
let Some(filter_str) = filter_path.to_str() else { return; };
let model = cx.add_model(|cx| ProjectSearch::new(workspace.project().clone(), cx)); let model = cx.add_model(|cx| ProjectSearch::new(workspace.project().clone(), cx));
let search = cx.add_view(|cx| ProjectSearchView::new(model, cx)); let search = cx.add_view(|cx| ProjectSearchView::new(model, cx));
@ -662,16 +707,10 @@ impl ProjectSearchView {
if semantic.outstanding_file_count > 0 { if semantic.outstanding_file_count > 0 {
return; return;
} }
if let Some(query) = self.build_search_query(cx) {
let query = self.query_editor.read(cx).text(cx); self.model
if let Some((included_files, exclude_files)) = .update(cx, |model, cx| model.semantic_search(query, cx));
self.get_included_and_excluded_globsets(cx)
{
self.model.update(cx, |model, cx| {
model.semantic_search(query, included_files, exclude_files, cx)
});
} }
return;
} }
if let Some(query) = self.build_search_query(cx) { if let Some(query) = self.build_search_query(cx) {
@ -679,42 +718,10 @@ impl ProjectSearchView {
} }
} }
fn get_included_and_excluded_globsets(
&mut self,
cx: &mut ViewContext<Self>,
) -> Option<(Vec<GlobMatcher>, Vec<GlobMatcher>)> {
let included_files =
match Self::load_glob_set(&self.included_files_editor.read(cx).text(cx)) {
Ok(included_files) => {
self.panels_with_errors.remove(&InputPanel::Include);
included_files
}
Err(_e) => {
self.panels_with_errors.insert(InputPanel::Include);
cx.notify();
return None;
}
};
let excluded_files =
match Self::load_glob_set(&self.excluded_files_editor.read(cx).text(cx)) {
Ok(excluded_files) => {
self.panels_with_errors.remove(&InputPanel::Exclude);
excluded_files
}
Err(_e) => {
self.panels_with_errors.insert(InputPanel::Exclude);
cx.notify();
return None;
}
};
Some((included_files, excluded_files))
}
fn build_search_query(&mut self, cx: &mut ViewContext<Self>) -> Option<SearchQuery> { fn build_search_query(&mut self, cx: &mut ViewContext<Self>) -> Option<SearchQuery> {
let text = self.query_editor.read(cx).text(cx); let text = self.query_editor.read(cx).text(cx);
let included_files = let included_files =
match Self::load_glob_set(&self.included_files_editor.read(cx).text(cx)) { match Self::parse_path_matches(&self.included_files_editor.read(cx).text(cx)) {
Ok(included_files) => { Ok(included_files) => {
self.panels_with_errors.remove(&InputPanel::Include); self.panels_with_errors.remove(&InputPanel::Include);
included_files included_files
@ -726,7 +733,7 @@ impl ProjectSearchView {
} }
}; };
let excluded_files = let excluded_files =
match Self::load_glob_set(&self.excluded_files_editor.read(cx).text(cx)) { match Self::parse_path_matches(&self.excluded_files_editor.read(cx).text(cx)) {
Ok(excluded_files) => { Ok(excluded_files) => {
self.panels_with_errors.remove(&InputPanel::Exclude); self.panels_with_errors.remove(&InputPanel::Exclude);
excluded_files excluded_files
@ -766,11 +773,14 @@ impl ProjectSearchView {
} }
} }
fn load_glob_set(text: &str) -> Result<Vec<GlobMatcher>> { fn parse_path_matches(text: &str) -> anyhow::Result<Vec<PathMatcher>> {
text.split(',') text.split(',')
.map(str::trim) .map(str::trim)
.filter(|glob_str| !glob_str.is_empty()) .filter(|maybe_glob_str| !maybe_glob_str.is_empty())
.map(|glob_str| anyhow::Ok(Glob::new(glob_str)?.compile_matcher())) .map(|maybe_glob_str| {
PathMatcher::new(maybe_glob_str)
.with_context(|| format!("parsing {maybe_glob_str} as path matcher"))
})
.collect() .collect()
} }
@ -783,6 +793,7 @@ impl ProjectSearchView {
let range_to_select = match_ranges[new_index].clone(); let range_to_select = match_ranges[new_index].clone();
self.results_editor.update(cx, |editor, cx| { self.results_editor.update(cx, |editor, cx| {
let range_to_select = editor.range_for_match(&range_to_select);
editor.unfold_ranges([range_to_select.clone()], false, true, cx); editor.unfold_ranges([range_to_select.clone()], false, true, cx);
editor.change_selections(Some(Autoscroll::fit()), cx, |s| { editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range_to_select]) s.select_ranges([range_to_select])
@ -824,8 +835,12 @@ impl ProjectSearchView {
let is_new_search = self.search_id != prev_search_id; let is_new_search = self.search_id != prev_search_id;
self.results_editor.update(cx, |editor, cx| { self.results_editor.update(cx, |editor, cx| {
if is_new_search { if is_new_search {
let range_to_select = match_ranges
.first()
.clone()
.map(|range| editor.range_for_match(range));
editor.change_selections(Some(Autoscroll::fit()), cx, |s| { editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges(match_ranges.first().cloned()) s.select_ranges(range_to_select)
}); });
} }
editor.highlight_background::<Self>( editor.highlight_background::<Self>(
@ -1187,6 +1202,47 @@ impl ProjectSearchBar {
false false
} }
} }
fn next_history_query(&mut self, _: &NextHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(search_view) = self.active_project_search.as_ref() {
search_view.update(cx, |search_view, cx| {
let new_query = search_view.model.update(cx, |model, _| {
if let Some(new_query) = model.search_history.next().map(str::to_string) {
new_query
} else {
model.search_history.reset_selection();
String::new()
}
});
search_view.set_query(&new_query, cx);
});
}
}
fn previous_history_query(&mut self, _: &PreviousHistoryQuery, cx: &mut ViewContext<Self>) {
if let Some(search_view) = self.active_project_search.as_ref() {
search_view.update(cx, |search_view, cx| {
if search_view.query_editor.read(cx).text(cx).is_empty() {
if let Some(new_query) = search_view
.model
.read(cx)
.search_history
.current()
.map(str::to_string)
{
search_view.set_query(&new_query, cx);
return;
}
}
if let Some(new_query) = search_view.model.update(cx, |model, _| {
model.search_history.previous().map(str::to_string)
}) {
search_view.set_query(&new_query, cx);
}
});
}
}
} }
impl Entity for ProjectSearchBar { impl Entity for ProjectSearchBar {
@ -1368,6 +1424,7 @@ pub mod tests {
use editor::DisplayPoint; use editor::DisplayPoint;
use gpui::{color::Color, executor::Deterministic, TestAppContext}; use gpui::{color::Color, executor::Deterministic, TestAppContext};
use project::FakeFs; use project::FakeFs;
use semantic_index::semantic_index_settings::SemanticIndexSettings;
use serde_json::json; use serde_json::json;
use settings::SettingsStore; use settings::SettingsStore;
use std::sync::Arc; use std::sync::Arc;
@ -1769,7 +1826,7 @@ pub mod tests {
search_view.included_files_editor.update(cx, |editor, cx| { search_view.included_files_editor.update(cx, |editor, cx| {
assert_eq!( assert_eq!(
editor.display_text(cx), editor.display_text(cx),
a_dir_entry.path.join("**").display().to_string(), a_dir_entry.path.to_str().unwrap(),
"New search in directory should have included dir entry path" "New search in directory should have included dir entry path"
); );
}); });
@ -1793,6 +1850,192 @@ pub mod tests {
}); });
} }
#[gpui::test]
async fn test_search_query_history(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/dir",
json!({
"one.rs": "const ONE: usize = 1;",
"two.rs": "const TWO: usize = one::ONE + one::ONE;",
"three.rs": "const THREE: usize = one::ONE + two::TWO;",
"four.rs": "const FOUR: usize = one::ONE + three::THREE;",
}),
)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
workspace.update(cx, |workspace, cx| {
ProjectSearchView::deploy(workspace, &workspace::NewSearch, cx)
});
let search_view = cx.read(|cx| {
workspace
.read(cx)
.active_pane()
.read(cx)
.active_item()
.and_then(|item| item.downcast::<ProjectSearchView>())
.expect("Search view expected to appear after new search event trigger")
});
let search_bar = cx.add_view(window_id, |cx| {
let mut search_bar = ProjectSearchBar::new();
search_bar.set_active_pane_item(Some(&search_view), cx);
// search_bar.show(cx);
search_bar
});
// Add 3 search items into the history + another unsubmitted one.
search_view.update(cx, |search_view, cx| {
search_view.search_options = SearchOptions::CASE_SENSITIVE;
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("ONE", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("TWO", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("THREE", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
search_view.query_editor.update(cx, |query_editor, cx| {
query_editor.set_text("JUST_TEXT_INPUT", cx)
});
});
cx.foreground().run_until_parked();
// Ensure that the latest input with search settings is active.
search_view.update(cx, |search_view, cx| {
assert_eq!(
search_view.query_editor.read(cx).text(cx),
"JUST_TEXT_INPUT"
);
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next history query after the latest should set the query to the empty string.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// First previous query for empty current query should set the query to the latest submitted one.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Further previous items should go over the history in reverse order.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Previous items should never go behind the first history item.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "ONE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// Next items should go over the history in the original order.
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_view.update(cx, |search_view, cx| {
search_view
.query_editor
.update(cx, |query_editor, cx| query_editor.set_text("TWO_NEW", cx));
search_view.search(cx);
});
cx.foreground().run_until_parked();
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
// New search input should add another entry to history and move the selection to the end of the history.
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.previous_history_query(&PreviousHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "THREE");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "TWO_NEW");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
search_bar.update(cx, |search_bar, cx| {
search_bar.next_history_query(&NextHistoryQuery, cx);
});
search_view.update(cx, |search_view, cx| {
assert_eq!(search_view.query_editor.read(cx).text(cx), "");
assert_eq!(search_view.search_options, SearchOptions::CASE_SENSITIVE);
});
}
pub fn init_test(cx: &mut TestAppContext) { pub fn init_test(cx: &mut TestAppContext) {
cx.foreground().forbid_parking(); cx.foreground().forbid_parking();
let fonts = cx.font_cache(); let fonts = cx.font_cache();
@ -1802,6 +2045,7 @@ pub mod tests {
cx.update(|cx| { cx.update(|cx| {
cx.set_global(SettingsStore::test(cx)); cx.set_global(SettingsStore::test(cx));
cx.set_global(ActiveSearches::default()); cx.set_global(ActiveSearches::default());
settings::register::<SemanticIndexSettings>(cx);
theme::init((), cx); theme::init((), cx);
cx.update_global::<SettingsStore, _, _>(|store, _| { cx.update_global::<SettingsStore, _, _>(|store, _| {

View file

@ -3,6 +3,7 @@ pub use buffer_search::BufferSearchBar;
use gpui::{actions, Action, AppContext}; use gpui::{actions, Action, AppContext};
use project::search::SearchQuery; use project::search::SearchQuery;
pub use project_search::{ProjectSearchBar, ProjectSearchView}; pub use project_search::{ProjectSearchBar, ProjectSearchView};
use smallvec::SmallVec;
pub mod buffer_search; pub mod buffer_search;
pub mod project_search; pub mod project_search;
@ -21,6 +22,8 @@ actions!(
SelectNextMatch, SelectNextMatch,
SelectPrevMatch, SelectPrevMatch,
SelectAllMatches, SelectAllMatches,
NextHistoryQuery,
PreviousHistoryQuery,
] ]
); );
@ -65,3 +68,187 @@ impl SearchOptions {
options options
} }
} }
const SEARCH_HISTORY_LIMIT: usize = 20;
#[derive(Default, Debug, Clone)]
pub struct SearchHistory {
history: SmallVec<[String; SEARCH_HISTORY_LIMIT]>,
selected: Option<usize>,
}
impl SearchHistory {
pub fn add(&mut self, search_string: String) {
if let Some(i) = self.selected {
if search_string == self.history[i] {
return;
}
}
if let Some(previously_searched) = self.history.last_mut() {
if search_string.find(previously_searched.as_str()).is_some() {
*previously_searched = search_string;
self.selected = Some(self.history.len() - 1);
return;
}
}
self.history.push(search_string);
if self.history.len() > SEARCH_HISTORY_LIMIT {
self.history.remove(0);
}
self.selected = Some(self.history.len() - 1);
}
pub fn next(&mut self) -> Option<&str> {
let history_size = self.history.len();
if history_size == 0 {
return None;
}
let selected = self.selected?;
if selected == history_size - 1 {
return None;
}
let next_index = selected + 1;
self.selected = Some(next_index);
Some(&self.history[next_index])
}
pub fn current(&self) -> Option<&str> {
Some(&self.history[self.selected?])
}
pub fn previous(&mut self) -> Option<&str> {
let history_size = self.history.len();
if history_size == 0 {
return None;
}
let prev_index = match self.selected {
Some(selected_index) => {
if selected_index == 0 {
return None;
} else {
selected_index - 1
}
}
None => history_size - 1,
};
self.selected = Some(prev_index);
Some(&self.history[prev_index])
}
pub fn reset_selection(&mut self) {
self.selected = None;
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_add() {
let mut search_history = SearchHistory::default();
assert_eq!(
search_history.current(),
None,
"No current selection should be set fo the default search history"
);
search_history.add("rust".to_string());
assert_eq!(
search_history.current(),
Some("rust"),
"Newly added item should be selected"
);
// check if duplicates are not added
search_history.add("rust".to_string());
assert_eq!(
search_history.history.len(),
1,
"Should not add a duplicate"
);
assert_eq!(search_history.current(), Some("rust"));
// check if new string containing the previous string replaces it
search_history.add("rustlang".to_string());
assert_eq!(
search_history.history.len(),
1,
"Should replace previous item if it's a substring"
);
assert_eq!(search_history.current(), Some("rustlang"));
// push enough items to test SEARCH_HISTORY_LIMIT
for i in 0..SEARCH_HISTORY_LIMIT * 2 {
search_history.add(format!("item{i}"));
}
assert!(search_history.history.len() <= SEARCH_HISTORY_LIMIT);
}
#[test]
fn test_next_and_previous() {
let mut search_history = SearchHistory::default();
assert_eq!(
search_history.next(),
None,
"Default search history should not have a next item"
);
search_history.add("Rust".to_string());
assert_eq!(search_history.next(), None);
search_history.add("JavaScript".to_string());
assert_eq!(search_history.next(), None);
search_history.add("TypeScript".to_string());
assert_eq!(search_history.next(), None);
assert_eq!(search_history.current(), Some("TypeScript"));
assert_eq!(search_history.previous(), Some("JavaScript"));
assert_eq!(search_history.current(), Some("JavaScript"));
assert_eq!(search_history.previous(), Some("Rust"));
assert_eq!(search_history.current(), Some("Rust"));
assert_eq!(search_history.previous(), None);
assert_eq!(search_history.current(), Some("Rust"));
assert_eq!(search_history.next(), Some("JavaScript"));
assert_eq!(search_history.current(), Some("JavaScript"));
assert_eq!(search_history.next(), Some("TypeScript"));
assert_eq!(search_history.current(), Some("TypeScript"));
assert_eq!(search_history.next(), None);
assert_eq!(search_history.current(), Some("TypeScript"));
}
#[test]
fn test_reset_selection() {
let mut search_history = SearchHistory::default();
search_history.add("Rust".to_string());
search_history.add("JavaScript".to_string());
search_history.add("TypeScript".to_string());
assert_eq!(search_history.current(), Some("TypeScript"));
search_history.reset_selection();
assert_eq!(search_history.current(), None);
assert_eq!(
search_history.previous(),
Some("TypeScript"),
"Should start from the end after reset on previous item query"
);
search_history.previous();
assert_eq!(search_history.current(), Some("JavaScript"));
search_history.previous();
assert_eq!(search_history.current(), Some("Rust"));
search_history.reset_selection();
assert_eq!(search_history.current(), None);
}
}

View file

@ -1,7 +1,6 @@
use crate::{parsing::Document, SEMANTIC_INDEX_VERSION}; use crate::{parsing::Document, SEMANTIC_INDEX_VERSION};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use globset::GlobMatcher; use project::{search::PathMatcher, Fs};
use project::Fs;
use rpc::proto::Timestamp; use rpc::proto::Timestamp;
use rusqlite::{ use rusqlite::{
params, params,
@ -290,8 +289,8 @@ impl VectorDatabase {
pub fn retrieve_included_file_ids( pub fn retrieve_included_file_ids(
&self, &self,
worktree_ids: &[i64], worktree_ids: &[i64],
include_globs: Vec<GlobMatcher>, includes: &[PathMatcher],
exclude_globs: Vec<GlobMatcher>, excludes: &[PathMatcher],
) -> Result<Vec<i64>> { ) -> Result<Vec<i64>> {
let mut file_query = self.db.prepare( let mut file_query = self.db.prepare(
" "
@ -310,13 +309,9 @@ impl VectorDatabase {
while let Some(row) = rows.next()? { while let Some(row) = rows.next()? {
let file_id = row.get(0)?; let file_id = row.get(0)?;
let relative_path = row.get_ref(1)?.as_str()?; let relative_path = row.get_ref(1)?.as_str()?;
let included = include_globs.is_empty() let included =
|| include_globs includes.is_empty() || includes.iter().any(|glob| glob.is_match(relative_path));
.iter() let excluded = excludes.iter().any(|glob| glob.is_match(relative_path));
.any(|glob| glob.is_match(relative_path));
let excluded = exclude_globs
.iter()
.any(|glob| glob.is_match(relative_path));
if included && !excluded { if included && !excluded {
file_ids.push(file_id); file_ids.push(file_id);
} }

View file

@ -11,13 +11,12 @@ use anyhow::{anyhow, Result};
use db::VectorDatabase; use db::VectorDatabase;
use embedding::{EmbeddingProvider, OpenAIEmbeddings}; use embedding::{EmbeddingProvider, OpenAIEmbeddings};
use futures::{channel::oneshot, Future}; use futures::{channel::oneshot, Future};
use globset::GlobMatcher;
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle}; use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
use language::{Anchor, Buffer, Language, LanguageRegistry}; use language::{Anchor, Buffer, Language, LanguageRegistry};
use parking_lot::Mutex; use parking_lot::Mutex;
use parsing::{CodeContextRetriever, Document, PARSEABLE_ENTIRE_FILE_TYPES}; use parsing::{CodeContextRetriever, Document, PARSEABLE_ENTIRE_FILE_TYPES};
use postage::watch; use postage::watch;
use project::{Fs, Project, WorktreeId}; use project::{search::PathMatcher, Fs, Project, WorktreeId};
use smol::channel; use smol::channel;
use std::{ use std::{
cmp::Ordering, cmp::Ordering,
@ -683,8 +682,8 @@ impl SemanticIndex {
project: ModelHandle<Project>, project: ModelHandle<Project>,
phrase: String, phrase: String,
limit: usize, limit: usize,
include_globs: Vec<GlobMatcher>, includes: Vec<PathMatcher>,
exclude_globs: Vec<GlobMatcher>, excludes: Vec<PathMatcher>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<SearchResult>>> { ) -> Task<Result<Vec<SearchResult>>> {
let project_state = if let Some(state) = self.projects.get(&project.downgrade()) { let project_state = if let Some(state) = self.projects.get(&project.downgrade()) {
@ -715,11 +714,8 @@ impl SemanticIndex {
.next() .next()
.unwrap(); .unwrap();
let file_ids = database.retrieve_included_file_ids( let file_ids =
&worktree_db_ids, database.retrieve_included_file_ids(&worktree_db_ids, &includes, &excludes)?;
include_globs,
exclude_globs,
)?;
let batch_n = cx.background().num_cpus(); let batch_n = cx.background().num_cpus();
let ids_len = file_ids.clone().len(); let ids_len = file_ids.clone().len();

View file

@ -7,11 +7,10 @@ use crate::{
}; };
use anyhow::Result; use anyhow::Result;
use async_trait::async_trait; use async_trait::async_trait;
use globset::Glob;
use gpui::{Task, TestAppContext}; use gpui::{Task, TestAppContext};
use language::{Language, LanguageConfig, LanguageRegistry, ToOffset}; use language::{Language, LanguageConfig, LanguageRegistry, ToOffset};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use project::{project_settings::ProjectSettings, FakeFs, Fs, Project}; use project::{project_settings::ProjectSettings, search::PathMatcher, FakeFs, Fs, Project};
use rand::{rngs::StdRng, Rng}; use rand::{rngs::StdRng, Rng};
use serde_json::json; use serde_json::json;
use settings::SettingsStore; use settings::SettingsStore;
@ -121,8 +120,8 @@ async fn test_semantic_index(cx: &mut TestAppContext) {
); );
// Test Include Files Functonality // Test Include Files Functonality
let include_files = vec![Glob::new("*.rs").unwrap().compile_matcher()]; let include_files = vec![PathMatcher::new("*.rs").unwrap()];
let exclude_files = vec![Glob::new("*.rs").unwrap().compile_matcher()]; let exclude_files = vec![PathMatcher::new("*.rs").unwrap()];
let rust_only_search_results = store let rust_only_search_results = store
.update(cx, |store, cx| { .update(cx, |store, cx| {
store.search_project( store.search_project(

View file

@ -44,7 +44,11 @@ impl ModeIndicator {
// Vim doesn't exist in some tests // Vim doesn't exist in some tests
let mode = cx let mode = cx
.has_global::<Vim>() .has_global::<Vim>()
.then(|| cx.global::<Vim>().state.mode); .then(|| {
let vim = cx.global::<Vim>();
vim.enabled.then(|| vim.state.mode)
})
.flatten();
Self { Self {
mode, mode,

View file

@ -222,7 +222,7 @@ mod test {
}); });
search_bar.read_with(cx.cx, |bar, cx| { search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query_editor.read(cx).text(cx), "cc"); assert_eq!(bar.query(cx), "cc");
}); });
deterministic.run_until_parked(); deterministic.run_until_parked();

View file

@ -99,7 +99,7 @@ async fn test_buffer_search(cx: &mut gpui::TestAppContext) {
}); });
search_bar.read_with(cx.cx, |bar, cx| { search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query_editor.read(cx).text(cx), ""); assert_eq!(bar.query(cx), "");
}) })
} }
@ -175,7 +175,7 @@ async fn test_selection_on_search(cx: &mut gpui::TestAppContext) {
}); });
search_bar.read_with(cx.cx, |bar, cx| { search_bar.read_with(cx.cx, |bar, cx| {
assert_eq!(bar.query_editor.read(cx).text(cx), "cc"); assert_eq!(bar.query(cx), "cc");
}); });
// wait for the query editor change event to fire. // wait for the query editor change event to fire.

View file

@ -746,6 +746,10 @@ impl Pane {
_: &CloseAllItems, _: &CloseAllItems,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) -> Option<Task<Result<()>>> { ) -> Option<Task<Result<()>>> {
if self.items.is_empty() {
return None;
}
Some(self.close_items(cx, move |_| true)) Some(self.close_items(cx, move |_| true))
} }

View file

@ -122,6 +122,7 @@ actions!(
NewFile, NewFile,
NewWindow, NewWindow,
CloseWindow, CloseWindow,
CloseInactiveTabsAndPanes,
AddFolderToProject, AddFolderToProject,
Unfollow, Unfollow,
Save, Save,
@ -240,6 +241,7 @@ pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) {
cx.add_async_action(Workspace::follow_next_collaborator); cx.add_async_action(Workspace::follow_next_collaborator);
cx.add_async_action(Workspace::close); cx.add_async_action(Workspace::close);
cx.add_async_action(Workspace::close_inactive_items_and_panes);
cx.add_global_action(Workspace::close_global); cx.add_global_action(Workspace::close_global);
cx.add_global_action(restart); cx.add_global_action(restart);
cx.add_async_action(Workspace::save_all); cx.add_async_action(Workspace::save_all);
@ -1671,6 +1673,45 @@ impl Workspace {
} }
} }
pub fn close_inactive_items_and_panes(
&mut self,
_: &CloseInactiveTabsAndPanes,
cx: &mut ViewContext<Self>,
) -> Option<Task<Result<()>>> {
let current_pane = self.active_pane();
let mut tasks = Vec::new();
if let Some(current_pane_close) = current_pane.update(cx, |pane, cx| {
pane.close_inactive_items(&CloseInactiveItems, cx)
}) {
tasks.push(current_pane_close);
};
for pane in self.panes() {
if pane.id() == current_pane.id() {
continue;
}
if let Some(close_pane_items) = pane.update(cx, |pane: &mut Pane, cx| {
pane.close_all_items(&CloseAllItems, cx)
}) {
tasks.push(close_pane_items)
}
}
if tasks.is_empty() {
None
} else {
Some(cx.spawn(|_, _| async move {
for task in tasks {
task.await?
}
Ok(())
}))
}
}
pub fn toggle_dock(&mut self, dock_side: DockPosition, cx: &mut ViewContext<Self>) { pub fn toggle_dock(&mut self, dock_side: DockPosition, cx: &mut ViewContext<Self>) {
let dock = match dock_side { let dock = match dock_side {
DockPosition::Left => &self.left_dock, DockPosition::Left => &self.left_dock,

View file

@ -128,6 +128,7 @@ tree-sitter-svelte.workspace = true
tree-sitter-racket.workspace = true tree-sitter-racket.workspace = true
tree-sitter-yaml.workspace = true tree-sitter-yaml.workspace = true
tree-sitter-lua.workspace = true tree-sitter-lua.workspace = true
tree-sitter-nix.workspace = true
url = "2.2" url = "2.2"
urlencoding = "2.1.2" urlencoding = "2.1.2"

View file

@ -152,8 +152,10 @@ pub fn init(languages: Arc<LanguageRegistry>, node_runtime: Arc<NodeRuntime>) {
tree_sitter_php::language(), tree_sitter_php::language(),
vec![Arc::new(php::IntelephenseLspAdapter::new(node_runtime))], vec![Arc::new(php::IntelephenseLspAdapter::new(node_runtime))],
); );
language("elm", tree_sitter_elm::language(), vec![]); language("elm", tree_sitter_elm::language(), vec![]);
language("glsl", tree_sitter_glsl::language(), vec![]); language("glsl", tree_sitter_glsl::language(), vec![]);
language("nix", tree_sitter_nix::language(), vec![]);
} }
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]

View file

@ -0,0 +1,11 @@
name = "Nix"
path_suffixes = ["nix"]
line_comment = "# "
block_comment = ["/* ", " */"]
autoclose_before = ";:.,=}])>` \n\t\""
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
{ start = "(", end = ")", close = true, newline = true },
{ start = "<", end = ">", close = true, newline = true },
]

View file

@ -0,0 +1,95 @@
(comment) @comment
[
"if"
"then"
"else"
"let"
"inherit"
"in"
"rec"
"with"
"assert"
"or"
] @keyword
[
(string_expression)
(indented_string_expression)
] @string
[
(path_expression)
(hpath_expression)
(spath_expression)
] @string.special.path
(uri_expression) @link_uri
[
(integer_expression)
(float_expression)
] @number
(interpolation
"${" @punctuation.special
"}" @punctuation.special) @embedded
(escape_sequence) @escape
(dollar_escape) @escape
(function_expression
universal: (identifier) @parameter
)
(formal
name: (identifier) @parameter
"?"? @punctuation.delimiter)
(select_expression
attrpath: (attrpath (identifier)) @property)
(apply_expression
function: [
(variable_expression (identifier)) @function
(select_expression
attrpath: (attrpath
attr: (identifier) @function .))])
(unary_expression
operator: _ @operator)
(binary_expression
operator: _ @operator)
(variable_expression (identifier) @variable)
(binding
attrpath: (attrpath (identifier)) @property)
"=" @operator
[
";"
"."
","
] @punctuation.delimiter
[
"("
")"
"["
"]"
"{"
"}"
] @punctuation.bracket
(identifier) @variable
((identifier) @function.builtin
(#match? @function.builtin "^(__add|__addErrorContext|__all|__any|__appendContext|__attrNames|__attrValues|__bitAnd|__bitOr|__bitXor|__catAttrs|__compareVersions|__concatLists|__concatMap|__concatStringsSep|__deepSeq|__div|__elem|__elemAt|__fetchurl|__filter|__filterSource|__findFile|__foldl'|__fromJSON|__functionArgs|__genList|__genericClosure|__getAttr|__getContext|__getEnv|__hasAttr|__hasContext|__hashFile|__hashString|__head|__intersectAttrs|__isAttrs|__isBool|__isFloat|__isFunction|__isInt|__isList|__isPath|__isString|__langVersion|__length|__lessThan|__listToAttrs|__mapAttrs|__match|__mul|__parseDrvName|__partition|__path|__pathExists|__readDir|__readFile|__replaceStrings|__seq|__sort|__split|__splitVersion|__storePath|__stringLength|__sub|__substring|__tail|__toFile|__toJSON|__toPath|__toXML|__trace|__tryEval|__typeOf|__unsafeDiscardOutputDependency|__unsafeDiscardStringContext|__unsafeGetAttrPos|__valueSize|abort|baseNameOf|derivation|derivationStrict|dirOf|fetchGit|fetchMercurial|fetchTarball|fromTOML|import|isNull|map|placeholder|removeAttrs|scopedImport|throw|toString)$")
(#is-not? local))
((identifier) @variable.builtin
(#match? @variable.builtin "^(__currentSystem|__currentTime|__nixPath|__nixVersion|__storeDir|builtins|false|null|true)$")
(#is-not? local))

View file

@ -136,7 +136,7 @@ fn main() {
languages.set_executor(cx.background().clone()); languages.set_executor(cx.background().clone());
languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone()); languages.set_language_server_download_dir(paths::LANGUAGES_DIR.clone());
let languages = Arc::new(languages); let languages = Arc::new(languages);
let node_runtime = NodeRuntime::instance(http.clone(), cx.background().to_owned()); let node_runtime = NodeRuntime::instance(http.clone());
languages::init(languages.clone(), node_runtime.clone()); languages::init(languages.clone(), node_runtime.clone());
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx)); let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));
@ -717,7 +717,7 @@ async fn watch_languages(_: Arc<dyn Fs>, _: Arc<LanguageRegistry>) -> Option<()>
} }
#[cfg(not(debug_assertions))] #[cfg(not(debug_assertions))]
fn watch_file_types(fs: Arc<dyn Fs>, cx: &mut AppContext) {} fn watch_file_types(_fs: Arc<dyn Fs>, _cx: &mut AppContext) {}
fn connect_to_cli( fn connect_to_cli(
server_name: &str, server_name: &str,

View file

@ -2364,7 +2364,7 @@ mod tests {
languages.set_executor(cx.background().clone()); languages.set_executor(cx.background().clone());
let languages = Arc::new(languages); let languages = Arc::new(languages);
let http = FakeHttpClient::with_404_response(); let http = FakeHttpClient::with_404_response();
let node_runtime = NodeRuntime::instance(http, cx.background().to_owned()); let node_runtime = NodeRuntime::instance(http);
languages::init(languages.clone(), node_runtime); languages::init(languages.clone(), node_runtime);
for name in languages.language_names() { for name in languages.language_names() {
languages.language_for_name(&name); languages.language_for_name(&name);

View file

@ -182,8 +182,8 @@ export default function editor(): any {
line_number: with_opacity(foreground(layer), 0.35), line_number: with_opacity(foreground(layer), 0.35),
line_number_active: foreground(layer), line_number_active: foreground(layer),
rename_fade: 0.6, rename_fade: 0.6,
wrap_guide: with_opacity(foreground(layer), 0.1), wrap_guide: with_opacity(foreground(layer), 0.05),
active_wrap_guide: with_opacity(foreground(layer), 0.2), active_wrap_guide: with_opacity(foreground(layer), 0.1),
unnecessary_code_fade: 0.5, unnecessary_code_fade: 0.5,
selection: theme.players[0], selection: theme.players[0],
whitespace: theme.ramps.neutral(0.5).hex(), whitespace: theme.ramps.neutral(0.5).hex(),