Remove 2 suffix for project
Co-authored-by: Mikayla <mikayla@zed.dev>
This commit is contained in:
parent
4ddb26204f
commit
c5a1950522
48 changed files with 1800 additions and 25868 deletions
103
Cargo.lock
generated
103
Cargo.lock
generated
|
@ -12,7 +12,7 @@ dependencies = [
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language2",
|
||||||
"project2",
|
"project",
|
||||||
"settings2",
|
"settings2",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"theme2",
|
"theme2",
|
||||||
|
@ -321,7 +321,7 @@ dependencies = [
|
||||||
"multi_buffer",
|
"multi_buffer",
|
||||||
"ordered-float 2.10.0",
|
"ordered-float 2.10.0",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"project2",
|
"project",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"regex",
|
"regex",
|
||||||
"schemars",
|
"schemars",
|
||||||
|
@ -694,7 +694,7 @@ dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"log",
|
"log",
|
||||||
"menu2",
|
"menu2",
|
||||||
"project2",
|
"project",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
@ -1027,7 +1027,7 @@ dependencies = [
|
||||||
"itertools 0.10.5",
|
"itertools 0.10.5",
|
||||||
"language2",
|
"language2",
|
||||||
"outline",
|
"outline",
|
||||||
"project2",
|
"project",
|
||||||
"search",
|
"search",
|
||||||
"settings2",
|
"settings2",
|
||||||
"theme2",
|
"theme2",
|
||||||
|
@ -1157,7 +1157,7 @@ dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"media",
|
"media",
|
||||||
"postage",
|
"postage",
|
||||||
"project2",
|
"project",
|
||||||
"schemars",
|
"schemars",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
|
@ -1671,7 +1671,7 @@ dependencies = [
|
||||||
"notifications2",
|
"notifications2",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"project2",
|
"project",
|
||||||
"prometheus",
|
"prometheus",
|
||||||
"prost 0.8.0",
|
"prost 0.8.0",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
|
@ -1729,7 +1729,7 @@ dependencies = [
|
||||||
"picker",
|
"picker",
|
||||||
"postage",
|
"postage",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"project2",
|
"project",
|
||||||
"recent_projects",
|
"recent_projects",
|
||||||
"rich_text",
|
"rich_text",
|
||||||
"rpc2",
|
"rpc2",
|
||||||
|
@ -1793,7 +1793,7 @@ dependencies = [
|
||||||
"language2",
|
"language2",
|
||||||
"menu2",
|
"menu2",
|
||||||
"picker",
|
"picker",
|
||||||
"project2",
|
"project",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"settings2",
|
"settings2",
|
||||||
|
@ -2442,7 +2442,7 @@ dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"lsp2",
|
"lsp2",
|
||||||
"postage",
|
"postage",
|
||||||
"project2",
|
"project",
|
||||||
"schemars",
|
"schemars",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
|
@ -2616,7 +2616,7 @@ dependencies = [
|
||||||
"ordered-float 2.10.0",
|
"ordered-float 2.10.0",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"postage",
|
"postage",
|
||||||
"project2",
|
"project",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"rich_text",
|
"rich_text",
|
||||||
"rpc2",
|
"rpc2",
|
||||||
|
@ -2835,7 +2835,7 @@ dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"menu2",
|
"menu2",
|
||||||
"postage",
|
"postage",
|
||||||
"project2",
|
"project",
|
||||||
"regex",
|
"regex",
|
||||||
"search",
|
"search",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -2866,7 +2866,7 @@ dependencies = [
|
||||||
"menu2",
|
"menu2",
|
||||||
"picker",
|
"picker",
|
||||||
"postage",
|
"postage",
|
||||||
"project2",
|
"project",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"settings2",
|
"settings2",
|
||||||
|
@ -4307,7 +4307,7 @@ dependencies = [
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language2",
|
||||||
"picker",
|
"picker",
|
||||||
"project2",
|
"project",
|
||||||
"settings2",
|
"settings2",
|
||||||
"theme2",
|
"theme2",
|
||||||
"ui2",
|
"ui2",
|
||||||
|
@ -4328,7 +4328,7 @@ dependencies = [
|
||||||
"gpui2",
|
"gpui2",
|
||||||
"language2",
|
"language2",
|
||||||
"lsp2",
|
"lsp2",
|
||||||
"project2",
|
"project",
|
||||||
"serde",
|
"serde",
|
||||||
"settings2",
|
"settings2",
|
||||||
"theme2",
|
"theme2",
|
||||||
|
@ -4961,7 +4961,7 @@ dependencies = [
|
||||||
"ordered-float 2.10.0",
|
"ordered-float 2.10.0",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"postage",
|
"postage",
|
||||||
"project2",
|
"project",
|
||||||
"pulldown-cmark",
|
"pulldown-cmark",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"rich_text",
|
"rich_text",
|
||||||
|
@ -6091,61 +6091,6 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "project"
|
name = "project"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
|
||||||
"aho-corasick",
|
|
||||||
"anyhow",
|
|
||||||
"async-trait",
|
|
||||||
"backtrace",
|
|
||||||
"client",
|
|
||||||
"clock",
|
|
||||||
"collections",
|
|
||||||
"copilot",
|
|
||||||
"ctor",
|
|
||||||
"db",
|
|
||||||
"env_logger",
|
|
||||||
"fs",
|
|
||||||
"fsevent",
|
|
||||||
"futures 0.3.28",
|
|
||||||
"fuzzy",
|
|
||||||
"git",
|
|
||||||
"git2",
|
|
||||||
"globset",
|
|
||||||
"gpui",
|
|
||||||
"ignore",
|
|
||||||
"itertools 0.10.5",
|
|
||||||
"language",
|
|
||||||
"lazy_static",
|
|
||||||
"log",
|
|
||||||
"lsp",
|
|
||||||
"node_runtime",
|
|
||||||
"parking_lot 0.11.2",
|
|
||||||
"postage",
|
|
||||||
"prettier",
|
|
||||||
"pretty_assertions",
|
|
||||||
"rand 0.8.5",
|
|
||||||
"regex",
|
|
||||||
"rpc",
|
|
||||||
"schemars",
|
|
||||||
"serde",
|
|
||||||
"serde_derive",
|
|
||||||
"serde_json",
|
|
||||||
"settings",
|
|
||||||
"sha2 0.10.7",
|
|
||||||
"similar",
|
|
||||||
"smol",
|
|
||||||
"sum_tree",
|
|
||||||
"tempdir",
|
|
||||||
"terminal",
|
|
||||||
"text",
|
|
||||||
"thiserror",
|
|
||||||
"toml 0.5.11",
|
|
||||||
"unindent",
|
|
||||||
"util",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "project2"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
@ -6213,7 +6158,7 @@ dependencies = [
|
||||||
"menu2",
|
"menu2",
|
||||||
"postage",
|
"postage",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"project2",
|
"project",
|
||||||
"schemars",
|
"schemars",
|
||||||
"search",
|
"search",
|
||||||
"serde",
|
"serde",
|
||||||
|
@ -6242,7 +6187,7 @@ dependencies = [
|
||||||
"ordered-float 2.10.0",
|
"ordered-float 2.10.0",
|
||||||
"picker",
|
"picker",
|
||||||
"postage",
|
"postage",
|
||||||
"project2",
|
"project",
|
||||||
"settings2",
|
"settings2",
|
||||||
"smol",
|
"smol",
|
||||||
"text2",
|
"text2",
|
||||||
|
@ -7415,7 +7360,7 @@ dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"menu2",
|
"menu2",
|
||||||
"postage",
|
"postage",
|
||||||
"project2",
|
"project",
|
||||||
"semantic_index2",
|
"semantic_index2",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
|
@ -7530,7 +7475,7 @@ dependencies = [
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"postage",
|
"postage",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"project2",
|
"project",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"rpc2",
|
"rpc2",
|
||||||
"rusqlite",
|
"rusqlite",
|
||||||
|
@ -8707,7 +8652,7 @@ dependencies = [
|
||||||
"mio-extras",
|
"mio-extras",
|
||||||
"ordered-float 2.10.0",
|
"ordered-float 2.10.0",
|
||||||
"procinfo",
|
"procinfo",
|
||||||
"project2",
|
"project",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
|
@ -9950,7 +9895,7 @@ dependencies = [
|
||||||
"lsp2",
|
"lsp2",
|
||||||
"nvim-rs",
|
"nvim-rs",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"project2",
|
"project",
|
||||||
"search",
|
"search",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
|
@ -10365,7 +10310,7 @@ dependencies = [
|
||||||
"install_cli",
|
"install_cli",
|
||||||
"log",
|
"log",
|
||||||
"picker",
|
"picker",
|
||||||
"project2",
|
"project",
|
||||||
"schemars",
|
"schemars",
|
||||||
"serde",
|
"serde",
|
||||||
"settings2",
|
"settings2",
|
||||||
|
@ -10640,7 +10585,7 @@ dependencies = [
|
||||||
"node_runtime",
|
"node_runtime",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"postage",
|
"postage",
|
||||||
"project2",
|
"project",
|
||||||
"schemars",
|
"schemars",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
|
@ -10791,7 +10736,7 @@ dependencies = [
|
||||||
"outline",
|
"outline",
|
||||||
"parking_lot 0.11.2",
|
"parking_lot 0.11.2",
|
||||||
"postage",
|
"postage",
|
||||||
"project2",
|
"project",
|
||||||
"project_panel",
|
"project_panel",
|
||||||
"project_symbols",
|
"project_symbols",
|
||||||
"quick_action_bar",
|
"quick_action_bar",
|
||||||
|
|
|
@ -70,7 +70,6 @@ members = [
|
||||||
"crates/prettier",
|
"crates/prettier",
|
||||||
"crates/prettier2",
|
"crates/prettier2",
|
||||||
"crates/project",
|
"crates/project",
|
||||||
"crates/project2",
|
|
||||||
"crates/project_panel",
|
"crates/project_panel",
|
||||||
"crates/project_symbols",
|
"crates/project_symbols",
|
||||||
"crates/quick_action_bar",
|
"crates/quick_action_bar",
|
||||||
|
|
|
@ -13,7 +13,7 @@ auto_update = { path = "../auto_update" }
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
language = { path = "../language2", package = "language2" }
|
language = { path = "../language2", package = "language2" }
|
||||||
gpui = { path = "../gpui2", package = "gpui2" }
|
gpui = { path = "../gpui2", package = "gpui2" }
|
||||||
project = { path = "../project2", package = "project2" }
|
project = { path = "../project" }
|
||||||
settings = { path = "../settings2", package = "settings2" }
|
settings = { path = "../settings2", package = "settings2" }
|
||||||
ui = { path = "../ui2", package = "ui2" }
|
ui = { path = "../ui2", package = "ui2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
|
|
@ -18,7 +18,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { package = "language2", path = "../language2" }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
multi_buffer = { path = "../multi_buffer" }
|
multi_buffer = { path = "../multi_buffer" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
search = { path = "../search" }
|
search = { path = "../search" }
|
||||||
semantic_index = { package = "semantic_index2", path = "../semantic_index2" }
|
semantic_index = { package = "semantic_index2", path = "../semantic_index2" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
|
@ -46,7 +46,7 @@ tiktoken-rs.workspace = true
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
ai = { path = "../ai", features = ["test-support"]}
|
ai = { path = "../ai", features = ["test-support"]}
|
||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
project = { package = "project2", path = "../project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
|
|
||||||
ctor.workspace = true
|
ctor.workspace = true
|
||||||
env_logger.workspace = true
|
env_logger.workspace = true
|
||||||
|
|
|
@ -13,7 +13,7 @@ db = { package = "db2", path = "../db2" }
|
||||||
client = { package = "client2", path = "../client2" }
|
client = { package = "client2", path = "../client2" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
workspace = { path = "../workspace" }
|
workspace = { path = "../workspace" }
|
||||||
|
|
|
@ -14,7 +14,7 @@ editor = { path = "../editor" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { package = "language2", path = "../language2" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
search = { path = "../search" }
|
search = { path = "../search" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
|
|
|
@ -28,7 +28,7 @@ live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2" }
|
||||||
fs = { package = "fs2", path = "../fs2" }
|
fs = { package = "fs2", path = "../fs2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { package = "language2", path = "../language2" }
|
||||||
media = { path = "../media" }
|
media = { path = "../media" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
|
||||||
|
@ -50,5 +50,5 @@ language = { package = "language2", path = "../language2", features = ["test-sup
|
||||||
collections = { path = "../collections", features = ["test-support"] }
|
collections = { path = "../collections", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
|
live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
|
||||||
project = { package = "project2", path = "../project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
|
|
|
@ -75,7 +75,7 @@ lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
||||||
node_runtime = { path = "../node_runtime" }
|
node_runtime = { path = "../node_runtime" }
|
||||||
notifications = { package = "notifications2", path = "../notifications2", features = ["test-support"] }
|
notifications = { package = "notifications2", path = "../notifications2", features = ["test-support"] }
|
||||||
|
|
||||||
project = { package = "project2", path = "../project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
||||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
|
|
|
@ -40,7 +40,7 @@ menu = { package = "menu2", path = "../menu2" }
|
||||||
notifications = { package = "notifications2", path = "../notifications2" }
|
notifications = { package = "notifications2", path = "../notifications2" }
|
||||||
rich_text = { path = "../rich_text" }
|
rich_text = { path = "../rich_text" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
recent_projects = { path = "../recent_projects" }
|
recent_projects = { path = "../recent_projects" }
|
||||||
rpc = { package ="rpc2", path = "../rpc2" }
|
rpc = { package ="rpc2", path = "../rpc2" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
|
@ -71,7 +71,7 @@ collections = { path = "../collections", features = ["test-support"] }
|
||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
notifications = { package = "notifications2", path = "../notifications2", features = ["test-support"] }
|
notifications = { package = "notifications2", path = "../notifications2", features = ["test-support"] }
|
||||||
project = { package = "project2", path = "../project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
||||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
|
|
|
@ -14,7 +14,7 @@ editor = { path = "../editor" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
@ -28,7 +28,7 @@ serde.workspace = true
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
language = { package="language2", path = "../language2", features = ["test-support"] }
|
language = { package="language2", path = "../language2", features = ["test-support"] }
|
||||||
project = { package="project2", path = "../project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
go_to_line = { path = "../go_to_line" }
|
go_to_line = { path = "../go_to_line" }
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
|
|
|
@ -15,7 +15,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { package = "language2", path = "../language2" }
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
lsp = { package = "lsp2", path = "../lsp2" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
|
|
@ -35,7 +35,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { package = "language2", path = "../language2" }
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
lsp = { package = "lsp2", path = "../lsp2" }
|
||||||
multi_buffer = { path = "../multi_buffer" }
|
multi_buffer = { path = "../multi_buffer" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
rpc = { package = "rpc2", path = "../rpc2" }
|
rpc = { package = "rpc2", path = "../rpc2" }
|
||||||
rich_text = { path = "../rich_text" }
|
rich_text = { path = "../rich_text" }
|
||||||
settings = { package="settings2", path = "../settings2" }
|
settings = { package="settings2", path = "../settings2" }
|
||||||
|
@ -78,7 +78,7 @@ language = { package="language2", path = "../language2", features = ["test-suppo
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
project = { package = "project2", path = "../project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
multi_buffer = { path = "../multi_buffer", features = ["test-support"] }
|
multi_buffer = { path = "../multi_buffer", features = ["test-support"] }
|
||||||
|
|
|
@ -17,7 +17,7 @@ editor = { path = "../editor" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { package = "language2", path = "../language2" }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
search = { path = "../search" }
|
search = { path = "../search" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
|
|
|
@ -15,7 +15,7 @@ fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
text = { package = "text2", path = "../text2" }
|
text = { package = "text2", path = "../text2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
|
|
@ -14,7 +14,7 @@ fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { package = "language2", path = "../language2" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
|
|
|
@ -14,7 +14,7 @@ editor = { path = "../editor" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { package = "language2", path = "../language2" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
workspace = { path = "../workspace" }
|
workspace = { path = "../workspace" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
|
|
|
@ -65,7 +65,7 @@ language = { package = "language2", path = "../language2", features = ["test-sup
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
project = { package = "project2", path = "../project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
||||||
|
|
||||||
ctor.workspace = true
|
ctor.workspace = true
|
||||||
|
|
|
@ -16,28 +16,29 @@ test-support = [
|
||||||
"settings/test-support",
|
"settings/test-support",
|
||||||
"text/test-support",
|
"text/test-support",
|
||||||
"prettier/test-support",
|
"prettier/test-support",
|
||||||
|
"gpui/test-support",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
text = { path = "../text" }
|
text = { package = "text2", path = "../text2" }
|
||||||
copilot = { path = "../copilot" }
|
copilot = { path = "../copilot" }
|
||||||
client = { path = "../client" }
|
client = { package = "client2", path = "../client2" }
|
||||||
clock = { path = "../clock" }
|
clock = { path = "../clock" }
|
||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
db = { path = "../db" }
|
db = { package = "db2", path = "../db2" }
|
||||||
fs = { path = "../fs" }
|
fs = { package = "fs2", path = "../fs2" }
|
||||||
fsevent = { path = "../fsevent" }
|
fsevent = { path = "../fsevent" }
|
||||||
fuzzy = { path = "../fuzzy" }
|
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
||||||
git = { path = "../git" }
|
git = { package = "git3", path = "../git3" }
|
||||||
gpui = { path = "../gpui" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { path = "../language" }
|
language = { package = "language2", path = "../language2" }
|
||||||
lsp = { path = "../lsp" }
|
lsp = { package = "lsp2", path = "../lsp2" }
|
||||||
node_runtime = { path = "../node_runtime" }
|
node_runtime = { path = "../node_runtime" }
|
||||||
prettier = { path = "../prettier" }
|
prettier = { package = "prettier2", path = "../prettier2" }
|
||||||
rpc = { path = "../rpc" }
|
rpc = { package = "rpc2", path = "../rpc2" }
|
||||||
settings = { path = "../settings" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
sum_tree = { path = "../sum_tree" }
|
sum_tree = { path = "../sum_tree" }
|
||||||
terminal = { path = "../terminal" }
|
terminal = { package = "terminal2", path = "../terminal2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
|
||||||
aho-corasick = "1.1"
|
aho-corasick = "1.1"
|
||||||
|
@ -68,17 +69,17 @@ itertools = "0.10"
|
||||||
ctor.workspace = true
|
ctor.workspace = true
|
||||||
env_logger.workspace = true
|
env_logger.workspace = true
|
||||||
pretty_assertions.workspace = true
|
pretty_assertions.workspace = true
|
||||||
client = { path = "../client", features = ["test-support"] }
|
client = { package = "client2", path = "../client2", features = ["test-support"] }
|
||||||
collections = { path = "../collections", features = ["test-support"] }
|
collections = { path = "../collections", features = ["test-support"] }
|
||||||
db = { path = "../db", features = ["test-support"] }
|
db = { package = "db2", path = "../db2", features = ["test-support"] }
|
||||||
fs = { path = "../fs", features = ["test-support"] }
|
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
|
||||||
gpui = { path = "../gpui", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { path = "../language", features = ["test-support"] }
|
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
||||||
lsp = { path = "../lsp", features = ["test-support"] }
|
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
||||||
settings = { path = "../settings", features = ["test-support"] }
|
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
||||||
prettier = { path = "../prettier", features = ["test-support"] }
|
prettier = { package = "prettier2", path = "../prettier2", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
rpc = { path = "../rpc", features = ["test-support"] }
|
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
||||||
git2.workspace = true
|
git2.workspace = true
|
||||||
tempdir.workspace = true
|
tempdir.workspace = true
|
||||||
unindent.workspace = true
|
unindent.workspace = true
|
||||||
|
|
|
@ -7,7 +7,7 @@ use anyhow::{anyhow, Context, Result};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use client::proto::{self, PeerId};
|
use client::proto::{self, PeerId};
|
||||||
use futures::future;
|
use futures::future;
|
||||||
use gpui::{AppContext, AsyncAppContext, ModelHandle};
|
use gpui::{AppContext, AsyncAppContext, Model};
|
||||||
use language::{
|
use language::{
|
||||||
language_settings::{language_settings, InlayHintKind},
|
language_settings::{language_settings, InlayHintKind},
|
||||||
point_from_lsp, point_to_lsp, prepare_completion_documentation,
|
point_from_lsp, point_to_lsp, prepare_completion_documentation,
|
||||||
|
@ -33,7 +33,7 @@ pub fn lsp_formatting_options(tab_size: u32) -> lsp::FormattingOptions {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait(?Send)]
|
#[async_trait(?Send)]
|
||||||
pub trait LspCommand: 'static + Sized {
|
pub trait LspCommand: 'static + Sized + Send {
|
||||||
type Response: 'static + Default + Send;
|
type Response: 'static + Default + Send;
|
||||||
type LspRequest: 'static + Send + lsp::request::Request;
|
type LspRequest: 'static + Send + lsp::request::Request;
|
||||||
type ProtoRequest: 'static + Send + proto::RequestMessage;
|
type ProtoRequest: 'static + Send + proto::RequestMessage;
|
||||||
|
@ -53,8 +53,8 @@ pub trait LspCommand: 'static + Sized {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
message: <Self::LspRequest as lsp::request::Request>::Result,
|
message: <Self::LspRequest as lsp::request::Request>::Result,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
cx: AsyncAppContext,
|
cx: AsyncAppContext,
|
||||||
) -> Result<Self::Response>;
|
) -> Result<Self::Response>;
|
||||||
|
@ -63,8 +63,8 @@ pub trait LspCommand: 'static + Sized {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: Self::ProtoRequest,
|
message: Self::ProtoRequest,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
cx: AsyncAppContext,
|
cx: AsyncAppContext,
|
||||||
) -> Result<Self>;
|
) -> Result<Self>;
|
||||||
|
|
||||||
|
@ -79,8 +79,8 @@ pub trait LspCommand: 'static + Sized {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: <Self::ProtoRequest as proto::RequestMessage>::Response,
|
message: <Self::ProtoRequest as proto::RequestMessage>::Response,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
cx: AsyncAppContext,
|
cx: AsyncAppContext,
|
||||||
) -> Result<Self::Response>;
|
) -> Result<Self::Response>;
|
||||||
|
|
||||||
|
@ -180,12 +180,12 @@ impl LspCommand for PrepareRename {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
message: Option<lsp::PrepareRenameResponse>,
|
message: Option<lsp::PrepareRenameResponse>,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
_: LanguageServerId,
|
_: LanguageServerId,
|
||||||
cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Option<Range<Anchor>>> {
|
) -> Result<Option<Range<Anchor>>> {
|
||||||
buffer.read_with(&cx, |buffer, _| {
|
buffer.update(&mut cx, |buffer, _| {
|
||||||
if let Some(
|
if let Some(
|
||||||
lsp::PrepareRenameResponse::Range(range)
|
lsp::PrepareRenameResponse::Range(range)
|
||||||
| lsp::PrepareRenameResponse::RangeWithPlaceholder { range, .. },
|
| lsp::PrepareRenameResponse::RangeWithPlaceholder { range, .. },
|
||||||
|
@ -199,7 +199,7 @@ impl LspCommand for PrepareRename {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(None)
|
Ok(None)
|
||||||
})
|
})?
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PrepareRename {
|
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PrepareRename {
|
||||||
|
@ -215,8 +215,8 @@ impl LspCommand for PrepareRename {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: proto::PrepareRename,
|
message: proto::PrepareRename,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let position = message
|
let position = message
|
||||||
|
@ -226,11 +226,11 @@ impl LspCommand for PrepareRename {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
|
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -256,15 +256,15 @@ impl LspCommand for PrepareRename {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::PrepareRenameResponse,
|
message: proto::PrepareRenameResponse,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Option<Range<Anchor>>> {
|
) -> Result<Option<Range<Anchor>>> {
|
||||||
if message.can_rename {
|
if message.can_rename {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
let start = message.start.and_then(deserialize_anchor);
|
let start = message.start.and_then(deserialize_anchor);
|
||||||
let end = message.end.and_then(deserialize_anchor);
|
let end = message.end.and_then(deserialize_anchor);
|
||||||
|
@ -307,8 +307,8 @@ impl LspCommand for PerformRename {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
message: Option<lsp::WorkspaceEdit>,
|
message: Option<lsp::WorkspaceEdit>,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<ProjectTransaction> {
|
) -> Result<ProjectTransaction> {
|
||||||
|
@ -343,8 +343,8 @@ impl LspCommand for PerformRename {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: proto::PerformRename,
|
message: proto::PerformRename,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let position = message
|
let position = message
|
||||||
|
@ -354,10 +354,10 @@ impl LspCommand for PerformRename {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
|
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
|
||||||
new_name: message.new_name,
|
new_name: message.new_name,
|
||||||
push_to_history: false,
|
push_to_history: false,
|
||||||
})
|
})
|
||||||
|
@ -379,8 +379,8 @@ impl LspCommand for PerformRename {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::PerformRenameResponse,
|
message: proto::PerformRenameResponse,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
_: ModelHandle<Buffer>,
|
_: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<ProjectTransaction> {
|
) -> Result<ProjectTransaction> {
|
||||||
let message = message
|
let message = message
|
||||||
|
@ -389,7 +389,7 @@ impl LspCommand for PerformRename {
|
||||||
project
|
project
|
||||||
.update(&mut cx, |project, cx| {
|
.update(&mut cx, |project, cx| {
|
||||||
project.deserialize_project_transaction(message, self.push_to_history, cx)
|
project.deserialize_project_transaction(message, self.push_to_history, cx)
|
||||||
})
|
})?
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -426,8 +426,8 @@ impl LspCommand for GetDefinition {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
message: Option<lsp::GotoDefinitionResponse>,
|
message: Option<lsp::GotoDefinitionResponse>,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
cx: AsyncAppContext,
|
cx: AsyncAppContext,
|
||||||
) -> Result<Vec<LocationLink>> {
|
) -> Result<Vec<LocationLink>> {
|
||||||
|
@ -447,8 +447,8 @@ impl LspCommand for GetDefinition {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: proto::GetDefinition,
|
message: proto::GetDefinition,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let position = message
|
let position = message
|
||||||
|
@ -458,10 +458,10 @@ impl LspCommand for GetDefinition {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
|
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -479,8 +479,8 @@ impl LspCommand for GetDefinition {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::GetDefinitionResponse,
|
message: proto::GetDefinitionResponse,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
_: ModelHandle<Buffer>,
|
_: Model<Buffer>,
|
||||||
cx: AsyncAppContext,
|
cx: AsyncAppContext,
|
||||||
) -> Result<Vec<LocationLink>> {
|
) -> Result<Vec<LocationLink>> {
|
||||||
location_links_from_proto(message.links, project, cx).await
|
location_links_from_proto(message.links, project, cx).await
|
||||||
|
@ -527,8 +527,8 @@ impl LspCommand for GetTypeDefinition {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
message: Option<lsp::GotoTypeDefinitionResponse>,
|
message: Option<lsp::GotoTypeDefinitionResponse>,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
cx: AsyncAppContext,
|
cx: AsyncAppContext,
|
||||||
) -> Result<Vec<LocationLink>> {
|
) -> Result<Vec<LocationLink>> {
|
||||||
|
@ -548,8 +548,8 @@ impl LspCommand for GetTypeDefinition {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: proto::GetTypeDefinition,
|
message: proto::GetTypeDefinition,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let position = message
|
let position = message
|
||||||
|
@ -559,10 +559,10 @@ impl LspCommand for GetTypeDefinition {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
|
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -580,8 +580,8 @@ impl LspCommand for GetTypeDefinition {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::GetTypeDefinitionResponse,
|
message: proto::GetTypeDefinitionResponse,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
_: ModelHandle<Buffer>,
|
_: Model<Buffer>,
|
||||||
cx: AsyncAppContext,
|
cx: AsyncAppContext,
|
||||||
) -> Result<Vec<LocationLink>> {
|
) -> Result<Vec<LocationLink>> {
|
||||||
location_links_from_proto(message.links, project, cx).await
|
location_links_from_proto(message.links, project, cx).await
|
||||||
|
@ -593,23 +593,23 @@ impl LspCommand for GetTypeDefinition {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn language_server_for_buffer(
|
fn language_server_for_buffer(
|
||||||
project: &ModelHandle<Project>,
|
project: &Model<Project>,
|
||||||
buffer: &ModelHandle<Buffer>,
|
buffer: &Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
cx: &mut AsyncAppContext,
|
cx: &mut AsyncAppContext,
|
||||||
) -> Result<(Arc<CachedLspAdapter>, Arc<LanguageServer>)> {
|
) -> Result<(Arc<CachedLspAdapter>, Arc<LanguageServer>)> {
|
||||||
project
|
project
|
||||||
.read_with(cx, |project, cx| {
|
.update(cx, |project, cx| {
|
||||||
project
|
project
|
||||||
.language_server_for_buffer(buffer.read(cx), server_id, cx)
|
.language_server_for_buffer(buffer.read(cx), server_id, cx)
|
||||||
.map(|(adapter, server)| (adapter.clone(), server.clone()))
|
.map(|(adapter, server)| (adapter.clone(), server.clone()))
|
||||||
})
|
})?
|
||||||
.ok_or_else(|| anyhow!("no language server found for buffer"))
|
.ok_or_else(|| anyhow!("no language server found for buffer"))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn location_links_from_proto(
|
async fn location_links_from_proto(
|
||||||
proto_links: Vec<proto::LocationLink>,
|
proto_links: Vec<proto::LocationLink>,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<LocationLink>> {
|
) -> Result<Vec<LocationLink>> {
|
||||||
let mut links = Vec::new();
|
let mut links = Vec::new();
|
||||||
|
@ -620,7 +620,7 @@ async fn location_links_from_proto(
|
||||||
let buffer = project
|
let buffer = project
|
||||||
.update(&mut cx, |this, cx| {
|
.update(&mut cx, |this, cx| {
|
||||||
this.wait_for_remote_buffer(origin.buffer_id, cx)
|
this.wait_for_remote_buffer(origin.buffer_id, cx)
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
let start = origin
|
let start = origin
|
||||||
.start
|
.start
|
||||||
|
@ -631,7 +631,7 @@ async fn location_links_from_proto(
|
||||||
.and_then(deserialize_anchor)
|
.and_then(deserialize_anchor)
|
||||||
.ok_or_else(|| anyhow!("missing origin end"))?;
|
.ok_or_else(|| anyhow!("missing origin end"))?;
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
|
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||||
.await?;
|
.await?;
|
||||||
Some(Location {
|
Some(Location {
|
||||||
buffer,
|
buffer,
|
||||||
|
@ -645,7 +645,7 @@ async fn location_links_from_proto(
|
||||||
let buffer = project
|
let buffer = project
|
||||||
.update(&mut cx, |this, cx| {
|
.update(&mut cx, |this, cx| {
|
||||||
this.wait_for_remote_buffer(target.buffer_id, cx)
|
this.wait_for_remote_buffer(target.buffer_id, cx)
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
let start = target
|
let start = target
|
||||||
.start
|
.start
|
||||||
|
@ -656,7 +656,7 @@ async fn location_links_from_proto(
|
||||||
.and_then(deserialize_anchor)
|
.and_then(deserialize_anchor)
|
||||||
.ok_or_else(|| anyhow!("missing target end"))?;
|
.ok_or_else(|| anyhow!("missing target end"))?;
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
|
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||||
.await?;
|
.await?;
|
||||||
let target = Location {
|
let target = Location {
|
||||||
buffer,
|
buffer,
|
||||||
|
@ -671,8 +671,8 @@ async fn location_links_from_proto(
|
||||||
|
|
||||||
async fn location_links_from_lsp(
|
async fn location_links_from_lsp(
|
||||||
message: Option<lsp::GotoDefinitionResponse>,
|
message: Option<lsp::GotoDefinitionResponse>,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<LocationLink>> {
|
) -> Result<Vec<LocationLink>> {
|
||||||
|
@ -714,10 +714,10 @@ async fn location_links_from_lsp(
|
||||||
lsp_adapter.name.clone(),
|
lsp_adapter.name.clone(),
|
||||||
cx,
|
cx,
|
||||||
)
|
)
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
cx.read(|cx| {
|
cx.update(|cx| {
|
||||||
let origin_location = origin_range.map(|origin_range| {
|
let origin_location = origin_range.map(|origin_range| {
|
||||||
let origin_buffer = buffer.read(cx);
|
let origin_buffer = buffer.read(cx);
|
||||||
let origin_start =
|
let origin_start =
|
||||||
|
@ -746,7 +746,7 @@ async fn location_links_from_lsp(
|
||||||
origin: origin_location,
|
origin: origin_location,
|
||||||
target: target_location,
|
target: target_location,
|
||||||
})
|
})
|
||||||
});
|
})?;
|
||||||
}
|
}
|
||||||
Ok(definitions)
|
Ok(definitions)
|
||||||
}
|
}
|
||||||
|
@ -815,8 +815,8 @@ impl LspCommand for GetReferences {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
locations: Option<Vec<lsp::Location>>,
|
locations: Option<Vec<lsp::Location>>,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<Location>> {
|
) -> Result<Vec<Location>> {
|
||||||
|
@ -834,21 +834,22 @@ impl LspCommand for GetReferences {
|
||||||
lsp_adapter.name.clone(),
|
lsp_adapter.name.clone(),
|
||||||
cx,
|
cx,
|
||||||
)
|
)
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
cx.read(|cx| {
|
target_buffer_handle
|
||||||
let target_buffer = target_buffer_handle.read(cx);
|
.clone()
|
||||||
let target_start = target_buffer
|
.update(&mut cx, |target_buffer, _| {
|
||||||
.clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left);
|
let target_start = target_buffer
|
||||||
let target_end = target_buffer
|
.clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left);
|
||||||
.clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left);
|
let target_end = target_buffer
|
||||||
references.push(Location {
|
.clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left);
|
||||||
buffer: target_buffer_handle,
|
references.push(Location {
|
||||||
range: target_buffer.anchor_after(target_start)
|
buffer: target_buffer_handle,
|
||||||
..target_buffer.anchor_before(target_end),
|
range: target_buffer.anchor_after(target_start)
|
||||||
});
|
..target_buffer.anchor_before(target_end),
|
||||||
});
|
});
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -868,8 +869,8 @@ impl LspCommand for GetReferences {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: proto::GetReferences,
|
message: proto::GetReferences,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let position = message
|
let position = message
|
||||||
|
@ -879,10 +880,10 @@ impl LspCommand for GetReferences {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
|
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -910,8 +911,8 @@ impl LspCommand for GetReferences {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::GetReferencesResponse,
|
message: proto::GetReferencesResponse,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
_: ModelHandle<Buffer>,
|
_: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<Location>> {
|
) -> Result<Vec<Location>> {
|
||||||
let mut locations = Vec::new();
|
let mut locations = Vec::new();
|
||||||
|
@ -919,7 +920,7 @@ impl LspCommand for GetReferences {
|
||||||
let target_buffer = project
|
let target_buffer = project
|
||||||
.update(&mut cx, |this, cx| {
|
.update(&mut cx, |this, cx| {
|
||||||
this.wait_for_remote_buffer(location.buffer_id, cx)
|
this.wait_for_remote_buffer(location.buffer_id, cx)
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
let start = location
|
let start = location
|
||||||
.start
|
.start
|
||||||
|
@ -930,7 +931,7 @@ impl LspCommand for GetReferences {
|
||||||
.and_then(deserialize_anchor)
|
.and_then(deserialize_anchor)
|
||||||
.ok_or_else(|| anyhow!("missing target end"))?;
|
.ok_or_else(|| anyhow!("missing target end"))?;
|
||||||
target_buffer
|
target_buffer
|
||||||
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
|
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||||
.await?;
|
.await?;
|
||||||
locations.push(Location {
|
locations.push(Location {
|
||||||
buffer: target_buffer,
|
buffer: target_buffer,
|
||||||
|
@ -977,15 +978,15 @@ impl LspCommand for GetDocumentHighlights {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
lsp_highlights: Option<Vec<lsp::DocumentHighlight>>,
|
lsp_highlights: Option<Vec<lsp::DocumentHighlight>>,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
_: LanguageServerId,
|
_: LanguageServerId,
|
||||||
cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<DocumentHighlight>> {
|
) -> Result<Vec<DocumentHighlight>> {
|
||||||
buffer.read_with(&cx, |buffer, _| {
|
buffer.update(&mut cx, |buffer, _| {
|
||||||
let mut lsp_highlights = lsp_highlights.unwrap_or_default();
|
let mut lsp_highlights = lsp_highlights.unwrap_or_default();
|
||||||
lsp_highlights.sort_unstable_by_key(|h| (h.range.start, Reverse(h.range.end)));
|
lsp_highlights.sort_unstable_by_key(|h| (h.range.start, Reverse(h.range.end)));
|
||||||
Ok(lsp_highlights
|
lsp_highlights
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|lsp_highlight| {
|
.map(|lsp_highlight| {
|
||||||
let start = buffer
|
let start = buffer
|
||||||
|
@ -999,7 +1000,7 @@ impl LspCommand for GetDocumentHighlights {
|
||||||
.unwrap_or(lsp::DocumentHighlightKind::READ),
|
.unwrap_or(lsp::DocumentHighlightKind::READ),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect())
|
.collect()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1016,8 +1017,8 @@ impl LspCommand for GetDocumentHighlights {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: proto::GetDocumentHighlights,
|
message: proto::GetDocumentHighlights,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let position = message
|
let position = message
|
||||||
|
@ -1027,10 +1028,10 @@ impl LspCommand for GetDocumentHighlights {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
|
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1060,8 +1061,8 @@ impl LspCommand for GetDocumentHighlights {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::GetDocumentHighlightsResponse,
|
message: proto::GetDocumentHighlightsResponse,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<DocumentHighlight>> {
|
) -> Result<Vec<DocumentHighlight>> {
|
||||||
let mut highlights = Vec::new();
|
let mut highlights = Vec::new();
|
||||||
|
@ -1075,7 +1076,7 @@ impl LspCommand for GetDocumentHighlights {
|
||||||
.and_then(deserialize_anchor)
|
.and_then(deserialize_anchor)
|
||||||
.ok_or_else(|| anyhow!("missing target end"))?;
|
.ok_or_else(|| anyhow!("missing target end"))?;
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
|
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||||
.await?;
|
.await?;
|
||||||
let kind = match proto::document_highlight::Kind::from_i32(highlight.kind) {
|
let kind = match proto::document_highlight::Kind::from_i32(highlight.kind) {
|
||||||
Some(proto::document_highlight::Kind::Text) => DocumentHighlightKind::TEXT,
|
Some(proto::document_highlight::Kind::Text) => DocumentHighlightKind::TEXT,
|
||||||
|
@ -1123,73 +1124,71 @@ impl LspCommand for GetHover {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
message: Option<lsp::Hover>,
|
message: Option<lsp::Hover>,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
_: LanguageServerId,
|
_: LanguageServerId,
|
||||||
cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self::Response> {
|
) -> Result<Self::Response> {
|
||||||
Ok(message.and_then(|hover| {
|
let Some(hover) = message else {
|
||||||
let (language, range) = cx.read(|cx| {
|
return Ok(None);
|
||||||
let buffer = buffer.read(cx);
|
};
|
||||||
(
|
|
||||||
buffer.language().cloned(),
|
|
||||||
hover.range.map(|range| {
|
|
||||||
let token_start =
|
|
||||||
buffer.clip_point_utf16(point_from_lsp(range.start), Bias::Left);
|
|
||||||
let token_end =
|
|
||||||
buffer.clip_point_utf16(point_from_lsp(range.end), Bias::Left);
|
|
||||||
buffer.anchor_after(token_start)..buffer.anchor_before(token_end)
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
fn hover_blocks_from_marked_string(
|
let (language, range) = buffer.update(&mut cx, |buffer, _| {
|
||||||
marked_string: lsp::MarkedString,
|
(
|
||||||
) -> Option<HoverBlock> {
|
buffer.language().cloned(),
|
||||||
let block = match marked_string {
|
hover.range.map(|range| {
|
||||||
lsp::MarkedString::String(content) => HoverBlock {
|
let token_start =
|
||||||
text: content,
|
buffer.clip_point_utf16(point_from_lsp(range.start), Bias::Left);
|
||||||
kind: HoverBlockKind::Markdown,
|
let token_end = buffer.clip_point_utf16(point_from_lsp(range.end), Bias::Left);
|
||||||
},
|
buffer.anchor_after(token_start)..buffer.anchor_before(token_end)
|
||||||
lsp::MarkedString::LanguageString(lsp::LanguageString { language, value }) => {
|
}),
|
||||||
HoverBlock {
|
)
|
||||||
text: value,
|
})?;
|
||||||
kind: HoverBlockKind::Code { language },
|
|
||||||
}
|
fn hover_blocks_from_marked_string(marked_string: lsp::MarkedString) -> Option<HoverBlock> {
|
||||||
|
let block = match marked_string {
|
||||||
|
lsp::MarkedString::String(content) => HoverBlock {
|
||||||
|
text: content,
|
||||||
|
kind: HoverBlockKind::Markdown,
|
||||||
|
},
|
||||||
|
lsp::MarkedString::LanguageString(lsp::LanguageString { language, value }) => {
|
||||||
|
HoverBlock {
|
||||||
|
text: value,
|
||||||
|
kind: HoverBlockKind::Code { language },
|
||||||
}
|
}
|
||||||
};
|
|
||||||
if block.text.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(block)
|
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
if block.text.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(block)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let contents = cx.read(|_| match hover.contents {
|
let contents = match hover.contents {
|
||||||
lsp::HoverContents::Scalar(marked_string) => {
|
lsp::HoverContents::Scalar(marked_string) => {
|
||||||
hover_blocks_from_marked_string(marked_string)
|
hover_blocks_from_marked_string(marked_string)
|
||||||
.into_iter()
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
lsp::HoverContents::Array(marked_strings) => marked_strings
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(hover_blocks_from_marked_string)
|
.collect()
|
||||||
.collect(),
|
}
|
||||||
lsp::HoverContents::Markup(markup_content) => vec![HoverBlock {
|
lsp::HoverContents::Array(marked_strings) => marked_strings
|
||||||
text: markup_content.value,
|
.into_iter()
|
||||||
kind: if markup_content.kind == lsp::MarkupKind::Markdown {
|
.filter_map(hover_blocks_from_marked_string)
|
||||||
HoverBlockKind::Markdown
|
.collect(),
|
||||||
} else {
|
lsp::HoverContents::Markup(markup_content) => vec![HoverBlock {
|
||||||
HoverBlockKind::PlainText
|
text: markup_content.value,
|
||||||
},
|
kind: if markup_content.kind == lsp::MarkupKind::Markdown {
|
||||||
}],
|
HoverBlockKind::Markdown
|
||||||
});
|
} else {
|
||||||
|
HoverBlockKind::PlainText
|
||||||
|
},
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
|
||||||
Some(Hover {
|
Ok(Some(Hover {
|
||||||
contents,
|
contents,
|
||||||
range,
|
range,
|
||||||
language,
|
language,
|
||||||
})
|
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1206,8 +1205,8 @@ impl LspCommand for GetHover {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: Self::ProtoRequest,
|
message: Self::ProtoRequest,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let position = message
|
let position = message
|
||||||
|
@ -1217,10 +1216,10 @@ impl LspCommand for GetHover {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
|
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1272,9 +1271,9 @@ impl LspCommand for GetHover {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::GetHoverResponse,
|
message: proto::GetHoverResponse,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self::Response> {
|
) -> Result<Self::Response> {
|
||||||
let contents: Vec<_> = message
|
let contents: Vec<_> = message
|
||||||
.contents
|
.contents
|
||||||
|
@ -1294,7 +1293,7 @@ impl LspCommand for GetHover {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned());
|
let language = buffer.update(&mut cx, |buffer, _| buffer.language().cloned())?;
|
||||||
let range = if let (Some(start), Some(end)) = (message.start, message.end) {
|
let range = if let (Some(start), Some(end)) = (message.start, message.end) {
|
||||||
language::proto::deserialize_anchor(start)
|
language::proto::deserialize_anchor(start)
|
||||||
.and_then(|start| language::proto::deserialize_anchor(end).map(|end| start..end))
|
.and_then(|start| language::proto::deserialize_anchor(end).map(|end| start..end))
|
||||||
|
@ -1341,10 +1340,10 @@ impl LspCommand for GetCompletions {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
completions: Option<lsp::CompletionResponse>,
|
completions: Option<lsp::CompletionResponse>,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<Completion>> {
|
) -> Result<Vec<Completion>> {
|
||||||
let mut response_list = None;
|
let mut response_list = None;
|
||||||
let completions = if let Some(completions) = completions {
|
let completions = if let Some(completions) = completions {
|
||||||
|
@ -1358,10 +1357,10 @@ impl LspCommand for GetCompletions {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Vec::new()
|
Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let completions = buffer.read_with(&cx, |buffer, cx| {
|
let completions = buffer.update(&mut cx, |buffer, cx| {
|
||||||
let language_registry = project.read(cx).languages().clone();
|
let language_registry = project.read(cx).languages().clone();
|
||||||
let language = buffer.language().cloned();
|
let language = buffer.language().cloned();
|
||||||
let snapshot = buffer.snapshot();
|
let snapshot = buffer.snapshot();
|
||||||
|
@ -1371,14 +1370,6 @@ impl LspCommand for GetCompletions {
|
||||||
completions
|
completions
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(move |mut lsp_completion| {
|
.filter_map(move |mut lsp_completion| {
|
||||||
if let Some(response_list) = &response_list {
|
|
||||||
if let Some(item_defaults) = &response_list.item_defaults {
|
|
||||||
if let Some(data) = &item_defaults.data {
|
|
||||||
lsp_completion.data = Some(data.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref() {
|
let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref() {
|
||||||
// If the language server provides a range to overwrite, then
|
// If the language server provides a range to overwrite, then
|
||||||
// check that the range is valid.
|
// check that the range is valid.
|
||||||
|
@ -1454,10 +1445,9 @@ impl LspCommand for GetCompletions {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
LineEnding::normalize(&mut new_text);
|
|
||||||
let language_registry = language_registry.clone();
|
let language_registry = language_registry.clone();
|
||||||
let language = language.clone();
|
let language = language.clone();
|
||||||
|
LineEnding::normalize(&mut new_text);
|
||||||
Some(async move {
|
Some(async move {
|
||||||
let mut label = None;
|
let mut label = None;
|
||||||
if let Some(language) = language.as_ref() {
|
if let Some(language) = language.as_ref() {
|
||||||
|
@ -1493,7 +1483,7 @@ impl LspCommand for GetCompletions {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
});
|
})?;
|
||||||
|
|
||||||
Ok(future::join_all(completions).await)
|
Ok(future::join_all(completions).await)
|
||||||
}
|
}
|
||||||
|
@ -1510,23 +1500,23 @@ impl LspCommand for GetCompletions {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: proto::GetCompletions,
|
message: proto::GetCompletions,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let version = deserialize_version(&message.version);
|
let version = deserialize_version(&message.version);
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| buffer.wait_for_version(version))
|
.update(&mut cx, |buffer, _| buffer.wait_for_version(version))?
|
||||||
.await?;
|
.await?;
|
||||||
let position = message
|
let position = message
|
||||||
.position
|
.position
|
||||||
.and_then(language::proto::deserialize_anchor)
|
.and_then(language::proto::deserialize_anchor)
|
||||||
.map(|p| {
|
.map(|p| {
|
||||||
buffer.read_with(&cx, |buffer, _| {
|
buffer.update(&mut cx, |buffer, _| {
|
||||||
buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left)
|
buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
.ok_or_else(|| anyhow!("invalid position"))??;
|
||||||
Ok(Self { position })
|
Ok(Self { position })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1549,17 +1539,17 @@ impl LspCommand for GetCompletions {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::GetCompletionsResponse,
|
message: proto::GetCompletionsResponse,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<Completion>> {
|
) -> Result<Vec<Completion>> {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned());
|
let language = buffer.update(&mut cx, |buffer, _| buffer.language().cloned())?;
|
||||||
let completions = message.completions.into_iter().map(|completion| {
|
let completions = message.completions.into_iter().map(|completion| {
|
||||||
language::proto::deserialize_completion(completion, language.clone())
|
language::proto::deserialize_completion(completion, language.clone())
|
||||||
});
|
});
|
||||||
|
@ -1615,8 +1605,8 @@ impl LspCommand for GetCodeActions {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
actions: Option<lsp::CodeActionResponse>,
|
actions: Option<lsp::CodeActionResponse>,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
_: ModelHandle<Buffer>,
|
_: Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
_: AsyncAppContext,
|
_: AsyncAppContext,
|
||||||
) -> Result<Vec<CodeAction>> {
|
) -> Result<Vec<CodeAction>> {
|
||||||
|
@ -1649,8 +1639,8 @@ impl LspCommand for GetCodeActions {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: proto::GetCodeActions,
|
message: proto::GetCodeActions,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let start = message
|
let start = message
|
||||||
|
@ -1664,7 +1654,7 @@ impl LspCommand for GetCodeActions {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(Self { range: start..end })
|
Ok(Self { range: start..end })
|
||||||
|
@ -1689,14 +1679,14 @@ impl LspCommand for GetCodeActions {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::GetCodeActionsResponse,
|
message: proto::GetCodeActionsResponse,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<CodeAction>> {
|
) -> Result<Vec<CodeAction>> {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
message
|
message
|
||||||
.actions
|
.actions
|
||||||
|
@ -1752,8 +1742,8 @@ impl LspCommand for OnTypeFormatting {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
message: Option<Vec<lsp::TextEdit>>,
|
message: Option<Vec<lsp::TextEdit>>,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Option<Transaction>> {
|
) -> Result<Option<Transaction>> {
|
||||||
|
@ -1789,8 +1779,8 @@ impl LspCommand for OnTypeFormatting {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: proto::OnTypeFormatting,
|
message: proto::OnTypeFormatting,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let position = message
|
let position = message
|
||||||
|
@ -1800,15 +1790,15 @@ impl LspCommand for OnTypeFormatting {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let tab_size = buffer.read_with(&cx, |buffer, cx| {
|
let tab_size = buffer.update(&mut cx, |buffer, cx| {
|
||||||
language_settings(buffer.language(), buffer.file(), cx).tab_size
|
language_settings(buffer.language(), buffer.file(), cx).tab_size
|
||||||
});
|
})?;
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
|
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
|
||||||
trigger: message.trigger.clone(),
|
trigger: message.trigger.clone(),
|
||||||
options: lsp_formatting_options(tab_size.get()).into(),
|
options: lsp_formatting_options(tab_size.get()).into(),
|
||||||
push_to_history: false,
|
push_to_history: false,
|
||||||
|
@ -1831,8 +1821,8 @@ impl LspCommand for OnTypeFormatting {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::OnTypeFormattingResponse,
|
message: proto::OnTypeFormattingResponse,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
_: ModelHandle<Buffer>,
|
_: Model<Buffer>,
|
||||||
_: AsyncAppContext,
|
_: AsyncAppContext,
|
||||||
) -> Result<Option<Transaction>> {
|
) -> Result<Option<Transaction>> {
|
||||||
let Some(transaction) = message.transaction else {
|
let Some(transaction) = message.transaction else {
|
||||||
|
@ -1849,7 +1839,7 @@ impl LspCommand for OnTypeFormatting {
|
||||||
impl InlayHints {
|
impl InlayHints {
|
||||||
pub async fn lsp_to_project_hint(
|
pub async fn lsp_to_project_hint(
|
||||||
lsp_hint: lsp::InlayHint,
|
lsp_hint: lsp::InlayHint,
|
||||||
buffer_handle: &ModelHandle<Buffer>,
|
buffer_handle: &Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
resolve_state: ResolveState,
|
resolve_state: ResolveState,
|
||||||
force_no_type_left_padding: bool,
|
force_no_type_left_padding: bool,
|
||||||
|
@ -1861,15 +1851,14 @@ impl InlayHints {
|
||||||
_ => None,
|
_ => None,
|
||||||
});
|
});
|
||||||
|
|
||||||
let position = cx.update(|cx| {
|
let position = buffer_handle.update(cx, |buffer, _| {
|
||||||
let buffer = buffer_handle.read(cx);
|
|
||||||
let position = buffer.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
|
let position = buffer.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
|
||||||
if kind == Some(InlayHintKind::Parameter) {
|
if kind == Some(InlayHintKind::Parameter) {
|
||||||
buffer.anchor_before(position)
|
buffer.anchor_before(position)
|
||||||
} else {
|
} else {
|
||||||
buffer.anchor_after(position)
|
buffer.anchor_after(position)
|
||||||
}
|
}
|
||||||
});
|
})?;
|
||||||
let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id)
|
let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id)
|
||||||
.await
|
.await
|
||||||
.context("lsp to project inlay hint conversion")?;
|
.context("lsp to project inlay hint conversion")?;
|
||||||
|
@ -2255,8 +2244,8 @@ impl LspCommand for InlayHints {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
message: Option<Vec<lsp::InlayHint>>,
|
message: Option<Vec<lsp::InlayHint>>,
|
||||||
project: ModelHandle<Project>,
|
project: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
server_id: LanguageServerId,
|
server_id: LanguageServerId,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> anyhow::Result<Vec<InlayHint>> {
|
) -> anyhow::Result<Vec<InlayHint>> {
|
||||||
|
@ -2280,7 +2269,7 @@ impl LspCommand for InlayHints {
|
||||||
};
|
};
|
||||||
|
|
||||||
let buffer = buffer.clone();
|
let buffer = buffer.clone();
|
||||||
cx.spawn(|mut cx| async move {
|
cx.spawn(move |mut cx| async move {
|
||||||
InlayHints::lsp_to_project_hint(
|
InlayHints::lsp_to_project_hint(
|
||||||
lsp_hint,
|
lsp_hint,
|
||||||
&buffer,
|
&buffer,
|
||||||
|
@ -2311,8 +2300,8 @@ impl LspCommand for InlayHints {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: proto::InlayHints,
|
message: proto::InlayHints,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let start = message
|
let start = message
|
||||||
|
@ -2326,7 +2315,7 @@ impl LspCommand for InlayHints {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(Self { range: start..end })
|
Ok(Self { range: start..end })
|
||||||
|
@ -2351,14 +2340,14 @@ impl LspCommand for InlayHints {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::InlayHintsResponse,
|
message: proto::InlayHintsResponse,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> anyhow::Result<Vec<InlayHint>> {
|
) -> anyhow::Result<Vec<InlayHint>> {
|
||||||
buffer
|
buffer
|
||||||
.update(&mut cx, |buffer, _| {
|
.update(&mut cx, |buffer, _| {
|
||||||
buffer.wait_for_version(deserialize_version(&message.version))
|
buffer.wait_for_version(deserialize_version(&message.version))
|
||||||
})
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let mut hints = Vec::new();
|
let mut hints = Vec::new();
|
||||||
|
|
|
@ -2,7 +2,7 @@ use std::{path::Path, sync::Arc};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use gpui::{AppContext, AsyncAppContext, ModelHandle};
|
use gpui::{AppContext, AsyncAppContext, Model};
|
||||||
use language::{point_to_lsp, proto::deserialize_anchor, Buffer};
|
use language::{point_to_lsp, proto::deserialize_anchor, Buffer};
|
||||||
use lsp::{LanguageServer, LanguageServerId};
|
use lsp::{LanguageServer, LanguageServerId};
|
||||||
use rpc::proto::{self, PeerId};
|
use rpc::proto::{self, PeerId};
|
||||||
|
@ -67,8 +67,8 @@ impl LspCommand for ExpandMacro {
|
||||||
async fn response_from_lsp(
|
async fn response_from_lsp(
|
||||||
self,
|
self,
|
||||||
message: Option<ExpandedMacro>,
|
message: Option<ExpandedMacro>,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
_: ModelHandle<Buffer>,
|
_: Model<Buffer>,
|
||||||
_: LanguageServerId,
|
_: LanguageServerId,
|
||||||
_: AsyncAppContext,
|
_: AsyncAppContext,
|
||||||
) -> anyhow::Result<ExpandedMacro> {
|
) -> anyhow::Result<ExpandedMacro> {
|
||||||
|
@ -92,8 +92,8 @@ impl LspCommand for ExpandMacro {
|
||||||
|
|
||||||
async fn from_proto(
|
async fn from_proto(
|
||||||
message: Self::ProtoRequest,
|
message: Self::ProtoRequest,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
buffer: ModelHandle<Buffer>,
|
buffer: Model<Buffer>,
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> anyhow::Result<Self> {
|
) -> anyhow::Result<Self> {
|
||||||
let position = message
|
let position = message
|
||||||
|
@ -101,7 +101,7 @@ impl LspCommand for ExpandMacro {
|
||||||
.and_then(deserialize_anchor)
|
.and_then(deserialize_anchor)
|
||||||
.context("invalid position")?;
|
.context("invalid position")?;
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer)),
|
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,8 +121,8 @@ impl LspCommand for ExpandMacro {
|
||||||
async fn response_from_proto(
|
async fn response_from_proto(
|
||||||
self,
|
self,
|
||||||
message: proto::LspExtExpandMacroResponse,
|
message: proto::LspExtExpandMacroResponse,
|
||||||
_: ModelHandle<Project>,
|
_: Model<Project>,
|
||||||
_: ModelHandle<Buffer>,
|
_: Model<Buffer>,
|
||||||
_: AsyncAppContext,
|
_: AsyncAppContext,
|
||||||
) -> anyhow::Result<ExpandedMacro> {
|
) -> anyhow::Result<ExpandedMacro> {
|
||||||
Ok(ExpandedMacro {
|
Ok(ExpandedMacro {
|
||||||
|
|
|
@ -11,7 +11,7 @@ use futures::{
|
||||||
future::{self, Shared},
|
future::{self, Shared},
|
||||||
FutureExt,
|
FutureExt,
|
||||||
};
|
};
|
||||||
use gpui::{AsyncAppContext, ModelContext, ModelHandle, Task};
|
use gpui::{AsyncAppContext, Model, ModelContext, Task, WeakModel};
|
||||||
use language::{
|
use language::{
|
||||||
language_settings::{Formatter, LanguageSettings},
|
language_settings::{Formatter, LanguageSettings},
|
||||||
Buffer, Language, LanguageServerName, LocalFile,
|
Buffer, Language, LanguageServerName, LocalFile,
|
||||||
|
@ -49,21 +49,24 @@ pub fn prettier_plugins_for_language(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) async fn format_with_prettier(
|
pub(super) async fn format_with_prettier(
|
||||||
project: &ModelHandle<Project>,
|
project: &WeakModel<Project>,
|
||||||
buffer: &ModelHandle<Buffer>,
|
buffer: &Model<Buffer>,
|
||||||
cx: &mut AsyncAppContext,
|
cx: &mut AsyncAppContext,
|
||||||
) -> Option<FormatOperation> {
|
) -> Option<FormatOperation> {
|
||||||
if let Some((prettier_path, prettier_task)) = project
|
if let Some((prettier_path, prettier_task)) = project
|
||||||
.update(cx, |project, cx| {
|
.update(cx, |project, cx| {
|
||||||
project.prettier_instance_for_buffer(buffer, cx)
|
project.prettier_instance_for_buffer(buffer, cx)
|
||||||
})
|
})
|
||||||
|
.ok()?
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
match prettier_task.await {
|
match prettier_task.await {
|
||||||
Ok(prettier) => {
|
Ok(prettier) => {
|
||||||
let buffer_path = buffer.update(cx, |buffer, cx| {
|
let buffer_path = buffer
|
||||||
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
|
.update(cx, |buffer, cx| {
|
||||||
});
|
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
|
||||||
|
})
|
||||||
|
.ok()?;
|
||||||
match prettier.format(buffer, buffer_path, cx).await {
|
match prettier.format(buffer, buffer_path, cx).await {
|
||||||
Ok(new_diff) => return Some(FormatOperation::Prettier(new_diff)),
|
Ok(new_diff) => return Some(FormatOperation::Prettier(new_diff)),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -73,28 +76,30 @@ pub(super) async fn format_with_prettier(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => project.update(cx, |project, _| {
|
Err(e) => project
|
||||||
let instance_to_update = match prettier_path {
|
.update(cx, |project, _| {
|
||||||
Some(prettier_path) => {
|
let instance_to_update = match prettier_path {
|
||||||
log::error!(
|
Some(prettier_path) => {
|
||||||
|
log::error!(
|
||||||
"Prettier instance from path {prettier_path:?} failed to spawn: {e:#}"
|
"Prettier instance from path {prettier_path:?} failed to spawn: {e:#}"
|
||||||
);
|
);
|
||||||
project.prettier_instances.get_mut(&prettier_path)
|
project.prettier_instances.get_mut(&prettier_path)
|
||||||
}
|
|
||||||
None => {
|
|
||||||
log::error!("Default prettier instance failed to spawn: {e:#}");
|
|
||||||
match &mut project.default_prettier.prettier {
|
|
||||||
PrettierInstallation::NotInstalled { .. } => None,
|
|
||||||
PrettierInstallation::Installed(instance) => Some(instance),
|
|
||||||
}
|
}
|
||||||
}
|
None => {
|
||||||
};
|
log::error!("Default prettier instance failed to spawn: {e:#}");
|
||||||
|
match &mut project.default_prettier.prettier {
|
||||||
|
PrettierInstallation::NotInstalled { .. } => None,
|
||||||
|
PrettierInstallation::Installed(instance) => Some(instance),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
if let Some(instance) = instance_to_update {
|
if let Some(instance) = instance_to_update {
|
||||||
instance.attempt += 1;
|
instance.attempt += 1;
|
||||||
instance.prettier = None;
|
instance.prettier = None;
|
||||||
}
|
}
|
||||||
}),
|
})
|
||||||
|
.ok()?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -200,7 +205,7 @@ impl PrettierInstance {
|
||||||
project
|
project
|
||||||
.update(&mut cx, |_, cx| {
|
.update(&mut cx, |_, cx| {
|
||||||
start_default_prettier(node, worktree_id, cx)
|
start_default_prettier(node, worktree_id, cx)
|
||||||
})
|
})?
|
||||||
.await
|
.await
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -225,7 +230,7 @@ fn start_default_prettier(
|
||||||
ControlFlow::Break(default_prettier.clone())
|
ControlFlow::Break(default_prettier.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
})?;
|
||||||
match installation_task {
|
match installation_task {
|
||||||
ControlFlow::Continue(None) => {
|
ControlFlow::Continue(None) => {
|
||||||
anyhow::bail!("Default prettier is not installed and cannot be started")
|
anyhow::bail!("Default prettier is not installed and cannot be started")
|
||||||
|
@ -243,7 +248,7 @@ fn start_default_prettier(
|
||||||
*installation_task = None;
|
*installation_task = None;
|
||||||
*attempts += 1;
|
*attempts += 1;
|
||||||
}
|
}
|
||||||
});
|
})?;
|
||||||
anyhow::bail!(
|
anyhow::bail!(
|
||||||
"Cannot start default prettier due to its installation failure: {e:#}"
|
"Cannot start default prettier due to its installation failure: {e:#}"
|
||||||
);
|
);
|
||||||
|
@ -257,7 +262,7 @@ fn start_default_prettier(
|
||||||
prettier: Some(new_default_prettier.clone()),
|
prettier: Some(new_default_prettier.clone()),
|
||||||
});
|
});
|
||||||
new_default_prettier
|
new_default_prettier
|
||||||
});
|
})?;
|
||||||
return Ok(new_default_prettier);
|
return Ok(new_default_prettier);
|
||||||
}
|
}
|
||||||
ControlFlow::Break(instance) => match instance.prettier {
|
ControlFlow::Break(instance) => match instance.prettier {
|
||||||
|
@ -272,7 +277,7 @@ fn start_default_prettier(
|
||||||
prettier: Some(new_default_prettier.clone()),
|
prettier: Some(new_default_prettier.clone()),
|
||||||
});
|
});
|
||||||
new_default_prettier
|
new_default_prettier
|
||||||
});
|
})?;
|
||||||
return Ok(new_default_prettier);
|
return Ok(new_default_prettier);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -291,7 +296,7 @@ fn start_prettier(
|
||||||
log::info!("Starting prettier at path {prettier_dir:?}");
|
log::info!("Starting prettier at path {prettier_dir:?}");
|
||||||
let new_server_id = project.update(&mut cx, |project, _| {
|
let new_server_id = project.update(&mut cx, |project, _| {
|
||||||
project.languages.next_language_server_id()
|
project.languages.next_language_server_id()
|
||||||
});
|
})?;
|
||||||
|
|
||||||
let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone())
|
let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone())
|
||||||
.await
|
.await
|
||||||
|
@ -305,7 +310,7 @@ fn start_prettier(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn register_new_prettier(
|
fn register_new_prettier(
|
||||||
project: &ModelHandle<Project>,
|
project: &WeakModel<Project>,
|
||||||
prettier: &Prettier,
|
prettier: &Prettier,
|
||||||
worktree_id: Option<WorktreeId>,
|
worktree_id: Option<WorktreeId>,
|
||||||
new_server_id: LanguageServerId,
|
new_server_id: LanguageServerId,
|
||||||
|
@ -319,38 +324,40 @@ fn register_new_prettier(
|
||||||
log::info!("Started prettier in {prettier_dir:?}");
|
log::info!("Started prettier in {prettier_dir:?}");
|
||||||
}
|
}
|
||||||
if let Some(prettier_server) = prettier.server() {
|
if let Some(prettier_server) = prettier.server() {
|
||||||
project.update(cx, |project, cx| {
|
project
|
||||||
let name = if is_default {
|
.update(cx, |project, cx| {
|
||||||
LanguageServerName(Arc::from("prettier (default)"))
|
let name = if is_default {
|
||||||
} else {
|
LanguageServerName(Arc::from("prettier (default)"))
|
||||||
let worktree_path = worktree_id
|
} else {
|
||||||
.and_then(|id| project.worktree_for_id(id, cx))
|
let worktree_path = worktree_id
|
||||||
.map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path()));
|
.and_then(|id| project.worktree_for_id(id, cx))
|
||||||
let name = match worktree_path {
|
.map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path()));
|
||||||
Some(worktree_path) => {
|
let name = match worktree_path {
|
||||||
if prettier_dir == worktree_path.as_ref() {
|
Some(worktree_path) => {
|
||||||
let name = prettier_dir
|
if prettier_dir == worktree_path.as_ref() {
|
||||||
.file_name()
|
let name = prettier_dir
|
||||||
.and_then(|name| name.to_str())
|
.file_name()
|
||||||
.unwrap_or_default();
|
.and_then(|name| name.to_str())
|
||||||
format!("prettier ({name})")
|
.unwrap_or_default();
|
||||||
} else {
|
format!("prettier ({name})")
|
||||||
let dir_to_display = prettier_dir
|
} else {
|
||||||
.strip_prefix(worktree_path.as_ref())
|
let dir_to_display = prettier_dir
|
||||||
.ok()
|
.strip_prefix(worktree_path.as_ref())
|
||||||
.unwrap_or(prettier_dir);
|
.ok()
|
||||||
format!("prettier ({})", dir_to_display.display())
|
.unwrap_or(prettier_dir);
|
||||||
|
format!("prettier ({})", dir_to_display.display())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
None => format!("prettier ({})", prettier_dir.display()),
|
||||||
None => format!("prettier ({})", prettier_dir.display()),
|
};
|
||||||
|
LanguageServerName(Arc::from(name))
|
||||||
};
|
};
|
||||||
LanguageServerName(Arc::from(name))
|
project
|
||||||
};
|
.supplementary_language_servers
|
||||||
project
|
.insert(new_server_id, (name, Arc::clone(prettier_server)));
|
||||||
.supplementary_language_servers
|
cx.emit(Event::LanguageServerAdded(new_server_id));
|
||||||
.insert(new_server_id, (name, Arc::clone(prettier_server)));
|
})
|
||||||
cx.emit(Event::LanguageServerAdded(new_server_id));
|
.ok();
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -405,7 +412,7 @@ async fn save_prettier_server_file(fs: &dyn Fs) -> Result<(), anyhow::Error> {
|
||||||
impl Project {
|
impl Project {
|
||||||
pub fn update_prettier_settings(
|
pub fn update_prettier_settings(
|
||||||
&self,
|
&self,
|
||||||
worktree: &ModelHandle<Worktree>,
|
worktree: &Model<Worktree>,
|
||||||
changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
|
changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
|
||||||
cx: &mut ModelContext<'_, Project>,
|
cx: &mut ModelContext<'_, Project>,
|
||||||
) {
|
) {
|
||||||
|
@ -446,7 +453,7 @@ impl Project {
|
||||||
}))
|
}))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
cx.background()
|
cx.background_executor()
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
let _: Vec<()> = future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_instance)| {
|
let _: Vec<()> = future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_instance)| {
|
||||||
async move {
|
async move {
|
||||||
|
@ -477,7 +484,7 @@ impl Project {
|
||||||
|
|
||||||
fn prettier_instance_for_buffer(
|
fn prettier_instance_for_buffer(
|
||||||
&mut self,
|
&mut self,
|
||||||
buffer: &ModelHandle<Buffer>,
|
buffer: &Model<Buffer>,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Task<Option<(Option<PathBuf>, PrettierTask)>> {
|
) -> Task<Option<(Option<PathBuf>, PrettierTask)>> {
|
||||||
let buffer = buffer.read(cx);
|
let buffer = buffer.read(cx);
|
||||||
|
@ -500,7 +507,7 @@ impl Project {
|
||||||
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
|
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
|
||||||
return cx.spawn(|project, mut cx| async move {
|
return cx.spawn(|project, mut cx| async move {
|
||||||
match cx
|
match cx
|
||||||
.background()
|
.background_executor()
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
Prettier::locate_prettier_installation(
|
Prettier::locate_prettier_installation(
|
||||||
fs.as_ref(),
|
fs.as_ref(),
|
||||||
|
@ -515,30 +522,34 @@ impl Project {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
Ok(ControlFlow::Continue(None)) => {
|
Ok(ControlFlow::Continue(None)) => {
|
||||||
let default_instance = project.update(&mut cx, |project, cx| {
|
let default_instance = project
|
||||||
project
|
.update(&mut cx, |project, cx| {
|
||||||
.prettiers_per_worktree
|
project
|
||||||
.entry(worktree_id)
|
.prettiers_per_worktree
|
||||||
.or_default()
|
.entry(worktree_id)
|
||||||
.insert(None);
|
.or_default()
|
||||||
project.default_prettier.prettier_task(
|
.insert(None);
|
||||||
&node,
|
project.default_prettier.prettier_task(
|
||||||
Some(worktree_id),
|
&node,
|
||||||
cx,
|
Some(worktree_id),
|
||||||
)
|
cx,
|
||||||
});
|
)
|
||||||
|
})
|
||||||
|
.ok()?;
|
||||||
Some((None, default_instance?.log_err().await?))
|
Some((None, default_instance?.log_err().await?))
|
||||||
}
|
}
|
||||||
Ok(ControlFlow::Continue(Some(prettier_dir))) => {
|
Ok(ControlFlow::Continue(Some(prettier_dir))) => {
|
||||||
project.update(&mut cx, |project, _| {
|
project
|
||||||
project
|
.update(&mut cx, |project, _| {
|
||||||
.prettiers_per_worktree
|
project
|
||||||
.entry(worktree_id)
|
.prettiers_per_worktree
|
||||||
.or_default()
|
.entry(worktree_id)
|
||||||
.insert(Some(prettier_dir.clone()))
|
.or_default()
|
||||||
});
|
.insert(Some(prettier_dir.clone()))
|
||||||
if let Some(prettier_task) =
|
})
|
||||||
project.update(&mut cx, |project, cx| {
|
.ok()?;
|
||||||
|
if let Some(prettier_task) = project
|
||||||
|
.update(&mut cx, |project, cx| {
|
||||||
project.prettier_instances.get_mut(&prettier_dir).map(
|
project.prettier_instances.get_mut(&prettier_dir).map(
|
||||||
|existing_instance| {
|
|existing_instance| {
|
||||||
existing_instance.prettier_task(
|
existing_instance.prettier_task(
|
||||||
|
@ -550,6 +561,7 @@ impl Project {
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
.ok()?
|
||||||
{
|
{
|
||||||
log::debug!(
|
log::debug!(
|
||||||
"Found already started prettier in {prettier_dir:?}"
|
"Found already started prettier in {prettier_dir:?}"
|
||||||
|
@ -561,22 +573,24 @@ impl Project {
|
||||||
}
|
}
|
||||||
|
|
||||||
log::info!("Found prettier in {prettier_dir:?}, starting.");
|
log::info!("Found prettier in {prettier_dir:?}, starting.");
|
||||||
let new_prettier_task = project.update(&mut cx, |project, cx| {
|
let new_prettier_task = project
|
||||||
let new_prettier_task = start_prettier(
|
.update(&mut cx, |project, cx| {
|
||||||
node,
|
let new_prettier_task = start_prettier(
|
||||||
prettier_dir.clone(),
|
node,
|
||||||
Some(worktree_id),
|
prettier_dir.clone(),
|
||||||
cx,
|
Some(worktree_id),
|
||||||
);
|
cx,
|
||||||
project.prettier_instances.insert(
|
);
|
||||||
prettier_dir.clone(),
|
project.prettier_instances.insert(
|
||||||
PrettierInstance {
|
prettier_dir.clone(),
|
||||||
attempt: 0,
|
PrettierInstance {
|
||||||
prettier: Some(new_prettier_task.clone()),
|
attempt: 0,
|
||||||
},
|
prettier: Some(new_prettier_task.clone()),
|
||||||
);
|
},
|
||||||
new_prettier_task
|
);
|
||||||
});
|
new_prettier_task
|
||||||
|
})
|
||||||
|
.ok()?;
|
||||||
Some((Some(prettier_dir), new_prettier_task))
|
Some((Some(prettier_dir), new_prettier_task))
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -633,7 +647,7 @@ impl Project {
|
||||||
}) {
|
}) {
|
||||||
Some(locate_from) => {
|
Some(locate_from) => {
|
||||||
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
|
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
|
||||||
cx.background().spawn(async move {
|
cx.background_executor().spawn(async move {
|
||||||
Prettier::locate_prettier_installation(
|
Prettier::locate_prettier_installation(
|
||||||
fs.as_ref(),
|
fs.as_ref(),
|
||||||
&installed_prettiers,
|
&installed_prettiers,
|
||||||
|
@ -696,7 +710,7 @@ impl Project {
|
||||||
installation_attempt = *attempts;
|
installation_attempt = *attempts;
|
||||||
needs_install = true;
|
needs_install = true;
|
||||||
};
|
};
|
||||||
});
|
})?;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if installation_attempt > prettier::FAIL_THRESHOLD {
|
if installation_attempt > prettier::FAIL_THRESHOLD {
|
||||||
|
@ -704,7 +718,7 @@ impl Project {
|
||||||
if let PrettierInstallation::NotInstalled { installation_task, .. } = &mut project.default_prettier.prettier {
|
if let PrettierInstallation::NotInstalled { installation_task, .. } = &mut project.default_prettier.prettier {
|
||||||
*installation_task = None;
|
*installation_task = None;
|
||||||
};
|
};
|
||||||
});
|
})?;
|
||||||
log::warn!(
|
log::warn!(
|
||||||
"Default prettier installation had failed {installation_attempt} times, not attempting again",
|
"Default prettier installation had failed {installation_attempt} times, not attempting again",
|
||||||
);
|
);
|
||||||
|
@ -721,10 +735,10 @@ impl Project {
|
||||||
not_installed_plugins.extend(new_plugins.iter());
|
not_installed_plugins.extend(new_plugins.iter());
|
||||||
}
|
}
|
||||||
needs_install |= !new_plugins.is_empty();
|
needs_install |= !new_plugins.is_empty();
|
||||||
});
|
})?;
|
||||||
if needs_install {
|
if needs_install {
|
||||||
let installed_plugins = new_plugins.clone();
|
let installed_plugins = new_plugins.clone();
|
||||||
cx.background()
|
cx.background_executor()
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
save_prettier_server_file(fs.as_ref()).await?;
|
save_prettier_server_file(fs.as_ref()).await?;
|
||||||
install_prettier_packages(new_plugins, node).await
|
install_prettier_packages(new_plugins, node).await
|
||||||
|
@ -742,7 +756,7 @@ impl Project {
|
||||||
project.default_prettier
|
project.default_prettier
|
||||||
.installed_plugins
|
.installed_plugins
|
||||||
.extend(installed_plugins);
|
.extend(installed_plugins);
|
||||||
});
|
})?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,7 +1,8 @@
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
|
use gpui::AppContext;
|
||||||
use schemars::JsonSchema;
|
use schemars::JsonSchema;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use settings::Setting;
|
use settings::Settings;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
|
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
|
||||||
|
@ -34,7 +35,7 @@ pub struct LspSettings {
|
||||||
pub initialization_options: Option<serde_json::Value>,
|
pub initialization_options: Option<serde_json::Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Setting for ProjectSettings {
|
impl Settings for ProjectSettings {
|
||||||
const KEY: Option<&'static str> = None;
|
const KEY: Option<&'static str> = None;
|
||||||
|
|
||||||
type FileContent = Self;
|
type FileContent = Self;
|
||||||
|
@ -42,7 +43,7 @@ impl Setting for ProjectSettings {
|
||||||
fn load(
|
fn load(
|
||||||
default_value: &Self::FileContent,
|
default_value: &Self::FileContent,
|
||||||
user_values: &[&Self::FileContent],
|
user_values: &[&Self::FileContent],
|
||||||
_: &gpui::AppContext,
|
_: &mut AppContext,
|
||||||
) -> anyhow::Result<Self> {
|
) -> anyhow::Result<Self> {
|
||||||
Self::load_via_json_merge(default_value, user_values)
|
Self::load_via_json_merge(default_value, user_values)
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,5 +1,6 @@
|
||||||
use crate::Project;
|
use crate::Project;
|
||||||
use gpui::{AnyWindowHandle, ModelContext, ModelHandle, WeakModelHandle};
|
use gpui::{AnyWindowHandle, Context, Entity, Model, ModelContext, WeakModel};
|
||||||
|
use settings::Settings;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use terminal::{
|
use terminal::{
|
||||||
terminal_settings::{self, TerminalSettings, VenvSettingsContent},
|
terminal_settings::{self, TerminalSettings, VenvSettingsContent},
|
||||||
|
@ -10,7 +11,7 @@ use terminal::{
|
||||||
use std::os::unix::ffi::OsStrExt;
|
use std::os::unix::ffi::OsStrExt;
|
||||||
|
|
||||||
pub struct Terminals {
|
pub struct Terminals {
|
||||||
pub(crate) local_handles: Vec<WeakModelHandle<terminal::Terminal>>,
|
pub(crate) local_handles: Vec<WeakModel<terminal::Terminal>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
|
@ -19,13 +20,13 @@ impl Project {
|
||||||
working_directory: Option<PathBuf>,
|
working_directory: Option<PathBuf>,
|
||||||
window: AnyWindowHandle,
|
window: AnyWindowHandle,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> anyhow::Result<ModelHandle<Terminal>> {
|
) -> anyhow::Result<Model<Terminal>> {
|
||||||
if self.is_remote() {
|
if self.is_remote() {
|
||||||
return Err(anyhow::anyhow!(
|
return Err(anyhow::anyhow!(
|
||||||
"creating terminals as a guest is not supported yet"
|
"creating terminals as a guest is not supported yet"
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
let settings = settings::get::<TerminalSettings>(cx);
|
let settings = TerminalSettings::get_global(cx);
|
||||||
let python_settings = settings.detect_venv.clone();
|
let python_settings = settings.detect_venv.clone();
|
||||||
let shell = settings.shell.clone();
|
let shell = settings.shell.clone();
|
||||||
|
|
||||||
|
@ -38,17 +39,20 @@ impl Project {
|
||||||
window,
|
window,
|
||||||
)
|
)
|
||||||
.map(|builder| {
|
.map(|builder| {
|
||||||
let terminal_handle = cx.add_model(|cx| builder.subscribe(cx));
|
let terminal_handle = cx.new_model(|cx| builder.subscribe(cx));
|
||||||
|
|
||||||
self.terminals
|
self.terminals
|
||||||
.local_handles
|
.local_handles
|
||||||
.push(terminal_handle.downgrade());
|
.push(terminal_handle.downgrade());
|
||||||
|
|
||||||
let id = terminal_handle.id();
|
let id = terminal_handle.entity_id();
|
||||||
cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
|
cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
|
||||||
let handles = &mut project.terminals.local_handles;
|
let handles = &mut project.terminals.local_handles;
|
||||||
|
|
||||||
if let Some(index) = handles.iter().position(|terminal| terminal.id() == id) {
|
if let Some(index) = handles
|
||||||
|
.iter()
|
||||||
|
.position(|terminal| terminal.entity_id() == id)
|
||||||
|
{
|
||||||
handles.remove(index);
|
handles.remove(index);
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
}
|
||||||
|
@ -103,7 +107,7 @@ impl Project {
|
||||||
fn activate_python_virtual_environment(
|
fn activate_python_virtual_environment(
|
||||||
&mut self,
|
&mut self,
|
||||||
activate_script: Option<PathBuf>,
|
activate_script: Option<PathBuf>,
|
||||||
terminal_handle: &ModelHandle<Terminal>,
|
terminal_handle: &Model<Terminal>,
|
||||||
cx: &mut ModelContext<Project>,
|
cx: &mut ModelContext<Project>,
|
||||||
) {
|
) {
|
||||||
if let Some(activate_script) = activate_script {
|
if let Some(activate_script) = activate_script {
|
||||||
|
@ -116,7 +120,7 @@ impl Project {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn local_terminal_handles(&self) -> &Vec<WeakModelHandle<terminal::Terminal>> {
|
pub fn local_terminal_handles(&self) -> &Vec<WeakModel<terminal::Terminal>> {
|
||||||
&self.terminals.local_handles
|
&self.terminals.local_handles
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
||||||
ProjectEntryId, RemoveOptions,
|
ProjectEntryId, RemoveOptions,
|
||||||
};
|
};
|
||||||
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
|
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context as _, Result};
|
||||||
use client::{proto, Client};
|
use client::{proto, Client};
|
||||||
use clock::ReplicaId;
|
use clock::ReplicaId;
|
||||||
use collections::{HashMap, HashSet, VecDeque};
|
use collections::{HashMap, HashSet, VecDeque};
|
||||||
|
@ -18,12 +18,13 @@ use futures::{
|
||||||
},
|
},
|
||||||
select_biased,
|
select_biased,
|
||||||
task::Poll,
|
task::Poll,
|
||||||
FutureExt, Stream, StreamExt,
|
FutureExt as _, Stream, StreamExt,
|
||||||
};
|
};
|
||||||
use fuzzy::CharBag;
|
use fuzzy::CharBag;
|
||||||
use git::{DOT_GIT, GITIGNORE};
|
use git::{DOT_GIT, GITIGNORE};
|
||||||
use gpui::{
|
use gpui::{
|
||||||
executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription, Task,
|
AppContext, AsyncAppContext, BackgroundExecutor, Context, EventEmitter, Model, ModelContext,
|
||||||
|
Task,
|
||||||
};
|
};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use language::{
|
use language::{
|
||||||
|
@ -40,7 +41,7 @@ use postage::{
|
||||||
prelude::{Sink as _, Stream as _},
|
prelude::{Sink as _, Stream as _},
|
||||||
watch,
|
watch,
|
||||||
};
|
};
|
||||||
use settings::SettingsStore;
|
use settings::{Settings, SettingsStore};
|
||||||
use smol::channel::{self, Sender};
|
use smol::channel::{self, Sender};
|
||||||
use std::{
|
use std::{
|
||||||
any::Any,
|
any::Any,
|
||||||
|
@ -78,7 +79,6 @@ pub struct LocalWorktree {
|
||||||
scan_requests_tx: channel::Sender<ScanRequest>,
|
scan_requests_tx: channel::Sender<ScanRequest>,
|
||||||
path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
|
path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
|
||||||
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
|
is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
|
||||||
_settings_subscription: Subscription,
|
|
||||||
_background_scanner_tasks: Vec<Task<()>>,
|
_background_scanner_tasks: Vec<Task<()>>,
|
||||||
share: Option<ShareState>,
|
share: Option<ShareState>,
|
||||||
diagnostics: HashMap<
|
diagnostics: HashMap<
|
||||||
|
@ -283,14 +283,13 @@ struct ShareState {
|
||||||
_maintain_remote_snapshot: Task<Option<()>>,
|
_maintain_remote_snapshot: Task<Option<()>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
pub enum Event {
|
pub enum Event {
|
||||||
UpdatedEntries(UpdatedEntriesSet),
|
UpdatedEntries(UpdatedEntriesSet),
|
||||||
UpdatedGitRepositories(UpdatedGitRepositoriesSet),
|
UpdatedGitRepositories(UpdatedGitRepositoriesSet),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Entity for Worktree {
|
impl EventEmitter<Event> for Worktree {}
|
||||||
type Event = Event;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Worktree {
|
impl Worktree {
|
||||||
pub async fn local(
|
pub async fn local(
|
||||||
|
@ -300,10 +299,11 @@ impl Worktree {
|
||||||
fs: Arc<dyn Fs>,
|
fs: Arc<dyn Fs>,
|
||||||
next_entry_id: Arc<AtomicUsize>,
|
next_entry_id: Arc<AtomicUsize>,
|
||||||
cx: &mut AsyncAppContext,
|
cx: &mut AsyncAppContext,
|
||||||
) -> Result<ModelHandle<Self>> {
|
) -> Result<Model<Self>> {
|
||||||
// After determining whether the root entry is a file or a directory, populate the
|
// After determining whether the root entry is a file or a directory, populate the
|
||||||
// snapshot's "root name", which will be used for the purpose of fuzzy matching.
|
// snapshot's "root name", which will be used for the purpose of fuzzy matching.
|
||||||
let abs_path = path.into();
|
let abs_path = path.into();
|
||||||
|
|
||||||
let metadata = fs
|
let metadata = fs
|
||||||
.metadata(&abs_path)
|
.metadata(&abs_path)
|
||||||
.await
|
.await
|
||||||
|
@ -312,11 +312,11 @@ impl Worktree {
|
||||||
let closure_fs = Arc::clone(&fs);
|
let closure_fs = Arc::clone(&fs);
|
||||||
let closure_next_entry_id = Arc::clone(&next_entry_id);
|
let closure_next_entry_id = Arc::clone(&next_entry_id);
|
||||||
let closure_abs_path = abs_path.to_path_buf();
|
let closure_abs_path = abs_path.to_path_buf();
|
||||||
Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| {
|
cx.new_model(move |cx: &mut ModelContext<Worktree>| {
|
||||||
let settings_subscription = cx.observe_global::<SettingsStore, _>(move |this, cx| {
|
cx.observe_global::<SettingsStore>(move |this, cx| {
|
||||||
if let Self::Local(this) = this {
|
if let Self::Local(this) = this {
|
||||||
let new_file_scan_exclusions =
|
let new_file_scan_exclusions =
|
||||||
file_scan_exclusions(settings::get::<ProjectSettings>(cx));
|
file_scan_exclusions(ProjectSettings::get_global(cx));
|
||||||
if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
|
if new_file_scan_exclusions != this.snapshot.file_scan_exclusions {
|
||||||
this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
|
this.snapshot.file_scan_exclusions = new_file_scan_exclusions;
|
||||||
log::info!(
|
log::info!(
|
||||||
|
@ -345,17 +345,19 @@ impl Worktree {
|
||||||
this.is_scanning = watch::channel_with(true);
|
this.is_scanning = watch::channel_with(true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
})
|
||||||
|
.detach();
|
||||||
|
|
||||||
let root_name = abs_path
|
let root_name = abs_path
|
||||||
.file_name()
|
.file_name()
|
||||||
.map_or(String::new(), |f| f.to_string_lossy().to_string());
|
.map_or(String::new(), |f| f.to_string_lossy().to_string());
|
||||||
|
|
||||||
let mut snapshot = LocalSnapshot {
|
let mut snapshot = LocalSnapshot {
|
||||||
file_scan_exclusions: file_scan_exclusions(settings::get::<ProjectSettings>(cx)),
|
file_scan_exclusions: file_scan_exclusions(ProjectSettings::get_global(cx)),
|
||||||
ignores_by_parent_abs_path: Default::default(),
|
ignores_by_parent_abs_path: Default::default(),
|
||||||
git_repositories: Default::default(),
|
git_repositories: Default::default(),
|
||||||
snapshot: Snapshot {
|
snapshot: Snapshot {
|
||||||
id: WorktreeId::from_usize(cx.model_id()),
|
id: WorktreeId::from_usize(cx.entity_id().as_u64() as usize),
|
||||||
abs_path: abs_path.to_path_buf().into(),
|
abs_path: abs_path.to_path_buf().into(),
|
||||||
root_name: root_name.clone(),
|
root_name: root_name.clone(),
|
||||||
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
|
root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
|
||||||
|
@ -388,7 +390,6 @@ impl Worktree {
|
||||||
share: None,
|
share: None,
|
||||||
scan_requests_tx,
|
scan_requests_tx,
|
||||||
path_prefixes_to_scan_tx,
|
path_prefixes_to_scan_tx,
|
||||||
_settings_subscription: settings_subscription,
|
|
||||||
_background_scanner_tasks: start_background_scan_tasks(
|
_background_scanner_tasks: start_background_scan_tasks(
|
||||||
&abs_path,
|
&abs_path,
|
||||||
task_snapshot,
|
task_snapshot,
|
||||||
|
@ -404,18 +405,17 @@ impl Worktree {
|
||||||
fs,
|
fs,
|
||||||
visible,
|
visible,
|
||||||
})
|
})
|
||||||
}))
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// abcdefghi
|
|
||||||
pub fn remote(
|
pub fn remote(
|
||||||
project_remote_id: u64,
|
project_remote_id: u64,
|
||||||
replica_id: ReplicaId,
|
replica_id: ReplicaId,
|
||||||
worktree: proto::WorktreeMetadata,
|
worktree: proto::WorktreeMetadata,
|
||||||
client: Arc<Client>,
|
client: Arc<Client>,
|
||||||
cx: &mut AppContext,
|
cx: &mut AppContext,
|
||||||
) -> ModelHandle<Self> {
|
) -> Model<Self> {
|
||||||
cx.add_model(|cx: &mut ModelContext<Self>| {
|
cx.new_model(|cx: &mut ModelContext<Self>| {
|
||||||
let snapshot = Snapshot {
|
let snapshot = Snapshot {
|
||||||
id: WorktreeId(worktree.id as usize),
|
id: WorktreeId(worktree.id as usize),
|
||||||
abs_path: Arc::from(PathBuf::from(worktree.abs_path)),
|
abs_path: Arc::from(PathBuf::from(worktree.abs_path)),
|
||||||
|
@ -436,7 +436,7 @@ impl Worktree {
|
||||||
let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
|
let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
|
||||||
let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
|
let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
|
||||||
|
|
||||||
cx.background()
|
cx.background_executor()
|
||||||
.spawn({
|
.spawn({
|
||||||
let background_snapshot = background_snapshot.clone();
|
let background_snapshot = background_snapshot.clone();
|
||||||
async move {
|
async move {
|
||||||
|
@ -452,27 +452,24 @@ impl Worktree {
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
|
|
||||||
cx.spawn_weak(|this, mut cx| async move {
|
cx.spawn(|this, mut cx| async move {
|
||||||
while (snapshot_updated_rx.recv().await).is_some() {
|
while (snapshot_updated_rx.recv().await).is_some() {
|
||||||
if let Some(this) = this.upgrade(&cx) {
|
this.update(&mut cx, |this, cx| {
|
||||||
this.update(&mut cx, |this, cx| {
|
let this = this.as_remote_mut().unwrap();
|
||||||
let this = this.as_remote_mut().unwrap();
|
this.snapshot = this.background_snapshot.lock().clone();
|
||||||
this.snapshot = this.background_snapshot.lock().clone();
|
cx.emit(Event::UpdatedEntries(Arc::from([])));
|
||||||
cx.emit(Event::UpdatedEntries(Arc::from([])));
|
cx.notify();
|
||||||
cx.notify();
|
while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
|
||||||
while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
|
if this.observed_snapshot(*scan_id) {
|
||||||
if this.observed_snapshot(*scan_id) {
|
let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
|
||||||
let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
|
let _ = tx.send(());
|
||||||
let _ = tx.send(());
|
} else {
|
||||||
} else {
|
break;
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
} else {
|
})?;
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
anyhow::Ok(())
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
|
|
||||||
|
@ -604,9 +601,9 @@ fn start_background_scan_tasks(
|
||||||
cx: &mut ModelContext<'_, Worktree>,
|
cx: &mut ModelContext<'_, Worktree>,
|
||||||
) -> Vec<Task<()>> {
|
) -> Vec<Task<()>> {
|
||||||
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
|
let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
|
||||||
let background_scanner = cx.background().spawn({
|
let background_scanner = cx.background_executor().spawn({
|
||||||
let abs_path = abs_path.to_path_buf();
|
let abs_path = abs_path.to_path_buf();
|
||||||
let background = cx.background().clone();
|
let background = cx.background_executor().clone();
|
||||||
async move {
|
async move {
|
||||||
let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
|
let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
|
||||||
BackgroundScanner::new(
|
BackgroundScanner::new(
|
||||||
|
@ -622,8 +619,8 @@ fn start_background_scan_tasks(
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
let scan_state_updater = cx.spawn_weak(|this, mut cx| async move {
|
let scan_state_updater = cx.spawn(|this, mut cx| async move {
|
||||||
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
|
while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade()) {
|
||||||
this.update(&mut cx, |this, cx| {
|
this.update(&mut cx, |this, cx| {
|
||||||
let this = this.as_local_mut().unwrap();
|
let this = this.as_local_mut().unwrap();
|
||||||
match state {
|
match state {
|
||||||
|
@ -642,7 +639,8 @@ fn start_background_scan_tasks(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
cx.notify();
|
cx.notify();
|
||||||
});
|
})
|
||||||
|
.ok();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
vec![background_scanner, scan_state_updater]
|
vec![background_scanner, scan_state_updater]
|
||||||
|
@ -674,17 +672,17 @@ impl LocalWorktree {
|
||||||
id: u64,
|
id: u64,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
cx: &mut ModelContext<Worktree>,
|
cx: &mut ModelContext<Worktree>,
|
||||||
) -> Task<Result<ModelHandle<Buffer>>> {
|
) -> Task<Result<Model<Buffer>>> {
|
||||||
let path = Arc::from(path);
|
let path = Arc::from(path);
|
||||||
cx.spawn(move |this, mut cx| async move {
|
cx.spawn(move |this, mut cx| async move {
|
||||||
let (file, contents, diff_base) = this
|
let (file, contents, diff_base) = this
|
||||||
.update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
|
.update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))?
|
||||||
.await?;
|
.await?;
|
||||||
let text_buffer = cx
|
let text_buffer = cx
|
||||||
.background()
|
.background_executor()
|
||||||
.spawn(async move { text::Buffer::new(0, id, contents) })
|
.spawn(async move { text::Buffer::new(0, id, contents) })
|
||||||
.await;
|
.await;
|
||||||
Ok(cx.add_model(|_| Buffer::build(text_buffer, diff_base, Some(Arc::new(file)))))
|
cx.new_model(|_| Buffer::build(text_buffer, diff_base, Some(Arc::new(file))))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -958,24 +956,17 @@ impl LocalWorktree {
|
||||||
let fs = self.fs.clone();
|
let fs = self.fs.clone();
|
||||||
let entry = self.refresh_entry(path.clone(), None, cx);
|
let entry = self.refresh_entry(path.clone(), None, cx);
|
||||||
|
|
||||||
cx.spawn(|this, cx| async move {
|
cx.spawn(|this, mut cx| async move {
|
||||||
let text = fs.load(&abs_path).await?;
|
let text = fs.load(&abs_path).await?;
|
||||||
let mut index_task = None;
|
let mut index_task = None;
|
||||||
let snapshot = this.read_with(&cx, |this, _| this.as_local().unwrap().snapshot());
|
let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
|
||||||
if let Some(repo) = snapshot.repository_for_path(&path) {
|
if let Some(repo) = snapshot.repository_for_path(&path) {
|
||||||
if let Some(repo_path) = repo.work_directory.relativize(&snapshot, &path) {
|
let repo_path = repo.work_directory.relativize(&snapshot, &path).unwrap();
|
||||||
if let Some(repo) = snapshot.git_repositories.get(&*repo.work_directory) {
|
if let Some(repo) = snapshot.git_repositories.get(&*repo.work_directory) {
|
||||||
let repo = repo.repo_ptr.clone();
|
let repo = repo.repo_ptr.clone();
|
||||||
index_task = Some(
|
index_task = Some(
|
||||||
cx.background()
|
cx.background_executor()
|
||||||
.spawn(async move { repo.lock().load_index_text(&repo_path) }),
|
.spawn(async move { repo.lock().load_index_text(&repo_path) }),
|
||||||
);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
log::warn!(
|
|
||||||
"Skipping loading index text from path {:?} is not in repository {:?}",
|
|
||||||
path,
|
|
||||||
repo.work_directory,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -986,11 +977,14 @@ impl LocalWorktree {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let worktree = this
|
||||||
|
.upgrade()
|
||||||
|
.ok_or_else(|| anyhow!("worktree was dropped"))?;
|
||||||
match entry.await? {
|
match entry.await? {
|
||||||
Some(entry) => Ok((
|
Some(entry) => Ok((
|
||||||
File {
|
File {
|
||||||
entry_id: Some(entry.id),
|
entry_id: Some(entry.id),
|
||||||
worktree: this,
|
worktree,
|
||||||
path: entry.path,
|
path: entry.path,
|
||||||
mtime: entry.mtime,
|
mtime: entry.mtime,
|
||||||
is_local: true,
|
is_local: true,
|
||||||
|
@ -1012,7 +1006,7 @@ impl LocalWorktree {
|
||||||
Ok((
|
Ok((
|
||||||
File {
|
File {
|
||||||
entry_id: None,
|
entry_id: None,
|
||||||
worktree: this,
|
worktree,
|
||||||
path,
|
path,
|
||||||
mtime: metadata.mtime,
|
mtime: metadata.mtime,
|
||||||
is_local: true,
|
is_local: true,
|
||||||
|
@ -1028,12 +1022,11 @@ impl LocalWorktree {
|
||||||
|
|
||||||
pub fn save_buffer(
|
pub fn save_buffer(
|
||||||
&self,
|
&self,
|
||||||
buffer_handle: ModelHandle<Buffer>,
|
buffer_handle: Model<Buffer>,
|
||||||
path: Arc<Path>,
|
path: Arc<Path>,
|
||||||
has_changed_file: bool,
|
has_changed_file: bool,
|
||||||
cx: &mut ModelContext<Worktree>,
|
cx: &mut ModelContext<Worktree>,
|
||||||
) -> Task<Result<()>> {
|
) -> Task<Result<()>> {
|
||||||
let handle = cx.handle();
|
|
||||||
let buffer = buffer_handle.read(cx);
|
let buffer = buffer_handle.read(cx);
|
||||||
|
|
||||||
let rpc = self.client.clone();
|
let rpc = self.client.clone();
|
||||||
|
@ -1047,8 +1040,9 @@ impl LocalWorktree {
|
||||||
let fs = Arc::clone(&self.fs);
|
let fs = Arc::clone(&self.fs);
|
||||||
let abs_path = self.absolutize(&path);
|
let abs_path = self.absolutize(&path);
|
||||||
|
|
||||||
cx.as_mut().spawn(|mut cx| async move {
|
cx.spawn(move |this, mut cx| async move {
|
||||||
let entry = save.await?;
|
let entry = save.await?;
|
||||||
|
let this = this.upgrade().context("worktree dropped")?;
|
||||||
|
|
||||||
let (entry_id, mtime, path) = match entry {
|
let (entry_id, mtime, path) = match entry {
|
||||||
Some(entry) => (Some(entry.id), entry.mtime, entry.path),
|
Some(entry) => (Some(entry.id), entry.mtime, entry.path),
|
||||||
|
@ -1071,7 +1065,7 @@ impl LocalWorktree {
|
||||||
if has_changed_file {
|
if has_changed_file {
|
||||||
let new_file = Arc::new(File {
|
let new_file = Arc::new(File {
|
||||||
entry_id,
|
entry_id,
|
||||||
worktree: handle,
|
worktree: this,
|
||||||
path,
|
path,
|
||||||
mtime,
|
mtime,
|
||||||
is_local: true,
|
is_local: true,
|
||||||
|
@ -1091,7 +1085,7 @@ impl LocalWorktree {
|
||||||
if has_changed_file {
|
if has_changed_file {
|
||||||
buffer.file_updated(new_file, cx);
|
buffer.file_updated(new_file, cx);
|
||||||
}
|
}
|
||||||
});
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(project_id) = project_id {
|
if let Some(project_id) = project_id {
|
||||||
|
@ -1106,7 +1100,7 @@ impl LocalWorktree {
|
||||||
|
|
||||||
buffer_handle.update(&mut cx, |buffer, cx| {
|
buffer_handle.update(&mut cx, |buffer, cx| {
|
||||||
buffer.did_save(version.clone(), fingerprint, mtime, cx);
|
buffer.did_save(version.clone(), fingerprint, mtime, cx);
|
||||||
});
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
|
@ -1135,7 +1129,7 @@ impl LocalWorktree {
|
||||||
let lowest_ancestor = self.lowest_ancestor(&path);
|
let lowest_ancestor = self.lowest_ancestor(&path);
|
||||||
let abs_path = self.absolutize(&path);
|
let abs_path = self.absolutize(&path);
|
||||||
let fs = self.fs.clone();
|
let fs = self.fs.clone();
|
||||||
let write = cx.background().spawn(async move {
|
let write = cx.background_executor().spawn(async move {
|
||||||
if is_dir {
|
if is_dir {
|
||||||
fs.create_dir(&abs_path).await
|
fs.create_dir(&abs_path).await
|
||||||
} else {
|
} else {
|
||||||
|
@ -1165,7 +1159,7 @@ impl LocalWorktree {
|
||||||
this.as_local_mut().unwrap().refresh_entry(path, None, cx),
|
this.as_local_mut().unwrap().refresh_entry(path, None, cx),
|
||||||
refreshes,
|
refreshes,
|
||||||
)
|
)
|
||||||
});
|
})?;
|
||||||
for refresh in refreshes {
|
for refresh in refreshes {
|
||||||
refresh.await.log_err();
|
refresh.await.log_err();
|
||||||
}
|
}
|
||||||
|
@ -1185,14 +1179,14 @@ impl LocalWorktree {
|
||||||
let abs_path = self.absolutize(&path);
|
let abs_path = self.absolutize(&path);
|
||||||
let fs = self.fs.clone();
|
let fs = self.fs.clone();
|
||||||
let write = cx
|
let write = cx
|
||||||
.background()
|
.background_executor()
|
||||||
.spawn(async move { fs.save(&abs_path, &text, line_ending).await });
|
.spawn(async move { fs.save(&abs_path, &text, line_ending).await });
|
||||||
|
|
||||||
cx.spawn(|this, mut cx| async move {
|
cx.spawn(|this, mut cx| async move {
|
||||||
write.await?;
|
write.await?;
|
||||||
this.update(&mut cx, |this, cx| {
|
this.update(&mut cx, |this, cx| {
|
||||||
this.as_local_mut().unwrap().refresh_entry(path, None, cx)
|
this.as_local_mut().unwrap().refresh_entry(path, None, cx)
|
||||||
})
|
})?
|
||||||
.await
|
.await
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1206,7 +1200,7 @@ impl LocalWorktree {
|
||||||
let abs_path = self.absolutize(&entry.path);
|
let abs_path = self.absolutize(&entry.path);
|
||||||
let fs = self.fs.clone();
|
let fs = self.fs.clone();
|
||||||
|
|
||||||
let delete = cx.background().spawn(async move {
|
let delete = cx.background_executor().spawn(async move {
|
||||||
if entry.is_file() {
|
if entry.is_file() {
|
||||||
fs.remove_file(&abs_path, Default::default()).await?;
|
fs.remove_file(&abs_path, Default::default()).await?;
|
||||||
} else {
|
} else {
|
||||||
|
@ -1228,7 +1222,7 @@ impl LocalWorktree {
|
||||||
this.as_local_mut()
|
this.as_local_mut()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.refresh_entries_for_paths(vec![path])
|
.refresh_entries_for_paths(vec![path])
|
||||||
})
|
})?
|
||||||
.recv()
|
.recv()
|
||||||
.await;
|
.await;
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -1249,7 +1243,7 @@ impl LocalWorktree {
|
||||||
let abs_old_path = self.absolutize(&old_path);
|
let abs_old_path = self.absolutize(&old_path);
|
||||||
let abs_new_path = self.absolutize(&new_path);
|
let abs_new_path = self.absolutize(&new_path);
|
||||||
let fs = self.fs.clone();
|
let fs = self.fs.clone();
|
||||||
let rename = cx.background().spawn(async move {
|
let rename = cx.background_executor().spawn(async move {
|
||||||
fs.rename(&abs_old_path, &abs_new_path, Default::default())
|
fs.rename(&abs_old_path, &abs_new_path, Default::default())
|
||||||
.await
|
.await
|
||||||
});
|
});
|
||||||
|
@ -1260,7 +1254,7 @@ impl LocalWorktree {
|
||||||
this.as_local_mut()
|
this.as_local_mut()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.refresh_entry(new_path.clone(), Some(old_path), cx)
|
.refresh_entry(new_path.clone(), Some(old_path), cx)
|
||||||
})
|
})?
|
||||||
.await
|
.await
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1279,7 +1273,7 @@ impl LocalWorktree {
|
||||||
let abs_old_path = self.absolutize(&old_path);
|
let abs_old_path = self.absolutize(&old_path);
|
||||||
let abs_new_path = self.absolutize(&new_path);
|
let abs_new_path = self.absolutize(&new_path);
|
||||||
let fs = self.fs.clone();
|
let fs = self.fs.clone();
|
||||||
let copy = cx.background().spawn(async move {
|
let copy = cx.background_executor().spawn(async move {
|
||||||
copy_recursive(
|
copy_recursive(
|
||||||
fs.as_ref(),
|
fs.as_ref(),
|
||||||
&abs_old_path,
|
&abs_old_path,
|
||||||
|
@ -1295,7 +1289,7 @@ impl LocalWorktree {
|
||||||
this.as_local_mut()
|
this.as_local_mut()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.refresh_entry(new_path.clone(), None, cx)
|
.refresh_entry(new_path.clone(), None, cx)
|
||||||
})
|
})?
|
||||||
.await
|
.await
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1307,7 +1301,7 @@ impl LocalWorktree {
|
||||||
) -> Option<Task<Result<()>>> {
|
) -> Option<Task<Result<()>>> {
|
||||||
let path = self.entry_for_id(entry_id)?.path.clone();
|
let path = self.entry_for_id(entry_id)?.path.clone();
|
||||||
let mut refresh = self.refresh_entries_for_paths(vec![path]);
|
let mut refresh = self.refresh_entries_for_paths(vec![path]);
|
||||||
Some(cx.background().spawn(async move {
|
Some(cx.background_executor().spawn(async move {
|
||||||
refresh.next().await;
|
refresh.next().await;
|
||||||
Ok(())
|
Ok(())
|
||||||
}))
|
}))
|
||||||
|
@ -1343,16 +1337,13 @@ impl LocalWorktree {
|
||||||
vec![path.clone()]
|
vec![path.clone()]
|
||||||
};
|
};
|
||||||
let mut refresh = self.refresh_entries_for_paths(paths);
|
let mut refresh = self.refresh_entries_for_paths(paths);
|
||||||
cx.spawn_weak(move |this, mut cx| async move {
|
cx.spawn(move |this, mut cx| async move {
|
||||||
refresh.recv().await;
|
refresh.recv().await;
|
||||||
let new_entry = this
|
let new_entry = this.update(&mut cx, |this, _| {
|
||||||
.upgrade(&cx)
|
this.entry_for_path(path)
|
||||||
.ok_or_else(|| anyhow!("worktree was dropped"))?
|
.cloned()
|
||||||
.update(&mut cx, |this, _| {
|
.ok_or_else(|| anyhow!("failed to read path after update"))
|
||||||
this.entry_for_path(path)
|
})??;
|
||||||
.cloned()
|
|
||||||
.ok_or_else(|| anyhow!("failed to read path after update"))
|
|
||||||
})?;
|
|
||||||
Ok(Some(new_entry))
|
Ok(Some(new_entry))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1387,8 +1378,8 @@ impl LocalWorktree {
|
||||||
.unbounded_send((self.snapshot(), Arc::from([]), Arc::from([])))
|
.unbounded_send((self.snapshot(), Arc::from([]), Arc::from([])))
|
||||||
.ok();
|
.ok();
|
||||||
|
|
||||||
let worktree_id = cx.model_id() as u64;
|
let worktree_id = cx.entity_id().as_u64();
|
||||||
let _maintain_remote_snapshot = cx.background().spawn(async move {
|
let _maintain_remote_snapshot = cx.background_executor().spawn(async move {
|
||||||
let mut is_first = true;
|
let mut is_first = true;
|
||||||
while let Some((snapshot, entry_changes, repo_changes)) = snapshots_rx.next().await {
|
while let Some((snapshot, entry_changes, repo_changes)) = snapshots_rx.next().await {
|
||||||
let update;
|
let update;
|
||||||
|
@ -1435,7 +1426,7 @@ impl LocalWorktree {
|
||||||
for (&server_id, summary) in summaries {
|
for (&server_id, summary) in summaries {
|
||||||
if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
|
if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
|
||||||
project_id,
|
project_id,
|
||||||
worktree_id: cx.model_id() as u64,
|
worktree_id: cx.entity_id().as_u64(),
|
||||||
summary: Some(summary.to_proto(server_id, &path)),
|
summary: Some(summary.to_proto(server_id, &path)),
|
||||||
}) {
|
}) {
|
||||||
return Task::ready(Err(e));
|
return Task::ready(Err(e));
|
||||||
|
@ -1446,7 +1437,7 @@ impl LocalWorktree {
|
||||||
let rx = self.observe_updates(project_id, cx, move |update| {
|
let rx = self.observe_updates(project_id, cx, move |update| {
|
||||||
client.request(update).map(|result| result.is_ok())
|
client.request(update).map(|result| result.is_ok())
|
||||||
});
|
});
|
||||||
cx.foreground()
|
cx.background_executor()
|
||||||
.spawn(async move { rx.await.map_err(|_| anyhow!("share ended")) })
|
.spawn(async move { rx.await.map_err(|_| anyhow!("share ended")) })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1472,7 +1463,7 @@ impl RemoteWorktree {
|
||||||
|
|
||||||
pub fn save_buffer(
|
pub fn save_buffer(
|
||||||
&self,
|
&self,
|
||||||
buffer_handle: ModelHandle<Buffer>,
|
buffer_handle: Model<Buffer>,
|
||||||
cx: &mut ModelContext<Worktree>,
|
cx: &mut ModelContext<Worktree>,
|
||||||
) -> Task<Result<()>> {
|
) -> Task<Result<()>> {
|
||||||
let buffer = buffer_handle.read(cx);
|
let buffer = buffer_handle.read(cx);
|
||||||
|
@ -1480,7 +1471,7 @@ impl RemoteWorktree {
|
||||||
let version = buffer.version();
|
let version = buffer.version();
|
||||||
let rpc = self.client.clone();
|
let rpc = self.client.clone();
|
||||||
let project_id = self.project_id;
|
let project_id = self.project_id;
|
||||||
cx.as_mut().spawn(|mut cx| async move {
|
cx.spawn(move |_, mut cx| async move {
|
||||||
let response = rpc
|
let response = rpc
|
||||||
.request(proto::SaveBuffer {
|
.request(proto::SaveBuffer {
|
||||||
project_id,
|
project_id,
|
||||||
|
@ -1497,7 +1488,7 @@ impl RemoteWorktree {
|
||||||
|
|
||||||
buffer_handle.update(&mut cx, |buffer, cx| {
|
buffer_handle.update(&mut cx, |buffer, cx| {
|
||||||
buffer.did_save(version.clone(), fingerprint, mtime, cx);
|
buffer.did_save(version.clone(), fingerprint, mtime, cx);
|
||||||
});
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
|
@ -1577,7 +1568,7 @@ impl RemoteWorktree {
|
||||||
let entry = snapshot.insert_entry(entry);
|
let entry = snapshot.insert_entry(entry);
|
||||||
worktree.snapshot = snapshot.clone();
|
worktree.snapshot = snapshot.clone();
|
||||||
entry
|
entry
|
||||||
})
|
})?
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1588,14 +1579,14 @@ impl RemoteWorktree {
|
||||||
cx: &mut ModelContext<Worktree>,
|
cx: &mut ModelContext<Worktree>,
|
||||||
) -> Task<Result<()>> {
|
) -> Task<Result<()>> {
|
||||||
let wait_for_snapshot = self.wait_for_snapshot(scan_id);
|
let wait_for_snapshot = self.wait_for_snapshot(scan_id);
|
||||||
cx.spawn(|this, mut cx| async move {
|
cx.spawn(move |this, mut cx| async move {
|
||||||
wait_for_snapshot.await?;
|
wait_for_snapshot.await?;
|
||||||
this.update(&mut cx, |worktree, _| {
|
this.update(&mut cx, |worktree, _| {
|
||||||
let worktree = worktree.as_remote_mut().unwrap();
|
let worktree = worktree.as_remote_mut().unwrap();
|
||||||
let mut snapshot = worktree.background_snapshot.lock();
|
let mut snapshot = worktree.background_snapshot.lock();
|
||||||
snapshot.delete_entry(id);
|
snapshot.delete_entry(id);
|
||||||
worktree.snapshot = snapshot.clone();
|
worktree.snapshot = snapshot.clone();
|
||||||
});
|
})?;
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -2168,16 +2159,11 @@ impl LocalSnapshot {
|
||||||
|
|
||||||
fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
|
fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
|
||||||
let mut new_ignores = Vec::new();
|
let mut new_ignores = Vec::new();
|
||||||
for (index, ancestor) in abs_path.ancestors().enumerate() {
|
for ancestor in abs_path.ancestors().skip(1) {
|
||||||
if index > 0 {
|
if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
|
||||||
if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
|
new_ignores.push((ancestor, Some(ignore.clone())));
|
||||||
new_ignores.push((ancestor, Some(ignore.clone())));
|
} else {
|
||||||
} else {
|
new_ignores.push((ancestor, None));
|
||||||
new_ignores.push((ancestor, None));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if ancestor.join(&*DOT_GIT).is_dir() {
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2194,6 +2180,7 @@ impl LocalSnapshot {
|
||||||
if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
|
if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
|
||||||
ignore_stack = IgnoreStack::all();
|
ignore_stack = IgnoreStack::all();
|
||||||
}
|
}
|
||||||
|
|
||||||
ignore_stack
|
ignore_stack
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2471,6 +2458,7 @@ impl BackgroundScannerState {
|
||||||
|
|
||||||
fn reload_repositories(&mut self, dot_git_dirs_to_reload: &HashSet<PathBuf>, fs: &dyn Fs) {
|
fn reload_repositories(&mut self, dot_git_dirs_to_reload: &HashSet<PathBuf>, fs: &dyn Fs) {
|
||||||
let scan_id = self.snapshot.scan_id;
|
let scan_id = self.snapshot.scan_id;
|
||||||
|
|
||||||
for dot_git_dir in dot_git_dirs_to_reload {
|
for dot_git_dir in dot_git_dirs_to_reload {
|
||||||
// If there is already a repository for this .git directory, reload
|
// If there is already a repository for this .git directory, reload
|
||||||
// the status for all of its files.
|
// the status for all of its files.
|
||||||
|
@ -2732,7 +2720,7 @@ impl fmt::Debug for Snapshot {
|
||||||
|
|
||||||
#[derive(Clone, PartialEq)]
|
#[derive(Clone, PartialEq)]
|
||||||
pub struct File {
|
pub struct File {
|
||||||
pub worktree: ModelHandle<Worktree>,
|
pub worktree: Model<Worktree>,
|
||||||
pub path: Arc<Path>,
|
pub path: Arc<Path>,
|
||||||
pub mtime: SystemTime,
|
pub mtime: SystemTime,
|
||||||
pub(crate) entry_id: Option<ProjectEntryId>,
|
pub(crate) entry_id: Option<ProjectEntryId>,
|
||||||
|
@ -2790,7 +2778,7 @@ impl language::File for File {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn worktree_id(&self) -> usize {
|
fn worktree_id(&self) -> usize {
|
||||||
self.worktree.id()
|
self.worktree.entity_id().as_u64() as usize
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_deleted(&self) -> bool {
|
fn is_deleted(&self) -> bool {
|
||||||
|
@ -2803,7 +2791,7 @@ impl language::File for File {
|
||||||
|
|
||||||
fn to_proto(&self) -> rpc::proto::File {
|
fn to_proto(&self) -> rpc::proto::File {
|
||||||
rpc::proto::File {
|
rpc::proto::File {
|
||||||
worktree_id: self.worktree.id() as u64,
|
worktree_id: self.worktree.entity_id().as_u64(),
|
||||||
entry_id: self.entry_id.map(|id| id.to_proto()),
|
entry_id: self.entry_id.map(|id| id.to_proto()),
|
||||||
path: self.path.to_string_lossy().into(),
|
path: self.path.to_string_lossy().into(),
|
||||||
mtime: Some(self.mtime.into()),
|
mtime: Some(self.mtime.into()),
|
||||||
|
@ -2826,7 +2814,7 @@ impl language::LocalFile for File {
|
||||||
let worktree = self.worktree.read(cx).as_local().unwrap();
|
let worktree = self.worktree.read(cx).as_local().unwrap();
|
||||||
let abs_path = worktree.absolutize(&self.path);
|
let abs_path = worktree.absolutize(&self.path);
|
||||||
let fs = worktree.fs.clone();
|
let fs = worktree.fs.clone();
|
||||||
cx.background()
|
cx.background_executor()
|
||||||
.spawn(async move { fs.load(&abs_path).await })
|
.spawn(async move { fs.load(&abs_path).await })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2857,7 +2845,7 @@ impl language::LocalFile for File {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl File {
|
impl File {
|
||||||
pub fn for_entry(entry: Entry, worktree: ModelHandle<Worktree>) -> Arc<Self> {
|
pub fn for_entry(entry: Entry, worktree: Model<Worktree>) -> Arc<Self> {
|
||||||
Arc::new(Self {
|
Arc::new(Self {
|
||||||
worktree,
|
worktree,
|
||||||
path: entry.path.clone(),
|
path: entry.path.clone(),
|
||||||
|
@ -2870,7 +2858,7 @@ impl File {
|
||||||
|
|
||||||
pub fn from_proto(
|
pub fn from_proto(
|
||||||
proto: rpc::proto::File,
|
proto: rpc::proto::File,
|
||||||
worktree: ModelHandle<Worktree>,
|
worktree: Model<Worktree>,
|
||||||
cx: &AppContext,
|
cx: &AppContext,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let worktree_id = worktree
|
let worktree_id = worktree
|
||||||
|
@ -3168,7 +3156,7 @@ struct BackgroundScanner {
|
||||||
state: Mutex<BackgroundScannerState>,
|
state: Mutex<BackgroundScannerState>,
|
||||||
fs: Arc<dyn Fs>,
|
fs: Arc<dyn Fs>,
|
||||||
status_updates_tx: UnboundedSender<ScanState>,
|
status_updates_tx: UnboundedSender<ScanState>,
|
||||||
executor: Arc<executor::Background>,
|
executor: BackgroundExecutor,
|
||||||
scan_requests_rx: channel::Receiver<ScanRequest>,
|
scan_requests_rx: channel::Receiver<ScanRequest>,
|
||||||
path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
|
path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
|
||||||
next_entry_id: Arc<AtomicUsize>,
|
next_entry_id: Arc<AtomicUsize>,
|
||||||
|
@ -3188,7 +3176,7 @@ impl BackgroundScanner {
|
||||||
next_entry_id: Arc<AtomicUsize>,
|
next_entry_id: Arc<AtomicUsize>,
|
||||||
fs: Arc<dyn Fs>,
|
fs: Arc<dyn Fs>,
|
||||||
status_updates_tx: UnboundedSender<ScanState>,
|
status_updates_tx: UnboundedSender<ScanState>,
|
||||||
executor: Arc<executor::Background>,
|
executor: BackgroundExecutor,
|
||||||
scan_requests_rx: channel::Receiver<ScanRequest>,
|
scan_requests_rx: channel::Receiver<ScanRequest>,
|
||||||
path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
|
path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
|
@ -3220,21 +3208,14 @@ impl BackgroundScanner {
|
||||||
|
|
||||||
// Populate ignores above the root.
|
// Populate ignores above the root.
|
||||||
let root_abs_path = self.state.lock().snapshot.abs_path.clone();
|
let root_abs_path = self.state.lock().snapshot.abs_path.clone();
|
||||||
for (index, ancestor) in root_abs_path.ancestors().enumerate() {
|
for ancestor in root_abs_path.ancestors().skip(1) {
|
||||||
if index != 0 {
|
if let Ok(ignore) = build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
|
||||||
if let Ok(ignore) =
|
{
|
||||||
build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
|
self.state
|
||||||
{
|
.lock()
|
||||||
self.state
|
.snapshot
|
||||||
.lock()
|
.ignores_by_parent_abs_path
|
||||||
.snapshot
|
.insert(ancestor.into(), (ignore.into(), false));
|
||||||
.ignores_by_parent_abs_path
|
|
||||||
.insert(ancestor.into(), (ignore.into(), false));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if ancestor.join(&*DOT_GIT).is_dir() {
|
|
||||||
// Reached root of git repository.
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3397,6 +3378,7 @@ impl BackgroundScanner {
|
||||||
);
|
);
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
|
||||||
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
|
let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
|
||||||
snapshot
|
snapshot
|
||||||
.entry_for_path(parent)
|
.entry_for_path(parent)
|
||||||
|
@ -3662,8 +3644,8 @@ impl BackgroundScanner {
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut state = self.state.lock();
|
|
||||||
let relative_path = job.path.join(child_name);
|
let relative_path = job.path.join(child_name);
|
||||||
|
let mut state = self.state.lock();
|
||||||
if state.snapshot.is_path_excluded(relative_path.clone()) {
|
if state.snapshot.is_path_excluded(relative_path.clone()) {
|
||||||
log::debug!("skipping excluded child entry {relative_path:?}");
|
log::debug!("skipping excluded child entry {relative_path:?}");
|
||||||
state.remove_path(&relative_path);
|
state.remove_path(&relative_path);
|
||||||
|
@ -4240,11 +4222,11 @@ pub trait WorktreeModelHandle {
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
fn flush_fs_events<'a>(
|
fn flush_fs_events<'a>(
|
||||||
&self,
|
&self,
|
||||||
cx: &'a gpui::TestAppContext,
|
cx: &'a mut gpui::TestAppContext,
|
||||||
) -> futures::future::LocalBoxFuture<'a, ()>;
|
) -> futures::future::LocalBoxFuture<'a, ()>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WorktreeModelHandle for ModelHandle<Worktree> {
|
impl WorktreeModelHandle for Model<Worktree> {
|
||||||
// When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
|
// When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
|
||||||
// occurred before the worktree was constructed. These events can cause the worktree to perform
|
// occurred before the worktree was constructed. These events can cause the worktree to perform
|
||||||
// extra directory scans, and emit extra scan-state notifications.
|
// extra directory scans, and emit extra scan-state notifications.
|
||||||
|
@ -4254,29 +4236,31 @@ impl WorktreeModelHandle for ModelHandle<Worktree> {
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
fn flush_fs_events<'a>(
|
fn flush_fs_events<'a>(
|
||||||
&self,
|
&self,
|
||||||
cx: &'a gpui::TestAppContext,
|
cx: &'a mut gpui::TestAppContext,
|
||||||
) -> futures::future::LocalBoxFuture<'a, ()> {
|
) -> futures::future::LocalBoxFuture<'a, ()> {
|
||||||
let filename = "fs-event-sentinel";
|
let file_name = "fs-event-sentinel";
|
||||||
|
|
||||||
let tree = self.clone();
|
let tree = self.clone();
|
||||||
let (fs, root_path) = self.read_with(cx, |tree, _| {
|
let (fs, root_path) = self.update(cx, |tree, _| {
|
||||||
let tree = tree.as_local().unwrap();
|
let tree = tree.as_local().unwrap();
|
||||||
(tree.fs.clone(), tree.abs_path().clone())
|
(tree.fs.clone(), tree.abs_path().clone())
|
||||||
});
|
});
|
||||||
|
|
||||||
async move {
|
async move {
|
||||||
fs.create_file(&root_path.join(filename), Default::default())
|
fs.create_file(&root_path.join(file_name), Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
|
|
||||||
|
cx.condition(&tree, |tree, _| tree.entry_for_path(file_name).is_some())
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
fs.remove_file(&root_path.join(filename), Default::default())
|
fs.remove_file(&root_path.join(file_name), Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
|
cx.condition(&tree, |tree, _| tree.entry_for_path(file_name).is_none())
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
cx.update(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
.boxed_local()
|
.boxed_local()
|
||||||
|
|
|
@ -7,7 +7,7 @@ use anyhow::Result;
|
||||||
use client::Client;
|
use client::Client;
|
||||||
use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
|
use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
|
||||||
use git::GITIGNORE;
|
use git::GITIGNORE;
|
||||||
use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
|
use gpui::{ModelContext, Task, TestAppContext};
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use postage::stream::Stream;
|
use postage::stream::Stream;
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
|
@ -26,7 +26,7 @@ use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_traversal(cx: &mut TestAppContext) {
|
async fn test_traversal(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
let fs = FakeFs::new(cx.background());
|
let fs = FakeFs::new(cx.background_executor.clone());
|
||||||
fs.insert_tree(
|
fs.insert_tree(
|
||||||
"/root",
|
"/root",
|
||||||
json!({
|
json!({
|
||||||
|
@ -82,7 +82,7 @@ async fn test_traversal(cx: &mut TestAppContext) {
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_descendent_entries(cx: &mut TestAppContext) {
|
async fn test_descendent_entries(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
let fs = FakeFs::new(cx.background());
|
let fs = FakeFs::new(cx.background_executor.clone());
|
||||||
fs.insert_tree(
|
fs.insert_tree(
|
||||||
"/root",
|
"/root",
|
||||||
json!({
|
json!({
|
||||||
|
@ -188,9 +188,9 @@ async fn test_descendent_entries(cx: &mut TestAppContext) {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test(iterations = 10)]
|
#[gpui::test(iterations = 10)]
|
||||||
async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
|
async fn test_circular_symlinks(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
let fs = FakeFs::new(cx.background());
|
let fs = FakeFs::new(cx.background_executor.clone());
|
||||||
fs.insert_tree(
|
fs.insert_tree(
|
||||||
"/root",
|
"/root",
|
||||||
json!({
|
json!({
|
||||||
|
@ -247,7 +247,7 @@ async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppCo
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
executor.run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
tree.read_with(cx, |tree, _| {
|
tree.read_with(cx, |tree, _| {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tree.entries(false)
|
tree.entries(false)
|
||||||
|
@ -270,7 +270,7 @@ async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppCo
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
|
async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
let fs = FakeFs::new(cx.background());
|
let fs = FakeFs::new(cx.background_executor.clone());
|
||||||
fs.insert_tree(
|
fs.insert_tree(
|
||||||
"/root",
|
"/root",
|
||||||
json!({
|
json!({
|
||||||
|
@ -446,7 +446,7 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_open_gitignored_files(cx: &mut TestAppContext) {
|
async fn test_open_gitignored_files(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
let fs = FakeFs::new(cx.background());
|
let fs = FakeFs::new(cx.background_executor.clone());
|
||||||
fs.insert_tree(
|
fs.insert_tree(
|
||||||
"/root",
|
"/root",
|
||||||
json!({
|
json!({
|
||||||
|
@ -597,7 +597,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
|
||||||
fs.create_dir("/root/one/node_modules/c/lib".as_ref())
|
fs.create_dir("/root/one/node_modules/c/lib".as_ref())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
|
fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
|
||||||
0
|
0
|
||||||
|
@ -607,7 +607,7 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
|
async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
let fs = FakeFs::new(cx.background());
|
let fs = FakeFs::new(cx.background_executor.clone());
|
||||||
fs.insert_tree(
|
fs.insert_tree(
|
||||||
"/root",
|
"/root",
|
||||||
json!({
|
json!({
|
||||||
|
@ -693,7 +693,7 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
|
||||||
fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
|
fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
|
|
||||||
// All of the directories that are no longer ignored are now loaded.
|
// All of the directories that are no longer ignored are now loaded.
|
||||||
tree.read_with(cx, |tree, _| {
|
tree.read_with(cx, |tree, _| {
|
||||||
|
@ -732,13 +732,13 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
|
||||||
async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
|
async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
cx.update(|cx| {
|
cx.update(|cx| {
|
||||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
cx.update_global::<SettingsStore, _>(|store, cx| {
|
||||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||||
project_settings.file_scan_exclusions = Some(Vec::new());
|
project_settings.file_scan_exclusions = Some(Vec::new());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
let fs = FakeFs::new(cx.background());
|
let fs = FakeFs::new(cx.background_executor.clone());
|
||||||
fs.insert_tree(
|
fs.insert_tree(
|
||||||
"/root",
|
"/root",
|
||||||
json!({
|
json!({
|
||||||
|
@ -818,7 +818,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
cx.read(|cx| {
|
cx.read(|cx| {
|
||||||
let tree = tree.read(cx);
|
let tree = tree.read(cx);
|
||||||
assert!(
|
assert!(
|
||||||
|
@ -844,6 +844,7 @@ async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_write_file(cx: &mut TestAppContext) {
|
async fn test_write_file(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
|
cx.executor().allow_parking();
|
||||||
let dir = temp_tree(json!({
|
let dir = temp_tree(json!({
|
||||||
".git": {},
|
".git": {},
|
||||||
".gitignore": "ignored-dir\n",
|
".gitignore": "ignored-dir\n",
|
||||||
|
@ -897,6 +898,7 @@ async fn test_write_file(cx: &mut TestAppContext) {
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
|
async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
|
cx.executor().allow_parking();
|
||||||
let dir = temp_tree(json!({
|
let dir = temp_tree(json!({
|
||||||
".gitignore": "**/target\n/node_modules\n",
|
".gitignore": "**/target\n/node_modules\n",
|
||||||
"target": {
|
"target": {
|
||||||
|
@ -922,7 +924,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
|
||||||
".DS_Store": "",
|
".DS_Store": "",
|
||||||
}));
|
}));
|
||||||
cx.update(|cx| {
|
cx.update(|cx| {
|
||||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
cx.update_global::<SettingsStore, _>(|store, cx| {
|
||||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||||
project_settings.file_scan_exclusions =
|
project_settings.file_scan_exclusions =
|
||||||
Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
|
Some(vec!["**/foo/**".to_string(), "**/.DS_Store".to_string()]);
|
||||||
|
@ -959,7 +961,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
|
||||||
});
|
});
|
||||||
|
|
||||||
cx.update(|cx| {
|
cx.update(|cx| {
|
||||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
cx.update_global::<SettingsStore, _>(|store, cx| {
|
||||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||||
project_settings.file_scan_exclusions =
|
project_settings.file_scan_exclusions =
|
||||||
Some(vec!["**/node_modules/**".to_string()]);
|
Some(vec!["**/node_modules/**".to_string()]);
|
||||||
|
@ -967,7 +969,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
tree.flush_fs_events(cx).await;
|
tree.flush_fs_events(cx).await;
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
tree.read_with(cx, |tree, _| {
|
tree.read_with(cx, |tree, _| {
|
||||||
check_worktree_entries(
|
check_worktree_entries(
|
||||||
tree,
|
tree,
|
||||||
|
@ -993,6 +995,7 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
|
async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
|
cx.executor().allow_parking();
|
||||||
let dir = temp_tree(json!({
|
let dir = temp_tree(json!({
|
||||||
".git": {
|
".git": {
|
||||||
"HEAD": "ref: refs/heads/main\n",
|
"HEAD": "ref: refs/heads/main\n",
|
||||||
|
@ -1022,7 +1025,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
|
||||||
".DS_Store": "",
|
".DS_Store": "",
|
||||||
}));
|
}));
|
||||||
cx.update(|cx| {
|
cx.update(|cx| {
|
||||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
cx.update_global::<SettingsStore, _>(|store, cx| {
|
||||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
||||||
project_settings.file_scan_exclusions = Some(vec![
|
project_settings.file_scan_exclusions = Some(vec![
|
||||||
"**/.git".to_string(),
|
"**/.git".to_string(),
|
||||||
|
@ -1134,7 +1137,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
|
||||||
#[gpui::test(iterations = 30)]
|
#[gpui::test(iterations = 30)]
|
||||||
async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
|
async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
let fs = FakeFs::new(cx.background());
|
let fs = FakeFs::new(cx.background_executor.clone());
|
||||||
fs.insert_tree(
|
fs.insert_tree(
|
||||||
"/root",
|
"/root",
|
||||||
json!({
|
json!({
|
||||||
|
@ -1180,7 +1183,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert!(entry.is_dir());
|
assert!(entry.is_dir());
|
||||||
|
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
tree.read_with(cx, |tree, _| {
|
tree.read_with(cx, |tree, _| {
|
||||||
assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
|
assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
|
||||||
});
|
});
|
||||||
|
@ -1195,9 +1198,10 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
|
cx.executor().allow_parking();
|
||||||
|
let client_fake = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
|
||||||
|
|
||||||
let fs_fake = FakeFs::new(cx.background());
|
let fs_fake = FakeFs::new(cx.background_executor.clone());
|
||||||
fs_fake
|
fs_fake
|
||||||
.insert_tree(
|
.insert_tree(
|
||||||
"/root",
|
"/root",
|
||||||
|
@ -1229,14 +1233,14 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert!(entry.is_file());
|
assert!(entry.is_file());
|
||||||
|
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
tree_fake.read_with(cx, |tree, _| {
|
tree_fake.read_with(cx, |tree, _| {
|
||||||
assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
|
assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
|
||||||
assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
|
assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
|
||||||
assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
|
assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
|
||||||
});
|
});
|
||||||
|
|
||||||
let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
|
let client_real = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
|
||||||
|
|
||||||
let fs_real = Arc::new(RealFs);
|
let fs_real = Arc::new(RealFs);
|
||||||
let temp_root = temp_tree(json!({
|
let temp_root = temp_tree(json!({
|
||||||
|
@ -1265,7 +1269,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert!(entry.is_file());
|
assert!(entry.is_file());
|
||||||
|
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
tree_real.read_with(cx, |tree, _| {
|
tree_real.read_with(cx, |tree, _| {
|
||||||
assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
|
assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
|
||||||
assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
|
assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
|
||||||
|
@ -1284,7 +1288,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert!(entry.is_file());
|
assert!(entry.is_file());
|
||||||
|
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
tree_real.read_with(cx, |tree, _| {
|
tree_real.read_with(cx, |tree, _| {
|
||||||
assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
|
assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
|
||||||
});
|
});
|
||||||
|
@ -1301,7 +1305,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert!(entry.is_file());
|
assert!(entry.is_file());
|
||||||
|
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
tree_real.read_with(cx, |tree, _| {
|
tree_real.read_with(cx, |tree, _| {
|
||||||
assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
|
assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
|
||||||
assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
|
assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
|
||||||
|
@ -1324,7 +1328,7 @@ async fn test_random_worktree_operations_during_initial_scan(
|
||||||
.unwrap_or(20);
|
.unwrap_or(20);
|
||||||
|
|
||||||
let root_dir = Path::new("/test");
|
let root_dir = Path::new("/test");
|
||||||
let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
|
let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
|
||||||
fs.as_fake().insert_tree(root_dir, json!({})).await;
|
fs.as_fake().insert_tree(root_dir, json!({})).await;
|
||||||
for _ in 0..initial_entries {
|
for _ in 0..initial_entries {
|
||||||
randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
|
randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
|
||||||
|
@ -1376,7 +1380,7 @@ async fn test_random_worktree_operations_during_initial_scan(
|
||||||
.update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
|
.update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
|
|
||||||
let final_snapshot = worktree.read_with(cx, |tree, _| {
|
let final_snapshot = worktree.read_with(cx, |tree, _| {
|
||||||
let tree = tree.as_local().unwrap();
|
let tree = tree.as_local().unwrap();
|
||||||
|
@ -1414,7 +1418,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
|
||||||
.unwrap_or(20);
|
.unwrap_or(20);
|
||||||
|
|
||||||
let root_dir = Path::new("/test");
|
let root_dir = Path::new("/test");
|
||||||
let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
|
let fs = FakeFs::new(cx.background_executor.clone()) as Arc<dyn Fs>;
|
||||||
fs.as_fake().insert_tree(root_dir, json!({})).await;
|
fs.as_fake().insert_tree(root_dir, json!({})).await;
|
||||||
for _ in 0..initial_entries {
|
for _ in 0..initial_entries {
|
||||||
randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
|
randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
|
||||||
|
@ -1474,7 +1478,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
|
||||||
mutations_len -= 1;
|
mutations_len -= 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
if rng.gen_bool(0.2) {
|
if rng.gen_bool(0.2) {
|
||||||
log::info!("storing snapshot {}", snapshots.len());
|
log::info!("storing snapshot {}", snapshots.len());
|
||||||
let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
|
let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
|
||||||
|
@ -1484,7 +1488,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
|
||||||
|
|
||||||
log::info!("quiescing");
|
log::info!("quiescing");
|
||||||
fs.as_fake().flush_events(usize::MAX);
|
fs.as_fake().flush_events(usize::MAX);
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
|
|
||||||
let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
|
let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
|
||||||
snapshot.check_invariants(true);
|
snapshot.check_invariants(true);
|
||||||
|
@ -1624,7 +1628,7 @@ fn randomly_mutate_worktree(
|
||||||
new_path
|
new_path
|
||||||
);
|
);
|
||||||
let task = worktree.rename_entry(entry.id, new_path, cx);
|
let task = worktree.rename_entry(entry.id, new_path, cx);
|
||||||
cx.foreground().spawn(async move {
|
cx.background_executor().spawn(async move {
|
||||||
task.await?.unwrap();
|
task.await?.unwrap();
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
|
@ -1639,7 +1643,7 @@ fn randomly_mutate_worktree(
|
||||||
child_path,
|
child_path,
|
||||||
);
|
);
|
||||||
let task = worktree.create_entry(child_path, is_dir, cx);
|
let task = worktree.create_entry(child_path, is_dir, cx);
|
||||||
cx.foreground().spawn(async move {
|
cx.background_executor().spawn(async move {
|
||||||
task.await?;
|
task.await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
|
@ -1647,7 +1651,7 @@ fn randomly_mutate_worktree(
|
||||||
log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
|
log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
|
||||||
let task =
|
let task =
|
||||||
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
|
worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
|
||||||
cx.foreground().spawn(async move {
|
cx.background_executor().spawn(async move {
|
||||||
task.await?;
|
task.await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
|
@ -1826,6 +1830,7 @@ fn random_filename(rng: &mut impl Rng) -> String {
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_rename_work_directory(cx: &mut TestAppContext) {
|
async fn test_rename_work_directory(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
|
cx.executor().allow_parking();
|
||||||
let root = temp_tree(json!({
|
let root = temp_tree(json!({
|
||||||
"projects": {
|
"projects": {
|
||||||
"project1": {
|
"project1": {
|
||||||
|
@ -1897,6 +1902,7 @@ async fn test_rename_work_directory(cx: &mut TestAppContext) {
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_git_repository_for_path(cx: &mut TestAppContext) {
|
async fn test_git_repository_for_path(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
|
cx.executor().allow_parking();
|
||||||
let root = temp_tree(json!({
|
let root = temp_tree(json!({
|
||||||
"c.txt": "",
|
"c.txt": "",
|
||||||
"dir1": {
|
"dir1": {
|
||||||
|
@ -2016,16 +2022,9 @@ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
|
async fn test_git_status(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
cx.update(|cx| {
|
cx.executor().allow_parking();
|
||||||
cx.update_global::<SettingsStore, _, _>(|store, cx| {
|
|
||||||
store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
|
|
||||||
project_settings.file_scan_exclusions =
|
|
||||||
Some(vec!["**/.git".to_string(), "**/.gitignore".to_string()]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
const IGNORE_RULE: &'static str = "**/target";
|
const IGNORE_RULE: &'static str = "**/target";
|
||||||
|
|
||||||
let root = temp_tree(json!({
|
let root = temp_tree(json!({
|
||||||
|
@ -2077,7 +2076,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
|
||||||
tree.flush_fs_events(cx).await;
|
tree.flush_fs_events(cx).await;
|
||||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||||
.await;
|
.await;
|
||||||
deterministic.run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
|
|
||||||
// Check that the right git state is observed on startup
|
// Check that the right git state is observed on startup
|
||||||
tree.read_with(cx, |tree, _cx| {
|
tree.read_with(cx, |tree, _cx| {
|
||||||
|
@ -2099,7 +2098,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
|
||||||
// Modify a file in the working copy.
|
// Modify a file in the working copy.
|
||||||
std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
|
std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
|
||||||
tree.flush_fs_events(cx).await;
|
tree.flush_fs_events(cx).await;
|
||||||
deterministic.run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
|
|
||||||
// The worktree detects that the file's git status has changed.
|
// The worktree detects that the file's git status has changed.
|
||||||
tree.read_with(cx, |tree, _cx| {
|
tree.read_with(cx, |tree, _cx| {
|
||||||
|
@ -2115,7 +2114,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
|
||||||
git_add(B_TXT, &repo);
|
git_add(B_TXT, &repo);
|
||||||
git_commit("Committing modified and added", &repo);
|
git_commit("Committing modified and added", &repo);
|
||||||
tree.flush_fs_events(cx).await;
|
tree.flush_fs_events(cx).await;
|
||||||
deterministic.run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
|
|
||||||
// The worktree detects that the files' git status have changed.
|
// The worktree detects that the files' git status have changed.
|
||||||
tree.read_with(cx, |tree, _cx| {
|
tree.read_with(cx, |tree, _cx| {
|
||||||
|
@ -2135,7 +2134,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
|
||||||
std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
|
std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
|
||||||
std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
|
std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
|
||||||
tree.flush_fs_events(cx).await;
|
tree.flush_fs_events(cx).await;
|
||||||
deterministic.run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
|
|
||||||
// Check that more complex repo changes are tracked
|
// Check that more complex repo changes are tracked
|
||||||
tree.read_with(cx, |tree, _cx| {
|
tree.read_with(cx, |tree, _cx| {
|
||||||
|
@ -2164,7 +2163,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
|
||||||
git_commit("Committing modified git ignore", &repo);
|
git_commit("Committing modified git ignore", &repo);
|
||||||
|
|
||||||
tree.flush_fs_events(cx).await;
|
tree.flush_fs_events(cx).await;
|
||||||
deterministic.run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
|
|
||||||
let mut renamed_dir_name = "first_directory/second_directory";
|
let mut renamed_dir_name = "first_directory/second_directory";
|
||||||
const RENAMED_FILE: &'static str = "rf.txt";
|
const RENAMED_FILE: &'static str = "rf.txt";
|
||||||
|
@ -2177,7 +2176,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
tree.flush_fs_events(cx).await;
|
tree.flush_fs_events(cx).await;
|
||||||
deterministic.run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
|
|
||||||
tree.read_with(cx, |tree, _cx| {
|
tree.read_with(cx, |tree, _cx| {
|
||||||
let snapshot = tree.snapshot();
|
let snapshot = tree.snapshot();
|
||||||
|
@ -2196,7 +2195,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
tree.flush_fs_events(cx).await;
|
tree.flush_fs_events(cx).await;
|
||||||
deterministic.run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
|
|
||||||
tree.read_with(cx, |tree, _cx| {
|
tree.read_with(cx, |tree, _cx| {
|
||||||
let snapshot = tree.snapshot();
|
let snapshot = tree.snapshot();
|
||||||
|
@ -2215,7 +2214,7 @@ async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppCont
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
|
async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
|
||||||
init_test(cx);
|
init_test(cx);
|
||||||
let fs = FakeFs::new(cx.background());
|
let fs = FakeFs::new(cx.background_executor.clone());
|
||||||
fs.insert_tree(
|
fs.insert_tree(
|
||||||
"/root",
|
"/root",
|
||||||
json!({
|
json!({
|
||||||
|
@ -2266,7 +2265,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
|
||||||
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
cx.foreground().run_until_parked();
|
cx.executor().run_until_parked();
|
||||||
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
|
let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
|
||||||
|
|
||||||
check_propagated_statuses(
|
check_propagated_statuses(
|
||||||
|
@ -2334,7 +2333,7 @@ async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
|
||||||
|
|
||||||
fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
|
fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
|
||||||
let http_client = FakeHttpClient::with_404_response();
|
let http_client = FakeHttpClient::with_404_response();
|
||||||
cx.read(|cx| Client::new(http_client, cx))
|
cx.update(|cx| Client::new(http_client, cx))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
|
@ -2456,7 +2455,8 @@ fn check_worktree_entries(
|
||||||
|
|
||||||
fn init_test(cx: &mut gpui::TestAppContext) {
|
fn init_test(cx: &mut gpui::TestAppContext) {
|
||||||
cx.update(|cx| {
|
cx.update(|cx| {
|
||||||
cx.set_global(SettingsStore::test(cx));
|
let settings_store = SettingsStore::test(cx);
|
||||||
|
cx.set_global(settings_store);
|
||||||
Project::init_settings(cx);
|
Project::init_settings(cx);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,85 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "project2"
|
|
||||||
version = "0.1.0"
|
|
||||||
edition = "2021"
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "src/project2.rs"
|
|
||||||
doctest = false
|
|
||||||
|
|
||||||
[features]
|
|
||||||
test-support = [
|
|
||||||
"client/test-support",
|
|
||||||
"db/test-support",
|
|
||||||
"language/test-support",
|
|
||||||
"settings/test-support",
|
|
||||||
"text/test-support",
|
|
||||||
"prettier/test-support",
|
|
||||||
"gpui/test-support",
|
|
||||||
]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
text = { package = "text2", path = "../text2" }
|
|
||||||
copilot = { path = "../copilot" }
|
|
||||||
client = { package = "client2", path = "../client2" }
|
|
||||||
clock = { path = "../clock" }
|
|
||||||
collections = { path = "../collections" }
|
|
||||||
db = { package = "db2", path = "../db2" }
|
|
||||||
fs = { package = "fs2", path = "../fs2" }
|
|
||||||
fsevent = { path = "../fsevent" }
|
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
|
||||||
git = { package = "git3", path = "../git3" }
|
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
|
||||||
language = { package = "language2", path = "../language2" }
|
|
||||||
lsp = { package = "lsp2", path = "../lsp2" }
|
|
||||||
node_runtime = { path = "../node_runtime" }
|
|
||||||
prettier = { package = "prettier2", path = "../prettier2" }
|
|
||||||
rpc = { package = "rpc2", path = "../rpc2" }
|
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
|
||||||
sum_tree = { path = "../sum_tree" }
|
|
||||||
terminal = { package = "terminal2", path = "../terminal2" }
|
|
||||||
util = { path = "../util" }
|
|
||||||
|
|
||||||
aho-corasick = "1.1"
|
|
||||||
anyhow.workspace = true
|
|
||||||
async-trait.workspace = true
|
|
||||||
backtrace = "0.3"
|
|
||||||
futures.workspace = true
|
|
||||||
globset.workspace = true
|
|
||||||
ignore = "0.4"
|
|
||||||
lazy_static.workspace = true
|
|
||||||
log.workspace = true
|
|
||||||
parking_lot.workspace = true
|
|
||||||
postage.workspace = true
|
|
||||||
rand.workspace = true
|
|
||||||
regex.workspace = true
|
|
||||||
schemars.workspace = true
|
|
||||||
serde.workspace = true
|
|
||||||
serde_derive.workspace = true
|
|
||||||
serde_json.workspace = true
|
|
||||||
sha2 = "0.10"
|
|
||||||
similar = "1.3"
|
|
||||||
smol.workspace = true
|
|
||||||
thiserror.workspace = true
|
|
||||||
toml.workspace = true
|
|
||||||
itertools = "0.10"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
ctor.workspace = true
|
|
||||||
env_logger.workspace = true
|
|
||||||
pretty_assertions.workspace = true
|
|
||||||
client = { package = "client2", path = "../client2", features = ["test-support"] }
|
|
||||||
collections = { path = "../collections", features = ["test-support"] }
|
|
||||||
db = { package = "db2", path = "../db2", features = ["test-support"] }
|
|
||||||
fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
|
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
|
||||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
|
|
||||||
prettier = { package = "prettier2", path = "../prettier2", features = ["test-support"] }
|
|
||||||
util = { path = "../util", features = ["test-support"] }
|
|
||||||
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
|
||||||
git2.workspace = true
|
|
||||||
tempdir.workspace = true
|
|
||||||
unindent.workspace = true
|
|
|
@ -1,53 +0,0 @@
|
||||||
use ignore::gitignore::Gitignore;
|
|
||||||
use std::{ffi::OsStr, path::Path, sync::Arc};
|
|
||||||
|
|
||||||
pub enum IgnoreStack {
|
|
||||||
None,
|
|
||||||
Some {
|
|
||||||
abs_base_path: Arc<Path>,
|
|
||||||
ignore: Arc<Gitignore>,
|
|
||||||
parent: Arc<IgnoreStack>,
|
|
||||||
},
|
|
||||||
All,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IgnoreStack {
|
|
||||||
pub fn none() -> Arc<Self> {
|
|
||||||
Arc::new(Self::None)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn all() -> Arc<Self> {
|
|
||||||
Arc::new(Self::All)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
|
|
||||||
match self.as_ref() {
|
|
||||||
IgnoreStack::All => self,
|
|
||||||
_ => Arc::new(Self::Some {
|
|
||||||
abs_base_path,
|
|
||||||
ignore,
|
|
||||||
parent: self,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_abs_path_ignored(&self, abs_path: &Path, is_dir: bool) -> bool {
|
|
||||||
if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
match self {
|
|
||||||
Self::None => false,
|
|
||||||
Self::All => true,
|
|
||||||
Self::Some {
|
|
||||||
abs_base_path,
|
|
||||||
ignore,
|
|
||||||
parent: prev,
|
|
||||||
} => match ignore.matched(abs_path.strip_prefix(abs_base_path).unwrap(), is_dir) {
|
|
||||||
ignore::Match::None => prev.is_abs_path_ignored(abs_path, is_dir),
|
|
||||||
ignore::Match::Ignore(_) => true,
|
|
||||||
ignore::Match::Whitelist(_) => false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,137 +0,0 @@
|
||||||
use std::{path::Path, sync::Arc};
|
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use gpui::{AppContext, AsyncAppContext, Model};
|
|
||||||
use language::{point_to_lsp, proto::deserialize_anchor, Buffer};
|
|
||||||
use lsp::{LanguageServer, LanguageServerId};
|
|
||||||
use rpc::proto::{self, PeerId};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use text::{PointUtf16, ToPointUtf16};
|
|
||||||
|
|
||||||
use crate::{lsp_command::LspCommand, Project};
|
|
||||||
|
|
||||||
pub enum LspExpandMacro {}
|
|
||||||
|
|
||||||
impl lsp::request::Request for LspExpandMacro {
|
|
||||||
type Params = ExpandMacroParams;
|
|
||||||
type Result = Option<ExpandedMacro>;
|
|
||||||
const METHOD: &'static str = "rust-analyzer/expandMacro";
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize, Debug)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct ExpandMacroParams {
|
|
||||||
pub text_document: lsp::TextDocumentIdentifier,
|
|
||||||
pub position: lsp::Position,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Deserialize, Serialize, Debug)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct ExpandedMacro {
|
|
||||||
pub name: String,
|
|
||||||
pub expansion: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandedMacro {
|
|
||||||
pub fn is_empty(&self) -> bool {
|
|
||||||
self.name.is_empty() && self.expansion.is_empty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ExpandMacro {
|
|
||||||
pub position: PointUtf16,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait(?Send)]
|
|
||||||
impl LspCommand for ExpandMacro {
|
|
||||||
type Response = ExpandedMacro;
|
|
||||||
type LspRequest = LspExpandMacro;
|
|
||||||
type ProtoRequest = proto::LspExtExpandMacro;
|
|
||||||
|
|
||||||
fn to_lsp(
|
|
||||||
&self,
|
|
||||||
path: &Path,
|
|
||||||
_: &Buffer,
|
|
||||||
_: &Arc<LanguageServer>,
|
|
||||||
_: &AppContext,
|
|
||||||
) -> ExpandMacroParams {
|
|
||||||
ExpandMacroParams {
|
|
||||||
text_document: lsp::TextDocumentIdentifier {
|
|
||||||
uri: lsp::Url::from_file_path(path).unwrap(),
|
|
||||||
},
|
|
||||||
position: point_to_lsp(self.position),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn response_from_lsp(
|
|
||||||
self,
|
|
||||||
message: Option<ExpandedMacro>,
|
|
||||||
_: Model<Project>,
|
|
||||||
_: Model<Buffer>,
|
|
||||||
_: LanguageServerId,
|
|
||||||
_: AsyncAppContext,
|
|
||||||
) -> anyhow::Result<ExpandedMacro> {
|
|
||||||
Ok(message
|
|
||||||
.map(|message| ExpandedMacro {
|
|
||||||
name: message.name,
|
|
||||||
expansion: message.expansion,
|
|
||||||
})
|
|
||||||
.unwrap_or_default())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::LspExtExpandMacro {
|
|
||||||
proto::LspExtExpandMacro {
|
|
||||||
project_id,
|
|
||||||
buffer_id: buffer.remote_id(),
|
|
||||||
position: Some(language::proto::serialize_anchor(
|
|
||||||
&buffer.anchor_before(self.position),
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn from_proto(
|
|
||||||
message: Self::ProtoRequest,
|
|
||||||
_: Model<Project>,
|
|
||||||
buffer: Model<Buffer>,
|
|
||||||
mut cx: AsyncAppContext,
|
|
||||||
) -> anyhow::Result<Self> {
|
|
||||||
let position = message
|
|
||||||
.position
|
|
||||||
.and_then(deserialize_anchor)
|
|
||||||
.context("invalid position")?;
|
|
||||||
Ok(Self {
|
|
||||||
position: buffer.update(&mut cx, |buffer, _| position.to_point_utf16(buffer))?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn response_to_proto(
|
|
||||||
response: ExpandedMacro,
|
|
||||||
_: &mut Project,
|
|
||||||
_: PeerId,
|
|
||||||
_: &clock::Global,
|
|
||||||
_: &mut AppContext,
|
|
||||||
) -> proto::LspExtExpandMacroResponse {
|
|
||||||
proto::LspExtExpandMacroResponse {
|
|
||||||
name: response.name,
|
|
||||||
expansion: response.expansion,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn response_from_proto(
|
|
||||||
self,
|
|
||||||
message: proto::LspExtExpandMacroResponse,
|
|
||||||
_: Model<Project>,
|
|
||||||
_: Model<Buffer>,
|
|
||||||
_: AsyncAppContext,
|
|
||||||
) -> anyhow::Result<ExpandedMacro> {
|
|
||||||
Ok(ExpandedMacro {
|
|
||||||
name: message.name,
|
|
||||||
expansion: message.expansion,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn buffer_id_from_proto(message: &proto::LspExtExpandMacro) -> u64 {
|
|
||||||
message.buffer_id
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,772 +0,0 @@
|
||||||
use std::{
|
|
||||||
ops::ControlFlow,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use collections::HashSet;
|
|
||||||
use fs::Fs;
|
|
||||||
use futures::{
|
|
||||||
future::{self, Shared},
|
|
||||||
FutureExt,
|
|
||||||
};
|
|
||||||
use gpui::{AsyncAppContext, Model, ModelContext, Task, WeakModel};
|
|
||||||
use language::{
|
|
||||||
language_settings::{Formatter, LanguageSettings},
|
|
||||||
Buffer, Language, LanguageServerName, LocalFile,
|
|
||||||
};
|
|
||||||
use lsp::LanguageServerId;
|
|
||||||
use node_runtime::NodeRuntime;
|
|
||||||
use prettier::Prettier;
|
|
||||||
use util::{paths::DEFAULT_PRETTIER_DIR, ResultExt, TryFutureExt};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Event, File, FormatOperation, PathChange, Project, ProjectEntryId, Worktree, WorktreeId,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn prettier_plugins_for_language(
|
|
||||||
language: &Language,
|
|
||||||
language_settings: &LanguageSettings,
|
|
||||||
) -> Option<HashSet<&'static str>> {
|
|
||||||
match &language_settings.formatter {
|
|
||||||
Formatter::Prettier { .. } | Formatter::Auto => {}
|
|
||||||
Formatter::LanguageServer | Formatter::External { .. } => return None,
|
|
||||||
};
|
|
||||||
let mut prettier_plugins = None;
|
|
||||||
if language.prettier_parser_name().is_some() {
|
|
||||||
prettier_plugins
|
|
||||||
.get_or_insert_with(|| HashSet::default())
|
|
||||||
.extend(
|
|
||||||
language
|
|
||||||
.lsp_adapters()
|
|
||||||
.iter()
|
|
||||||
.flat_map(|adapter| adapter.prettier_plugins()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
prettier_plugins
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) async fn format_with_prettier(
|
|
||||||
project: &WeakModel<Project>,
|
|
||||||
buffer: &Model<Buffer>,
|
|
||||||
cx: &mut AsyncAppContext,
|
|
||||||
) -> Option<FormatOperation> {
|
|
||||||
if let Some((prettier_path, prettier_task)) = project
|
|
||||||
.update(cx, |project, cx| {
|
|
||||||
project.prettier_instance_for_buffer(buffer, cx)
|
|
||||||
})
|
|
||||||
.ok()?
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
match prettier_task.await {
|
|
||||||
Ok(prettier) => {
|
|
||||||
let buffer_path = buffer
|
|
||||||
.update(cx, |buffer, cx| {
|
|
||||||
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
|
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
match prettier.format(buffer, buffer_path, cx).await {
|
|
||||||
Ok(new_diff) => return Some(FormatOperation::Prettier(new_diff)),
|
|
||||||
Err(e) => {
|
|
||||||
log::error!(
|
|
||||||
"Prettier instance from {prettier_path:?} failed to format a buffer: {e:#}"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => project
|
|
||||||
.update(cx, |project, _| {
|
|
||||||
let instance_to_update = match prettier_path {
|
|
||||||
Some(prettier_path) => {
|
|
||||||
log::error!(
|
|
||||||
"Prettier instance from path {prettier_path:?} failed to spawn: {e:#}"
|
|
||||||
);
|
|
||||||
project.prettier_instances.get_mut(&prettier_path)
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
log::error!("Default prettier instance failed to spawn: {e:#}");
|
|
||||||
match &mut project.default_prettier.prettier {
|
|
||||||
PrettierInstallation::NotInstalled { .. } => None,
|
|
||||||
PrettierInstallation::Installed(instance) => Some(instance),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(instance) = instance_to_update {
|
|
||||||
instance.attempt += 1;
|
|
||||||
instance.prettier = None;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.ok()?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct DefaultPrettier {
|
|
||||||
prettier: PrettierInstallation,
|
|
||||||
installed_plugins: HashSet<&'static str>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub enum PrettierInstallation {
|
|
||||||
NotInstalled {
|
|
||||||
attempts: usize,
|
|
||||||
installation_task: Option<Shared<Task<Result<(), Arc<anyhow::Error>>>>>,
|
|
||||||
not_installed_plugins: HashSet<&'static str>,
|
|
||||||
},
|
|
||||||
Installed(PrettierInstance),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type PrettierTask = Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct PrettierInstance {
|
|
||||||
attempt: usize,
|
|
||||||
prettier: Option<PrettierTask>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for DefaultPrettier {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
prettier: PrettierInstallation::NotInstalled {
|
|
||||||
attempts: 0,
|
|
||||||
installation_task: None,
|
|
||||||
not_installed_plugins: HashSet::default(),
|
|
||||||
},
|
|
||||||
installed_plugins: HashSet::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DefaultPrettier {
|
|
||||||
pub fn instance(&self) -> Option<&PrettierInstance> {
|
|
||||||
if let PrettierInstallation::Installed(instance) = &self.prettier {
|
|
||||||
Some(instance)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn prettier_task(
|
|
||||||
&mut self,
|
|
||||||
node: &Arc<dyn NodeRuntime>,
|
|
||||||
worktree_id: Option<WorktreeId>,
|
|
||||||
cx: &mut ModelContext<'_, Project>,
|
|
||||||
) -> Option<Task<anyhow::Result<PrettierTask>>> {
|
|
||||||
match &mut self.prettier {
|
|
||||||
PrettierInstallation::NotInstalled { .. } => {
|
|
||||||
Some(start_default_prettier(Arc::clone(node), worktree_id, cx))
|
|
||||||
}
|
|
||||||
PrettierInstallation::Installed(existing_instance) => {
|
|
||||||
existing_instance.prettier_task(node, None, worktree_id, cx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PrettierInstance {
|
|
||||||
pub fn prettier_task(
|
|
||||||
&mut self,
|
|
||||||
node: &Arc<dyn NodeRuntime>,
|
|
||||||
prettier_dir: Option<&Path>,
|
|
||||||
worktree_id: Option<WorktreeId>,
|
|
||||||
cx: &mut ModelContext<'_, Project>,
|
|
||||||
) -> Option<Task<anyhow::Result<PrettierTask>>> {
|
|
||||||
if self.attempt > prettier::FAIL_THRESHOLD {
|
|
||||||
match prettier_dir {
|
|
||||||
Some(prettier_dir) => log::warn!(
|
|
||||||
"Prettier from path {prettier_dir:?} exceeded launch threshold, not starting"
|
|
||||||
),
|
|
||||||
None => log::warn!("Default prettier exceeded launch threshold, not starting"),
|
|
||||||
}
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
Some(match &self.prettier {
|
|
||||||
Some(prettier_task) => Task::ready(Ok(prettier_task.clone())),
|
|
||||||
None => match prettier_dir {
|
|
||||||
Some(prettier_dir) => {
|
|
||||||
let new_task = start_prettier(
|
|
||||||
Arc::clone(node),
|
|
||||||
prettier_dir.to_path_buf(),
|
|
||||||
worktree_id,
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
self.attempt += 1;
|
|
||||||
self.prettier = Some(new_task.clone());
|
|
||||||
Task::ready(Ok(new_task))
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
self.attempt += 1;
|
|
||||||
let node = Arc::clone(node);
|
|
||||||
cx.spawn(|project, mut cx| async move {
|
|
||||||
project
|
|
||||||
.update(&mut cx, |_, cx| {
|
|
||||||
start_default_prettier(node, worktree_id, cx)
|
|
||||||
})?
|
|
||||||
.await
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn start_default_prettier(
|
|
||||||
node: Arc<dyn NodeRuntime>,
|
|
||||||
worktree_id: Option<WorktreeId>,
|
|
||||||
cx: &mut ModelContext<'_, Project>,
|
|
||||||
) -> Task<anyhow::Result<PrettierTask>> {
|
|
||||||
cx.spawn(|project, mut cx| async move {
|
|
||||||
loop {
|
|
||||||
let installation_task = project.update(&mut cx, |project, _| {
|
|
||||||
match &project.default_prettier.prettier {
|
|
||||||
PrettierInstallation::NotInstalled {
|
|
||||||
installation_task, ..
|
|
||||||
} => ControlFlow::Continue(installation_task.clone()),
|
|
||||||
PrettierInstallation::Installed(default_prettier) => {
|
|
||||||
ControlFlow::Break(default_prettier.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
match installation_task {
|
|
||||||
ControlFlow::Continue(None) => {
|
|
||||||
anyhow::bail!("Default prettier is not installed and cannot be started")
|
|
||||||
}
|
|
||||||
ControlFlow::Continue(Some(installation_task)) => {
|
|
||||||
log::info!("Waiting for default prettier to install");
|
|
||||||
if let Err(e) = installation_task.await {
|
|
||||||
project.update(&mut cx, |project, _| {
|
|
||||||
if let PrettierInstallation::NotInstalled {
|
|
||||||
installation_task,
|
|
||||||
attempts,
|
|
||||||
..
|
|
||||||
} = &mut project.default_prettier.prettier
|
|
||||||
{
|
|
||||||
*installation_task = None;
|
|
||||||
*attempts += 1;
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
anyhow::bail!(
|
|
||||||
"Cannot start default prettier due to its installation failure: {e:#}"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let new_default_prettier = project.update(&mut cx, |project, cx| {
|
|
||||||
let new_default_prettier =
|
|
||||||
start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), worktree_id, cx);
|
|
||||||
project.default_prettier.prettier =
|
|
||||||
PrettierInstallation::Installed(PrettierInstance {
|
|
||||||
attempt: 0,
|
|
||||||
prettier: Some(new_default_prettier.clone()),
|
|
||||||
});
|
|
||||||
new_default_prettier
|
|
||||||
})?;
|
|
||||||
return Ok(new_default_prettier);
|
|
||||||
}
|
|
||||||
ControlFlow::Break(instance) => match instance.prettier {
|
|
||||||
Some(instance) => return Ok(instance),
|
|
||||||
None => {
|
|
||||||
let new_default_prettier = project.update(&mut cx, |project, cx| {
|
|
||||||
let new_default_prettier =
|
|
||||||
start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), worktree_id, cx);
|
|
||||||
project.default_prettier.prettier =
|
|
||||||
PrettierInstallation::Installed(PrettierInstance {
|
|
||||||
attempt: instance.attempt + 1,
|
|
||||||
prettier: Some(new_default_prettier.clone()),
|
|
||||||
});
|
|
||||||
new_default_prettier
|
|
||||||
})?;
|
|
||||||
return Ok(new_default_prettier);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn start_prettier(
|
|
||||||
node: Arc<dyn NodeRuntime>,
|
|
||||||
prettier_dir: PathBuf,
|
|
||||||
worktree_id: Option<WorktreeId>,
|
|
||||||
cx: &mut ModelContext<'_, Project>,
|
|
||||||
) -> PrettierTask {
|
|
||||||
cx.spawn(|project, mut cx| async move {
|
|
||||||
log::info!("Starting prettier at path {prettier_dir:?}");
|
|
||||||
let new_server_id = project.update(&mut cx, |project, _| {
|
|
||||||
project.languages.next_language_server_id()
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone())
|
|
||||||
.await
|
|
||||||
.context("default prettier spawn")
|
|
||||||
.map(Arc::new)
|
|
||||||
.map_err(Arc::new)?;
|
|
||||||
register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx);
|
|
||||||
Ok(new_prettier)
|
|
||||||
})
|
|
||||||
.shared()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn register_new_prettier(
|
|
||||||
project: &WeakModel<Project>,
|
|
||||||
prettier: &Prettier,
|
|
||||||
worktree_id: Option<WorktreeId>,
|
|
||||||
new_server_id: LanguageServerId,
|
|
||||||
cx: &mut AsyncAppContext,
|
|
||||||
) {
|
|
||||||
let prettier_dir = prettier.prettier_dir();
|
|
||||||
let is_default = prettier.is_default();
|
|
||||||
if is_default {
|
|
||||||
log::info!("Started default prettier in {prettier_dir:?}");
|
|
||||||
} else {
|
|
||||||
log::info!("Started prettier in {prettier_dir:?}");
|
|
||||||
}
|
|
||||||
if let Some(prettier_server) = prettier.server() {
|
|
||||||
project
|
|
||||||
.update(cx, |project, cx| {
|
|
||||||
let name = if is_default {
|
|
||||||
LanguageServerName(Arc::from("prettier (default)"))
|
|
||||||
} else {
|
|
||||||
let worktree_path = worktree_id
|
|
||||||
.and_then(|id| project.worktree_for_id(id, cx))
|
|
||||||
.map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path()));
|
|
||||||
let name = match worktree_path {
|
|
||||||
Some(worktree_path) => {
|
|
||||||
if prettier_dir == worktree_path.as_ref() {
|
|
||||||
let name = prettier_dir
|
|
||||||
.file_name()
|
|
||||||
.and_then(|name| name.to_str())
|
|
||||||
.unwrap_or_default();
|
|
||||||
format!("prettier ({name})")
|
|
||||||
} else {
|
|
||||||
let dir_to_display = prettier_dir
|
|
||||||
.strip_prefix(worktree_path.as_ref())
|
|
||||||
.ok()
|
|
||||||
.unwrap_or(prettier_dir);
|
|
||||||
format!("prettier ({})", dir_to_display.display())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => format!("prettier ({})", prettier_dir.display()),
|
|
||||||
};
|
|
||||||
LanguageServerName(Arc::from(name))
|
|
||||||
};
|
|
||||||
project
|
|
||||||
.supplementary_language_servers
|
|
||||||
.insert(new_server_id, (name, Arc::clone(prettier_server)));
|
|
||||||
cx.emit(Event::LanguageServerAdded(new_server_id));
|
|
||||||
})
|
|
||||||
.ok();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn install_prettier_packages(
|
|
||||||
plugins_to_install: HashSet<&'static str>,
|
|
||||||
node: Arc<dyn NodeRuntime>,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let packages_to_versions =
|
|
||||||
future::try_join_all(plugins_to_install.iter().chain(Some(&"prettier")).map(
|
|
||||||
|package_name| async {
|
|
||||||
let returned_package_name = package_name.to_string();
|
|
||||||
let latest_version = node
|
|
||||||
.npm_package_latest_version(package_name)
|
|
||||||
.await
|
|
||||||
.with_context(|| {
|
|
||||||
format!("fetching latest npm version for package {returned_package_name}")
|
|
||||||
})?;
|
|
||||||
anyhow::Ok((returned_package_name, latest_version))
|
|
||||||
},
|
|
||||||
))
|
|
||||||
.await
|
|
||||||
.context("fetching latest npm versions")?;
|
|
||||||
|
|
||||||
log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
|
|
||||||
let borrowed_packages = packages_to_versions
|
|
||||||
.iter()
|
|
||||||
.map(|(package, version)| (package.as_str(), version.as_str()))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
node.npm_install_packages(DEFAULT_PRETTIER_DIR.as_path(), &borrowed_packages)
|
|
||||||
.await
|
|
||||||
.context("fetching formatter packages")?;
|
|
||||||
anyhow::Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn save_prettier_server_file(fs: &dyn Fs) -> Result<(), anyhow::Error> {
|
|
||||||
let prettier_wrapper_path = DEFAULT_PRETTIER_DIR.join(prettier::PRETTIER_SERVER_FILE);
|
|
||||||
fs.save(
|
|
||||||
&prettier_wrapper_path,
|
|
||||||
&text::Rope::from(prettier::PRETTIER_SERVER_JS),
|
|
||||||
text::LineEnding::Unix,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.with_context(|| {
|
|
||||||
format!(
|
|
||||||
"writing {} file at {prettier_wrapper_path:?}",
|
|
||||||
prettier::PRETTIER_SERVER_FILE
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Project {
|
|
||||||
pub fn update_prettier_settings(
|
|
||||||
&self,
|
|
||||||
worktree: &Model<Worktree>,
|
|
||||||
changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
|
|
||||||
cx: &mut ModelContext<'_, Project>,
|
|
||||||
) {
|
|
||||||
let prettier_config_files = Prettier::CONFIG_FILE_NAMES
|
|
||||||
.iter()
|
|
||||||
.map(Path::new)
|
|
||||||
.collect::<HashSet<_>>();
|
|
||||||
|
|
||||||
let prettier_config_file_changed = changes
|
|
||||||
.iter()
|
|
||||||
.filter(|(_, _, change)| !matches!(change, PathChange::Loaded))
|
|
||||||
.filter(|(path, _, _)| {
|
|
||||||
!path
|
|
||||||
.components()
|
|
||||||
.any(|component| component.as_os_str().to_string_lossy() == "node_modules")
|
|
||||||
})
|
|
||||||
.find(|(path, _, _)| prettier_config_files.contains(path.as_ref()));
|
|
||||||
let current_worktree_id = worktree.read(cx).id();
|
|
||||||
if let Some((config_path, _, _)) = prettier_config_file_changed {
|
|
||||||
log::info!(
|
|
||||||
"Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}"
|
|
||||||
);
|
|
||||||
let prettiers_to_reload =
|
|
||||||
self.prettiers_per_worktree
|
|
||||||
.get(¤t_worktree_id)
|
|
||||||
.iter()
|
|
||||||
.flat_map(|prettier_paths| prettier_paths.iter())
|
|
||||||
.flatten()
|
|
||||||
.filter_map(|prettier_path| {
|
|
||||||
Some((
|
|
||||||
current_worktree_id,
|
|
||||||
Some(prettier_path.clone()),
|
|
||||||
self.prettier_instances.get(prettier_path)?.clone(),
|
|
||||||
))
|
|
||||||
})
|
|
||||||
.chain(self.default_prettier.instance().map(|default_prettier| {
|
|
||||||
(current_worktree_id, None, default_prettier.clone())
|
|
||||||
}))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
cx.background_executor()
|
|
||||||
.spawn(async move {
|
|
||||||
let _: Vec<()> = future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_instance)| {
|
|
||||||
async move {
|
|
||||||
if let Some(instance) = prettier_instance.prettier {
|
|
||||||
match instance.await {
|
|
||||||
Ok(prettier) => {
|
|
||||||
prettier.clear_cache().log_err().await;
|
|
||||||
},
|
|
||||||
Err(e) => {
|
|
||||||
match prettier_path {
|
|
||||||
Some(prettier_path) => log::error!(
|
|
||||||
"Failed to clear prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update: {e:#}"
|
|
||||||
),
|
|
||||||
None => log::error!(
|
|
||||||
"Failed to clear default prettier cache for worktree {worktree_id:?} on prettier settings update: {e:#}"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
.await;
|
|
||||||
})
|
|
||||||
.detach();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn prettier_instance_for_buffer(
|
|
||||||
&mut self,
|
|
||||||
buffer: &Model<Buffer>,
|
|
||||||
cx: &mut ModelContext<Self>,
|
|
||||||
) -> Task<Option<(Option<PathBuf>, PrettierTask)>> {
|
|
||||||
let buffer = buffer.read(cx);
|
|
||||||
let buffer_file = buffer.file();
|
|
||||||
let Some(buffer_language) = buffer.language() else {
|
|
||||||
return Task::ready(None);
|
|
||||||
};
|
|
||||||
if buffer_language.prettier_parser_name().is_none() {
|
|
||||||
return Task::ready(None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.is_local() {
|
|
||||||
let Some(node) = self.node.as_ref().map(Arc::clone) else {
|
|
||||||
return Task::ready(None);
|
|
||||||
};
|
|
||||||
match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx)))
|
|
||||||
{
|
|
||||||
Some((worktree_id, buffer_path)) => {
|
|
||||||
let fs = Arc::clone(&self.fs);
|
|
||||||
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
|
|
||||||
return cx.spawn(|project, mut cx| async move {
|
|
||||||
match cx
|
|
||||||
.background_executor()
|
|
||||||
.spawn(async move {
|
|
||||||
Prettier::locate_prettier_installation(
|
|
||||||
fs.as_ref(),
|
|
||||||
&installed_prettiers,
|
|
||||||
&buffer_path,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(ControlFlow::Break(())) => {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
Ok(ControlFlow::Continue(None)) => {
|
|
||||||
let default_instance = project
|
|
||||||
.update(&mut cx, |project, cx| {
|
|
||||||
project
|
|
||||||
.prettiers_per_worktree
|
|
||||||
.entry(worktree_id)
|
|
||||||
.or_default()
|
|
||||||
.insert(None);
|
|
||||||
project.default_prettier.prettier_task(
|
|
||||||
&node,
|
|
||||||
Some(worktree_id),
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
Some((None, default_instance?.log_err().await?))
|
|
||||||
}
|
|
||||||
Ok(ControlFlow::Continue(Some(prettier_dir))) => {
|
|
||||||
project
|
|
||||||
.update(&mut cx, |project, _| {
|
|
||||||
project
|
|
||||||
.prettiers_per_worktree
|
|
||||||
.entry(worktree_id)
|
|
||||||
.or_default()
|
|
||||||
.insert(Some(prettier_dir.clone()))
|
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
if let Some(prettier_task) = project
|
|
||||||
.update(&mut cx, |project, cx| {
|
|
||||||
project.prettier_instances.get_mut(&prettier_dir).map(
|
|
||||||
|existing_instance| {
|
|
||||||
existing_instance.prettier_task(
|
|
||||||
&node,
|
|
||||||
Some(&prettier_dir),
|
|
||||||
Some(worktree_id),
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
},
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.ok()?
|
|
||||||
{
|
|
||||||
log::debug!(
|
|
||||||
"Found already started prettier in {prettier_dir:?}"
|
|
||||||
);
|
|
||||||
return Some((
|
|
||||||
Some(prettier_dir),
|
|
||||||
prettier_task?.await.log_err()?,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
log::info!("Found prettier in {prettier_dir:?}, starting.");
|
|
||||||
let new_prettier_task = project
|
|
||||||
.update(&mut cx, |project, cx| {
|
|
||||||
let new_prettier_task = start_prettier(
|
|
||||||
node,
|
|
||||||
prettier_dir.clone(),
|
|
||||||
Some(worktree_id),
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
project.prettier_instances.insert(
|
|
||||||
prettier_dir.clone(),
|
|
||||||
PrettierInstance {
|
|
||||||
attempt: 0,
|
|
||||||
prettier: Some(new_prettier_task.clone()),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
new_prettier_task
|
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
Some((Some(prettier_dir), new_prettier_task))
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
log::error!("Failed to determine prettier path for buffer: {e:#}");
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
let new_task = self.default_prettier.prettier_task(&node, None, cx);
|
|
||||||
return cx
|
|
||||||
.spawn(|_, _| async move { Some((None, new_task?.log_err().await?)) });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return Task::ready(None);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
|
||||||
pub fn install_default_prettier(
|
|
||||||
&mut self,
|
|
||||||
_worktree: Option<WorktreeId>,
|
|
||||||
plugins: HashSet<&'static str>,
|
|
||||||
_cx: &mut ModelContext<Self>,
|
|
||||||
) {
|
|
||||||
// suppress unused code warnings
|
|
||||||
let _ = install_prettier_packages;
|
|
||||||
let _ = save_prettier_server_file;
|
|
||||||
|
|
||||||
self.default_prettier.installed_plugins.extend(plugins);
|
|
||||||
self.default_prettier.prettier = PrettierInstallation::Installed(PrettierInstance {
|
|
||||||
attempt: 0,
|
|
||||||
prettier: None,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(any(test, feature = "test-support")))]
|
|
||||||
pub fn install_default_prettier(
|
|
||||||
&mut self,
|
|
||||||
worktree: Option<WorktreeId>,
|
|
||||||
mut new_plugins: HashSet<&'static str>,
|
|
||||||
cx: &mut ModelContext<Self>,
|
|
||||||
) {
|
|
||||||
let Some(node) = self.node.as_ref().cloned() else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
log::info!("Initializing default prettier with plugins {new_plugins:?}");
|
|
||||||
let fs = Arc::clone(&self.fs);
|
|
||||||
let locate_prettier_installation = match worktree.and_then(|worktree_id| {
|
|
||||||
self.worktree_for_id(worktree_id, cx)
|
|
||||||
.map(|worktree| worktree.read(cx).abs_path())
|
|
||||||
}) {
|
|
||||||
Some(locate_from) => {
|
|
||||||
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
|
|
||||||
cx.background_executor().spawn(async move {
|
|
||||||
Prettier::locate_prettier_installation(
|
|
||||||
fs.as_ref(),
|
|
||||||
&installed_prettiers,
|
|
||||||
locate_from.as_ref(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
})
|
|
||||||
}
|
|
||||||
None => Task::ready(Ok(ControlFlow::Continue(None))),
|
|
||||||
};
|
|
||||||
new_plugins.retain(|plugin| !self.default_prettier.installed_plugins.contains(plugin));
|
|
||||||
let mut installation_attempt = 0;
|
|
||||||
let previous_installation_task = match &mut self.default_prettier.prettier {
|
|
||||||
PrettierInstallation::NotInstalled {
|
|
||||||
installation_task,
|
|
||||||
attempts,
|
|
||||||
not_installed_plugins,
|
|
||||||
} => {
|
|
||||||
installation_attempt = *attempts;
|
|
||||||
if installation_attempt > prettier::FAIL_THRESHOLD {
|
|
||||||
*installation_task = None;
|
|
||||||
log::warn!(
|
|
||||||
"Default prettier installation had failed {installation_attempt} times, not attempting again",
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
new_plugins.extend(not_installed_plugins.iter());
|
|
||||||
installation_task.clone()
|
|
||||||
}
|
|
||||||
PrettierInstallation::Installed { .. } => {
|
|
||||||
if new_plugins.is_empty() {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let plugins_to_install = new_plugins.clone();
|
|
||||||
let fs = Arc::clone(&self.fs);
|
|
||||||
let new_installation_task = cx
|
|
||||||
.spawn(|project, mut cx| async move {
|
|
||||||
match locate_prettier_installation
|
|
||||||
.await
|
|
||||||
.context("locate prettier installation")
|
|
||||||
.map_err(Arc::new)?
|
|
||||||
{
|
|
||||||
ControlFlow::Break(()) => return Ok(()),
|
|
||||||
ControlFlow::Continue(prettier_path) => {
|
|
||||||
if prettier_path.is_some() {
|
|
||||||
new_plugins.clear();
|
|
||||||
}
|
|
||||||
let mut needs_install = false;
|
|
||||||
if let Some(previous_installation_task) = previous_installation_task {
|
|
||||||
if let Err(e) = previous_installation_task.await {
|
|
||||||
log::error!("Failed to install default prettier: {e:#}");
|
|
||||||
project.update(&mut cx, |project, _| {
|
|
||||||
if let PrettierInstallation::NotInstalled { attempts, not_installed_plugins, .. } = &mut project.default_prettier.prettier {
|
|
||||||
*attempts += 1;
|
|
||||||
new_plugins.extend(not_installed_plugins.iter());
|
|
||||||
installation_attempt = *attempts;
|
|
||||||
needs_install = true;
|
|
||||||
};
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if installation_attempt > prettier::FAIL_THRESHOLD {
|
|
||||||
project.update(&mut cx, |project, _| {
|
|
||||||
if let PrettierInstallation::NotInstalled { installation_task, .. } = &mut project.default_prettier.prettier {
|
|
||||||
*installation_task = None;
|
|
||||||
};
|
|
||||||
})?;
|
|
||||||
log::warn!(
|
|
||||||
"Default prettier installation had failed {installation_attempt} times, not attempting again",
|
|
||||||
);
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
project.update(&mut cx, |project, _| {
|
|
||||||
new_plugins.retain(|plugin| {
|
|
||||||
!project.default_prettier.installed_plugins.contains(plugin)
|
|
||||||
});
|
|
||||||
if let PrettierInstallation::NotInstalled { not_installed_plugins, .. } = &mut project.default_prettier.prettier {
|
|
||||||
not_installed_plugins.retain(|plugin| {
|
|
||||||
!project.default_prettier.installed_plugins.contains(plugin)
|
|
||||||
});
|
|
||||||
not_installed_plugins.extend(new_plugins.iter());
|
|
||||||
}
|
|
||||||
needs_install |= !new_plugins.is_empty();
|
|
||||||
})?;
|
|
||||||
if needs_install {
|
|
||||||
let installed_plugins = new_plugins.clone();
|
|
||||||
cx.background_executor()
|
|
||||||
.spawn(async move {
|
|
||||||
save_prettier_server_file(fs.as_ref()).await?;
|
|
||||||
install_prettier_packages(new_plugins, node).await
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.context("prettier & plugins install")
|
|
||||||
.map_err(Arc::new)?;
|
|
||||||
log::info!("Initialized prettier with plugins: {installed_plugins:?}");
|
|
||||||
project.update(&mut cx, |project, _| {
|
|
||||||
project.default_prettier.prettier =
|
|
||||||
PrettierInstallation::Installed(PrettierInstance {
|
|
||||||
attempt: 0,
|
|
||||||
prettier: None,
|
|
||||||
});
|
|
||||||
project.default_prettier
|
|
||||||
.installed_plugins
|
|
||||||
.extend(installed_plugins);
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
.shared();
|
|
||||||
self.default_prettier.prettier = PrettierInstallation::NotInstalled {
|
|
||||||
attempts: installation_attempt,
|
|
||||||
installation_task: Some(new_installation_task),
|
|
||||||
not_installed_plugins: plugins_to_install,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,50 +0,0 @@
|
||||||
use collections::HashMap;
|
|
||||||
use gpui::AppContext;
|
|
||||||
use schemars::JsonSchema;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use settings::Settings;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
|
|
||||||
pub struct ProjectSettings {
|
|
||||||
#[serde(default)]
|
|
||||||
pub lsp: HashMap<Arc<str>, LspSettings>,
|
|
||||||
#[serde(default)]
|
|
||||||
pub git: GitSettings,
|
|
||||||
#[serde(default)]
|
|
||||||
pub file_scan_exclusions: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
|
||||||
pub struct GitSettings {
|
|
||||||
pub git_gutter: Option<GitGutterSetting>,
|
|
||||||
pub gutter_debounce: Option<u64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub enum GitGutterSetting {
|
|
||||||
#[default]
|
|
||||||
TrackedFiles,
|
|
||||||
Hide,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
|
||||||
#[serde(rename_all = "snake_case")]
|
|
||||||
pub struct LspSettings {
|
|
||||||
pub initialization_options: Option<serde_json::Value>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Settings for ProjectSettings {
|
|
||||||
const KEY: Option<&'static str> = None;
|
|
||||||
|
|
||||||
type FileContent = Self;
|
|
||||||
|
|
||||||
fn load(
|
|
||||||
default_value: &Self::FileContent,
|
|
||||||
user_values: &[&Self::FileContent],
|
|
||||||
_: &mut AppContext,
|
|
||||||
) -> anyhow::Result<Self> {
|
|
||||||
Self::load_via_json_merge(default_value, user_values)
|
|
||||||
}
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,463 +0,0 @@
|
||||||
use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
|
|
||||||
use anyhow::{Context, Result};
|
|
||||||
use client::proto;
|
|
||||||
use itertools::Itertools;
|
|
||||||
use language::{char_kind, BufferSnapshot};
|
|
||||||
use regex::{Regex, RegexBuilder};
|
|
||||||
use smol::future::yield_now;
|
|
||||||
use std::{
|
|
||||||
borrow::Cow,
|
|
||||||
io::{BufRead, BufReader, Read},
|
|
||||||
ops::Range,
|
|
||||||
path::Path,
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
use util::paths::PathMatcher;
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct SearchInputs {
|
|
||||||
query: Arc<str>,
|
|
||||||
files_to_include: Vec<PathMatcher>,
|
|
||||||
files_to_exclude: Vec<PathMatcher>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SearchInputs {
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
self.query.as_ref()
|
|
||||||
}
|
|
||||||
pub fn files_to_include(&self) -> &[PathMatcher] {
|
|
||||||
&self.files_to_include
|
|
||||||
}
|
|
||||||
pub fn files_to_exclude(&self) -> &[PathMatcher] {
|
|
||||||
&self.files_to_exclude
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub enum SearchQuery {
|
|
||||||
Text {
|
|
||||||
search: Arc<AhoCorasick>,
|
|
||||||
replacement: Option<String>,
|
|
||||||
whole_word: bool,
|
|
||||||
case_sensitive: bool,
|
|
||||||
include_ignored: bool,
|
|
||||||
inner: SearchInputs,
|
|
||||||
},
|
|
||||||
|
|
||||||
Regex {
|
|
||||||
regex: Regex,
|
|
||||||
replacement: Option<String>,
|
|
||||||
multiline: bool,
|
|
||||||
whole_word: bool,
|
|
||||||
case_sensitive: bool,
|
|
||||||
include_ignored: bool,
|
|
||||||
inner: SearchInputs,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SearchQuery {
|
|
||||||
pub fn text(
|
|
||||||
query: impl ToString,
|
|
||||||
whole_word: bool,
|
|
||||||
case_sensitive: bool,
|
|
||||||
include_ignored: bool,
|
|
||||||
files_to_include: Vec<PathMatcher>,
|
|
||||||
files_to_exclude: Vec<PathMatcher>,
|
|
||||||
) -> Result<Self> {
|
|
||||||
let query = query.to_string();
|
|
||||||
let search = AhoCorasickBuilder::new()
|
|
||||||
.ascii_case_insensitive(!case_sensitive)
|
|
||||||
.build(&[&query])?;
|
|
||||||
let inner = SearchInputs {
|
|
||||||
query: query.into(),
|
|
||||||
files_to_exclude,
|
|
||||||
files_to_include,
|
|
||||||
};
|
|
||||||
Ok(Self::Text {
|
|
||||||
search: Arc::new(search),
|
|
||||||
replacement: None,
|
|
||||||
whole_word,
|
|
||||||
case_sensitive,
|
|
||||||
include_ignored,
|
|
||||||
inner,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn regex(
|
|
||||||
query: impl ToString,
|
|
||||||
whole_word: bool,
|
|
||||||
case_sensitive: bool,
|
|
||||||
include_ignored: bool,
|
|
||||||
files_to_include: Vec<PathMatcher>,
|
|
||||||
files_to_exclude: Vec<PathMatcher>,
|
|
||||||
) -> Result<Self> {
|
|
||||||
let mut query = query.to_string();
|
|
||||||
let initial_query = Arc::from(query.as_str());
|
|
||||||
if whole_word {
|
|
||||||
let mut word_query = String::new();
|
|
||||||
word_query.push_str("\\b");
|
|
||||||
word_query.push_str(&query);
|
|
||||||
word_query.push_str("\\b");
|
|
||||||
query = word_query
|
|
||||||
}
|
|
||||||
|
|
||||||
let multiline = query.contains('\n') || query.contains("\\n");
|
|
||||||
let regex = RegexBuilder::new(&query)
|
|
||||||
.case_insensitive(!case_sensitive)
|
|
||||||
.multi_line(multiline)
|
|
||||||
.build()?;
|
|
||||||
let inner = SearchInputs {
|
|
||||||
query: initial_query,
|
|
||||||
files_to_exclude,
|
|
||||||
files_to_include,
|
|
||||||
};
|
|
||||||
Ok(Self::Regex {
|
|
||||||
regex,
|
|
||||||
replacement: None,
|
|
||||||
multiline,
|
|
||||||
whole_word,
|
|
||||||
case_sensitive,
|
|
||||||
include_ignored,
|
|
||||||
inner,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_proto(message: proto::SearchProject) -> Result<Self> {
|
|
||||||
if message.regex {
|
|
||||||
Self::regex(
|
|
||||||
message.query,
|
|
||||||
message.whole_word,
|
|
||||||
message.case_sensitive,
|
|
||||||
message.include_ignored,
|
|
||||||
deserialize_path_matches(&message.files_to_include)?,
|
|
||||||
deserialize_path_matches(&message.files_to_exclude)?,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
Self::text(
|
|
||||||
message.query,
|
|
||||||
message.whole_word,
|
|
||||||
message.case_sensitive,
|
|
||||||
message.include_ignored,
|
|
||||||
deserialize_path_matches(&message.files_to_include)?,
|
|
||||||
deserialize_path_matches(&message.files_to_exclude)?,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn with_replacement(mut self, new_replacement: String) -> Self {
|
|
||||||
match self {
|
|
||||||
Self::Text {
|
|
||||||
ref mut replacement,
|
|
||||||
..
|
|
||||||
}
|
|
||||||
| Self::Regex {
|
|
||||||
ref mut replacement,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
*replacement = Some(new_replacement);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn to_proto(&self, project_id: u64) -> proto::SearchProject {
|
|
||||||
proto::SearchProject {
|
|
||||||
project_id,
|
|
||||||
query: self.as_str().to_string(),
|
|
||||||
regex: self.is_regex(),
|
|
||||||
whole_word: self.whole_word(),
|
|
||||||
case_sensitive: self.case_sensitive(),
|
|
||||||
include_ignored: self.include_ignored(),
|
|
||||||
files_to_include: self
|
|
||||||
.files_to_include()
|
|
||||||
.iter()
|
|
||||||
.map(|matcher| matcher.to_string())
|
|
||||||
.join(","),
|
|
||||||
files_to_exclude: self
|
|
||||||
.files_to_exclude()
|
|
||||||
.iter()
|
|
||||||
.map(|matcher| matcher.to_string())
|
|
||||||
.join(","),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn detect<T: Read>(&self, stream: T) -> Result<bool> {
|
|
||||||
if self.as_str().is_empty() {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
match self {
|
|
||||||
Self::Text { search, .. } => {
|
|
||||||
let mat = search.stream_find_iter(stream).next();
|
|
||||||
match mat {
|
|
||||||
Some(Ok(_)) => Ok(true),
|
|
||||||
Some(Err(err)) => Err(err.into()),
|
|
||||||
None => Ok(false),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Self::Regex {
|
|
||||||
regex, multiline, ..
|
|
||||||
} => {
|
|
||||||
let mut reader = BufReader::new(stream);
|
|
||||||
if *multiline {
|
|
||||||
let mut text = String::new();
|
|
||||||
if let Err(err) = reader.read_to_string(&mut text) {
|
|
||||||
Err(err.into())
|
|
||||||
} else {
|
|
||||||
Ok(regex.find(&text).is_some())
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for line in reader.lines() {
|
|
||||||
let line = line?;
|
|
||||||
if regex.find(&line).is_some() {
|
|
||||||
return Ok(true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// Returns the replacement text for this `SearchQuery`.
|
|
||||||
pub fn replacement(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
SearchQuery::Text { replacement, .. } | SearchQuery::Regex { replacement, .. } => {
|
|
||||||
replacement.as_deref()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// Replaces search hits if replacement is set. `text` is assumed to be a string that matches this `SearchQuery` exactly, without any leftovers on either side.
|
|
||||||
pub fn replacement_for<'a>(&self, text: &'a str) -> Option<Cow<'a, str>> {
|
|
||||||
match self {
|
|
||||||
SearchQuery::Text { replacement, .. } => replacement.clone().map(Cow::from),
|
|
||||||
SearchQuery::Regex {
|
|
||||||
regex, replacement, ..
|
|
||||||
} => {
|
|
||||||
if let Some(replacement) = replacement {
|
|
||||||
Some(regex.replace(text, replacement))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub async fn search(
|
|
||||||
&self,
|
|
||||||
buffer: &BufferSnapshot,
|
|
||||||
subrange: Option<Range<usize>>,
|
|
||||||
) -> Vec<Range<usize>> {
|
|
||||||
const YIELD_INTERVAL: usize = 20000;
|
|
||||||
|
|
||||||
if self.as_str().is_empty() {
|
|
||||||
return Default::default();
|
|
||||||
}
|
|
||||||
|
|
||||||
let range_offset = subrange.as_ref().map(|r| r.start).unwrap_or(0);
|
|
||||||
let rope = if let Some(range) = subrange {
|
|
||||||
buffer.as_rope().slice(range)
|
|
||||||
} else {
|
|
||||||
buffer.as_rope().clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut matches = Vec::new();
|
|
||||||
match self {
|
|
||||||
Self::Text {
|
|
||||||
search, whole_word, ..
|
|
||||||
} => {
|
|
||||||
for (ix, mat) in search
|
|
||||||
.stream_find_iter(rope.bytes_in_range(0..rope.len()))
|
|
||||||
.enumerate()
|
|
||||||
{
|
|
||||||
if (ix + 1) % YIELD_INTERVAL == 0 {
|
|
||||||
yield_now().await;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mat = mat.unwrap();
|
|
||||||
if *whole_word {
|
|
||||||
let scope = buffer.language_scope_at(range_offset + mat.start());
|
|
||||||
let kind = |c| char_kind(&scope, c);
|
|
||||||
|
|
||||||
let prev_kind = rope.reversed_chars_at(mat.start()).next().map(kind);
|
|
||||||
let start_kind = kind(rope.chars_at(mat.start()).next().unwrap());
|
|
||||||
let end_kind = kind(rope.reversed_chars_at(mat.end()).next().unwrap());
|
|
||||||
let next_kind = rope.chars_at(mat.end()).next().map(kind);
|
|
||||||
if Some(start_kind) == prev_kind || Some(end_kind) == next_kind {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
matches.push(mat.start()..mat.end())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Self::Regex {
|
|
||||||
regex, multiline, ..
|
|
||||||
} => {
|
|
||||||
if *multiline {
|
|
||||||
let text = rope.to_string();
|
|
||||||
for (ix, mat) in regex.find_iter(&text).enumerate() {
|
|
||||||
if (ix + 1) % YIELD_INTERVAL == 0 {
|
|
||||||
yield_now().await;
|
|
||||||
}
|
|
||||||
|
|
||||||
matches.push(mat.start()..mat.end());
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let mut line = String::new();
|
|
||||||
let mut line_offset = 0;
|
|
||||||
for (chunk_ix, chunk) in rope.chunks().chain(["\n"]).enumerate() {
|
|
||||||
if (chunk_ix + 1) % YIELD_INTERVAL == 0 {
|
|
||||||
yield_now().await;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (newline_ix, text) in chunk.split('\n').enumerate() {
|
|
||||||
if newline_ix > 0 {
|
|
||||||
for mat in regex.find_iter(&line) {
|
|
||||||
let start = line_offset + mat.start();
|
|
||||||
let end = line_offset + mat.end();
|
|
||||||
matches.push(start..end);
|
|
||||||
}
|
|
||||||
|
|
||||||
line_offset += line.len() + 1;
|
|
||||||
line.clear();
|
|
||||||
}
|
|
||||||
line.push_str(text);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
matches
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
self.as_inner().as_str()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn whole_word(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Text { whole_word, .. } => *whole_word,
|
|
||||||
Self::Regex { whole_word, .. } => *whole_word,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn case_sensitive(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Text { case_sensitive, .. } => *case_sensitive,
|
|
||||||
Self::Regex { case_sensitive, .. } => *case_sensitive,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn include_ignored(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Text {
|
|
||||||
include_ignored, ..
|
|
||||||
} => *include_ignored,
|
|
||||||
Self::Regex {
|
|
||||||
include_ignored, ..
|
|
||||||
} => *include_ignored,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_regex(&self) -> bool {
|
|
||||||
matches!(self, Self::Regex { .. })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn files_to_include(&self) -> &[PathMatcher] {
|
|
||||||
self.as_inner().files_to_include()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn files_to_exclude(&self) -> &[PathMatcher] {
|
|
||||||
self.as_inner().files_to_exclude()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
|
|
||||||
match file_path {
|
|
||||||
Some(file_path) => {
|
|
||||||
let mut path = file_path.to_path_buf();
|
|
||||||
loop {
|
|
||||||
if self
|
|
||||||
.files_to_exclude()
|
|
||||||
.iter()
|
|
||||||
.any(|exclude_glob| exclude_glob.is_match(&path))
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
} else if self.files_to_include().is_empty()
|
|
||||||
|| self
|
|
||||||
.files_to_include()
|
|
||||||
.iter()
|
|
||||||
.any(|include_glob| include_glob.is_match(&path))
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
} else if !path.pop() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => self.files_to_include().is_empty(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn as_inner(&self) -> &SearchInputs {
|
|
||||||
match self {
|
|
||||||
Self::Regex { inner, .. } | Self::Text { inner, .. } => inner,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<Vec<PathMatcher>> {
|
|
||||||
glob_set
|
|
||||||
.split(',')
|
|
||||||
.map(str::trim)
|
|
||||||
.filter(|glob_str| !glob_str.is_empty())
|
|
||||||
.map(|glob_str| {
|
|
||||||
PathMatcher::new(glob_str)
|
|
||||||
.with_context(|| format!("deserializing path match glob {glob_str}"))
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn path_matcher_creation_for_valid_paths() {
|
|
||||||
for valid_path in [
|
|
||||||
"file",
|
|
||||||
"Cargo.toml",
|
|
||||||
".DS_Store",
|
|
||||||
"~/dir/another_dir/",
|
|
||||||
"./dir/file",
|
|
||||||
"dir/[a-z].txt",
|
|
||||||
"../dir/filé",
|
|
||||||
] {
|
|
||||||
let path_matcher = PathMatcher::new(valid_path).unwrap_or_else(|e| {
|
|
||||||
panic!("Valid path {valid_path} should be accepted, but got: {e}")
|
|
||||||
});
|
|
||||||
assert!(
|
|
||||||
path_matcher.is_match(valid_path),
|
|
||||||
"Path matcher for valid path {valid_path} should match itself"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn path_matcher_creation_for_globs() {
|
|
||||||
for invalid_glob in ["dir/[].txt", "dir/[a-z.txt", "dir/{file"] {
|
|
||||||
match PathMatcher::new(invalid_glob) {
|
|
||||||
Ok(_) => panic!("Invalid glob {invalid_glob} should not be accepted"),
|
|
||||||
Err(_expected) => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for valid_glob in [
|
|
||||||
"dir/?ile",
|
|
||||||
"dir/*.txt",
|
|
||||||
"dir/**/file",
|
|
||||||
"dir/[a-z].txt",
|
|
||||||
"{dir,file}",
|
|
||||||
] {
|
|
||||||
match PathMatcher::new(valid_glob) {
|
|
||||||
Ok(_expected) => {}
|
|
||||||
Err(e) => panic!("Valid glob {valid_glob} should be accepted, but got: {e}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,128 +0,0 @@
|
||||||
use crate::Project;
|
|
||||||
use gpui::{AnyWindowHandle, Context, Entity, Model, ModelContext, WeakModel};
|
|
||||||
use settings::Settings;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use terminal::{
|
|
||||||
terminal_settings::{self, TerminalSettings, VenvSettingsContent},
|
|
||||||
Terminal, TerminalBuilder,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(target_os = "macos")]
|
|
||||||
use std::os::unix::ffi::OsStrExt;
|
|
||||||
|
|
||||||
pub struct Terminals {
|
|
||||||
pub(crate) local_handles: Vec<WeakModel<terminal::Terminal>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Project {
|
|
||||||
pub fn create_terminal(
|
|
||||||
&mut self,
|
|
||||||
working_directory: Option<PathBuf>,
|
|
||||||
window: AnyWindowHandle,
|
|
||||||
cx: &mut ModelContext<Self>,
|
|
||||||
) -> anyhow::Result<Model<Terminal>> {
|
|
||||||
if self.is_remote() {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"creating terminals as a guest is not supported yet"
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
let settings = TerminalSettings::get_global(cx);
|
|
||||||
let python_settings = settings.detect_venv.clone();
|
|
||||||
let shell = settings.shell.clone();
|
|
||||||
|
|
||||||
let terminal = TerminalBuilder::new(
|
|
||||||
working_directory.clone(),
|
|
||||||
shell.clone(),
|
|
||||||
settings.env.clone(),
|
|
||||||
Some(settings.blinking.clone()),
|
|
||||||
settings.alternate_scroll,
|
|
||||||
window,
|
|
||||||
)
|
|
||||||
.map(|builder| {
|
|
||||||
let terminal_handle = cx.new_model(|cx| builder.subscribe(cx));
|
|
||||||
|
|
||||||
self.terminals
|
|
||||||
.local_handles
|
|
||||||
.push(terminal_handle.downgrade());
|
|
||||||
|
|
||||||
let id = terminal_handle.entity_id();
|
|
||||||
cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
|
|
||||||
let handles = &mut project.terminals.local_handles;
|
|
||||||
|
|
||||||
if let Some(index) = handles
|
|
||||||
.iter()
|
|
||||||
.position(|terminal| terminal.entity_id() == id)
|
|
||||||
{
|
|
||||||
handles.remove(index);
|
|
||||||
cx.notify();
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.detach();
|
|
||||||
|
|
||||||
if let Some(python_settings) = &python_settings.as_option() {
|
|
||||||
let activate_script_path =
|
|
||||||
self.find_activate_script_path(&python_settings, working_directory);
|
|
||||||
self.activate_python_virtual_environment(
|
|
||||||
activate_script_path,
|
|
||||||
&terminal_handle,
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
terminal_handle
|
|
||||||
});
|
|
||||||
|
|
||||||
terminal
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn find_activate_script_path(
|
|
||||||
&mut self,
|
|
||||||
settings: &VenvSettingsContent,
|
|
||||||
working_directory: Option<PathBuf>,
|
|
||||||
) -> Option<PathBuf> {
|
|
||||||
// When we are unable to resolve the working directory, the terminal builder
|
|
||||||
// defaults to '/'. We should probably encode this directly somewhere, but for
|
|
||||||
// now, let's just hard code it here.
|
|
||||||
let working_directory = working_directory.unwrap_or_else(|| Path::new("/").to_path_buf());
|
|
||||||
let activate_script_name = match settings.activate_script {
|
|
||||||
terminal_settings::ActivateScript::Default => "activate",
|
|
||||||
terminal_settings::ActivateScript::Csh => "activate.csh",
|
|
||||||
terminal_settings::ActivateScript::Fish => "activate.fish",
|
|
||||||
terminal_settings::ActivateScript::Nushell => "activate.nu",
|
|
||||||
};
|
|
||||||
|
|
||||||
for virtual_environment_name in settings.directories {
|
|
||||||
let mut path = working_directory.join(virtual_environment_name);
|
|
||||||
path.push("bin/");
|
|
||||||
path.push(activate_script_name);
|
|
||||||
|
|
||||||
if path.exists() {
|
|
||||||
return Some(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn activate_python_virtual_environment(
|
|
||||||
&mut self,
|
|
||||||
activate_script: Option<PathBuf>,
|
|
||||||
terminal_handle: &Model<Terminal>,
|
|
||||||
cx: &mut ModelContext<Project>,
|
|
||||||
) {
|
|
||||||
if let Some(activate_script) = activate_script {
|
|
||||||
// Paths are not strings so we need to jump through some hoops to format the command without `format!`
|
|
||||||
let mut command = Vec::from("source ".as_bytes());
|
|
||||||
command.extend_from_slice(activate_script.as_os_str().as_bytes());
|
|
||||||
command.push(b'\n');
|
|
||||||
|
|
||||||
terminal_handle.update(cx, |this, _| this.input_bytes(command));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn local_terminal_handles(&self) -> &Vec<WeakModel<terminal::Terminal>> {
|
|
||||||
&self.terminals.local_handles
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Add a few tests for adding and removing terminal tabs
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -14,7 +14,7 @@ db = { path = "../db2", package = "db2" }
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
gpui = { path = "../gpui2", package = "gpui2" }
|
gpui = { path = "../gpui2", package = "gpui2" }
|
||||||
menu = { path = "../menu2", package = "menu2" }
|
menu = { path = "../menu2", package = "menu2" }
|
||||||
project = { path = "../project2", package = "project2" }
|
project = { path = "../project" }
|
||||||
search = { path = "../search" }
|
search = { path = "../search" }
|
||||||
settings = { path = "../settings2", package = "settings2" }
|
settings = { path = "../settings2", package = "settings2" }
|
||||||
theme = { path = "../theme2", package = "theme2" }
|
theme = { path = "../theme2", package = "theme2" }
|
||||||
|
|
|
@ -13,7 +13,7 @@ editor = { path = "../editor" }
|
||||||
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
picker = { path = "../picker" }
|
picker = { path = "../picker" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
text = { package = "text2", path = "../text2" }
|
text = { package = "text2", path = "../text2" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
workspace = { path = "../workspace" }
|
workspace = { path = "../workspace" }
|
||||||
|
@ -32,6 +32,6 @@ settings = { package = "settings2", path = "../settings2", features = ["test-sup
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
||||||
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
|
||||||
project = { package = "project2", path = "../project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
|
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
|
|
|
@ -15,7 +15,7 @@ editor = { path = "../editor" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { package = "language2", path = "../language2" }
|
||||||
menu = { package = "menu2", path = "../menu2" }
|
menu = { package = "menu2", path = "../menu2" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
|
|
|
@ -13,7 +13,7 @@ ai = { path = "../ai" }
|
||||||
collections = { path = "../collections" }
|
collections = { path = "../collections" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { package = "language2", path = "../language2" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
workspace = { path = "../workspace" }
|
workspace = { path = "../workspace" }
|
||||||
util = { path = "../util" }
|
util = { path = "../util" }
|
||||||
rpc = { package = "rpc2", path = "../rpc2" }
|
rpc = { package = "rpc2", path = "../rpc2" }
|
||||||
|
@ -43,7 +43,7 @@ ai = { path = "../ai", features = ["test-support"] }
|
||||||
collections = { path = "../collections", features = ["test-support"] }
|
collections = { path = "../collections", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
||||||
project = { package = "project2", path = "../project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
settings = { package = "settings2", path = "../settings2", features = ["test-support"]}
|
settings = { package = "settings2", path = "../settings2", features = ["test-support"]}
|
||||||
|
|
|
@ -12,7 +12,7 @@ doctest = false
|
||||||
editor = { path = "../editor" }
|
editor = { path = "../editor" }
|
||||||
language = { package = "language2", path = "../language2" }
|
language = { package = "language2", path = "../language2" }
|
||||||
gpui = { package = "gpui2", path = "../gpui2" }
|
gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
# search = { path = "../search" }
|
# search = { path = "../search" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
|
@ -41,6 +41,6 @@ serde_derive.workspace = true
|
||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
client = { package = "client2", path = "../client2", features = ["test-support"]}
|
client = { package = "client2", path = "../client2", features = ["test-support"]}
|
||||||
project = { package = "project2", path = "../project2", features = ["test-support"]}
|
project = { path = "../project", features = ["test-support"]}
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
|
|
|
@ -45,7 +45,7 @@ futures.workspace = true
|
||||||
editor = { path = "../editor", features = ["test-support"] }
|
editor = { path = "../editor", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
||||||
project = { package = "project2", path = "../project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
util = { path = "../util", features = ["test-support"] }
|
util = { path = "../util", features = ["test-support"] }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
|
|
|
@ -19,7 +19,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
|
||||||
ui = { package = "ui2", path = "../ui2" }
|
ui = { package = "ui2", path = "../ui2" }
|
||||||
db = { package = "db2", path = "../db2" }
|
db = { package = "db2", path = "../db2" }
|
||||||
install_cli = { path = "../install_cli" }
|
install_cli = { path = "../install_cli" }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
settings = { package = "settings2", path = "../settings2" }
|
settings = { package = "settings2", path = "../settings2" }
|
||||||
theme = { package = "theme2", path = "../theme2" }
|
theme = { package = "theme2", path = "../theme2" }
|
||||||
theme_selector = { path = "../theme_selector" }
|
theme_selector = { path = "../theme_selector" }
|
||||||
|
|
|
@ -30,7 +30,7 @@ install_cli = { path = "../install_cli" }
|
||||||
language = { path = "../language2", package = "language2" }
|
language = { path = "../language2", package = "language2" }
|
||||||
#menu = { path = "../menu" }
|
#menu = { path = "../menu" }
|
||||||
node_runtime = { path = "../node_runtime" }
|
node_runtime = { path = "../node_runtime" }
|
||||||
project = { path = "../project2", package = "project2" }
|
project = { path = "../project" }
|
||||||
settings = { path = "../settings2", package = "settings2" }
|
settings = { path = "../settings2", package = "settings2" }
|
||||||
terminal = { path = "../terminal2", package = "terminal2" }
|
terminal = { path = "../terminal2", package = "terminal2" }
|
||||||
theme = { path = "../theme2", package = "theme2" }
|
theme = { path = "../theme2", package = "theme2" }
|
||||||
|
@ -57,7 +57,7 @@ uuid.workspace = true
|
||||||
call = { path = "../call2", package = "call2", features = ["test-support"] }
|
call = { path = "../call2", package = "call2", features = ["test-support"] }
|
||||||
client = { path = "../client2", package = "client2", features = ["test-support"] }
|
client = { path = "../client2", package = "client2", features = ["test-support"] }
|
||||||
gpui = { path = "../gpui2", package = "gpui2", features = ["test-support"] }
|
gpui = { path = "../gpui2", package = "gpui2", features = ["test-support"] }
|
||||||
project = { path = "../project2", package = "project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
settings = { path = "../settings2", package = "settings2", features = ["test-support"] }
|
settings = { path = "../settings2", package = "settings2", features = ["test-support"] }
|
||||||
fs = { path = "../fs2", package = "fs2", features = ["test-support"] }
|
fs = { path = "../fs2", package = "fs2", features = ["test-support"] }
|
||||||
db = { path = "../db2", package = "db2", features = ["test-support"] }
|
db = { path = "../db2", package = "db2", features = ["test-support"] }
|
||||||
|
|
|
@ -53,7 +53,7 @@ notifications = { package = "notifications2", path = "../notifications2" }
|
||||||
assistant = { path = "../assistant" }
|
assistant = { path = "../assistant" }
|
||||||
outline = { path = "../outline" }
|
outline = { path = "../outline" }
|
||||||
# plugin_runtime = { path = "../plugin_runtime",optional = true }
|
# plugin_runtime = { path = "../plugin_runtime",optional = true }
|
||||||
project = { package = "project2", path = "../project2" }
|
project = { path = "../project" }
|
||||||
project_panel = { path = "../project_panel" }
|
project_panel = { path = "../project_panel" }
|
||||||
project_symbols = { path = "../project_symbols" }
|
project_symbols = { path = "../project_symbols" }
|
||||||
quick_action_bar = { path = "../quick_action_bar" }
|
quick_action_bar = { path = "../quick_action_bar" }
|
||||||
|
@ -151,7 +151,7 @@ call = { package = "call2", path = "../call2", features = ["test-support"] }
|
||||||
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
|
||||||
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
language = { package = "language2", path = "../language2", features = ["test-support"] }
|
||||||
# lsp = { path = "../lsp", features = ["test-support"] }
|
# lsp = { path = "../lsp", features = ["test-support"] }
|
||||||
project = { package = "project2", path = "../project2", features = ["test-support"] }
|
project = { path = "../project", features = ["test-support"] }
|
||||||
# rpc = { path = "../rpc", features = ["test-support"] }
|
# rpc = { path = "../rpc", features = ["test-support"] }
|
||||||
# settings = { path = "../settings", features = ["test-support"] }
|
# settings = { path = "../settings", features = ["test-support"] }
|
||||||
text = { package = "text2", path = "../text2", features = ["test-support"] }
|
text = { package = "text2", path = "../text2", features = ["test-support"] }
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue