project: Fine-grained language server management (#23805)
Closes #ISSUE https://github.com/zed-industries/zed/pull/23804 Release Notes: - Improved detection of project roots for use by language servers.
This commit is contained in:
parent
b62812c49e
commit
e662e819fe
28 changed files with 2227 additions and 936 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -9985,6 +9985,7 @@ dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"lsp",
|
"lsp",
|
||||||
"node_runtime",
|
"node_runtime",
|
||||||
|
"once_cell",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"pathdiff",
|
"pathdiff",
|
||||||
"paths",
|
"paths",
|
||||||
|
|
|
@ -376,7 +376,7 @@ async-tungstenite = "0.28"
|
||||||
async-watch = "0.3.1"
|
async-watch = "0.3.1"
|
||||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||||
base64 = "0.22"
|
base64 = "0.22"
|
||||||
bitflags = "2.6.0"
|
bitflags = "2.8.0"
|
||||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
blade-graphics = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
||||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
blade-macros = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
||||||
blade-util = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
blade-util = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
||||||
|
@ -426,12 +426,13 @@ libc = "0.2"
|
||||||
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
|
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
|
||||||
linkify = "0.10.0"
|
linkify = "0.10.0"
|
||||||
livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev="060964da10574cd9bf06463a53bf6e0769c5c45e", features = ["dispatcher", "services-dispatcher", "rustls-tls-native-roots"], default-features = false }
|
livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev="060964da10574cd9bf06463a53bf6e0769c5c45e", features = ["dispatcher", "services-dispatcher", "rustls-tls-native-roots"], default-features = false }
|
||||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
log = { version = "0.4.25", features = ["kv_unstable_serde", "serde"] }
|
||||||
markup5ever_rcdom = "0.3.0"
|
markup5ever_rcdom = "0.3.0"
|
||||||
nanoid = "0.4"
|
nanoid = "0.4"
|
||||||
nbformat = { version = "0.10.0" }
|
nbformat = { version = "0.10.0" }
|
||||||
nix = "0.29"
|
nix = "0.29"
|
||||||
num-format = "0.4.4"
|
num-format = "0.4.4"
|
||||||
|
once_cell = "1.20"
|
||||||
ordered-float = "2.1.1"
|
ordered-float = "2.1.1"
|
||||||
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
||||||
parking_lot = "0.12.1"
|
parking_lot = "0.12.1"
|
||||||
|
@ -514,7 +515,7 @@ tree-sitter = { version = "0.23", features = ["wasm"] }
|
||||||
tree-sitter-bash = "0.23"
|
tree-sitter-bash = "0.23"
|
||||||
tree-sitter-c = "0.23"
|
tree-sitter-c = "0.23"
|
||||||
tree-sitter-cpp = "0.23"
|
tree-sitter-cpp = "0.23"
|
||||||
tree-sitter-css = "0.23"
|
tree-sitter-css = "0.23.2"
|
||||||
tree-sitter-elixir = "0.3"
|
tree-sitter-elixir = "0.3"
|
||||||
tree-sitter-embedded-template = "0.23.0"
|
tree-sitter-embedded-template = "0.23.0"
|
||||||
tree-sitter-go = "0.23"
|
tree-sitter-go = "0.23"
|
||||||
|
|
|
@ -53,7 +53,7 @@ reqwest_client.workspace = true
|
||||||
rpc.workspace = true
|
rpc.workspace = true
|
||||||
rustc-demangle.workspace = true
|
rustc-demangle.workspace = true
|
||||||
scrypt = "0.11"
|
scrypt = "0.11"
|
||||||
sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
|
sea-orm = { version = "1.1.4", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
|
||||||
semantic_version.workspace = true
|
semantic_version.workspace = true
|
||||||
semver.workspace = true
|
semver.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
|
@ -116,7 +116,7 @@ release_channel.workspace = true
|
||||||
remote = { workspace = true, features = ["test-support"] }
|
remote = { workspace = true, features = ["test-support"] }
|
||||||
remote_server.workspace = true
|
remote_server.workspace = true
|
||||||
rpc = { workspace = true, features = ["test-support"] }
|
rpc = { workspace = true, features = ["test-support"] }
|
||||||
sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-sqlite"] }
|
sea-orm = { version = "1.1.4", features = ["sqlx-sqlite"] }
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
session = { workspace = true, features = ["test-support"] }
|
session = { workspace = true, features = ["test-support"] }
|
||||||
settings = { workspace = true, features = ["test-support"] }
|
settings = { workspace = true, features = ["test-support"] }
|
||||||
|
|
|
@ -458,12 +458,14 @@ impl Copilot {
|
||||||
.on_notification::<StatusNotification, _>(|_, _| { /* Silence the notification */ })
|
.on_notification::<StatusNotification, _>(|_, _| { /* Silence the notification */ })
|
||||||
.detach();
|
.detach();
|
||||||
|
|
||||||
let initialize_params = None;
|
|
||||||
let configuration = lsp::DidChangeConfigurationParams {
|
let configuration = lsp::DidChangeConfigurationParams {
|
||||||
settings: Default::default(),
|
settings: Default::default(),
|
||||||
};
|
};
|
||||||
let server = cx
|
let server = cx
|
||||||
.update(|cx| server.initialize(initialize_params, configuration.into(), cx))?
|
.update(|cx| {
|
||||||
|
let params = server.default_initialize_params(cx);
|
||||||
|
server.initialize(params, configuration.into(), cx)
|
||||||
|
})?
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let status = server
|
let status = server
|
||||||
|
|
|
@ -13292,28 +13292,27 @@ impl Editor {
|
||||||
cx.emit(SearchEvent::MatchesInvalidated);
|
cx.emit(SearchEvent::MatchesInvalidated);
|
||||||
if *singleton_buffer_edited {
|
if *singleton_buffer_edited {
|
||||||
if let Some(project) = &self.project {
|
if let Some(project) = &self.project {
|
||||||
let project = project.read(cx);
|
|
||||||
#[allow(clippy::mutable_key_type)]
|
#[allow(clippy::mutable_key_type)]
|
||||||
let languages_affected = multibuffer
|
let languages_affected = multibuffer.update(cx, |multibuffer, cx| {
|
||||||
.read(cx)
|
multibuffer
|
||||||
.all_buffers()
|
.all_buffers()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|buffer| {
|
.filter_map(|buffer| {
|
||||||
let buffer = buffer.read(cx);
|
buffer.update(cx, |buffer, cx| {
|
||||||
let language = buffer.language()?;
|
let language = buffer.language()?;
|
||||||
if project.is_local()
|
let should_discard = project.update(cx, |project, cx| {
|
||||||
&& project
|
project.is_local()
|
||||||
.language_servers_for_local_buffer(buffer, cx)
|
&& project.for_language_servers_for_local_buffer(
|
||||||
.count()
|
buffer,
|
||||||
== 0
|
|it| it.count() == 0,
|
||||||
{
|
cx,
|
||||||
None
|
)
|
||||||
} else {
|
});
|
||||||
Some(language)
|
should_discard.not().then_some(language.clone())
|
||||||
}
|
})
|
||||||
})
|
})
|
||||||
.cloned()
|
.collect::<HashSet<_>>()
|
||||||
.collect::<HashSet<_>>();
|
});
|
||||||
if !languages_affected.is_empty() {
|
if !languages_affected.is_empty() {
|
||||||
self.refresh_inlay_hints(
|
self.refresh_inlay_hints(
|
||||||
InlayHintRefreshReason::BufferEdited(languages_affected),
|
InlayHintRefreshReason::BufferEdited(languages_affected),
|
||||||
|
@ -13903,15 +13902,18 @@ impl Editor {
|
||||||
self.handle_input(text, window, cx);
|
self.handle_input(text, window, cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn supports_inlay_hints(&self, cx: &App) -> bool {
|
pub fn supports_inlay_hints(&self, cx: &mut App) -> bool {
|
||||||
let Some(provider) = self.semantics_provider.as_ref() else {
|
let Some(provider) = self.semantics_provider.as_ref() else {
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut supports = false;
|
let mut supports = false;
|
||||||
self.buffer().read(cx).for_each_buffer(|buffer| {
|
self.buffer().update(cx, |this, cx| {
|
||||||
supports |= provider.supports_inlay_hints(buffer, cx);
|
this.for_each_buffer(|buffer| {
|
||||||
|
supports |= provider.supports_inlay_hints(buffer, cx);
|
||||||
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
supports
|
supports
|
||||||
}
|
}
|
||||||
pub fn is_focused(&self, window: &mut Window) -> bool {
|
pub fn is_focused(&self, window: &mut Window) -> bool {
|
||||||
|
@ -14461,7 +14463,7 @@ pub trait SemanticsProvider {
|
||||||
cx: &mut App,
|
cx: &mut App,
|
||||||
) -> Option<Task<anyhow::Result<InlayHint>>>;
|
) -> Option<Task<anyhow::Result<InlayHint>>>;
|
||||||
|
|
||||||
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &App) -> bool;
|
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool;
|
||||||
|
|
||||||
fn document_highlights(
|
fn document_highlights(
|
||||||
&self,
|
&self,
|
||||||
|
@ -14852,17 +14854,25 @@ impl SemanticsProvider for Entity<Project> {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &App) -> bool {
|
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool {
|
||||||
// TODO: make this work for remote projects
|
// TODO: make this work for remote projects
|
||||||
self.read(cx)
|
buffer.update(cx, |buffer, cx| {
|
||||||
.language_servers_for_local_buffer(buffer.read(cx), cx)
|
self.update(cx, |this, cx| {
|
||||||
.any(
|
this.for_language_servers_for_local_buffer(
|
||||||
|(_, server)| match server.capabilities().inlay_hint_provider {
|
buffer,
|
||||||
Some(lsp::OneOf::Left(enabled)) => enabled,
|
|mut it| {
|
||||||
Some(lsp::OneOf::Right(_)) => true,
|
it.any(
|
||||||
None => false,
|
|(_, server)| match server.capabilities().inlay_hint_provider {
|
||||||
},
|
Some(lsp::OneOf::Left(enabled)) => enabled,
|
||||||
)
|
Some(lsp::OneOf::Right(_)) => true,
|
||||||
|
None => false,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
},
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn inlay_hints(
|
fn inlay_hints(
|
||||||
|
|
|
@ -11111,7 +11111,6 @@ async fn test_language_server_restart_due_to_settings_change(cx: &mut gpui::Test
|
||||||
0,
|
0,
|
||||||
"Should not restart LSP server on an unrelated LSP settings change"
|
"Should not restart LSP server on an unrelated LSP settings change"
|
||||||
);
|
);
|
||||||
|
|
||||||
update_test_project_settings(cx, |project_settings| {
|
update_test_project_settings(cx, |project_settings| {
|
||||||
project_settings.lsp.insert(
|
project_settings.lsp.insert(
|
||||||
language_server_name.into(),
|
language_server_name.into(),
|
||||||
|
|
|
@ -21,7 +21,6 @@ where
|
||||||
let Some(project) = &editor.project else {
|
let Some(project) = &editor.project else {
|
||||||
return None;
|
return None;
|
||||||
};
|
};
|
||||||
let multibuffer = editor.buffer().read(cx);
|
|
||||||
let mut language_servers_for = HashMap::default();
|
let mut language_servers_for = HashMap::default();
|
||||||
editor
|
editor
|
||||||
.selections
|
.selections
|
||||||
|
@ -29,29 +28,33 @@ where
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|selection| selection.start == selection.end)
|
.filter(|selection| selection.start == selection.end)
|
||||||
.filter_map(|selection| Some((selection.start.buffer_id?, selection.start)))
|
.filter_map(|selection| Some((selection.start.buffer_id?, selection.start)))
|
||||||
.filter_map(|(buffer_id, trigger_anchor)| {
|
.find_map(|(buffer_id, trigger_anchor)| {
|
||||||
let buffer = multibuffer.buffer(buffer_id)?;
|
let buffer = editor.buffer().read(cx).buffer(buffer_id)?;
|
||||||
let server_id = *match language_servers_for.entry(buffer_id) {
|
let server_id = *match language_servers_for.entry(buffer_id) {
|
||||||
Entry::Occupied(occupied_entry) => occupied_entry.into_mut(),
|
Entry::Occupied(occupied_entry) => occupied_entry.into_mut(),
|
||||||
Entry::Vacant(vacant_entry) => {
|
Entry::Vacant(vacant_entry) => {
|
||||||
let language_server_id = project
|
let language_server_id = buffer.update(cx, |buffer, cx| {
|
||||||
.read(cx)
|
project.update(cx, |project, cx| {
|
||||||
.language_servers_for_local_buffer(buffer.read(cx), cx)
|
project.for_language_servers_for_local_buffer(
|
||||||
.find_map(|(adapter, server)| {
|
buffer,
|
||||||
if adapter.name.0.as_ref() == language_server_name {
|
|mut it| {
|
||||||
Some(server.server_id())
|
it.find_map(|(adapter, server)| {
|
||||||
} else {
|
if adapter.name.0.as_ref() == language_server_name {
|
||||||
None
|
Some(server.server_id())
|
||||||
}
|
} else {
|
||||||
});
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
});
|
||||||
vacant_entry.insert(language_server_id)
|
vacant_entry.insert(language_server_id)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.as_ref()?;
|
.as_ref()?;
|
||||||
|
|
||||||
Some((buffer, trigger_anchor, server_id))
|
|
||||||
})
|
|
||||||
.find_map(|(buffer, trigger_anchor, server_id)| {
|
|
||||||
let language = buffer.read(cx).language_at(trigger_anchor.text_anchor)?;
|
let language = buffer.read(cx).language_at(trigger_anchor.text_anchor)?;
|
||||||
if !filter_language(&language) {
|
if !filter_language(&language) {
|
||||||
return None;
|
return None;
|
||||||
|
|
|
@ -472,7 +472,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
|
||||||
self.0.resolve_inlay_hint(hint, buffer, server_id, cx)
|
self.0.resolve_inlay_hint(hint, buffer, server_id, cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &App) -> bool {
|
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool {
|
||||||
if let Some(buffer) = self.to_base(&buffer, &[], cx) {
|
if let Some(buffer) = self.to_base(&buffer, &[], cx) {
|
||||||
self.0.supports_inlay_hints(&buffer, cx)
|
self.0.supports_inlay_hints(&buffer, cx)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -14,7 +14,7 @@ proc-macro = true
|
||||||
doctest = true
|
doctest = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
proc-macro2 = "1.0.66"
|
proc-macro2 = "1.0.93"
|
||||||
quote = "1.0.9"
|
quote = "1.0.9"
|
||||||
syn = { version = "1.0.72", features = ["full", "extra-traits"] }
|
syn = { version = "1.0.72", features = ["full", "extra-traits"] }
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,6 @@ use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use settings::WorktreeId;
|
use settings::WorktreeId;
|
||||||
use smol::future::FutureExt as _;
|
use smol::future::FutureExt as _;
|
||||||
use std::num::NonZeroU32;
|
|
||||||
use std::{
|
use std::{
|
||||||
any::Any,
|
any::Any,
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
|
@ -62,6 +61,7 @@ use std::{
|
||||||
Arc, LazyLock,
|
Arc, LazyLock,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
use std::{num::NonZeroU32, sync::OnceLock};
|
||||||
use syntax_map::{QueryCursorHandle, SyntaxSnapshot};
|
use syntax_map::{QueryCursorHandle, SyntaxSnapshot};
|
||||||
use task::RunnableTag;
|
use task::RunnableTag;
|
||||||
pub use task_context::{ContextProvider, RunnableRange};
|
pub use task_context::{ContextProvider, RunnableRange};
|
||||||
|
@ -164,6 +164,7 @@ pub struct CachedLspAdapter {
|
||||||
pub adapter: Arc<dyn LspAdapter>,
|
pub adapter: Arc<dyn LspAdapter>,
|
||||||
pub reinstall_attempt_count: AtomicU64,
|
pub reinstall_attempt_count: AtomicU64,
|
||||||
cached_binary: futures::lock::Mutex<Option<LanguageServerBinary>>,
|
cached_binary: futures::lock::Mutex<Option<LanguageServerBinary>>,
|
||||||
|
attach_kind: OnceLock<Attach>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for CachedLspAdapter {
|
impl Debug for CachedLspAdapter {
|
||||||
|
@ -199,6 +200,7 @@ impl CachedLspAdapter {
|
||||||
adapter,
|
adapter,
|
||||||
cached_binary: Default::default(),
|
cached_binary: Default::default(),
|
||||||
reinstall_attempt_count: AtomicU64::new(0),
|
reinstall_attempt_count: AtomicU64::new(0),
|
||||||
|
attach_kind: Default::default(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -260,6 +262,38 @@ impl CachedLspAdapter {
|
||||||
.cloned()
|
.cloned()
|
||||||
.unwrap_or_else(|| language_name.lsp_id())
|
.unwrap_or_else(|| language_name.lsp_id())
|
||||||
}
|
}
|
||||||
|
pub fn find_project_root(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
ancestor_depth: usize,
|
||||||
|
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||||
|
) -> Option<Arc<Path>> {
|
||||||
|
self.adapter
|
||||||
|
.find_project_root(path, ancestor_depth, delegate)
|
||||||
|
}
|
||||||
|
pub fn attach_kind(&self) -> Attach {
|
||||||
|
*self.attach_kind.get_or_init(|| self.adapter.attach_kind())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||||
|
pub enum Attach {
|
||||||
|
/// Create a single language server instance per subproject root.
|
||||||
|
InstancePerRoot,
|
||||||
|
/// Use one shared language server instance for all subprojects within a project.
|
||||||
|
Shared,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Attach {
|
||||||
|
pub fn root_path(
|
||||||
|
&self,
|
||||||
|
root_subproject_path: (WorktreeId, Arc<Path>),
|
||||||
|
) -> (WorktreeId, Arc<Path>) {
|
||||||
|
match self {
|
||||||
|
Attach::InstancePerRoot => root_subproject_path,
|
||||||
|
Attach::Shared => (root_subproject_path.0, Arc::from(Path::new(""))),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// [`LspAdapterDelegate`] allows [`LspAdapter]` implementations to interface with the application
|
/// [`LspAdapterDelegate`] allows [`LspAdapter]` implementations to interface with the application
|
||||||
|
@ -270,6 +304,7 @@ pub trait LspAdapterDelegate: Send + Sync {
|
||||||
fn http_client(&self) -> Arc<dyn HttpClient>;
|
fn http_client(&self) -> Arc<dyn HttpClient>;
|
||||||
fn worktree_id(&self) -> WorktreeId;
|
fn worktree_id(&self) -> WorktreeId;
|
||||||
fn worktree_root_path(&self) -> &Path;
|
fn worktree_root_path(&self) -> &Path;
|
||||||
|
fn exists(&self, path: &Path, is_dir: Option<bool>) -> bool;
|
||||||
fn update_status(&self, language: LanguageServerName, status: LanguageServerBinaryStatus);
|
fn update_status(&self, language: LanguageServerName, status: LanguageServerBinaryStatus);
|
||||||
async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option<Arc<Path>>;
|
async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option<Arc<Path>>;
|
||||||
|
|
||||||
|
@ -508,6 +543,19 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||||
fn prepare_initialize_params(&self, original: InitializeParams) -> Result<InitializeParams> {
|
fn prepare_initialize_params(&self, original: InitializeParams) -> Result<InitializeParams> {
|
||||||
Ok(original)
|
Ok(original)
|
||||||
}
|
}
|
||||||
|
fn attach_kind(&self) -> Attach {
|
||||||
|
Attach::Shared
|
||||||
|
}
|
||||||
|
fn find_project_root(
|
||||||
|
&self,
|
||||||
|
|
||||||
|
_path: &Path,
|
||||||
|
_ancestor_depth: usize,
|
||||||
|
_: &Arc<dyn LspAdapterDelegate>,
|
||||||
|
) -> Option<Arc<Path>> {
|
||||||
|
// By default all language servers are rooted at the root of the worktree.
|
||||||
|
Some(Arc::from("".as_ref()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn try_fetch_server_binary<L: LspAdapter + 'static + Send + Sync + ?Sized>(
|
async fn try_fetch_server_binary<L: LspAdapter + 'static + Send + Sync + ?Sized>(
|
||||||
|
|
|
@ -108,6 +108,7 @@ struct LanguageRegistryState {
|
||||||
available_languages: Vec<AvailableLanguage>,
|
available_languages: Vec<AvailableLanguage>,
|
||||||
grammars: HashMap<Arc<str>, AvailableGrammar>,
|
grammars: HashMap<Arc<str>, AvailableGrammar>,
|
||||||
lsp_adapters: HashMap<LanguageName, Vec<Arc<CachedLspAdapter>>>,
|
lsp_adapters: HashMap<LanguageName, Vec<Arc<CachedLspAdapter>>>,
|
||||||
|
all_lsp_adapters: HashMap<LanguageServerName, Arc<CachedLspAdapter>>,
|
||||||
available_lsp_adapters:
|
available_lsp_adapters:
|
||||||
HashMap<LanguageServerName, Arc<dyn Fn() -> Arc<CachedLspAdapter> + 'static + Send + Sync>>,
|
HashMap<LanguageServerName, Arc<dyn Fn() -> Arc<CachedLspAdapter> + 'static + Send + Sync>>,
|
||||||
loading_languages: HashMap<LanguageId, Vec<oneshot::Sender<Result<Arc<Language>>>>>,
|
loading_languages: HashMap<LanguageId, Vec<oneshot::Sender<Result<Arc<Language>>>>>,
|
||||||
|
@ -234,6 +235,7 @@ impl LanguageRegistry {
|
||||||
language_settings: Default::default(),
|
language_settings: Default::default(),
|
||||||
loading_languages: Default::default(),
|
loading_languages: Default::default(),
|
||||||
lsp_adapters: Default::default(),
|
lsp_adapters: Default::default(),
|
||||||
|
all_lsp_adapters: Default::default(),
|
||||||
available_lsp_adapters: HashMap::default(),
|
available_lsp_adapters: HashMap::default(),
|
||||||
subscription: watch::channel(),
|
subscription: watch::channel(),
|
||||||
theme: Default::default(),
|
theme: Default::default(),
|
||||||
|
@ -356,12 +358,16 @@ impl LanguageRegistry {
|
||||||
adapter: Arc<dyn LspAdapter>,
|
adapter: Arc<dyn LspAdapter>,
|
||||||
) -> Arc<CachedLspAdapter> {
|
) -> Arc<CachedLspAdapter> {
|
||||||
let cached = CachedLspAdapter::new(adapter);
|
let cached = CachedLspAdapter::new(adapter);
|
||||||
self.state
|
let mut state = self.state.write();
|
||||||
.write()
|
state
|
||||||
.lsp_adapters
|
.lsp_adapters
|
||||||
.entry(language_name)
|
.entry(language_name)
|
||||||
.or_default()
|
.or_default()
|
||||||
.push(cached.clone());
|
.push(cached.clone());
|
||||||
|
state
|
||||||
|
.all_lsp_adapters
|
||||||
|
.insert(cached.name.clone(), cached.clone());
|
||||||
|
|
||||||
cached
|
cached
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -401,12 +407,17 @@ impl LanguageRegistry {
|
||||||
let adapter_name = LanguageServerName(adapter.name.into());
|
let adapter_name = LanguageServerName(adapter.name.into());
|
||||||
let capabilities = adapter.capabilities.clone();
|
let capabilities = adapter.capabilities.clone();
|
||||||
let initializer = adapter.initializer.take();
|
let initializer = adapter.initializer.take();
|
||||||
self.state
|
let adapter = CachedLspAdapter::new(Arc::new(adapter));
|
||||||
.write()
|
{
|
||||||
.lsp_adapters
|
let mut state = self.state.write();
|
||||||
.entry(language_name.clone())
|
state
|
||||||
.or_default()
|
.lsp_adapters
|
||||||
.push(CachedLspAdapter::new(Arc::new(adapter)));
|
.entry(language_name.clone())
|
||||||
|
.or_default()
|
||||||
|
.push(adapter.clone());
|
||||||
|
state.all_lsp_adapters.insert(adapter.name(), adapter);
|
||||||
|
}
|
||||||
|
|
||||||
self.register_fake_language_server(adapter_name, capabilities, initializer)
|
self.register_fake_language_server(adapter_name, capabilities, initializer)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -419,12 +430,16 @@ impl LanguageRegistry {
|
||||||
adapter: crate::FakeLspAdapter,
|
adapter: crate::FakeLspAdapter,
|
||||||
) {
|
) {
|
||||||
let language_name = language_name.into();
|
let language_name = language_name.into();
|
||||||
self.state
|
let mut state = self.state.write();
|
||||||
.write()
|
let cached_adapter = CachedLspAdapter::new(Arc::new(adapter));
|
||||||
|
state
|
||||||
.lsp_adapters
|
.lsp_adapters
|
||||||
.entry(language_name.clone())
|
.entry(language_name.clone())
|
||||||
.or_default()
|
.or_default()
|
||||||
.push(CachedLspAdapter::new(Arc::new(adapter)));
|
.push(cached_adapter.clone());
|
||||||
|
state
|
||||||
|
.all_lsp_adapters
|
||||||
|
.insert(cached_adapter.name(), cached_adapter);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Register a fake language server (without the adapter)
|
/// Register a fake language server (without the adapter)
|
||||||
|
@ -892,6 +907,10 @@ impl LanguageRegistry {
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn adapter_for_name(&self, name: &LanguageServerName) -> Option<Arc<CachedLspAdapter>> {
|
||||||
|
self.state.read().all_lsp_adapters.get(name).cloned()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn update_lsp_status(
|
pub fn update_lsp_status(
|
||||||
&self,
|
&self,
|
||||||
server_name: LanguageServerName,
|
server_name: LanguageServerName,
|
||||||
|
|
|
@ -735,7 +735,8 @@ impl LspLogView {
|
||||||
|
|
||||||
* Binary: {BINARY:#?}
|
* Binary: {BINARY:#?}
|
||||||
|
|
||||||
* Running in project: {PATH:?}
|
* Registered workspace folders:
|
||||||
|
{WORKSPACE_FOLDERS}
|
||||||
|
|
||||||
* Capabilities: {CAPABILITIES}
|
* Capabilities: {CAPABILITIES}
|
||||||
|
|
||||||
|
@ -743,7 +744,15 @@ impl LspLogView {
|
||||||
NAME = server.name(),
|
NAME = server.name(),
|
||||||
ID = server.server_id(),
|
ID = server.server_id(),
|
||||||
BINARY = server.binary(),
|
BINARY = server.binary(),
|
||||||
PATH = server.root_path(),
|
WORKSPACE_FOLDERS = server
|
||||||
|
.workspace_folders()
|
||||||
|
.iter()
|
||||||
|
.filter_map(|path| path
|
||||||
|
.to_file_path()
|
||||||
|
.ok()
|
||||||
|
.map(|path| path.to_string_lossy().into_owned()))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", "),
|
||||||
CAPABILITIES = serde_json::to_string_pretty(&server.capabilities())
|
CAPABILITIES = serde_json::to_string_pretty(&server.capabilities())
|
||||||
.unwrap_or_else(|e| format!("Failed to serialize capabilities: {e}")),
|
.unwrap_or_else(|e| format!("Failed to serialize capabilities: {e}")),
|
||||||
CONFIGURATION = serde_json::to_string_pretty(server.configuration())
|
CONFIGURATION = serde_json::to_string_pretty(server.configuration())
|
||||||
|
|
|
@ -74,6 +74,23 @@ impl LspAdapter for RustLspAdapter {
|
||||||
Self::SERVER_NAME.clone()
|
Self::SERVER_NAME.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn find_project_root(
|
||||||
|
&self,
|
||||||
|
path: &Path,
|
||||||
|
ancestor_depth: usize,
|
||||||
|
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||||
|
) -> Option<Arc<Path>> {
|
||||||
|
let mut outermost_cargo_toml = None;
|
||||||
|
for path in path.ancestors().take(ancestor_depth) {
|
||||||
|
let p = path.join("Cargo.toml");
|
||||||
|
if delegate.exists(&p, Some(false)) {
|
||||||
|
outermost_cargo_toml = Some(Arc::from(path));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
outermost_cargo_toml
|
||||||
|
}
|
||||||
|
|
||||||
async fn check_if_user_installed(
|
async fn check_if_user_installed(
|
||||||
&self,
|
&self,
|
||||||
delegate: &dyn LspAdapterDelegate,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
|
|
|
@ -4,9 +4,10 @@ pub use lsp_types::request::*;
|
||||||
pub use lsp_types::*;
|
pub use lsp_types::*;
|
||||||
|
|
||||||
use anyhow::{anyhow, Context as _, Result};
|
use anyhow::{anyhow, Context as _, Result};
|
||||||
use collections::HashMap;
|
use collections::{HashMap, HashSet};
|
||||||
use futures::{channel::oneshot, io::BufWriter, select, AsyncRead, AsyncWrite, Future, FutureExt};
|
use futures::{channel::oneshot, io::BufWriter, select, AsyncRead, AsyncWrite, Future, FutureExt};
|
||||||
use gpui::{App, AsyncApp, BackgroundExecutor, SharedString, Task};
|
use gpui::{App, AsyncApp, BackgroundExecutor, SharedString, Task};
|
||||||
|
use notification::DidChangeWorkspaceFolders;
|
||||||
use parking_lot::{Mutex, RwLock};
|
use parking_lot::{Mutex, RwLock};
|
||||||
use postage::{barrier, prelude::Stream};
|
use postage::{barrier, prelude::Stream};
|
||||||
use schemars::{
|
use schemars::{
|
||||||
|
@ -23,10 +24,11 @@ use smol::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
|
collections::BTreeSet,
|
||||||
ffi::{OsStr, OsString},
|
ffi::{OsStr, OsString},
|
||||||
fmt,
|
fmt,
|
||||||
io::Write,
|
io::Write,
|
||||||
ops::DerefMut,
|
ops::{Deref, DerefMut},
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
pin::Pin,
|
pin::Pin,
|
||||||
sync::{
|
sync::{
|
||||||
|
@ -96,9 +98,9 @@ pub struct LanguageServer {
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
|
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
|
||||||
output_done_rx: Mutex<Option<barrier::Receiver>>,
|
output_done_rx: Mutex<Option<barrier::Receiver>>,
|
||||||
root_path: PathBuf,
|
|
||||||
working_dir: PathBuf,
|
|
||||||
server: Arc<Mutex<Option<Child>>>,
|
server: Arc<Mutex<Option<Child>>>,
|
||||||
|
workspace_folders: Arc<Mutex<BTreeSet<Url>>>,
|
||||||
|
registered_buffers: Arc<Mutex<HashSet<Url>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Identifies a running language server.
|
/// Identifies a running language server.
|
||||||
|
@ -376,8 +378,6 @@ impl LanguageServer {
|
||||||
Some(stderr),
|
Some(stderr),
|
||||||
stderr_capture,
|
stderr_capture,
|
||||||
Some(server),
|
Some(server),
|
||||||
root_path,
|
|
||||||
working_dir,
|
|
||||||
code_action_kinds,
|
code_action_kinds,
|
||||||
binary,
|
binary,
|
||||||
cx,
|
cx,
|
||||||
|
@ -403,8 +403,6 @@ impl LanguageServer {
|
||||||
stderr: Option<Stderr>,
|
stderr: Option<Stderr>,
|
||||||
stderr_capture: Arc<Mutex<Option<String>>>,
|
stderr_capture: Arc<Mutex<Option<String>>>,
|
||||||
server: Option<Child>,
|
server: Option<Child>,
|
||||||
root_path: &Path,
|
|
||||||
working_dir: &Path,
|
|
||||||
code_action_kinds: Option<Vec<CodeActionKind>>,
|
code_action_kinds: Option<Vec<CodeActionKind>>,
|
||||||
binary: LanguageServerBinary,
|
binary: LanguageServerBinary,
|
||||||
cx: AsyncApp,
|
cx: AsyncApp,
|
||||||
|
@ -488,9 +486,9 @@ impl LanguageServer {
|
||||||
executor: cx.background_executor().clone(),
|
executor: cx.background_executor().clone(),
|
||||||
io_tasks: Mutex::new(Some((input_task, output_task))),
|
io_tasks: Mutex::new(Some((input_task, output_task))),
|
||||||
output_done_rx: Mutex::new(Some(output_done_rx)),
|
output_done_rx: Mutex::new(Some(output_done_rx)),
|
||||||
root_path: root_path.to_path_buf(),
|
|
||||||
working_dir: working_dir.to_path_buf(),
|
|
||||||
server: Arc::new(Mutex::new(server)),
|
server: Arc::new(Mutex::new(server)),
|
||||||
|
workspace_folders: Default::default(),
|
||||||
|
registered_buffers: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -615,12 +613,11 @@ impl LanguageServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn default_initialize_params(&self, cx: &App) -> InitializeParams {
|
pub fn default_initialize_params(&self, cx: &App) -> InitializeParams {
|
||||||
let root_uri = Url::from_file_path(&self.working_dir).unwrap();
|
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
InitializeParams {
|
InitializeParams {
|
||||||
process_id: None,
|
process_id: None,
|
||||||
root_path: None,
|
root_path: None,
|
||||||
root_uri: Some(root_uri.clone()),
|
root_uri: None,
|
||||||
initialization_options: None,
|
initialization_options: None,
|
||||||
capabilities: ClientCapabilities {
|
capabilities: ClientCapabilities {
|
||||||
general: Some(GeneralClientCapabilities {
|
general: Some(GeneralClientCapabilities {
|
||||||
|
@ -787,10 +784,7 @@ impl LanguageServer {
|
||||||
}),
|
}),
|
||||||
},
|
},
|
||||||
trace: None,
|
trace: None,
|
||||||
workspace_folders: Some(vec![WorkspaceFolder {
|
workspace_folders: None,
|
||||||
uri: root_uri,
|
|
||||||
name: Default::default(),
|
|
||||||
}]),
|
|
||||||
client_info: release_channel::ReleaseChannel::try_global(cx).map(|release_channel| {
|
client_info: release_channel::ReleaseChannel::try_global(cx).map(|release_channel| {
|
||||||
ClientInfo {
|
ClientInfo {
|
||||||
name: release_channel.display_name().to_string(),
|
name: release_channel.display_name().to_string(),
|
||||||
|
@ -809,16 +803,10 @@ impl LanguageServer {
|
||||||
/// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initialize)
|
/// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initialize)
|
||||||
pub fn initialize(
|
pub fn initialize(
|
||||||
mut self,
|
mut self,
|
||||||
initialize_params: Option<InitializeParams>,
|
params: InitializeParams,
|
||||||
configuration: Arc<DidChangeConfigurationParams>,
|
configuration: Arc<DidChangeConfigurationParams>,
|
||||||
cx: &App,
|
cx: &App,
|
||||||
) -> Task<Result<Arc<Self>>> {
|
) -> Task<Result<Arc<Self>>> {
|
||||||
let params = if let Some(params) = initialize_params {
|
|
||||||
params
|
|
||||||
} else {
|
|
||||||
self.default_initialize_params(cx)
|
|
||||||
};
|
|
||||||
|
|
||||||
cx.spawn(|_| async move {
|
cx.spawn(|_| async move {
|
||||||
let response = self.request::<request::Initialize>(params).await?;
|
let response = self.request::<request::Initialize>(params).await?;
|
||||||
if let Some(info) = response.server_info {
|
if let Some(info) = response.server_info {
|
||||||
|
@ -1070,16 +1058,10 @@ impl LanguageServer {
|
||||||
self.server_id
|
self.server_id
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the root path of the project the language server is running against.
|
|
||||||
pub fn root_path(&self) -> &PathBuf {
|
|
||||||
&self.root_path
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Language server's binary information.
|
/// Language server's binary information.
|
||||||
pub fn binary(&self) -> &LanguageServerBinary {
|
pub fn binary(&self) -> &LanguageServerBinary {
|
||||||
&self.binary
|
&self.binary
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sends a RPC request to the language server.
|
/// Sends a RPC request to the language server.
|
||||||
///
|
///
|
||||||
/// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#requestMessage)
|
/// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#requestMessage)
|
||||||
|
@ -1207,6 +1189,125 @@ impl LanguageServer {
|
||||||
outbound_tx.try_send(message)?;
|
outbound_tx.try_send(message)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add new workspace folder to the list.
|
||||||
|
pub fn add_workspace_folder(&self, uri: Url) {
|
||||||
|
if self
|
||||||
|
.capabilities()
|
||||||
|
.workspace
|
||||||
|
.and_then(|ws| {
|
||||||
|
ws.workspace_folders.and_then(|folders| {
|
||||||
|
folders
|
||||||
|
.change_notifications
|
||||||
|
.map(|caps| matches!(caps, OneOf::Left(false)))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.unwrap_or(true)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let is_new_folder = self.workspace_folders.lock().insert(uri.clone());
|
||||||
|
if is_new_folder {
|
||||||
|
let params = DidChangeWorkspaceFoldersParams {
|
||||||
|
event: WorkspaceFoldersChangeEvent {
|
||||||
|
added: vec![WorkspaceFolder {
|
||||||
|
uri,
|
||||||
|
name: String::default(),
|
||||||
|
}],
|
||||||
|
removed: vec![],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
self.notify::<DidChangeWorkspaceFolders>(¶ms).log_err();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/// Add new workspace folder to the list.
|
||||||
|
pub fn remove_workspace_folder(&self, uri: Url) {
|
||||||
|
if self
|
||||||
|
.capabilities()
|
||||||
|
.workspace
|
||||||
|
.and_then(|ws| {
|
||||||
|
ws.workspace_folders.and_then(|folders| {
|
||||||
|
folders
|
||||||
|
.change_notifications
|
||||||
|
.map(|caps| !matches!(caps, OneOf::Left(false)))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.unwrap_or(true)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let was_removed = self.workspace_folders.lock().remove(&uri);
|
||||||
|
if was_removed {
|
||||||
|
let params = DidChangeWorkspaceFoldersParams {
|
||||||
|
event: WorkspaceFoldersChangeEvent {
|
||||||
|
added: vec![],
|
||||||
|
removed: vec![WorkspaceFolder {
|
||||||
|
uri,
|
||||||
|
name: String::default(),
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
self.notify::<DidChangeWorkspaceFolders>(¶ms).log_err();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn set_workspace_folders(&self, folders: BTreeSet<Url>) {
|
||||||
|
let mut workspace_folders = self.workspace_folders.lock();
|
||||||
|
let added: Vec<_> = folders
|
||||||
|
.iter()
|
||||||
|
.map(|uri| WorkspaceFolder {
|
||||||
|
uri: uri.clone(),
|
||||||
|
name: String::default(),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let removed: Vec<_> = std::mem::replace(&mut *workspace_folders, folders)
|
||||||
|
.into_iter()
|
||||||
|
.map(|uri| WorkspaceFolder {
|
||||||
|
uri: uri.clone(),
|
||||||
|
name: String::default(),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
let should_notify = !added.is_empty() || !removed.is_empty();
|
||||||
|
|
||||||
|
if should_notify {
|
||||||
|
let params = DidChangeWorkspaceFoldersParams {
|
||||||
|
event: WorkspaceFoldersChangeEvent { added, removed },
|
||||||
|
};
|
||||||
|
self.notify::<DidChangeWorkspaceFolders>(¶ms).log_err();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn workspace_folders(&self) -> impl Deref<Target = BTreeSet<Url>> + '_ {
|
||||||
|
self.workspace_folders.lock()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn register_buffer(
|
||||||
|
&self,
|
||||||
|
uri: Url,
|
||||||
|
language_id: String,
|
||||||
|
version: i32,
|
||||||
|
initial_text: String,
|
||||||
|
) {
|
||||||
|
let was_already_registered = self.registered_buffers.lock().insert(uri.clone());
|
||||||
|
|
||||||
|
if was_already_registered {
|
||||||
|
self.notify::<notification::DidOpenTextDocument>(&DidOpenTextDocumentParams {
|
||||||
|
text_document: TextDocumentItem::new(uri, language_id, version, initial_text),
|
||||||
|
})
|
||||||
|
.log_err();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn unregister_buffer(&self, uri: &Url) {
|
||||||
|
let was_removed = self.registered_buffers.lock().remove(uri);
|
||||||
|
if was_removed {
|
||||||
|
self.notify::<notification::DidCloseTextDocument>(&DidCloseTextDocumentParams {
|
||||||
|
text_document: TextDocumentIdentifier::new(uri.clone()),
|
||||||
|
})
|
||||||
|
.log_err();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for LanguageServer {
|
impl Drop for LanguageServer {
|
||||||
|
@ -1288,8 +1389,6 @@ impl FakeLanguageServer {
|
||||||
let (stdout_writer, stdout_reader) = async_pipe::pipe();
|
let (stdout_writer, stdout_reader) = async_pipe::pipe();
|
||||||
let (notifications_tx, notifications_rx) = channel::unbounded();
|
let (notifications_tx, notifications_rx) = channel::unbounded();
|
||||||
|
|
||||||
let root = Self::root_path();
|
|
||||||
|
|
||||||
let server_name = LanguageServerName(name.clone().into());
|
let server_name = LanguageServerName(name.clone().into());
|
||||||
let process_name = Arc::from(name.as_str());
|
let process_name = Arc::from(name.as_str());
|
||||||
let mut server = LanguageServer::new_internal(
|
let mut server = LanguageServer::new_internal(
|
||||||
|
@ -1300,8 +1399,6 @@ impl FakeLanguageServer {
|
||||||
None::<async_pipe::PipeReader>,
|
None::<async_pipe::PipeReader>,
|
||||||
Arc::new(Mutex::new(None)),
|
Arc::new(Mutex::new(None)),
|
||||||
None,
|
None,
|
||||||
root,
|
|
||||||
root,
|
|
||||||
None,
|
None,
|
||||||
binary.clone(),
|
binary.clone(),
|
||||||
cx.clone(),
|
cx.clone(),
|
||||||
|
@ -1319,8 +1416,6 @@ impl FakeLanguageServer {
|
||||||
None::<async_pipe::PipeReader>,
|
None::<async_pipe::PipeReader>,
|
||||||
Arc::new(Mutex::new(None)),
|
Arc::new(Mutex::new(None)),
|
||||||
None,
|
None,
|
||||||
root,
|
|
||||||
root,
|
|
||||||
None,
|
None,
|
||||||
binary,
|
binary,
|
||||||
cx.clone(),
|
cx.clone(),
|
||||||
|
@ -1357,16 +1452,6 @@ impl FakeLanguageServer {
|
||||||
|
|
||||||
(server, fake)
|
(server, fake)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_os = "windows")]
|
|
||||||
fn root_path() -> &'static Path {
|
|
||||||
Path::new("C:\\")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(target_os = "windows"))]
|
|
||||||
fn root_path() -> &'static Path {
|
|
||||||
Path::new("/")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
@ -1554,12 +1639,14 @@ mod tests {
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
|
|
||||||
let initialize_params = None;
|
|
||||||
let configuration = DidChangeConfigurationParams {
|
|
||||||
settings: Default::default(),
|
|
||||||
};
|
|
||||||
let server = cx
|
let server = cx
|
||||||
.update(|cx| server.initialize(initialize_params, configuration.into(), cx))
|
.update(|cx| {
|
||||||
|
let params = server.default_initialize_params(cx);
|
||||||
|
let configuration = DidChangeConfigurationParams {
|
||||||
|
settings: Default::default(),
|
||||||
|
};
|
||||||
|
server.initialize(params, configuration.into(), cx)
|
||||||
|
})
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
server
|
server
|
||||||
|
|
|
@ -283,13 +283,13 @@ impl Prettier {
|
||||||
)
|
)
|
||||||
.context("prettier server creation")?;
|
.context("prettier server creation")?;
|
||||||
|
|
||||||
let initialize_params = None;
|
|
||||||
let configuration = lsp::DidChangeConfigurationParams {
|
|
||||||
settings: Default::default(),
|
|
||||||
};
|
|
||||||
let server = cx
|
let server = cx
|
||||||
.update(|cx| {
|
.update(|cx| {
|
||||||
executor.spawn(server.initialize(initialize_params, configuration.into(), cx))
|
let params = server.default_initialize_params(cx);
|
||||||
|
let configuration = lsp::DidChangeConfigurationParams {
|
||||||
|
settings: Default::default(),
|
||||||
|
};
|
||||||
|
executor.spawn(server.initialize(params, configuration.into(), cx))
|
||||||
})?
|
})?
|
||||||
.await
|
.await
|
||||||
.context("prettier server initialization")?;
|
.context("prettier server initialization")?;
|
||||||
|
|
|
@ -43,6 +43,7 @@ log.workspace = true
|
||||||
lsp.workspace = true
|
lsp.workspace = true
|
||||||
node_runtime.workspace = true
|
node_runtime.workspace = true
|
||||||
image.workspace = true
|
image.workspace = true
|
||||||
|
once_cell.workspace = true
|
||||||
parking_lot.workspace = true
|
parking_lot.workspace = true
|
||||||
pathdiff.workspace = true
|
pathdiff.workspace = true
|
||||||
paths.workspace = true
|
paths.workspace = true
|
||||||
|
|
|
@ -942,9 +942,11 @@ fn language_server_for_buffer(
|
||||||
) -> Result<(Arc<CachedLspAdapter>, Arc<LanguageServer>)> {
|
) -> Result<(Arc<CachedLspAdapter>, Arc<LanguageServer>)> {
|
||||||
lsp_store
|
lsp_store
|
||||||
.update(cx, |lsp_store, cx| {
|
.update(cx, |lsp_store, cx| {
|
||||||
lsp_store
|
buffer.update(cx, |buffer, cx| {
|
||||||
.language_server_for_local_buffer(buffer.read(cx), server_id, cx)
|
lsp_store
|
||||||
.map(|(adapter, server)| (adapter.clone(), server.clone()))
|
.language_server_for_local_buffer(buffer, server_id, cx)
|
||||||
|
.map(|(adapter, server)| (adapter.clone(), server.clone()))
|
||||||
|
})
|
||||||
})?
|
})?
|
||||||
.ok_or_else(|| anyhow!("no language server found for buffer"))
|
.ok_or_else(|| anyhow!("no language server found for buffer"))
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -40,7 +40,7 @@ pub struct PrettierStore {
|
||||||
prettier_instances: HashMap<PathBuf, PrettierInstance>,
|
prettier_instances: HashMap<PathBuf, PrettierInstance>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum PrettierStoreEvent {
|
pub(crate) enum PrettierStoreEvent {
|
||||||
LanguageServerRemoved(LanguageServerId),
|
LanguageServerRemoved(LanguageServerId),
|
||||||
LanguageServerAdded {
|
LanguageServerAdded {
|
||||||
new_server_id: LanguageServerId,
|
new_server_id: LanguageServerId,
|
||||||
|
|
|
@ -9,6 +9,7 @@ pub mod lsp_ext_command;
|
||||||
pub mod lsp_store;
|
pub mod lsp_store;
|
||||||
pub mod prettier_store;
|
pub mod prettier_store;
|
||||||
pub mod project_settings;
|
pub mod project_settings;
|
||||||
|
mod project_tree;
|
||||||
pub mod search;
|
pub mod search;
|
||||||
mod task_inventory;
|
mod task_inventory;
|
||||||
pub mod task_store;
|
pub mod task_store;
|
||||||
|
@ -475,6 +476,7 @@ pub struct DocumentHighlight {
|
||||||
pub struct Symbol {
|
pub struct Symbol {
|
||||||
pub language_server_name: LanguageServerName,
|
pub language_server_name: LanguageServerName,
|
||||||
pub source_worktree_id: WorktreeId,
|
pub source_worktree_id: WorktreeId,
|
||||||
|
pub source_language_server_id: LanguageServerId,
|
||||||
pub path: ProjectPath,
|
pub path: ProjectPath,
|
||||||
pub label: CodeLabel,
|
pub label: CodeLabel,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
@ -1906,7 +1908,7 @@ impl Project {
|
||||||
pub fn open_buffer(
|
pub fn open_buffer(
|
||||||
&mut self,
|
&mut self,
|
||||||
path: impl Into<ProjectPath>,
|
path: impl Into<ProjectPath>,
|
||||||
cx: &mut Context<Self>,
|
cx: &mut App,
|
||||||
) -> Task<Result<Entity<Buffer>>> {
|
) -> Task<Result<Entity<Buffer>>> {
|
||||||
if self.is_disconnected(cx) {
|
if self.is_disconnected(cx) {
|
||||||
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
|
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
|
||||||
|
@ -1921,11 +1923,11 @@ impl Project {
|
||||||
pub fn open_buffer_with_lsp(
|
pub fn open_buffer_with_lsp(
|
||||||
&mut self,
|
&mut self,
|
||||||
path: impl Into<ProjectPath>,
|
path: impl Into<ProjectPath>,
|
||||||
cx: &mut Context<Self>,
|
cx: &mut App,
|
||||||
) -> Task<Result<(Entity<Buffer>, lsp_store::OpenLspBufferHandle)>> {
|
) -> Task<Result<(Entity<Buffer>, lsp_store::OpenLspBufferHandle)>> {
|
||||||
let buffer = self.open_buffer(path, cx);
|
let buffer = self.open_buffer(path, cx);
|
||||||
let lsp_store = self.lsp_store().clone();
|
let lsp_store = self.lsp_store().clone();
|
||||||
cx.spawn(|_, mut cx| async move {
|
cx.spawn(|mut cx| async move {
|
||||||
let buffer = buffer.await?;
|
let buffer = buffer.await?;
|
||||||
let handle = lsp_store.update(&mut cx, |lsp_store, cx| {
|
let handle = lsp_store.update(&mut cx, |lsp_store, cx| {
|
||||||
lsp_store.register_buffer_with_language_servers(&buffer, cx)
|
lsp_store.register_buffer_with_language_servers(&buffer, cx)
|
||||||
|
@ -4131,14 +4133,25 @@ impl Project {
|
||||||
self.lsp_store.read(cx).supplementary_language_servers()
|
self.lsp_store.read(cx).supplementary_language_servers()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn language_servers_for_local_buffer<'a>(
|
pub fn language_server_for_id(
|
||||||
&'a self,
|
&self,
|
||||||
buffer: &'a Buffer,
|
id: LanguageServerId,
|
||||||
cx: &'a App,
|
cx: &App,
|
||||||
) -> impl Iterator<Item = (&'a Arc<CachedLspAdapter>, &'a Arc<LanguageServer>)> {
|
) -> Option<Arc<LanguageServer>> {
|
||||||
self.lsp_store
|
self.lsp_store.read(cx).language_server_for_id(id)
|
||||||
.read(cx)
|
}
|
||||||
.language_servers_for_local_buffer(buffer, cx)
|
|
||||||
|
pub fn for_language_servers_for_local_buffer<R: 'static>(
|
||||||
|
&self,
|
||||||
|
buffer: &Buffer,
|
||||||
|
callback: impl FnOnce(
|
||||||
|
Box<dyn Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> + '_>,
|
||||||
|
) -> R,
|
||||||
|
cx: &mut App,
|
||||||
|
) -> R {
|
||||||
|
self.lsp_store.update(cx, |this, cx| {
|
||||||
|
callback(Box::new(this.language_servers_for_local_buffer(buffer, cx)))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn buffer_store(&self) -> &Entity<BufferStore> {
|
pub fn buffer_store(&self) -> &Entity<BufferStore> {
|
||||||
|
|
|
@ -1749,6 +1749,12 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
|
||||||
});
|
});
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
let _rs_buffer = project
|
||||||
|
.update(cx, |project, cx| {
|
||||||
|
project.open_local_buffer_with_lsp("/dir/a.rs", cx)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
|
let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
fake_rust_server_2
|
fake_rust_server_2
|
||||||
|
@ -2573,25 +2579,28 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
|
||||||
fs.insert_tree(
|
fs.insert_tree(
|
||||||
"/dir",
|
"/dir",
|
||||||
json!({
|
json!({
|
||||||
"a.rs": "const fn a() { A }",
|
|
||||||
"b.rs": "const y: i32 = crate::a()",
|
"b.rs": "const y: i32 = crate::a()",
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
fs.insert_tree(
|
||||||
|
"/another_dir",
|
||||||
|
json!({
|
||||||
|
"a.rs": "const fn a() { A }"}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
|
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
|
||||||
|
|
||||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
||||||
language_registry.add(rust_lang());
|
language_registry.add(rust_lang());
|
||||||
let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
|
let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
|
||||||
|
|
||||||
let (buffer, _handle) = project
|
let (buffer, _handle) = project
|
||||||
.update(cx, |project, cx| {
|
.update(cx, |project, cx| {
|
||||||
project.open_local_buffer_with_lsp("/dir/b.rs", cx)
|
project.open_local_buffer_with_lsp("/dir/b.rs", cx)
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let fake_server = fake_servers.next().await.unwrap();
|
let fake_server = fake_servers.next().await.unwrap();
|
||||||
fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
|
fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
|
||||||
let params = params.text_document_position_params;
|
let params = params.text_document_position_params;
|
||||||
|
@ -2603,12 +2612,11 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
|
||||||
|
|
||||||
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
|
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
|
||||||
lsp::Location::new(
|
lsp::Location::new(
|
||||||
lsp::Url::from_file_path("/dir/a.rs").unwrap(),
|
lsp::Url::from_file_path("/another_dir/a.rs").unwrap(),
|
||||||
lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
|
lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
|
||||||
),
|
),
|
||||||
)))
|
)))
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut definitions = project
|
let mut definitions = project
|
||||||
.update(cx, |project, cx| project.definition(&buffer, 22, cx))
|
.update(cx, |project, cx| project.definition(&buffer, 22, cx))
|
||||||
.await
|
.await
|
||||||
|
@ -2629,18 +2637,21 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
|
||||||
.as_local()
|
.as_local()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.abs_path(cx),
|
.abs_path(cx),
|
||||||
Path::new("/dir/a.rs"),
|
Path::new("/another_dir/a.rs"),
|
||||||
);
|
);
|
||||||
assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
|
assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
list_worktrees(&project, cx),
|
list_worktrees(&project, cx),
|
||||||
[("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
|
[
|
||||||
|
("/another_dir/a.rs".as_ref(), false),
|
||||||
|
("/dir".as_ref(), true)
|
||||||
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
drop(definition);
|
drop(definition);
|
||||||
});
|
});
|
||||||
cx.update(|cx| {
|
cx.update(|cx| {
|
||||||
assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
|
assert_eq!(list_worktrees(&project, cx), [("/dir".as_ref(), true)]);
|
||||||
});
|
});
|
||||||
|
|
||||||
fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
|
fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
|
||||||
|
|
243
crates/project/src/project_tree.rs
Normal file
243
crates/project/src/project_tree.rs
Normal file
|
@ -0,0 +1,243 @@
|
||||||
|
//! This module defines a Project Tree.
|
||||||
|
//!
|
||||||
|
//! A Project Tree is responsible for determining where the roots of subprojects are located in a project.
|
||||||
|
|
||||||
|
mod path_trie;
|
||||||
|
mod server_tree;
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
borrow::Borrow,
|
||||||
|
collections::{hash_map::Entry, BTreeMap},
|
||||||
|
ops::ControlFlow,
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
|
use collections::HashMap;
|
||||||
|
use gpui::{App, AppContext, Context, Entity, EventEmitter, Subscription};
|
||||||
|
use language::{CachedLspAdapter, LspAdapterDelegate};
|
||||||
|
use lsp::LanguageServerName;
|
||||||
|
use path_trie::{LabelPresence, RootPathTrie, TriePath};
|
||||||
|
use settings::{SettingsStore, WorktreeId};
|
||||||
|
use worktree::{Event as WorktreeEvent, Worktree};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
worktree_store::{WorktreeStore, WorktreeStoreEvent},
|
||||||
|
ProjectPath,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) use server_tree::{AdapterQuery, LanguageServerTree, LaunchDisposition};
|
||||||
|
|
||||||
|
struct WorktreeRoots {
|
||||||
|
roots: RootPathTrie<LanguageServerName>,
|
||||||
|
worktree_store: Entity<WorktreeStore>,
|
||||||
|
_worktree_subscription: Subscription,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WorktreeRoots {
|
||||||
|
fn new(
|
||||||
|
worktree_store: Entity<WorktreeStore>,
|
||||||
|
worktree: Entity<Worktree>,
|
||||||
|
cx: &mut App,
|
||||||
|
) -> Entity<Self> {
|
||||||
|
cx.new(|cx| Self {
|
||||||
|
roots: RootPathTrie::new(),
|
||||||
|
worktree_store,
|
||||||
|
_worktree_subscription: cx.subscribe(&worktree, |this: &mut Self, _, event, cx| {
|
||||||
|
match event {
|
||||||
|
WorktreeEvent::UpdatedEntries(changes) => {
|
||||||
|
for (path, _, kind) in changes.iter() {
|
||||||
|
match kind {
|
||||||
|
worktree::PathChange::Removed => {
|
||||||
|
let path = TriePath::from(path.as_ref());
|
||||||
|
this.roots.remove(&path);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
WorktreeEvent::UpdatedGitRepositories(_) => {}
|
||||||
|
WorktreeEvent::DeletedEntry(entry_id) => {
|
||||||
|
let Some(entry) = this.worktree_store.read(cx).entry_for_id(*entry_id, cx)
|
||||||
|
else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
let path = TriePath::from(entry.path.as_ref());
|
||||||
|
this.roots.remove(&path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ProjectTree {
|
||||||
|
root_points: HashMap<WorktreeId, Entity<WorktreeRoots>>,
|
||||||
|
worktree_store: Entity<WorktreeStore>,
|
||||||
|
_subscriptions: [Subscription; 2],
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct AdapterWrapper(Arc<CachedLspAdapter>);
|
||||||
|
impl PartialEq for AdapterWrapper {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.0.name.eq(&other.0.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for AdapterWrapper {}
|
||||||
|
|
||||||
|
impl std::hash::Hash for AdapterWrapper {
|
||||||
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||||
|
self.0.name.hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for AdapterWrapper {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||||
|
Some(self.0.name.cmp(&other.0.name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for AdapterWrapper {
|
||||||
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||||
|
self.0.name.cmp(&other.0.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Borrow<LanguageServerName> for AdapterWrapper {
|
||||||
|
fn borrow(&self) -> &LanguageServerName {
|
||||||
|
&self.0.name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(PartialEq)]
|
||||||
|
pub(crate) enum ProjectTreeEvent {
|
||||||
|
WorktreeRemoved(WorktreeId),
|
||||||
|
Cleared,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EventEmitter<ProjectTreeEvent> for ProjectTree {}
|
||||||
|
|
||||||
|
impl ProjectTree {
|
||||||
|
pub(crate) fn new(worktree_store: Entity<WorktreeStore>, cx: &mut App) -> Entity<Self> {
|
||||||
|
cx.new(|cx| Self {
|
||||||
|
root_points: Default::default(),
|
||||||
|
_subscriptions: [
|
||||||
|
cx.subscribe(&worktree_store, Self::on_worktree_store_event),
|
||||||
|
cx.observe_global::<SettingsStore>(|this, cx| {
|
||||||
|
for (_, roots) in &mut this.root_points {
|
||||||
|
roots.update(cx, |worktree_roots, _| {
|
||||||
|
worktree_roots.roots = RootPathTrie::new();
|
||||||
|
})
|
||||||
|
}
|
||||||
|
cx.emit(ProjectTreeEvent::Cleared);
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
worktree_store,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
#[allow(clippy::mutable_key_type)]
|
||||||
|
fn root_for_path(
|
||||||
|
&mut self,
|
||||||
|
ProjectPath { worktree_id, path }: ProjectPath,
|
||||||
|
adapters: Vec<Arc<CachedLspAdapter>>,
|
||||||
|
delegate: Arc<dyn LspAdapterDelegate>,
|
||||||
|
cx: &mut App,
|
||||||
|
) -> BTreeMap<AdapterWrapper, ProjectPath> {
|
||||||
|
debug_assert_eq!(delegate.worktree_id(), worktree_id);
|
||||||
|
#[allow(clippy::mutable_key_type)]
|
||||||
|
let mut roots = BTreeMap::from_iter(
|
||||||
|
adapters
|
||||||
|
.into_iter()
|
||||||
|
.map(|adapter| (AdapterWrapper(adapter), (None, LabelPresence::KnownAbsent))),
|
||||||
|
);
|
||||||
|
let worktree_roots = match self.root_points.entry(worktree_id) {
|
||||||
|
Entry::Occupied(occupied_entry) => occupied_entry.get().clone(),
|
||||||
|
Entry::Vacant(vacant_entry) => {
|
||||||
|
let Some(worktree) = self
|
||||||
|
.worktree_store
|
||||||
|
.read(cx)
|
||||||
|
.worktree_for_id(worktree_id, cx)
|
||||||
|
else {
|
||||||
|
return Default::default();
|
||||||
|
};
|
||||||
|
let roots = WorktreeRoots::new(self.worktree_store.clone(), worktree, cx);
|
||||||
|
vacant_entry.insert(roots).clone()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let key = TriePath::from(&*path);
|
||||||
|
worktree_roots.update(cx, |this, _| {
|
||||||
|
this.roots.walk(&key, &mut |path, labels| {
|
||||||
|
for (label, presence) in labels {
|
||||||
|
if let Some((marked_path, current_presence)) = roots.get_mut(label) {
|
||||||
|
if *current_presence > *presence {
|
||||||
|
debug_assert!(false, "RootPathTrie precondition violation; while walking the tree label presence is only allowed to increase");
|
||||||
|
}
|
||||||
|
*marked_path = Some(ProjectPath {worktree_id, path: path.clone()});
|
||||||
|
*current_presence = *presence;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
ControlFlow::Continue(())
|
||||||
|
});
|
||||||
|
});
|
||||||
|
for (adapter, (root_path, presence)) in &mut roots {
|
||||||
|
if *presence == LabelPresence::Present {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let depth = root_path
|
||||||
|
.as_ref()
|
||||||
|
.map(|root_path| {
|
||||||
|
path.strip_prefix(&root_path.path)
|
||||||
|
.unwrap()
|
||||||
|
.components()
|
||||||
|
.count()
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| path.components().count() + 1);
|
||||||
|
|
||||||
|
if depth > 0 {
|
||||||
|
let root = adapter.0.find_project_root(&path, depth, &delegate);
|
||||||
|
match root {
|
||||||
|
Some(known_root) => worktree_roots.update(cx, |this, _| {
|
||||||
|
let root = TriePath::from(&*known_root);
|
||||||
|
this.roots
|
||||||
|
.insert(&root, adapter.0.name(), LabelPresence::Present);
|
||||||
|
*presence = LabelPresence::Present;
|
||||||
|
*root_path = Some(ProjectPath {
|
||||||
|
worktree_id,
|
||||||
|
path: known_root,
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
None => worktree_roots.update(cx, |this, _| {
|
||||||
|
this.roots
|
||||||
|
.insert(&key, adapter.0.name(), LabelPresence::KnownAbsent);
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
roots
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|(k, (path, presence))| {
|
||||||
|
let path = path?;
|
||||||
|
presence.eq(&LabelPresence::Present).then(|| (k, path))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
fn on_worktree_store_event(
|
||||||
|
&mut self,
|
||||||
|
_: Entity<WorktreeStore>,
|
||||||
|
evt: &WorktreeStoreEvent,
|
||||||
|
cx: &mut Context<Self>,
|
||||||
|
) {
|
||||||
|
match evt {
|
||||||
|
WorktreeStoreEvent::WorktreeRemoved(_, worktree_id) => {
|
||||||
|
self.root_points.remove(&worktree_id);
|
||||||
|
cx.emit(ProjectTreeEvent::WorktreeRemoved(*worktree_id));
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
241
crates/project/src/project_tree/path_trie.rs
Normal file
241
crates/project/src/project_tree/path_trie.rs
Normal file
|
@ -0,0 +1,241 @@
|
||||||
|
use std::{
|
||||||
|
collections::{btree_map::Entry, BTreeMap},
|
||||||
|
ffi::OsStr,
|
||||||
|
ops::ControlFlow,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// [RootPathTrie] is a workhorse of [super::ProjectTree]. It is responsible for determining the closest known project root for a given path.
|
||||||
|
/// It also determines how much of a given path is unexplored, thus letting callers fill in that gap if needed.
|
||||||
|
/// Conceptually, it allows one to annotate Worktree entries with arbitrary extra metadata and run closest-ancestor searches.
|
||||||
|
///
|
||||||
|
/// A path is unexplored when the closest ancestor of a path is not the path itself; that means that we have not yet ran the scan on that path.
|
||||||
|
/// For example, if there's a project root at path `python/project` and we query for a path `python/project/subdir/another_subdir/file.py`, there is
|
||||||
|
/// a known root at `python/project` and the unexplored part is `subdir/another_subdir` - we need to run a scan on these 2 directories.
|
||||||
|
pub(super) struct RootPathTrie<Label> {
|
||||||
|
worktree_relative_path: Arc<Path>,
|
||||||
|
labels: BTreeMap<Label, LabelPresence>,
|
||||||
|
children: BTreeMap<Arc<OsStr>, RootPathTrie<Label>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Label presence is a marker that allows to optimize searches within [RootPathTrie]; node label can be:
|
||||||
|
/// - Present; we know there's definitely a project root at this node and it is the only label of that kind on the path to the root of a worktree
|
||||||
|
/// (none of it's ancestors or descendants can contain the same present label)
|
||||||
|
/// - Known Absent - we know there's definitely no project root at this node and none of it's ancestors are Present (descendants can be present though!).
|
||||||
|
/// - Forbidden - we know there's definitely no project root at this node and none of it's ancestors or descendants can be Present.
|
||||||
|
/// The distinction is there to optimize searching; when we encounter a node with unknown status, we don't need to look at it's full path
|
||||||
|
/// to the root of the worktree; it's sufficient to explore only the path between last node with a KnownAbsent state and the directory of a path, since we run searches
|
||||||
|
/// from the leaf up to the root of the worktree. When any of the ancestors is forbidden, we don't need to look at the node or its ancestors.
|
||||||
|
/// When there's a present labeled node on the path to the root, we don't need to ask the adapter to run the search at all.
|
||||||
|
///
|
||||||
|
/// In practical terms, it means that by storing label presence we don't need to do a project discovery on a given folder more than once
|
||||||
|
/// (unless the node is invalidated, which can happen when FS entries are renamed/removed).
|
||||||
|
///
|
||||||
|
/// Storing project absence allows us to recognize which paths have already been scanned for a project root unsuccessfully. This way we don't need to run
|
||||||
|
/// such scan more than once.
|
||||||
|
#[derive(Clone, Copy, Debug, PartialOrd, PartialEq, Ord, Eq)]
|
||||||
|
pub(super) enum LabelPresence {
|
||||||
|
KnownAbsent,
|
||||||
|
Present,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Label: Ord + Clone> RootPathTrie<Label> {
|
||||||
|
pub(super) fn new() -> Self {
|
||||||
|
Self::new_with_key(Arc::from(Path::new("")))
|
||||||
|
}
|
||||||
|
fn new_with_key(worktree_relative_path: Arc<Path>) -> Self {
|
||||||
|
RootPathTrie {
|
||||||
|
worktree_relative_path,
|
||||||
|
labels: Default::default(),
|
||||||
|
children: Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Internal implementation of inner that allows one to visit descendants of insertion point for a node.
|
||||||
|
fn insert_inner(
|
||||||
|
&mut self,
|
||||||
|
path: &TriePath,
|
||||||
|
value: Label,
|
||||||
|
presence: LabelPresence,
|
||||||
|
) -> &mut Self {
|
||||||
|
let mut current = self;
|
||||||
|
|
||||||
|
let mut path_so_far = PathBuf::new();
|
||||||
|
for key in path.0.iter() {
|
||||||
|
path_so_far.push(Path::new(key));
|
||||||
|
current = match current.children.entry(key.clone()) {
|
||||||
|
Entry::Vacant(vacant_entry) => vacant_entry
|
||||||
|
.insert(RootPathTrie::new_with_key(Arc::from(path_so_far.as_path()))),
|
||||||
|
Entry::Occupied(occupied_entry) => occupied_entry.into_mut(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
let _previous_value = current.labels.insert(value, presence);
|
||||||
|
debug_assert_eq!(_previous_value, None);
|
||||||
|
current
|
||||||
|
}
|
||||||
|
pub(super) fn insert(&mut self, path: &TriePath, value: Label, presence: LabelPresence) {
|
||||||
|
self.insert_inner(path, value, presence);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn walk<'a>(
|
||||||
|
&'a self,
|
||||||
|
path: &TriePath,
|
||||||
|
callback: &mut dyn for<'b> FnMut(
|
||||||
|
&'b Arc<Path>,
|
||||||
|
&'a BTreeMap<Label, LabelPresence>,
|
||||||
|
) -> ControlFlow<()>,
|
||||||
|
) {
|
||||||
|
let mut current = self;
|
||||||
|
for key in path.0.iter() {
|
||||||
|
if !current.labels.is_empty() {
|
||||||
|
if (callback)(¤t.worktree_relative_path, ¤t.labels).is_break() {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
current = match current.children.get(key) {
|
||||||
|
Some(child) => child,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if !current.labels.is_empty() {
|
||||||
|
(callback)(¤t.worktree_relative_path, ¤t.labels);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(super) fn remove(&mut self, path: &TriePath) {
|
||||||
|
debug_assert_ne!(path.0.len(), 0);
|
||||||
|
let mut current = self;
|
||||||
|
for path in path.0.iter().take(path.0.len().saturating_sub(1)) {
|
||||||
|
current = match current.children.get_mut(path) {
|
||||||
|
Some(child) => child,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if let Some(final_entry_name) = path.0.last() {
|
||||||
|
current.children.remove(final_entry_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// [TriePath] is a [Path] preprocessed for amortizing the cost of doing multiple lookups in distinct [RootPathTrie]s.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub(super) struct TriePath(Arc<[Arc<OsStr>]>);
|
||||||
|
|
||||||
|
impl From<&Path> for TriePath {
|
||||||
|
fn from(value: &Path) -> Self {
|
||||||
|
TriePath(value.components().map(|c| c.as_os_str().into()).collect())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::collections::BTreeSet;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_insert_and_lookup() {
|
||||||
|
let mut trie = RootPathTrie::<()>::new();
|
||||||
|
trie.insert(
|
||||||
|
&TriePath::from(Path::new("a/b/c")),
|
||||||
|
(),
|
||||||
|
LabelPresence::Present,
|
||||||
|
);
|
||||||
|
|
||||||
|
trie.walk(&TriePath::from(Path::new("a/b/c")), &mut |path, nodes| {
|
||||||
|
assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
|
||||||
|
assert_eq!(path.as_ref(), Path::new("a/b/c"));
|
||||||
|
ControlFlow::Continue(())
|
||||||
|
});
|
||||||
|
// Now let's annotate a parent with "Known missing" node.
|
||||||
|
trie.insert(
|
||||||
|
&TriePath::from(Path::new("a")),
|
||||||
|
(),
|
||||||
|
LabelPresence::KnownAbsent,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Ensure that we walk from the root to the leaf.
|
||||||
|
let mut visited_paths = BTreeSet::new();
|
||||||
|
trie.walk(&TriePath::from(Path::new("a/b/c")), &mut |path, nodes| {
|
||||||
|
if path.as_ref() == Path::new("a/b/c") {
|
||||||
|
assert_eq!(
|
||||||
|
visited_paths,
|
||||||
|
BTreeSet::from_iter([Arc::from(Path::new("a/"))])
|
||||||
|
);
|
||||||
|
assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
|
||||||
|
} else if path.as_ref() == Path::new("a/") {
|
||||||
|
assert!(visited_paths.is_empty());
|
||||||
|
assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
|
||||||
|
} else {
|
||||||
|
panic!("Unknown path");
|
||||||
|
}
|
||||||
|
// Assert that we only ever visit a path once.
|
||||||
|
assert!(visited_paths.insert(path.clone()));
|
||||||
|
ControlFlow::Continue(())
|
||||||
|
});
|
||||||
|
|
||||||
|
// One can also pass a path whose prefix is in the tree, but not that path itself.
|
||||||
|
let mut visited_paths = BTreeSet::new();
|
||||||
|
trie.walk(
|
||||||
|
&TriePath::from(Path::new("a/b/c/d/e/f/g")),
|
||||||
|
&mut |path, nodes| {
|
||||||
|
if path.as_ref() == Path::new("a/b/c") {
|
||||||
|
assert_eq!(
|
||||||
|
visited_paths,
|
||||||
|
BTreeSet::from_iter([Arc::from(Path::new("a/"))])
|
||||||
|
);
|
||||||
|
assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
|
||||||
|
} else if path.as_ref() == Path::new("a/") {
|
||||||
|
assert!(visited_paths.is_empty());
|
||||||
|
assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
|
||||||
|
} else {
|
||||||
|
panic!("Unknown path");
|
||||||
|
}
|
||||||
|
// Assert that we only ever visit a path once.
|
||||||
|
assert!(visited_paths.insert(path.clone()));
|
||||||
|
ControlFlow::Continue(())
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// Test breaking from the tree-walk.
|
||||||
|
let mut visited_paths = BTreeSet::new();
|
||||||
|
trie.walk(&TriePath::from(Path::new("a/b/c")), &mut |path, nodes| {
|
||||||
|
if path.as_ref() == Path::new("a/") {
|
||||||
|
assert!(visited_paths.is_empty());
|
||||||
|
assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
|
||||||
|
} else {
|
||||||
|
panic!("Unknown path");
|
||||||
|
}
|
||||||
|
// Assert that we only ever visit a path once.
|
||||||
|
assert!(visited_paths.insert(path.clone()));
|
||||||
|
ControlFlow::Break(())
|
||||||
|
});
|
||||||
|
assert_eq!(visited_paths.len(), 1);
|
||||||
|
|
||||||
|
// Entry removal.
|
||||||
|
trie.insert(
|
||||||
|
&TriePath::from(Path::new("a/b")),
|
||||||
|
(),
|
||||||
|
LabelPresence::KnownAbsent,
|
||||||
|
);
|
||||||
|
let mut visited_paths = BTreeSet::new();
|
||||||
|
trie.walk(&TriePath::from(Path::new("a/b/c")), &mut |path, _nodes| {
|
||||||
|
// Assert that we only ever visit a path once.
|
||||||
|
assert!(visited_paths.insert(path.clone()));
|
||||||
|
ControlFlow::Continue(())
|
||||||
|
});
|
||||||
|
assert_eq!(visited_paths.len(), 3);
|
||||||
|
trie.remove(&TriePath::from(Path::new("a/b/")));
|
||||||
|
let mut visited_paths = BTreeSet::new();
|
||||||
|
trie.walk(&TriePath::from(Path::new("a/b/c")), &mut |path, _nodes| {
|
||||||
|
// Assert that we only ever visit a path once.
|
||||||
|
assert!(visited_paths.insert(path.clone()));
|
||||||
|
ControlFlow::Continue(())
|
||||||
|
});
|
||||||
|
assert_eq!(visited_paths.len(), 1);
|
||||||
|
assert_eq!(
|
||||||
|
visited_paths.into_iter().next().unwrap().as_ref(),
|
||||||
|
Path::new("a/")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
461
crates/project/src/project_tree/server_tree.rs
Normal file
461
crates/project/src/project_tree/server_tree.rs
Normal file
|
@ -0,0 +1,461 @@
|
||||||
|
//! This module defines an LSP Tree.
|
||||||
|
//!
|
||||||
|
//! An LSP Tree is responsible for determining which language servers apply to a given project path.
|
||||||
|
//!
|
||||||
|
//! ## RPC
|
||||||
|
//! LSP Tree is transparent to RPC peers; when clients ask host to spawn a new language server, the host will perform LSP Tree lookup for provided path; it may decide
|
||||||
|
//! to reuse existing language server. The client maintains it's own LSP Tree that is a subset of host LSP Tree. Done this way, the client does not need to
|
||||||
|
//! ask about suitable language server for each path it interacts with; it can resolve most of the queries locally.
|
||||||
|
//! This module defines a Project Tree.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
collections::{BTreeMap, BTreeSet},
|
||||||
|
path::Path,
|
||||||
|
sync::{Arc, Weak},
|
||||||
|
};
|
||||||
|
|
||||||
|
use collections::{HashMap, IndexMap};
|
||||||
|
use gpui::{App, AppContext, Entity, Subscription};
|
||||||
|
use itertools::Itertools;
|
||||||
|
use language::{
|
||||||
|
language_settings::AllLanguageSettings, Attach, LanguageName, LanguageRegistry,
|
||||||
|
LspAdapterDelegate,
|
||||||
|
};
|
||||||
|
use lsp::LanguageServerName;
|
||||||
|
use once_cell::sync::OnceCell;
|
||||||
|
use settings::{Settings, SettingsLocation, WorktreeId};
|
||||||
|
use util::maybe;
|
||||||
|
|
||||||
|
use crate::{project_settings::LspSettings, LanguageServerId, ProjectPath};
|
||||||
|
|
||||||
|
use super::{AdapterWrapper, ProjectTree, ProjectTreeEvent};
|
||||||
|
|
||||||
|
#[derive(Debug, Default)]
|
||||||
|
struct ServersForWorktree {
|
||||||
|
roots: BTreeMap<
|
||||||
|
Arc<Path>,
|
||||||
|
BTreeMap<LanguageServerName, (Arc<InnerTreeNode>, BTreeSet<LanguageName>)>,
|
||||||
|
>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct LanguageServerTree {
|
||||||
|
project_tree: Entity<ProjectTree>,
|
||||||
|
instances: BTreeMap<WorktreeId, ServersForWorktree>,
|
||||||
|
attach_kind_cache: HashMap<LanguageServerName, Attach>,
|
||||||
|
languages: Arc<LanguageRegistry>,
|
||||||
|
_subscriptions: Subscription,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A node in language server tree represents either:
|
||||||
|
/// - A language server that has already been initialized/updated for a given project
|
||||||
|
/// - A soon-to-be-initialized language server.
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub(crate) struct LanguageServerTreeNode(Weak<InnerTreeNode>);
|
||||||
|
|
||||||
|
/// Describes a request to launch a language server.
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) struct LaunchDisposition<'a> {
|
||||||
|
pub(crate) server_name: &'a LanguageServerName,
|
||||||
|
pub(crate) attach: Attach,
|
||||||
|
pub(crate) path: ProjectPath,
|
||||||
|
pub(crate) settings: Arc<LspSettings>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> From<&'a InnerTreeNode> for LaunchDisposition<'a> {
|
||||||
|
fn from(value: &'a InnerTreeNode) -> Self {
|
||||||
|
LaunchDisposition {
|
||||||
|
server_name: &value.name,
|
||||||
|
attach: value.attach,
|
||||||
|
path: value.path.clone(),
|
||||||
|
settings: value.settings.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl LanguageServerTreeNode {
|
||||||
|
/// Returns a language server ID for this node if there is one.
|
||||||
|
/// Returns None if this node has not been initialized yet or it is no longer in the tree.
|
||||||
|
pub(crate) fn server_id(&self) -> Option<LanguageServerId> {
|
||||||
|
self.0.upgrade()?.id.get().copied()
|
||||||
|
}
|
||||||
|
/// Returns a language server ID for this node if it has already been initialized; otherwise runs the provided closure to initialize the language server node in a tree.
|
||||||
|
/// May return None if the node no longer belongs to the server tree it was created in.
|
||||||
|
pub(crate) fn server_id_or_init(
|
||||||
|
&self,
|
||||||
|
init: impl FnOnce(LaunchDisposition) -> LanguageServerId,
|
||||||
|
) -> Option<LanguageServerId> {
|
||||||
|
self.server_id_or_try_init(|disposition| Ok(init(disposition)))
|
||||||
|
}
|
||||||
|
fn server_id_or_try_init(
|
||||||
|
&self,
|
||||||
|
init: impl FnOnce(LaunchDisposition) -> Result<LanguageServerId, ()>,
|
||||||
|
) -> Option<LanguageServerId> {
|
||||||
|
let this = self.0.upgrade()?;
|
||||||
|
this.id
|
||||||
|
.get_or_try_init(|| init(LaunchDisposition::from(&*this)))
|
||||||
|
.ok()
|
||||||
|
.copied()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Weak<InnerTreeNode>> for LanguageServerTreeNode {
|
||||||
|
fn from(weak: Weak<InnerTreeNode>) -> Self {
|
||||||
|
LanguageServerTreeNode(weak)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct InnerTreeNode {
|
||||||
|
id: OnceCell<LanguageServerId>,
|
||||||
|
name: LanguageServerName,
|
||||||
|
attach: Attach,
|
||||||
|
path: ProjectPath,
|
||||||
|
settings: Arc<LspSettings>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InnerTreeNode {
|
||||||
|
fn new(
|
||||||
|
name: LanguageServerName,
|
||||||
|
attach: Attach,
|
||||||
|
path: ProjectPath,
|
||||||
|
settings: impl Into<Arc<LspSettings>>,
|
||||||
|
) -> Self {
|
||||||
|
InnerTreeNode {
|
||||||
|
id: Default::default(),
|
||||||
|
name,
|
||||||
|
attach,
|
||||||
|
path,
|
||||||
|
settings: settings.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Determines how the list of adapters to query should be constructed.
|
||||||
|
pub(crate) enum AdapterQuery<'a> {
|
||||||
|
/// Search for roots of all adapters associated with a given language name.
|
||||||
|
Language(&'a LanguageName),
|
||||||
|
/// Search for roots of adapter with a given name.
|
||||||
|
Adapter(&'a LanguageServerName),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LanguageServerTree {
|
||||||
|
pub(crate) fn new(
|
||||||
|
project_tree: Entity<ProjectTree>,
|
||||||
|
languages: Arc<LanguageRegistry>,
|
||||||
|
cx: &mut App,
|
||||||
|
) -> Entity<Self> {
|
||||||
|
cx.new(|cx| Self {
|
||||||
|
_subscriptions: cx.subscribe(
|
||||||
|
&project_tree,
|
||||||
|
|_: &mut Self, _, event, _| {
|
||||||
|
if event == &ProjectTreeEvent::Cleared {}
|
||||||
|
},
|
||||||
|
),
|
||||||
|
project_tree,
|
||||||
|
instances: Default::default(),
|
||||||
|
attach_kind_cache: Default::default(),
|
||||||
|
languages,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
/// Memoize calls to attach_kind on LspAdapter (which might be a WASM extension, thus ~expensive to call).
|
||||||
|
fn attach_kind(&mut self, adapter: &AdapterWrapper) -> Attach {
|
||||||
|
*self
|
||||||
|
.attach_kind_cache
|
||||||
|
.entry(adapter.0.name.clone())
|
||||||
|
.or_insert_with(|| adapter.0.attach_kind())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all language server root points for a given path and language; the language servers might already be initialized at a given path.
|
||||||
|
pub(crate) fn get<'a>(
|
||||||
|
&'a mut self,
|
||||||
|
path: ProjectPath,
|
||||||
|
query: AdapterQuery<'_>,
|
||||||
|
delegate: Arc<dyn LspAdapterDelegate>,
|
||||||
|
cx: &mut App,
|
||||||
|
) -> impl Iterator<Item = LanguageServerTreeNode> + 'a {
|
||||||
|
let settings_location = SettingsLocation {
|
||||||
|
worktree_id: path.worktree_id,
|
||||||
|
path: &path.path,
|
||||||
|
};
|
||||||
|
let adapters = match query {
|
||||||
|
AdapterQuery::Language(language_name) => {
|
||||||
|
self.adapters_for_language(settings_location, language_name, cx)
|
||||||
|
}
|
||||||
|
AdapterQuery::Adapter(language_server_name) => IndexMap::from_iter(
|
||||||
|
self.adapter_for_name(language_server_name)
|
||||||
|
.map(|adapter| (adapter, (LspSettings::default(), BTreeSet::new()))),
|
||||||
|
),
|
||||||
|
};
|
||||||
|
self.get_with_adapters(path, adapters, delegate, cx)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_with_adapters<'a>(
|
||||||
|
&'a mut self,
|
||||||
|
path: ProjectPath,
|
||||||
|
adapters: IndexMap<AdapterWrapper, (LspSettings, BTreeSet<LanguageName>)>,
|
||||||
|
delegate: Arc<dyn LspAdapterDelegate>,
|
||||||
|
cx: &mut App,
|
||||||
|
) -> impl Iterator<Item = LanguageServerTreeNode> + 'a {
|
||||||
|
let worktree_id = path.worktree_id;
|
||||||
|
#[allow(clippy::mutable_key_type)]
|
||||||
|
let mut roots = self.project_tree.update(cx, |this, cx| {
|
||||||
|
this.root_for_path(
|
||||||
|
path,
|
||||||
|
adapters
|
||||||
|
.iter()
|
||||||
|
.map(|(adapter, _)| adapter.0.clone())
|
||||||
|
.collect(),
|
||||||
|
delegate,
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
let mut root_path = None;
|
||||||
|
// Backwards-compat: Fill in any adapters for which we did not detect the root as having the project root at the root of a worktree.
|
||||||
|
for (adapter, _) in adapters.iter() {
|
||||||
|
roots.entry(adapter.clone()).or_insert_with(|| {
|
||||||
|
root_path
|
||||||
|
.get_or_insert_with(|| ProjectPath {
|
||||||
|
worktree_id,
|
||||||
|
path: Arc::from("".as_ref()),
|
||||||
|
})
|
||||||
|
.clone()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
roots
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(move |(adapter, root_path)| {
|
||||||
|
let attach = self.attach_kind(&adapter);
|
||||||
|
let (index, _, (settings, new_languages)) = adapters.get_full(&adapter)?;
|
||||||
|
let inner_node = self
|
||||||
|
.instances
|
||||||
|
.entry(root_path.worktree_id)
|
||||||
|
.or_default()
|
||||||
|
.roots
|
||||||
|
.entry(root_path.path.clone())
|
||||||
|
.or_default()
|
||||||
|
.entry(adapter.0.name.clone());
|
||||||
|
let (node, languages) = inner_node.or_insert_with(move || {
|
||||||
|
(
|
||||||
|
Arc::new(InnerTreeNode::new(
|
||||||
|
adapter.0.name(),
|
||||||
|
attach,
|
||||||
|
root_path,
|
||||||
|
settings.clone(),
|
||||||
|
)),
|
||||||
|
Default::default(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
languages.extend(new_languages.iter().cloned());
|
||||||
|
Some((index, Arc::downgrade(&node).into()))
|
||||||
|
})
|
||||||
|
.sorted_by_key(|(index, _)| *index)
|
||||||
|
.map(|(_, node)| node)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn adapter_for_name(&self, name: &LanguageServerName) -> Option<AdapterWrapper> {
|
||||||
|
self.languages.adapter_for_name(name).map(AdapterWrapper)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn adapters_for_language(
|
||||||
|
&self,
|
||||||
|
settings_location: SettingsLocation,
|
||||||
|
language_name: &LanguageName,
|
||||||
|
cx: &App,
|
||||||
|
) -> IndexMap<AdapterWrapper, (LspSettings, BTreeSet<LanguageName>)> {
|
||||||
|
let settings = AllLanguageSettings::get(Some(settings_location), cx).language(
|
||||||
|
Some(settings_location),
|
||||||
|
Some(language_name),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
if !settings.enable_language_server {
|
||||||
|
return Default::default();
|
||||||
|
}
|
||||||
|
let available_lsp_adapters = self.languages.lsp_adapters(&language_name);
|
||||||
|
let available_language_servers = available_lsp_adapters
|
||||||
|
.iter()
|
||||||
|
.map(|lsp_adapter| lsp_adapter.name.clone())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let desired_language_servers =
|
||||||
|
settings.customized_language_servers(&available_language_servers);
|
||||||
|
let adapters_with_settings = desired_language_servers
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|desired_adapter| {
|
||||||
|
let adapter = if let Some(adapter) = available_lsp_adapters
|
||||||
|
.iter()
|
||||||
|
.find(|adapter| adapter.name == desired_adapter)
|
||||||
|
{
|
||||||
|
Some(adapter.clone())
|
||||||
|
} else if let Some(adapter) =
|
||||||
|
self.languages.load_available_lsp_adapter(&desired_adapter)
|
||||||
|
{
|
||||||
|
self.languages
|
||||||
|
.register_lsp_adapter(language_name.clone(), adapter.adapter.clone());
|
||||||
|
Some(adapter)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}?;
|
||||||
|
let adapter_settings = crate::lsp_store::language_server_settings_for(
|
||||||
|
settings_location,
|
||||||
|
&adapter.name,
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
.cloned()
|
||||||
|
.unwrap_or_default();
|
||||||
|
Some((
|
||||||
|
AdapterWrapper(adapter),
|
||||||
|
(
|
||||||
|
adapter_settings,
|
||||||
|
BTreeSet::from_iter([language_name.clone()]),
|
||||||
|
),
|
||||||
|
))
|
||||||
|
})
|
||||||
|
.collect::<IndexMap<_, _>>();
|
||||||
|
// After starting all the language servers, reorder them to reflect the desired order
|
||||||
|
// based on the settings.
|
||||||
|
//
|
||||||
|
// This is done, in part, to ensure that language servers loaded at different points
|
||||||
|
// (e.g., native vs extension) still end up in the right order at the end, rather than
|
||||||
|
// it being based on which language server happened to be loaded in first.
|
||||||
|
self.languages.reorder_language_servers(
|
||||||
|
&language_name,
|
||||||
|
adapters_with_settings
|
||||||
|
.keys()
|
||||||
|
.map(|wrapper| wrapper.0.clone())
|
||||||
|
.collect(),
|
||||||
|
);
|
||||||
|
|
||||||
|
adapters_with_settings
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn on_settings_changed(
|
||||||
|
&mut self,
|
||||||
|
get_delegate: &mut dyn FnMut(WorktreeId, &mut App) -> Option<Arc<dyn LspAdapterDelegate>>,
|
||||||
|
spawn_language_server: &mut dyn FnMut(LaunchDisposition, &mut App) -> LanguageServerId,
|
||||||
|
on_language_server_removed: &mut dyn FnMut(LanguageServerId),
|
||||||
|
cx: &mut App,
|
||||||
|
) {
|
||||||
|
// Settings are checked at query time. Thus, to avoid messing with inference of applicable settings, we're just going to clear ourselves and let the next query repopulate.
|
||||||
|
// We're going to optimistically re-run the queries and re-assign the same language server id when a language server still exists at a given tree node.
|
||||||
|
let old_instances = std::mem::take(&mut self.instances);
|
||||||
|
let old_attach_kinds = std::mem::take(&mut self.attach_kind_cache);
|
||||||
|
|
||||||
|
let mut referenced_instances = BTreeSet::new();
|
||||||
|
// Re-map the old tree onto a new one. In the process we'll get a list of servers we have to shut down.
|
||||||
|
let mut all_instances = BTreeSet::new();
|
||||||
|
|
||||||
|
for (worktree_id, servers) in &old_instances {
|
||||||
|
// Record all initialized node ids.
|
||||||
|
all_instances.extend(servers.roots.values().flat_map(|servers_at_node| {
|
||||||
|
servers_at_node
|
||||||
|
.values()
|
||||||
|
.filter_map(|(server_node, _)| server_node.id.get().copied())
|
||||||
|
}));
|
||||||
|
let Some(delegate) = get_delegate(*worktree_id, cx) else {
|
||||||
|
// If worktree is no longer around, we're just going to shut down all of the language servers (since they've been added to all_instances).
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
for (path, servers_for_path) in &servers.roots {
|
||||||
|
for (server_name, (_, languages)) in servers_for_path {
|
||||||
|
let settings_location = SettingsLocation {
|
||||||
|
worktree_id: *worktree_id,
|
||||||
|
path: &path,
|
||||||
|
};
|
||||||
|
// Verify which of the previous languages still have this server enabled.
|
||||||
|
|
||||||
|
let mut adapter_with_settings = IndexMap::default();
|
||||||
|
|
||||||
|
for language_name in languages {
|
||||||
|
self.adapters_for_language(settings_location, language_name, cx)
|
||||||
|
.into_iter()
|
||||||
|
.for_each(|(lsp_adapter, lsp_settings)| {
|
||||||
|
if &lsp_adapter.0.name() != server_name {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
adapter_with_settings
|
||||||
|
.entry(lsp_adapter)
|
||||||
|
.and_modify(|x: &mut (_, BTreeSet<LanguageName>)| {
|
||||||
|
x.1.extend(lsp_settings.1.clone())
|
||||||
|
})
|
||||||
|
.or_insert(lsp_settings);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if adapter_with_settings.is_empty() {
|
||||||
|
// Since all languages that have had this server enabled are now disabled, we can remove the server entirely.
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
for new_node in self.get_with_adapters(
|
||||||
|
ProjectPath {
|
||||||
|
path: path.clone(),
|
||||||
|
worktree_id: *worktree_id,
|
||||||
|
},
|
||||||
|
adapter_with_settings,
|
||||||
|
delegate.clone(),
|
||||||
|
cx,
|
||||||
|
) {
|
||||||
|
new_node.server_id_or_try_init(|disposition| {
|
||||||
|
let Some((existing_node, _)) = servers
|
||||||
|
.roots
|
||||||
|
.get(&disposition.path.path)
|
||||||
|
.and_then(|roots| roots.get(disposition.server_name))
|
||||||
|
.filter(|(old_node, _)| {
|
||||||
|
old_attach_kinds.get(disposition.server_name).map_or(
|
||||||
|
false,
|
||||||
|
|old_attach| {
|
||||||
|
disposition.attach == *old_attach
|
||||||
|
&& disposition.settings == old_node.settings
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
else {
|
||||||
|
return Ok(spawn_language_server(disposition, cx));
|
||||||
|
};
|
||||||
|
if let Some(id) = existing_node.id.get().copied() {
|
||||||
|
// If we have a node with ID assigned (and it's parameters match `disposition`), reuse the id.
|
||||||
|
referenced_instances.insert(id);
|
||||||
|
Ok(id)
|
||||||
|
} else {
|
||||||
|
// Otherwise, if we do have a node but it does not have an ID assigned, keep it that way.
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for server_to_remove in all_instances.difference(&referenced_instances) {
|
||||||
|
on_language_server_removed(*server_to_remove);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Updates nodes in language server tree in place, changing the ID of initialized nodes.
|
||||||
|
pub(crate) fn restart_language_servers(
|
||||||
|
&mut self,
|
||||||
|
worktree_id: WorktreeId,
|
||||||
|
ids: BTreeSet<LanguageServerId>,
|
||||||
|
restart_callback: &mut dyn FnMut(LanguageServerId, LaunchDisposition) -> LanguageServerId,
|
||||||
|
) {
|
||||||
|
maybe! {{
|
||||||
|
for (_, nodes) in &mut self.instances.get_mut(&worktree_id)?.roots {
|
||||||
|
for (_, (node, _)) in nodes {
|
||||||
|
let Some(old_server_id) = node.id.get().copied() else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
if !ids.contains(&old_server_id) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let new_id = restart_callback(old_server_id, LaunchDisposition::from(&**node));
|
||||||
|
|
||||||
|
*node = Arc::new(InnerTreeNode::new(node.name.clone(), node.attach, node.path.clone(), node.settings.clone()));
|
||||||
|
node.id.set(new_id).expect("The id to be unset after clearing the node.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
|
@ -775,6 +775,7 @@ message Symbol {
|
||||||
PointUtf16 start = 7;
|
PointUtf16 start = 7;
|
||||||
PointUtf16 end = 8;
|
PointUtf16 end = 8;
|
||||||
bytes signature = 9;
|
bytes signature = 9;
|
||||||
|
uint64 language_server_id = 10;
|
||||||
}
|
}
|
||||||
|
|
||||||
message OpenBufferForSymbol {
|
message OpenBufferForSymbol {
|
||||||
|
|
|
@ -16,4 +16,4 @@ doctest = false
|
||||||
[dependencies]
|
[dependencies]
|
||||||
syn = "1.0.72"
|
syn = "1.0.72"
|
||||||
quote = "1.0.9"
|
quote = "1.0.9"
|
||||||
proc-macro2 = "1.0.66"
|
proc-macro2 = "1.0.93"
|
||||||
|
|
|
@ -13,7 +13,7 @@ path = "src/ui_macros.rs"
|
||||||
proc-macro = true
|
proc-macro = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
proc-macro2 = "1.0.66"
|
proc-macro2 = "1.0.93"
|
||||||
quote = "1.0.9"
|
quote = "1.0.9"
|
||||||
syn = { version = "1.0.72", features = ["full", "extra-traits"] }
|
syn = { version = "1.0.72", features = ["full", "extra-traits"] }
|
||||||
convert_case.workspace = true
|
convert_case.workspace = true
|
||||||
|
|
|
@ -95,26 +95,17 @@ impl Render for QuickActionBar {
|
||||||
show_git_blame_gutter,
|
show_git_blame_gutter,
|
||||||
auto_signature_help_enabled,
|
auto_signature_help_enabled,
|
||||||
inline_completions_enabled,
|
inline_completions_enabled,
|
||||||
) = {
|
) = editor.update(cx, |editor, cx| {
|
||||||
let editor = editor.read(cx);
|
|
||||||
let selection_menu_enabled = editor.selection_menu_enabled(cx);
|
|
||||||
let inlay_hints_enabled = editor.inlay_hints_enabled();
|
|
||||||
let supports_inlay_hints = editor.supports_inlay_hints(cx);
|
|
||||||
let git_blame_inline_enabled = editor.git_blame_inline_enabled();
|
|
||||||
let show_git_blame_gutter = editor.show_git_blame_gutter();
|
|
||||||
let auto_signature_help_enabled = editor.auto_signature_help_enabled(cx);
|
|
||||||
let inline_completions_enabled = editor.inline_completions_enabled(cx);
|
|
||||||
|
|
||||||
(
|
(
|
||||||
selection_menu_enabled,
|
editor.selection_menu_enabled(cx),
|
||||||
inlay_hints_enabled,
|
editor.inlay_hints_enabled(),
|
||||||
supports_inlay_hints,
|
editor.supports_inlay_hints(cx),
|
||||||
git_blame_inline_enabled,
|
editor.git_blame_inline_enabled(),
|
||||||
show_git_blame_gutter,
|
editor.show_git_blame_gutter(),
|
||||||
auto_signature_help_enabled,
|
editor.auto_signature_help_enabled(cx),
|
||||||
inline_completions_enabled,
|
editor.inline_completions_enabled(cx),
|
||||||
)
|
)
|
||||||
};
|
});
|
||||||
|
|
||||||
let focus_handle = editor.read(cx).focus_handle(cx);
|
let focus_handle = editor.read(cx).focus_handle(cx);
|
||||||
|
|
||||||
|
@ -462,16 +453,19 @@ impl ToolbarItemView for QuickActionBar {
|
||||||
|
|
||||||
if let Some(editor) = active_item.downcast::<Editor>() {
|
if let Some(editor) = active_item.downcast::<Editor>() {
|
||||||
let mut inlay_hints_enabled = editor.read(cx).inlay_hints_enabled();
|
let mut inlay_hints_enabled = editor.read(cx).inlay_hints_enabled();
|
||||||
let mut supports_inlay_hints = editor.read(cx).supports_inlay_hints(cx);
|
let mut supports_inlay_hints =
|
||||||
|
editor.update(cx, |this, cx| this.supports_inlay_hints(cx));
|
||||||
self._inlay_hints_enabled_subscription =
|
self._inlay_hints_enabled_subscription =
|
||||||
Some(cx.observe(&editor, move |_, editor, cx| {
|
Some(cx.observe(&editor, move |_, editor, cx| {
|
||||||
let editor = editor.read(cx);
|
let mut should_notify = false;
|
||||||
let new_inlay_hints_enabled = editor.inlay_hints_enabled();
|
editor.update(cx, |editor, cx| {
|
||||||
let new_supports_inlay_hints = editor.supports_inlay_hints(cx);
|
let new_inlay_hints_enabled = editor.inlay_hints_enabled();
|
||||||
let should_notify = inlay_hints_enabled != new_inlay_hints_enabled
|
let new_supports_inlay_hints = editor.supports_inlay_hints(cx);
|
||||||
|| supports_inlay_hints != new_supports_inlay_hints;
|
should_notify = inlay_hints_enabled != new_inlay_hints_enabled
|
||||||
inlay_hints_enabled = new_inlay_hints_enabled;
|
|| supports_inlay_hints != new_supports_inlay_hints;
|
||||||
supports_inlay_hints = new_supports_inlay_hints;
|
inlay_hints_enabled = new_inlay_hints_enabled;
|
||||||
|
supports_inlay_hints = new_supports_inlay_hints;
|
||||||
|
});
|
||||||
if should_notify {
|
if should_notify {
|
||||||
cx.notify()
|
cx.notify()
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue