project: Fine-grained language server management (#24038)
Closes #ISSUE https://github.com/zed-industries/zed/pull/23804 Release Notes: - Improved detection of project roots for use by language servers. Closes #ISSUE Release Notes: - N/A *or* Added/Fixed/Improved ... --------- Co-authored-by: smit <0xtimsb@gmail.com> Co-authored-by: Henrikh Kantuni <henrikh.kantuni@gmail.com> Co-authored-by: Caleb! <48127194+kaf-lamed-beyt@users.noreply.github.com> Co-authored-by: Marshall Bowers <git@maxdeviant.com> Co-authored-by: Kirill Bulatov <kirill@zed.dev> Co-authored-by: Agus Zubiaga <agus@zed.dev> Co-authored-by: Danilo <danilo@zed.dev> Co-authored-by: Nate Butler <iamnbutler@gmail.com>
This commit is contained in:
parent
8d839fca06
commit
a618830aea
33 changed files with 2350 additions and 962 deletions
|
@ -442,6 +442,7 @@ nanoid = "0.4"
|
|||
nbformat = { version = "0.10.0" }
|
||||
nix = "0.29"
|
||||
num-format = "0.4.4"
|
||||
once_cell = "1.20"
|
||||
ordered-float = "2.1.1"
|
||||
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
||||
parking_lot = "0.12.1"
|
||||
|
|
|
@ -4438,15 +4438,14 @@ async fn test_formatting_buffer(
|
|||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
let lsp_store_b = project_b.update(cx_b, |p, _| p.lsp_store());
|
||||
|
||||
let buffer_b = project_b
|
||||
.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let _handle = lsp_store_b.update(cx_b, |lsp_store, cx| {
|
||||
lsp_store.register_buffer_with_language_servers(&buffer_b, cx)
|
||||
let _handle = project_b.update(cx_b, |project, cx| {
|
||||
project.register_buffer_with_language_servers(&buffer_b, cx)
|
||||
});
|
||||
let fake_language_server = fake_language_servers.next().await.unwrap();
|
||||
fake_language_server.handle_request::<lsp::request::Formatting, _, _>(|_, _| async move {
|
||||
|
|
|
@ -15,7 +15,7 @@ path = "src/component.rs"
|
|||
collections.workspace = true
|
||||
gpui.workspace = true
|
||||
linkme.workspace = true
|
||||
once_cell = "1.20.3"
|
||||
once_cell.workspace = true
|
||||
parking_lot.workspace = true
|
||||
theme.workspace = true
|
||||
|
||||
|
|
|
@ -458,12 +458,14 @@ impl Copilot {
|
|||
.on_notification::<StatusNotification, _>(|_, _| { /* Silence the notification */ })
|
||||
.detach();
|
||||
|
||||
let initialize_params = None;
|
||||
let configuration = lsp::DidChangeConfigurationParams {
|
||||
settings: Default::default(),
|
||||
};
|
||||
let server = cx
|
||||
.update(|cx| server.initialize(initialize_params, configuration.into(), cx))?
|
||||
.update(|cx| {
|
||||
let params = server.default_initialize_params(cx);
|
||||
server.initialize(params, configuration.into(), cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let status = server
|
||||
|
|
|
@ -134,7 +134,7 @@ use project::{
|
|||
lsp_store::{FormatTrigger, LspFormatTarget, OpenLspBufferHandle},
|
||||
project_settings::{GitGutterSetting, ProjectSettings},
|
||||
CodeAction, Completion, CompletionIntent, DocumentHighlight, InlayHint, Location, LocationLink,
|
||||
LspStore, PrepareRenameResponse, Project, ProjectItem, ProjectTransaction, TaskSourceKind,
|
||||
PrepareRenameResponse, Project, ProjectItem, ProjectTransaction, TaskSourceKind,
|
||||
};
|
||||
use rand::prelude::*;
|
||||
use rpc::{proto::*, ErrorExt};
|
||||
|
@ -1492,9 +1492,8 @@ impl Editor {
|
|||
|
||||
if let Some(buffer) = buffer.read(cx).as_singleton() {
|
||||
if let Some(project) = this.project.as_ref() {
|
||||
let lsp_store = project.read(cx).lsp_store();
|
||||
let handle = lsp_store.update(cx, |lsp_store, cx| {
|
||||
lsp_store.register_buffer_with_language_servers(&buffer, cx)
|
||||
let handle = project.update(cx, |project, cx| {
|
||||
project.register_buffer_with_language_servers(&buffer, cx)
|
||||
});
|
||||
this.registered_buffers
|
||||
.insert(buffer.read(cx).remote_id(), handle);
|
||||
|
@ -1891,16 +1890,14 @@ impl Editor {
|
|||
|
||||
fn register_buffers_with_language_servers(&mut self, cx: &mut Context<Self>) {
|
||||
let buffers = self.buffer.read(cx).all_buffers();
|
||||
let Some(lsp_store) = self.lsp_store(cx) else {
|
||||
let Some(project) = self.project.as_ref() else {
|
||||
return;
|
||||
};
|
||||
lsp_store.update(cx, |lsp_store, cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
for buffer in buffers {
|
||||
self.registered_buffers
|
||||
.entry(buffer.read(cx).remote_id())
|
||||
.or_insert_with(|| {
|
||||
lsp_store.register_buffer_with_language_servers(&buffer, cx)
|
||||
});
|
||||
.or_insert_with(|| project.register_buffer_with_language_servers(&buffer, cx));
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -2100,14 +2097,14 @@ impl Editor {
|
|||
};
|
||||
if let Some(buffer_id) = new_cursor_position.buffer_id {
|
||||
if !self.registered_buffers.contains_key(&buffer_id) {
|
||||
if let Some(lsp_store) = self.lsp_store(cx) {
|
||||
lsp_store.update(cx, |lsp_store, cx| {
|
||||
if let Some(project) = self.project.as_ref() {
|
||||
project.update(cx, |project, cx| {
|
||||
let Some(buffer) = self.buffer.read(cx).buffer(buffer_id) else {
|
||||
return;
|
||||
};
|
||||
self.registered_buffers.insert(
|
||||
buffer_id,
|
||||
lsp_store.register_buffer_with_language_servers(&buffer, cx),
|
||||
project.register_buffer_with_language_servers(&buffer, cx),
|
||||
);
|
||||
})
|
||||
}
|
||||
|
@ -11638,7 +11635,10 @@ impl Editor {
|
|||
if let Some(project) = self.project.clone() {
|
||||
self.buffer.update(cx, |multi_buffer, cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers(multi_buffer.all_buffers(), cx);
|
||||
project.restart_language_servers_for_buffers(
|
||||
multi_buffer.all_buffers().into_iter().collect(),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
})
|
||||
}
|
||||
|
@ -14051,12 +14051,6 @@ impl Editor {
|
|||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn lsp_store(&self, cx: &App) -> Option<Entity<LspStore>> {
|
||||
self.project
|
||||
.as_ref()
|
||||
.map(|project| project.read(cx).lsp_store())
|
||||
}
|
||||
|
||||
fn on_buffer_changed(&mut self, _: Entity<MultiBuffer>, cx: &mut Context<Self>) {
|
||||
cx.notify();
|
||||
}
|
||||
|
@ -14083,11 +14077,11 @@ impl Editor {
|
|||
if let Some(buffer) = buffer_edited {
|
||||
let buffer_id = buffer.read(cx).remote_id();
|
||||
if !self.registered_buffers.contains_key(&buffer_id) {
|
||||
if let Some(lsp_store) = self.lsp_store(cx) {
|
||||
lsp_store.update(cx, |lsp_store, cx| {
|
||||
if let Some(project) = self.project.as_ref() {
|
||||
project.update(cx, |project, cx| {
|
||||
self.registered_buffers.insert(
|
||||
buffer_id,
|
||||
lsp_store.register_buffer_with_language_servers(&buffer, cx),
|
||||
project.register_buffer_with_language_servers(&buffer, cx),
|
||||
);
|
||||
})
|
||||
}
|
||||
|
@ -14097,28 +14091,23 @@ impl Editor {
|
|||
cx.emit(SearchEvent::MatchesInvalidated);
|
||||
if *singleton_buffer_edited {
|
||||
if let Some(project) = &self.project {
|
||||
let project = project.read(cx);
|
||||
#[allow(clippy::mutable_key_type)]
|
||||
let languages_affected = multibuffer
|
||||
.read(cx)
|
||||
.all_buffers()
|
||||
.into_iter()
|
||||
.filter_map(|buffer| {
|
||||
let buffer = buffer.read(cx);
|
||||
let language = buffer.language()?;
|
||||
if project.is_local()
|
||||
&& project
|
||||
.language_servers_for_local_buffer(buffer, cx)
|
||||
.count()
|
||||
== 0
|
||||
{
|
||||
None
|
||||
} else {
|
||||
Some(language)
|
||||
}
|
||||
})
|
||||
.cloned()
|
||||
.collect::<HashSet<_>>();
|
||||
let languages_affected = multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer
|
||||
.all_buffers()
|
||||
.into_iter()
|
||||
.filter_map(|buffer| {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let language = buffer.language()?;
|
||||
let should_discard = project.update(cx, |project, cx| {
|
||||
project.is_local()
|
||||
&& !project.has_language_servers_for(buffer, cx)
|
||||
});
|
||||
should_discard.not().then_some(language.clone())
|
||||
})
|
||||
})
|
||||
.collect::<HashSet<_>>()
|
||||
});
|
||||
if !languages_affected.is_empty() {
|
||||
self.refresh_inlay_hints(
|
||||
InlayHintRefreshReason::BufferEdited(languages_affected),
|
||||
|
@ -14711,15 +14700,18 @@ impl Editor {
|
|||
self.handle_input(text, window, cx);
|
||||
}
|
||||
|
||||
pub fn supports_inlay_hints(&self, cx: &App) -> bool {
|
||||
pub fn supports_inlay_hints(&self, cx: &mut App) -> bool {
|
||||
let Some(provider) = self.semantics_provider.as_ref() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let mut supports = false;
|
||||
self.buffer().read(cx).for_each_buffer(|buffer| {
|
||||
supports |= provider.supports_inlay_hints(buffer, cx);
|
||||
self.buffer().update(cx, |this, cx| {
|
||||
this.for_each_buffer(|buffer| {
|
||||
supports |= provider.supports_inlay_hints(buffer, cx);
|
||||
});
|
||||
});
|
||||
|
||||
supports
|
||||
}
|
||||
|
||||
|
@ -15267,7 +15259,7 @@ pub trait SemanticsProvider {
|
|||
cx: &mut App,
|
||||
) -> Option<Task<anyhow::Result<InlayHint>>>;
|
||||
|
||||
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &App) -> bool;
|
||||
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool;
|
||||
|
||||
fn document_highlights(
|
||||
&self,
|
||||
|
@ -15661,17 +15653,13 @@ impl SemanticsProvider for Entity<Project> {
|
|||
}))
|
||||
}
|
||||
|
||||
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &App) -> bool {
|
||||
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool {
|
||||
// TODO: make this work for remote projects
|
||||
self.read(cx)
|
||||
.language_servers_for_local_buffer(buffer.read(cx), cx)
|
||||
.any(
|
||||
|(_, server)| match server.capabilities().inlay_hint_provider {
|
||||
Some(lsp::OneOf::Left(enabled)) => enabled,
|
||||
Some(lsp::OneOf::Right(_)) => true,
|
||||
None => false,
|
||||
},
|
||||
)
|
||||
self.update(cx, |this, cx| {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
this.any_language_server_supports_inlay_hints(buffer, cx)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inlay_hints(
|
||||
|
|
|
@ -21,7 +21,6 @@ where
|
|||
let Some(project) = &editor.project else {
|
||||
return None;
|
||||
};
|
||||
let multibuffer = editor.buffer().read(cx);
|
||||
let mut language_servers_for = HashMap::default();
|
||||
editor
|
||||
.selections
|
||||
|
@ -29,29 +28,21 @@ where
|
|||
.iter()
|
||||
.filter(|selection| selection.start == selection.end)
|
||||
.filter_map(|selection| Some((selection.start.buffer_id?, selection.start)))
|
||||
.filter_map(|(buffer_id, trigger_anchor)| {
|
||||
let buffer = multibuffer.buffer(buffer_id)?;
|
||||
.find_map(|(buffer_id, trigger_anchor)| {
|
||||
let buffer = editor.buffer().read(cx).buffer(buffer_id)?;
|
||||
let server_id = *match language_servers_for.entry(buffer_id) {
|
||||
Entry::Occupied(occupied_entry) => occupied_entry.into_mut(),
|
||||
Entry::Vacant(vacant_entry) => {
|
||||
let language_server_id = project
|
||||
.read(cx)
|
||||
.language_servers_for_local_buffer(buffer.read(cx), cx)
|
||||
.find_map(|(adapter, server)| {
|
||||
if adapter.name.0.as_ref() == language_server_name {
|
||||
Some(server.server_id())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
let language_server_id = buffer.update(cx, |buffer, cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
project.language_server_id_for_name(buffer, language_server_name, cx)
|
||||
})
|
||||
});
|
||||
vacant_entry.insert(language_server_id)
|
||||
}
|
||||
}
|
||||
.as_ref()?;
|
||||
|
||||
Some((buffer, trigger_anchor, server_id))
|
||||
})
|
||||
.find_map(|(buffer, trigger_anchor, server_id)| {
|
||||
let language = buffer.read(cx).language_at(trigger_anchor.text_anchor)?;
|
||||
if !filter_language(&language) {
|
||||
return None;
|
||||
|
|
|
@ -467,7 +467,7 @@ impl SemanticsProvider for BranchBufferSemanticsProvider {
|
|||
self.0.resolve_inlay_hint(hint, buffer, server_id, cx)
|
||||
}
|
||||
|
||||
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &App) -> bool {
|
||||
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool {
|
||||
if let Some(buffer) = self.to_base(&buffer, &[], cx) {
|
||||
self.0.supports_inlay_hints(&buffer, cx)
|
||||
} else {
|
||||
|
|
|
@ -731,8 +731,9 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
|||
|
||||
// Start a new instance of the language server.
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers([buffer.clone()], cx)
|
||||
project.restart_language_servers_for_buffers(vec![buffer.clone()], cx)
|
||||
});
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
// The extension has cached the binary path, and does not attempt
|
||||
// to reinstall it.
|
||||
|
@ -752,7 +753,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
|||
|
||||
cx.executor().run_until_parked();
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers([buffer.clone()], cx)
|
||||
project.restart_language_servers_for_buffers(vec![buffer.clone()], cx)
|
||||
});
|
||||
|
||||
// The extension re-fetches the latest version of the language server.
|
||||
|
|
|
@ -23,7 +23,7 @@ async fn test_matching_paths(cx: &mut TestAppContext) {
|
|||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
json!({
|
||||
"a": {
|
||||
"banana": "",
|
||||
|
@ -33,7 +33,7 @@ async fn test_matching_paths(cx: &mut TestAppContext) {
|
|||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
let (picker, workspace, cx) = build_find_picker(project, cx);
|
||||
|
||||
|
@ -153,7 +153,7 @@ async fn test_complex_path(cx: &mut TestAppContext) {
|
|||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
json!({
|
||||
"其他": {
|
||||
"S数据表格": {
|
||||
|
@ -164,7 +164,7 @@ async fn test_complex_path(cx: &mut TestAppContext) {
|
|||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
let (picker, workspace, cx) = build_find_picker(project, cx);
|
||||
|
||||
|
@ -194,7 +194,7 @@ async fn test_row_column_numbers_query_inside_file(cx: &mut TestAppContext) {
|
|||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/src",
|
||||
path!("/src"),
|
||||
json!({
|
||||
"test": {
|
||||
first_file_name: first_file_contents,
|
||||
|
@ -204,7 +204,7 @@ async fn test_row_column_numbers_query_inside_file(cx: &mut TestAppContext) {
|
|||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
|
||||
|
||||
let (picker, workspace, cx) = build_find_picker(project, cx);
|
||||
|
||||
|
@ -269,7 +269,7 @@ async fn test_row_column_numbers_query_outside_file(cx: &mut TestAppContext) {
|
|||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/src",
|
||||
path!("/src"),
|
||||
json!({
|
||||
"test": {
|
||||
first_file_name: first_file_contents,
|
||||
|
@ -279,7 +279,7 @@ async fn test_row_column_numbers_query_outside_file(cx: &mut TestAppContext) {
|
|||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
|
||||
|
||||
let (picker, workspace, cx) = build_find_picker(project, cx);
|
||||
|
||||
|
@ -1777,7 +1777,7 @@ async fn test_opens_file_on_modifier_keys_release(cx: &mut gpui::TestAppContext)
|
|||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/test",
|
||||
path!("/test"),
|
||||
json!({
|
||||
"1.txt": "// One",
|
||||
"2.txt": "// Two",
|
||||
|
@ -1785,7 +1785,7 @@ async fn test_opens_file_on_modifier_keys_release(cx: &mut gpui::TestAppContext)
|
|||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/test").as_ref()], cx).await;
|
||||
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
|
||||
|
||||
open_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
|
||||
|
|
|
@ -298,6 +298,7 @@ mod tests {
|
|||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use std::{num::NonZeroU32, sync::Arc, time::Duration};
|
||||
use util::path;
|
||||
use workspace::{AppState, Workspace};
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -305,7 +306,7 @@ mod tests {
|
|||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"a.rs": indoc!{"
|
||||
struct SingleLine; // display line 0
|
||||
|
@ -326,7 +327,7 @@ mod tests {
|
|||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
|
||||
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let worktree_id = workspace.update(cx, |workspace, cx| {
|
||||
|
@ -335,7 +336,9 @@ mod tests {
|
|||
})
|
||||
});
|
||||
let _buffer = project
|
||||
.update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(path!("/dir/a.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let editor = workspace
|
||||
|
@ -414,14 +417,14 @@ mod tests {
|
|||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"a.rs": "ēlo"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
|
||||
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
|
@ -437,7 +440,9 @@ mod tests {
|
|||
})
|
||||
});
|
||||
let _buffer = project
|
||||
.update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(path!("/dir/a.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let editor = workspace
|
||||
|
@ -497,14 +502,14 @@ mod tests {
|
|||
let text = "ēlo你好";
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"a.rs": text
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
|
||||
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
|
@ -520,7 +525,9 @@ mod tests {
|
|||
})
|
||||
});
|
||||
let _buffer = project
|
||||
.update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(path!("/dir/a.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let editor = workspace
|
||||
|
@ -573,14 +580,14 @@ mod tests {
|
|||
let text = "ēlo你好";
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"a.rs": text
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
|
||||
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
|
@ -596,7 +603,9 @@ mod tests {
|
|||
})
|
||||
});
|
||||
let _buffer = project
|
||||
.update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(path!("/dir/a.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let editor = workspace
|
||||
|
|
|
@ -44,7 +44,6 @@ use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
|||
use serde_json::Value;
|
||||
use settings::WorktreeId;
|
||||
use smol::future::FutureExt as _;
|
||||
use std::num::NonZeroU32;
|
||||
use std::{
|
||||
any::Any,
|
||||
ffi::OsStr,
|
||||
|
@ -60,6 +59,7 @@ use std::{
|
|||
Arc, LazyLock,
|
||||
},
|
||||
};
|
||||
use std::{num::NonZeroU32, sync::OnceLock};
|
||||
use syntax_map::{QueryCursorHandle, SyntaxSnapshot};
|
||||
use task::RunnableTag;
|
||||
pub use task_context::{ContextProvider, RunnableRange};
|
||||
|
@ -162,6 +162,7 @@ pub struct CachedLspAdapter {
|
|||
pub adapter: Arc<dyn LspAdapter>,
|
||||
pub reinstall_attempt_count: AtomicU64,
|
||||
cached_binary: futures::lock::Mutex<Option<LanguageServerBinary>>,
|
||||
attach_kind: OnceLock<Attach>,
|
||||
}
|
||||
|
||||
impl Debug for CachedLspAdapter {
|
||||
|
@ -197,6 +198,7 @@ impl CachedLspAdapter {
|
|||
adapter,
|
||||
cached_binary: Default::default(),
|
||||
reinstall_attempt_count: AtomicU64::new(0),
|
||||
attach_kind: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -258,6 +260,38 @@ impl CachedLspAdapter {
|
|||
.cloned()
|
||||
.unwrap_or_else(|| language_name.lsp_id())
|
||||
}
|
||||
pub fn find_project_root(
|
||||
&self,
|
||||
path: &Path,
|
||||
ancestor_depth: usize,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Option<Arc<Path>> {
|
||||
self.adapter
|
||||
.find_project_root(path, ancestor_depth, delegate)
|
||||
}
|
||||
pub fn attach_kind(&self) -> Attach {
|
||||
*self.attach_kind.get_or_init(|| self.adapter.attach_kind())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
pub enum Attach {
|
||||
/// Create a single language server instance per subproject root.
|
||||
InstancePerRoot,
|
||||
/// Use one shared language server instance for all subprojects within a project.
|
||||
Shared,
|
||||
}
|
||||
|
||||
impl Attach {
|
||||
pub fn root_path(
|
||||
&self,
|
||||
root_subproject_path: (WorktreeId, Arc<Path>),
|
||||
) -> (WorktreeId, Arc<Path>) {
|
||||
match self {
|
||||
Attach::InstancePerRoot => root_subproject_path,
|
||||
Attach::Shared => (root_subproject_path.0, Arc::from(Path::new(""))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// [`LspAdapterDelegate`] allows [`LspAdapter]` implementations to interface with the application
|
||||
|
@ -268,6 +302,7 @@ pub trait LspAdapterDelegate: Send + Sync {
|
|||
fn http_client(&self) -> Arc<dyn HttpClient>;
|
||||
fn worktree_id(&self) -> WorktreeId;
|
||||
fn worktree_root_path(&self) -> &Path;
|
||||
fn exists(&self, path: &Path, is_dir: Option<bool>) -> bool;
|
||||
fn update_status(&self, language: LanguageServerName, status: LanguageServerBinaryStatus);
|
||||
async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option<Arc<Path>>;
|
||||
|
||||
|
@ -506,6 +541,19 @@ pub trait LspAdapter: 'static + Send + Sync {
|
|||
fn prepare_initialize_params(&self, original: InitializeParams) -> Result<InitializeParams> {
|
||||
Ok(original)
|
||||
}
|
||||
fn attach_kind(&self) -> Attach {
|
||||
Attach::Shared
|
||||
}
|
||||
fn find_project_root(
|
||||
&self,
|
||||
|
||||
_path: &Path,
|
||||
_ancestor_depth: usize,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Option<Arc<Path>> {
|
||||
// By default all language servers are rooted at the root of the worktree.
|
||||
Some(Arc::from("".as_ref()))
|
||||
}
|
||||
}
|
||||
|
||||
async fn try_fetch_server_binary<L: LspAdapter + 'static + Send + Sync + ?Sized>(
|
||||
|
|
|
@ -108,6 +108,7 @@ struct LanguageRegistryState {
|
|||
available_languages: Vec<AvailableLanguage>,
|
||||
grammars: HashMap<Arc<str>, AvailableGrammar>,
|
||||
lsp_adapters: HashMap<LanguageName, Vec<Arc<CachedLspAdapter>>>,
|
||||
all_lsp_adapters: HashMap<LanguageServerName, Arc<CachedLspAdapter>>,
|
||||
available_lsp_adapters:
|
||||
HashMap<LanguageServerName, Arc<dyn Fn() -> Arc<CachedLspAdapter> + 'static + Send + Sync>>,
|
||||
loading_languages: HashMap<LanguageId, Vec<oneshot::Sender<Result<Arc<Language>>>>>,
|
||||
|
@ -234,6 +235,7 @@ impl LanguageRegistry {
|
|||
language_settings: Default::default(),
|
||||
loading_languages: Default::default(),
|
||||
lsp_adapters: Default::default(),
|
||||
all_lsp_adapters: Default::default(),
|
||||
available_lsp_adapters: HashMap::default(),
|
||||
subscription: watch::channel(),
|
||||
theme: Default::default(),
|
||||
|
@ -356,12 +358,16 @@ impl LanguageRegistry {
|
|||
adapter: Arc<dyn LspAdapter>,
|
||||
) -> Arc<CachedLspAdapter> {
|
||||
let cached = CachedLspAdapter::new(adapter);
|
||||
self.state
|
||||
.write()
|
||||
let mut state = self.state.write();
|
||||
state
|
||||
.lsp_adapters
|
||||
.entry(language_name)
|
||||
.or_default()
|
||||
.push(cached.clone());
|
||||
state
|
||||
.all_lsp_adapters
|
||||
.insert(cached.name.clone(), cached.clone());
|
||||
|
||||
cached
|
||||
}
|
||||
|
||||
|
@ -401,12 +407,17 @@ impl LanguageRegistry {
|
|||
let adapter_name = LanguageServerName(adapter.name.into());
|
||||
let capabilities = adapter.capabilities.clone();
|
||||
let initializer = adapter.initializer.take();
|
||||
self.state
|
||||
.write()
|
||||
.lsp_adapters
|
||||
.entry(language_name.clone())
|
||||
.or_default()
|
||||
.push(CachedLspAdapter::new(Arc::new(adapter)));
|
||||
let adapter = CachedLspAdapter::new(Arc::new(adapter));
|
||||
{
|
||||
let mut state = self.state.write();
|
||||
state
|
||||
.lsp_adapters
|
||||
.entry(language_name.clone())
|
||||
.or_default()
|
||||
.push(adapter.clone());
|
||||
state.all_lsp_adapters.insert(adapter.name(), adapter);
|
||||
}
|
||||
|
||||
self.register_fake_language_server(adapter_name, capabilities, initializer)
|
||||
}
|
||||
|
||||
|
@ -419,12 +430,16 @@ impl LanguageRegistry {
|
|||
adapter: crate::FakeLspAdapter,
|
||||
) {
|
||||
let language_name = language_name.into();
|
||||
self.state
|
||||
.write()
|
||||
let mut state = self.state.write();
|
||||
let cached_adapter = CachedLspAdapter::new(Arc::new(adapter));
|
||||
state
|
||||
.lsp_adapters
|
||||
.entry(language_name.clone())
|
||||
.or_default()
|
||||
.push(CachedLspAdapter::new(Arc::new(adapter)));
|
||||
.push(cached_adapter.clone());
|
||||
state
|
||||
.all_lsp_adapters
|
||||
.insert(cached_adapter.name(), cached_adapter);
|
||||
}
|
||||
|
||||
/// Register a fake language server (without the adapter)
|
||||
|
@ -895,6 +910,10 @@ impl LanguageRegistry {
|
|||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn adapter_for_name(&self, name: &LanguageServerName) -> Option<Arc<CachedLspAdapter>> {
|
||||
self.state.read().all_lsp_adapters.get(name).cloned()
|
||||
}
|
||||
|
||||
pub fn update_lsp_status(
|
||||
&self,
|
||||
server_name: LanguageServerName,
|
||||
|
|
|
@ -735,7 +735,8 @@ impl LspLogView {
|
|||
|
||||
* Binary: {BINARY:#?}
|
||||
|
||||
* Running in project: {PATH:?}
|
||||
* Registered workspace folders:
|
||||
{WORKSPACE_FOLDERS}
|
||||
|
||||
* Capabilities: {CAPABILITIES}
|
||||
|
||||
|
@ -743,7 +744,15 @@ impl LspLogView {
|
|||
NAME = server.name(),
|
||||
ID = server.server_id(),
|
||||
BINARY = server.binary(),
|
||||
PATH = server.root_path(),
|
||||
WORKSPACE_FOLDERS = server
|
||||
.workspace_folders()
|
||||
.iter()
|
||||
.filter_map(|path| path
|
||||
.to_file_path()
|
||||
.ok()
|
||||
.map(|path| path.to_string_lossy().into_owned()))
|
||||
.collect::<Vec<_>>()
|
||||
.join(", "),
|
||||
CAPABILITIES = serde_json::to_string_pretty(&server.capabilities())
|
||||
.unwrap_or_else(|e| format!("Failed to serialize capabilities: {e}")),
|
||||
CONFIGURATION = serde_json::to_string_pretty(server.configuration())
|
||||
|
|
|
@ -74,6 +74,23 @@ impl LspAdapter for RustLspAdapter {
|
|||
Self::SERVER_NAME.clone()
|
||||
}
|
||||
|
||||
fn find_project_root(
|
||||
&self,
|
||||
path: &Path,
|
||||
ancestor_depth: usize,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Option<Arc<Path>> {
|
||||
let mut outermost_cargo_toml = None;
|
||||
for path in path.ancestors().take(ancestor_depth) {
|
||||
let p = path.join("Cargo.toml");
|
||||
if delegate.exists(&p, Some(false)) {
|
||||
outermost_cargo_toml = Some(Arc::from(path));
|
||||
}
|
||||
}
|
||||
|
||||
outermost_cargo_toml
|
||||
}
|
||||
|
||||
async fn check_if_user_installed(
|
||||
&self,
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
|
|
|
@ -7,6 +7,7 @@ use anyhow::{anyhow, Context as _, Result};
|
|||
use collections::HashMap;
|
||||
use futures::{channel::oneshot, io::BufWriter, select, AsyncRead, AsyncWrite, Future, FutureExt};
|
||||
use gpui::{App, AsyncApp, BackgroundExecutor, SharedString, Task};
|
||||
use notification::DidChangeWorkspaceFolders;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use postage::{barrier, prelude::Stream};
|
||||
use schemars::{
|
||||
|
@ -23,10 +24,11 @@ use smol::{
|
|||
};
|
||||
|
||||
use std::{
|
||||
collections::BTreeSet,
|
||||
ffi::{OsStr, OsString},
|
||||
fmt,
|
||||
io::Write,
|
||||
ops::DerefMut,
|
||||
ops::{Deref, DerefMut},
|
||||
path::PathBuf,
|
||||
pin::Pin,
|
||||
sync::{
|
||||
|
@ -96,9 +98,8 @@ pub struct LanguageServer {
|
|||
#[allow(clippy::type_complexity)]
|
||||
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
|
||||
output_done_rx: Mutex<Option<barrier::Receiver>>,
|
||||
root_path: PathBuf,
|
||||
working_dir: PathBuf,
|
||||
server: Arc<Mutex<Option<Child>>>,
|
||||
workspace_folders: Arc<Mutex<BTreeSet<Url>>>,
|
||||
}
|
||||
|
||||
/// Identifies a running language server.
|
||||
|
@ -376,8 +377,6 @@ impl LanguageServer {
|
|||
Some(stderr),
|
||||
stderr_capture,
|
||||
Some(server),
|
||||
root_path,
|
||||
working_dir,
|
||||
code_action_kinds,
|
||||
binary,
|
||||
cx,
|
||||
|
@ -403,8 +402,6 @@ impl LanguageServer {
|
|||
stderr: Option<Stderr>,
|
||||
stderr_capture: Arc<Mutex<Option<String>>>,
|
||||
server: Option<Child>,
|
||||
root_path: &Path,
|
||||
working_dir: &Path,
|
||||
code_action_kinds: Option<Vec<CodeActionKind>>,
|
||||
binary: LanguageServerBinary,
|
||||
cx: AsyncApp,
|
||||
|
@ -488,9 +485,8 @@ impl LanguageServer {
|
|||
executor: cx.background_executor().clone(),
|
||||
io_tasks: Mutex::new(Some((input_task, output_task))),
|
||||
output_done_rx: Mutex::new(Some(output_done_rx)),
|
||||
root_path: root_path.to_path_buf(),
|
||||
working_dir: working_dir.to_path_buf(),
|
||||
server: Arc::new(Mutex::new(server)),
|
||||
workspace_folders: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -615,12 +611,11 @@ impl LanguageServer {
|
|||
}
|
||||
|
||||
pub fn default_initialize_params(&self, cx: &App) -> InitializeParams {
|
||||
let root_uri = Url::from_file_path(&self.working_dir).unwrap();
|
||||
#[allow(deprecated)]
|
||||
InitializeParams {
|
||||
process_id: None,
|
||||
root_path: None,
|
||||
root_uri: Some(root_uri.clone()),
|
||||
root_uri: None,
|
||||
initialization_options: None,
|
||||
capabilities: ClientCapabilities {
|
||||
general: Some(GeneralClientCapabilities {
|
||||
|
@ -790,10 +785,7 @@ impl LanguageServer {
|
|||
}),
|
||||
},
|
||||
trace: None,
|
||||
workspace_folders: Some(vec![WorkspaceFolder {
|
||||
uri: root_uri,
|
||||
name: Default::default(),
|
||||
}]),
|
||||
workspace_folders: None,
|
||||
client_info: release_channel::ReleaseChannel::try_global(cx).map(|release_channel| {
|
||||
ClientInfo {
|
||||
name: release_channel.display_name().to_string(),
|
||||
|
@ -812,16 +804,10 @@ impl LanguageServer {
|
|||
/// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initialize)
|
||||
pub fn initialize(
|
||||
mut self,
|
||||
initialize_params: Option<InitializeParams>,
|
||||
params: InitializeParams,
|
||||
configuration: Arc<DidChangeConfigurationParams>,
|
||||
cx: &App,
|
||||
) -> Task<Result<Arc<Self>>> {
|
||||
let params = if let Some(params) = initialize_params {
|
||||
params
|
||||
} else {
|
||||
self.default_initialize_params(cx)
|
||||
};
|
||||
|
||||
cx.spawn(|_| async move {
|
||||
let response = self.request::<request::Initialize>(params).await?;
|
||||
if let Some(info) = response.server_info {
|
||||
|
@ -1073,16 +1059,10 @@ impl LanguageServer {
|
|||
self.server_id
|
||||
}
|
||||
|
||||
/// Get the root path of the project the language server is running against.
|
||||
pub fn root_path(&self) -> &PathBuf {
|
||||
&self.root_path
|
||||
}
|
||||
|
||||
/// Language server's binary information.
|
||||
pub fn binary(&self) -> &LanguageServerBinary {
|
||||
&self.binary
|
||||
}
|
||||
|
||||
/// Sends a RPC request to the language server.
|
||||
///
|
||||
/// [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#requestMessage)
|
||||
|
@ -1210,6 +1190,118 @@ impl LanguageServer {
|
|||
outbound_tx.try_send(message)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Add new workspace folder to the list.
|
||||
pub fn add_workspace_folder(&self, uri: Url) {
|
||||
if self
|
||||
.capabilities()
|
||||
.workspace
|
||||
.and_then(|ws| {
|
||||
ws.workspace_folders.and_then(|folders| {
|
||||
folders
|
||||
.change_notifications
|
||||
.map(|caps| matches!(caps, OneOf::Left(false)))
|
||||
})
|
||||
})
|
||||
.unwrap_or(true)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
let is_new_folder = self.workspace_folders.lock().insert(uri.clone());
|
||||
if is_new_folder {
|
||||
let params = DidChangeWorkspaceFoldersParams {
|
||||
event: WorkspaceFoldersChangeEvent {
|
||||
added: vec![WorkspaceFolder {
|
||||
uri,
|
||||
name: String::default(),
|
||||
}],
|
||||
removed: vec![],
|
||||
},
|
||||
};
|
||||
self.notify::<DidChangeWorkspaceFolders>(¶ms).log_err();
|
||||
}
|
||||
}
|
||||
/// Add new workspace folder to the list.
|
||||
pub fn remove_workspace_folder(&self, uri: Url) {
|
||||
if self
|
||||
.capabilities()
|
||||
.workspace
|
||||
.and_then(|ws| {
|
||||
ws.workspace_folders.and_then(|folders| {
|
||||
folders
|
||||
.change_notifications
|
||||
.map(|caps| !matches!(caps, OneOf::Left(false)))
|
||||
})
|
||||
})
|
||||
.unwrap_or(true)
|
||||
{
|
||||
return;
|
||||
}
|
||||
let was_removed = self.workspace_folders.lock().remove(&uri);
|
||||
if was_removed {
|
||||
let params = DidChangeWorkspaceFoldersParams {
|
||||
event: WorkspaceFoldersChangeEvent {
|
||||
added: vec![],
|
||||
removed: vec![WorkspaceFolder {
|
||||
uri,
|
||||
name: String::default(),
|
||||
}],
|
||||
},
|
||||
};
|
||||
self.notify::<DidChangeWorkspaceFolders>(¶ms).log_err();
|
||||
}
|
||||
}
|
||||
pub fn set_workspace_folders(&self, folders: BTreeSet<Url>) {
|
||||
let mut workspace_folders = self.workspace_folders.lock();
|
||||
let added: Vec<_> = folders
|
||||
.iter()
|
||||
.map(|uri| WorkspaceFolder {
|
||||
uri: uri.clone(),
|
||||
name: String::default(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
let removed: Vec<_> = std::mem::replace(&mut *workspace_folders, folders)
|
||||
.into_iter()
|
||||
.map(|uri| WorkspaceFolder {
|
||||
uri: uri.clone(),
|
||||
name: String::default(),
|
||||
})
|
||||
.collect();
|
||||
let should_notify = !added.is_empty() || !removed.is_empty();
|
||||
|
||||
if should_notify {
|
||||
let params = DidChangeWorkspaceFoldersParams {
|
||||
event: WorkspaceFoldersChangeEvent { added, removed },
|
||||
};
|
||||
self.notify::<DidChangeWorkspaceFolders>(¶ms).log_err();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn workspace_folders(&self) -> impl Deref<Target = BTreeSet<Url>> + '_ {
|
||||
self.workspace_folders.lock()
|
||||
}
|
||||
|
||||
pub fn register_buffer(
|
||||
&self,
|
||||
uri: Url,
|
||||
language_id: String,
|
||||
version: i32,
|
||||
initial_text: String,
|
||||
) {
|
||||
self.notify::<notification::DidOpenTextDocument>(&DidOpenTextDocumentParams {
|
||||
text_document: TextDocumentItem::new(uri, language_id, version, initial_text),
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
|
||||
pub fn unregister_buffer(&self, uri: Url) {
|
||||
self.notify::<notification::DidCloseTextDocument>(&DidCloseTextDocumentParams {
|
||||
text_document: TextDocumentIdentifier::new(uri),
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for LanguageServer {
|
||||
|
@ -1291,8 +1383,6 @@ impl FakeLanguageServer {
|
|||
let (stdout_writer, stdout_reader) = async_pipe::pipe();
|
||||
let (notifications_tx, notifications_rx) = channel::unbounded();
|
||||
|
||||
let root = Self::root_path();
|
||||
|
||||
let server_name = LanguageServerName(name.clone().into());
|
||||
let process_name = Arc::from(name.as_str());
|
||||
let mut server = LanguageServer::new_internal(
|
||||
|
@ -1303,8 +1393,6 @@ impl FakeLanguageServer {
|
|||
None::<async_pipe::PipeReader>,
|
||||
Arc::new(Mutex::new(None)),
|
||||
None,
|
||||
root,
|
||||
root,
|
||||
None,
|
||||
binary.clone(),
|
||||
cx.clone(),
|
||||
|
@ -1322,8 +1410,6 @@ impl FakeLanguageServer {
|
|||
None::<async_pipe::PipeReader>,
|
||||
Arc::new(Mutex::new(None)),
|
||||
None,
|
||||
root,
|
||||
root,
|
||||
None,
|
||||
binary,
|
||||
cx.clone(),
|
||||
|
@ -1360,16 +1446,6 @@ impl FakeLanguageServer {
|
|||
|
||||
(server, fake)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("C:\\")
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn root_path() -> &'static Path {
|
||||
Path::new("/")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
|
@ -1557,12 +1633,14 @@ mod tests {
|
|||
})
|
||||
.detach();
|
||||
|
||||
let initialize_params = None;
|
||||
let configuration = DidChangeConfigurationParams {
|
||||
settings: Default::default(),
|
||||
};
|
||||
let server = cx
|
||||
.update(|cx| server.initialize(initialize_params, configuration.into(), cx))
|
||||
.update(|cx| {
|
||||
let params = server.default_initialize_params(cx);
|
||||
let configuration = DidChangeConfigurationParams {
|
||||
settings: Default::default(),
|
||||
};
|
||||
server.initialize(params, configuration.into(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
server
|
||||
|
|
|
@ -370,6 +370,7 @@ mod tests {
|
|||
use language::{Language, LanguageConfig, LanguageMatcher};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use util::path;
|
||||
use workspace::{AppState, Workspace};
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -377,7 +378,7 @@ mod tests {
|
|||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"a.rs": indoc!{"
|
||||
struct SingleLine; // display line 0
|
||||
|
@ -391,7 +392,7 @@ mod tests {
|
|||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, ["/dir".as_ref()], cx).await;
|
||||
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
|
||||
project.read_with(cx, |project, _| project.languages().add(rust_lang()));
|
||||
|
||||
let (workspace, cx) =
|
||||
|
@ -402,7 +403,9 @@ mod tests {
|
|||
})
|
||||
});
|
||||
let _buffer = project
|
||||
.update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(path!("/dir/a.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let editor = workspace
|
||||
|
|
|
@ -5169,6 +5169,7 @@ mod tests {
|
|||
use project::FakeFs;
|
||||
use search::project_search::{self, perform_project_search};
|
||||
use serde_json::json;
|
||||
use util::path;
|
||||
use workspace::OpenVisible;
|
||||
|
||||
use super::*;
|
||||
|
@ -5546,8 +5547,8 @@ mod tests {
|
|||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
populate_with_test_ra_project(&fs, "/rust-analyzer").await;
|
||||
let project = Project::test(fs.clone(), ["/rust-analyzer".as_ref()], cx).await;
|
||||
populate_with_test_ra_project(&fs, path!("/rust-analyzer")).await;
|
||||
let project = Project::test(fs.clone(), [path!("/rust-analyzer").as_ref()], cx).await;
|
||||
project.read_with(cx, |project, _| {
|
||||
project.languages().add(Arc::new(rust_lang()))
|
||||
});
|
||||
|
@ -5591,15 +5592,17 @@ mod tests {
|
|||
);
|
||||
});
|
||||
});
|
||||
let all_matches = r#"/rust-analyzer/
|
||||
let root_path = format!("{}/", path!("/rust-analyzer"));
|
||||
let all_matches = format!(
|
||||
r#"{root_path}
|
||||
crates/
|
||||
ide/src/
|
||||
inlay_hints/
|
||||
fn_lifetime_fn.rs
|
||||
search: match config.param_names_for_lifetime_elision_hints {
|
||||
search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {
|
||||
search: Some(it) if config.param_names_for_lifetime_elision_hints => {
|
||||
search: InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },
|
||||
search: match config.param_names_for_lifetime_elision_hints {{
|
||||
search: allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {{
|
||||
search: Some(it) if config.param_names_for_lifetime_elision_hints => {{
|
||||
search: InlayHintsConfig {{ param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }},
|
||||
inlay_hints.rs
|
||||
search: pub param_names_for_lifetime_elision_hints: bool,
|
||||
search: param_names_for_lifetime_elision_hints: self
|
||||
|
@ -5610,7 +5613,8 @@ mod tests {
|
|||
analysis_stats.rs
|
||||
search: param_names_for_lifetime_elision_hints: true,
|
||||
config.rs
|
||||
search: param_names_for_lifetime_elision_hints: self"#;
|
||||
search: param_names_for_lifetime_elision_hints: self"#
|
||||
);
|
||||
let select_first_in_all_matches = |line_to_select: &str| {
|
||||
assert!(all_matches.contains(line_to_select));
|
||||
all_matches.replacen(
|
||||
|
@ -5921,7 +5925,7 @@ mod tests {
|
|||
async fn test_navigating_in_singleton(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let root = "/root";
|
||||
let root = path!("/root");
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
root,
|
||||
|
@ -5968,7 +5972,7 @@ struct OutlineEntryExcerpt {
|
|||
|
||||
let _editor = workspace
|
||||
.update(cx, |workspace, window, cx| {
|
||||
workspace.open_abs_path(PathBuf::from("/root/src/lib.rs"), true, window, cx)
|
||||
workspace.open_abs_path(PathBuf::from(path!("/root/src/lib.rs")), true, window, cx)
|
||||
})
|
||||
.unwrap()
|
||||
.await
|
||||
|
|
|
@ -283,13 +283,13 @@ impl Prettier {
|
|||
)
|
||||
.context("prettier server creation")?;
|
||||
|
||||
let initialize_params = None;
|
||||
let configuration = lsp::DidChangeConfigurationParams {
|
||||
settings: Default::default(),
|
||||
};
|
||||
let server = cx
|
||||
.update(|cx| {
|
||||
executor.spawn(server.initialize(initialize_params, configuration.into(), cx))
|
||||
let params = server.default_initialize_params(cx);
|
||||
let configuration = lsp::DidChangeConfigurationParams {
|
||||
settings: Default::default(),
|
||||
};
|
||||
executor.spawn(server.initialize(params, configuration.into(), cx))
|
||||
})?
|
||||
.await
|
||||
.context("prettier server initialization")?;
|
||||
|
|
|
@ -941,9 +941,11 @@ fn language_server_for_buffer(
|
|||
) -> Result<(Arc<CachedLspAdapter>, Arc<LanguageServer>)> {
|
||||
lsp_store
|
||||
.update(cx, |lsp_store, cx| {
|
||||
lsp_store
|
||||
.language_server_for_local_buffer(buffer.read(cx), server_id, cx)
|
||||
.map(|(adapter, server)| (adapter.clone(), server.clone()))
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
lsp_store
|
||||
.language_server_for_local_buffer(buffer, server_id, cx)
|
||||
.map(|(adapter, server)| (adapter.clone(), server.clone()))
|
||||
})
|
||||
})?
|
||||
.ok_or_else(|| anyhow!("no language server found for buffer"))
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -40,7 +40,7 @@ pub struct PrettierStore {
|
|||
prettier_instances: HashMap<PathBuf, PrettierInstance>,
|
||||
}
|
||||
|
||||
pub enum PrettierStoreEvent {
|
||||
pub(crate) enum PrettierStoreEvent {
|
||||
LanguageServerRemoved(LanguageServerId),
|
||||
LanguageServerAdded {
|
||||
new_server_id: LanguageServerId,
|
||||
|
|
|
@ -9,6 +9,7 @@ pub mod lsp_ext_command;
|
|||
pub mod lsp_store;
|
||||
pub mod prettier_store;
|
||||
pub mod project_settings;
|
||||
mod project_tree;
|
||||
pub mod search;
|
||||
mod task_inventory;
|
||||
pub mod task_store;
|
||||
|
@ -56,17 +57,16 @@ use gpui::{
|
|||
};
|
||||
use itertools::Itertools;
|
||||
use language::{
|
||||
language_settings::InlayHintKind, proto::split_operations, Buffer, BufferEvent,
|
||||
CachedLspAdapter, Capability, CodeLabel, CompletionDocumentation, File as _, Language,
|
||||
LanguageName, LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainList,
|
||||
Transaction, Unclipped,
|
||||
language_settings::InlayHintKind, proto::split_operations, Buffer, BufferEvent, Capability,
|
||||
CodeLabel, CompletionDocumentation, File as _, Language, LanguageName, LanguageRegistry,
|
||||
PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainList, Transaction, Unclipped,
|
||||
};
|
||||
use lsp::{
|
||||
CodeActionKind, CompletionContext, CompletionItemKind, DocumentHighlightKind, LanguageServer,
|
||||
LanguageServerId, LanguageServerName, MessageActionItem,
|
||||
CodeActionKind, CompletionContext, CompletionItemKind, DocumentHighlightKind, LanguageServerId,
|
||||
LanguageServerName, MessageActionItem,
|
||||
};
|
||||
use lsp_command::*;
|
||||
use lsp_store::LspFormatTarget;
|
||||
use lsp_store::{LspFormatTarget, OpenLspBufferHandle};
|
||||
use node_runtime::NodeRuntime;
|
||||
use parking_lot::Mutex;
|
||||
pub use prettier_store::PrettierStore;
|
||||
|
@ -481,6 +481,7 @@ pub struct DocumentHighlight {
|
|||
pub struct Symbol {
|
||||
pub language_server_name: LanguageServerName,
|
||||
pub source_worktree_id: WorktreeId,
|
||||
pub source_language_server_id: LanguageServerId,
|
||||
pub path: ProjectPath,
|
||||
pub label: CodeLabel,
|
||||
pub name: String,
|
||||
|
@ -1970,7 +1971,7 @@ impl Project {
|
|||
pub fn open_buffer(
|
||||
&mut self,
|
||||
path: impl Into<ProjectPath>,
|
||||
cx: &mut Context<Self>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Entity<Buffer>>> {
|
||||
if self.is_disconnected(cx) {
|
||||
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
|
||||
|
@ -1988,16 +1989,25 @@ impl Project {
|
|||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<(Entity<Buffer>, lsp_store::OpenLspBufferHandle)>> {
|
||||
let buffer = self.open_buffer(path, cx);
|
||||
let lsp_store = self.lsp_store().clone();
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let buffer = buffer.await?;
|
||||
let handle = lsp_store.update(&mut cx, |lsp_store, cx| {
|
||||
lsp_store.register_buffer_with_language_servers(&buffer, cx)
|
||||
let handle = this.update(&mut cx, |project, cx| {
|
||||
project.register_buffer_with_language_servers(&buffer, cx)
|
||||
})?;
|
||||
Ok((buffer, handle))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn register_buffer_with_language_servers(
|
||||
&self,
|
||||
buffer: &Entity<Buffer>,
|
||||
cx: &mut App,
|
||||
) -> OpenLspBufferHandle {
|
||||
self.lsp_store.update(cx, |lsp_store, cx| {
|
||||
lsp_store.register_buffer_with_language_servers(&buffer, false, cx)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn open_unstaged_diff(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
|
@ -2616,7 +2626,7 @@ impl Project {
|
|||
|
||||
pub fn restart_language_servers_for_buffers(
|
||||
&mut self,
|
||||
buffers: impl IntoIterator<Item = Entity<Buffer>>,
|
||||
buffers: Vec<Entity<Buffer>>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.lsp_store.update(cx, |lsp_store, cx| {
|
||||
|
@ -4228,14 +4238,43 @@ impl Project {
|
|||
self.lsp_store.read(cx).supplementary_language_servers()
|
||||
}
|
||||
|
||||
pub fn language_servers_for_local_buffer<'a>(
|
||||
&'a self,
|
||||
buffer: &'a Buffer,
|
||||
cx: &'a App,
|
||||
) -> impl Iterator<Item = (&'a Arc<CachedLspAdapter>, &'a Arc<LanguageServer>)> {
|
||||
self.lsp_store
|
||||
.read(cx)
|
||||
.language_servers_for_local_buffer(buffer, cx)
|
||||
pub fn any_language_server_supports_inlay_hints(&self, buffer: &Buffer, cx: &mut App) -> bool {
|
||||
self.lsp_store.update(cx, |this, cx| {
|
||||
this.language_servers_for_local_buffer(buffer, cx)
|
||||
.any(
|
||||
|(_, server)| match server.capabilities().inlay_hint_provider {
|
||||
Some(lsp::OneOf::Left(enabled)) => enabled,
|
||||
Some(lsp::OneOf::Right(_)) => true,
|
||||
None => false,
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn language_server_id_for_name(
|
||||
&self,
|
||||
buffer: &Buffer,
|
||||
name: &str,
|
||||
cx: &mut App,
|
||||
) -> Option<LanguageServerId> {
|
||||
self.lsp_store.update(cx, |this, cx| {
|
||||
this.language_servers_for_local_buffer(buffer, cx)
|
||||
.find_map(|(adapter, server)| {
|
||||
if adapter.name.0 == name {
|
||||
Some(server.server_id())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn has_language_servers_for(&self, buffer: &Buffer, cx: &mut App) -> bool {
|
||||
self.lsp_store.update(cx, |this, cx| {
|
||||
this.language_servers_for_local_buffer(buffer, cx)
|
||||
.next()
|
||||
.is_some()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn buffer_store(&self) -> &Entity<BufferStore> {
|
||||
|
|
|
@ -1406,14 +1406,13 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC
|
|||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Simulate diagnostics starting to update.
|
||||
let fake_server = fake_servers.next().await.unwrap();
|
||||
fake_server.start_progress(progress_token).await;
|
||||
|
||||
// Restart the server before the diagnostics finish updating.
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers([buffer], cx);
|
||||
project.restart_language_servers_for_buffers(vec![buffer], cx);
|
||||
});
|
||||
let mut events = cx.events(&project);
|
||||
|
||||
|
@ -1518,7 +1517,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp
|
|||
});
|
||||
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers([buffer.clone()], cx);
|
||||
project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
|
||||
});
|
||||
|
||||
// The diagnostics are cleared.
|
||||
|
@ -1572,10 +1571,10 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T
|
|||
diagnostics: Vec::new(),
|
||||
});
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers([buffer.clone()], cx);
|
||||
project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
|
||||
});
|
||||
|
||||
let mut fake_server = fake_servers.next().await.unwrap();
|
||||
let notification = fake_server
|
||||
.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
|
@ -1782,7 +1781,6 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
|
||||
|
||||
let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
|
||||
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
||||
|
||||
language_registry.add(rust_lang());
|
||||
|
@ -1801,8 +1799,8 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let _handle = lsp_store.update(cx, |lsp_store, cx| {
|
||||
lsp_store.register_buffer_with_language_servers(&buffer, cx)
|
||||
let _handle = project.update(cx, |project, cx| {
|
||||
project.register_buffer_with_language_servers(&buffer, cx)
|
||||
});
|
||||
|
||||
let mut fake_server = fake_servers.next().await.unwrap();
|
||||
|
@ -2617,7 +2615,6 @@ async fn test_definition(cx: &mut gpui::TestAppContext) {
|
|||
),
|
||||
)))
|
||||
});
|
||||
|
||||
let mut definitions = project
|
||||
.update(cx, |project, cx| project.definition(&buffer, 22, cx))
|
||||
.await
|
||||
|
@ -3381,7 +3378,7 @@ async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
|
|||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"a": {
|
||||
"file1": "",
|
||||
|
@ -3390,7 +3387,7 @@ async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
|
|||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, [Path::new("/dir")], cx).await;
|
||||
let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
|
||||
let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
let tree_id = tree.update(cx, |tree, _| tree.id());
|
||||
|
||||
|
|
243
crates/project/src/project_tree.rs
Normal file
243
crates/project/src/project_tree.rs
Normal file
|
@ -0,0 +1,243 @@
|
|||
//! This module defines a Project Tree.
|
||||
//!
|
||||
//! A Project Tree is responsible for determining where the roots of subprojects are located in a project.
|
||||
|
||||
mod path_trie;
|
||||
mod server_tree;
|
||||
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
collections::{hash_map::Entry, BTreeMap},
|
||||
ops::ControlFlow,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AppContext, Context, Entity, EventEmitter, Subscription};
|
||||
use language::{CachedLspAdapter, LspAdapterDelegate};
|
||||
use lsp::LanguageServerName;
|
||||
use path_trie::{LabelPresence, RootPathTrie, TriePath};
|
||||
use settings::{SettingsStore, WorktreeId};
|
||||
use worktree::{Event as WorktreeEvent, Worktree};
|
||||
|
||||
use crate::{
|
||||
worktree_store::{WorktreeStore, WorktreeStoreEvent},
|
||||
ProjectPath,
|
||||
};
|
||||
|
||||
pub(crate) use server_tree::{AdapterQuery, LanguageServerTree, LaunchDisposition};
|
||||
|
||||
struct WorktreeRoots {
|
||||
roots: RootPathTrie<LanguageServerName>,
|
||||
worktree_store: Entity<WorktreeStore>,
|
||||
_worktree_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl WorktreeRoots {
|
||||
fn new(
|
||||
worktree_store: Entity<WorktreeStore>,
|
||||
worktree: Entity<Worktree>,
|
||||
cx: &mut App,
|
||||
) -> Entity<Self> {
|
||||
cx.new(|cx| Self {
|
||||
roots: RootPathTrie::new(),
|
||||
worktree_store,
|
||||
_worktree_subscription: cx.subscribe(&worktree, |this: &mut Self, _, event, cx| {
|
||||
match event {
|
||||
WorktreeEvent::UpdatedEntries(changes) => {
|
||||
for (path, _, kind) in changes.iter() {
|
||||
match kind {
|
||||
worktree::PathChange::Removed => {
|
||||
let path = TriePath::from(path.as_ref());
|
||||
this.roots.remove(&path);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
WorktreeEvent::UpdatedGitRepositories(_) => {}
|
||||
WorktreeEvent::DeletedEntry(entry_id) => {
|
||||
let Some(entry) = this.worktree_store.read(cx).entry_for_id(*entry_id, cx)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let path = TriePath::from(entry.path.as_ref());
|
||||
this.roots.remove(&path);
|
||||
}
|
||||
}
|
||||
}),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ProjectTree {
|
||||
root_points: HashMap<WorktreeId, Entity<WorktreeRoots>>,
|
||||
worktree_store: Entity<WorktreeStore>,
|
||||
_subscriptions: [Subscription; 2],
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct AdapterWrapper(Arc<CachedLspAdapter>);
|
||||
impl PartialEq for AdapterWrapper {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0.name.eq(&other.0.name)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for AdapterWrapper {}
|
||||
|
||||
impl std::hash::Hash for AdapterWrapper {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.0.name.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for AdapterWrapper {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.0.name.cmp(&other.0.name))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for AdapterWrapper {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.0.name.cmp(&other.0.name)
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<LanguageServerName> for AdapterWrapper {
|
||||
fn borrow(&self) -> &LanguageServerName {
|
||||
&self.0.name
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq)]
|
||||
pub(crate) enum ProjectTreeEvent {
|
||||
WorktreeRemoved(WorktreeId),
|
||||
Cleared,
|
||||
}
|
||||
|
||||
impl EventEmitter<ProjectTreeEvent> for ProjectTree {}
|
||||
|
||||
impl ProjectTree {
|
||||
pub(crate) fn new(worktree_store: Entity<WorktreeStore>, cx: &mut App) -> Entity<Self> {
|
||||
cx.new(|cx| Self {
|
||||
root_points: Default::default(),
|
||||
_subscriptions: [
|
||||
cx.subscribe(&worktree_store, Self::on_worktree_store_event),
|
||||
cx.observe_global::<SettingsStore>(|this, cx| {
|
||||
for (_, roots) in &mut this.root_points {
|
||||
roots.update(cx, |worktree_roots, _| {
|
||||
worktree_roots.roots = RootPathTrie::new();
|
||||
})
|
||||
}
|
||||
cx.emit(ProjectTreeEvent::Cleared);
|
||||
}),
|
||||
],
|
||||
worktree_store,
|
||||
})
|
||||
}
|
||||
#[allow(clippy::mutable_key_type)]
|
||||
fn root_for_path(
|
||||
&mut self,
|
||||
ProjectPath { worktree_id, path }: ProjectPath,
|
||||
adapters: Vec<Arc<CachedLspAdapter>>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
cx: &mut App,
|
||||
) -> BTreeMap<AdapterWrapper, ProjectPath> {
|
||||
debug_assert_eq!(delegate.worktree_id(), worktree_id);
|
||||
#[allow(clippy::mutable_key_type)]
|
||||
let mut roots = BTreeMap::from_iter(
|
||||
adapters
|
||||
.into_iter()
|
||||
.map(|adapter| (AdapterWrapper(adapter), (None, LabelPresence::KnownAbsent))),
|
||||
);
|
||||
let worktree_roots = match self.root_points.entry(worktree_id) {
|
||||
Entry::Occupied(occupied_entry) => occupied_entry.get().clone(),
|
||||
Entry::Vacant(vacant_entry) => {
|
||||
let Some(worktree) = self
|
||||
.worktree_store
|
||||
.read(cx)
|
||||
.worktree_for_id(worktree_id, cx)
|
||||
else {
|
||||
return Default::default();
|
||||
};
|
||||
let roots = WorktreeRoots::new(self.worktree_store.clone(), worktree, cx);
|
||||
vacant_entry.insert(roots).clone()
|
||||
}
|
||||
};
|
||||
|
||||
let key = TriePath::from(&*path);
|
||||
worktree_roots.update(cx, |this, _| {
|
||||
this.roots.walk(&key, &mut |path, labels| {
|
||||
for (label, presence) in labels {
|
||||
if let Some((marked_path, current_presence)) = roots.get_mut(label) {
|
||||
if *current_presence > *presence {
|
||||
debug_assert!(false, "RootPathTrie precondition violation; while walking the tree label presence is only allowed to increase");
|
||||
}
|
||||
*marked_path = Some(ProjectPath {worktree_id, path: path.clone()});
|
||||
*current_presence = *presence;
|
||||
}
|
||||
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
});
|
||||
for (adapter, (root_path, presence)) in &mut roots {
|
||||
if *presence == LabelPresence::Present {
|
||||
continue;
|
||||
}
|
||||
|
||||
let depth = root_path
|
||||
.as_ref()
|
||||
.map(|root_path| {
|
||||
path.strip_prefix(&root_path.path)
|
||||
.unwrap()
|
||||
.components()
|
||||
.count()
|
||||
})
|
||||
.unwrap_or_else(|| path.components().count() + 1);
|
||||
|
||||
if depth > 0 {
|
||||
let root = adapter.0.find_project_root(&path, depth, &delegate);
|
||||
match root {
|
||||
Some(known_root) => worktree_roots.update(cx, |this, _| {
|
||||
let root = TriePath::from(&*known_root);
|
||||
this.roots
|
||||
.insert(&root, adapter.0.name(), LabelPresence::Present);
|
||||
*presence = LabelPresence::Present;
|
||||
*root_path = Some(ProjectPath {
|
||||
worktree_id,
|
||||
path: known_root,
|
||||
});
|
||||
}),
|
||||
None => worktree_roots.update(cx, |this, _| {
|
||||
this.roots
|
||||
.insert(&key, adapter.0.name(), LabelPresence::KnownAbsent);
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
roots
|
||||
.into_iter()
|
||||
.filter_map(|(k, (path, presence))| {
|
||||
let path = path?;
|
||||
presence.eq(&LabelPresence::Present).then(|| (k, path))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
fn on_worktree_store_event(
|
||||
&mut self,
|
||||
_: Entity<WorktreeStore>,
|
||||
evt: &WorktreeStoreEvent,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match evt {
|
||||
WorktreeStoreEvent::WorktreeRemoved(_, worktree_id) => {
|
||||
self.root_points.remove(&worktree_id);
|
||||
cx.emit(ProjectTreeEvent::WorktreeRemoved(*worktree_id));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
240
crates/project/src/project_tree/path_trie.rs
Normal file
240
crates/project/src/project_tree/path_trie.rs
Normal file
|
@ -0,0 +1,240 @@
|
|||
use std::{
|
||||
collections::{btree_map::Entry, BTreeMap},
|
||||
ffi::OsStr,
|
||||
ops::ControlFlow,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
/// [RootPathTrie] is a workhorse of [super::ProjectTree]. It is responsible for determining the closest known project root for a given path.
|
||||
/// It also determines how much of a given path is unexplored, thus letting callers fill in that gap if needed.
|
||||
/// Conceptually, it allows one to annotate Worktree entries with arbitrary extra metadata and run closest-ancestor searches.
|
||||
///
|
||||
/// A path is unexplored when the closest ancestor of a path is not the path itself; that means that we have not yet ran the scan on that path.
|
||||
/// For example, if there's a project root at path `python/project` and we query for a path `python/project/subdir/another_subdir/file.py`, there is
|
||||
/// a known root at `python/project` and the unexplored part is `subdir/another_subdir` - we need to run a scan on these 2 directories.
|
||||
pub(super) struct RootPathTrie<Label> {
|
||||
worktree_relative_path: Arc<Path>,
|
||||
labels: BTreeMap<Label, LabelPresence>,
|
||||
children: BTreeMap<Arc<OsStr>, RootPathTrie<Label>>,
|
||||
}
|
||||
|
||||
/// Label presence is a marker that allows to optimize searches within [RootPathTrie]; node label can be:
|
||||
/// - Present; we know there's definitely a project root at this node and it is the only label of that kind on the path to the root of a worktree
|
||||
/// (none of it's ancestors or descendants can contain the same present label)
|
||||
/// - Known Absent - we know there's definitely no project root at this node and none of it's ancestors are Present (descendants can be present though!).
|
||||
/// - Forbidden - we know there's definitely no project root at this node and none of it's ancestors or descendants can be Present.
|
||||
/// The distinction is there to optimize searching; when we encounter a node with unknown status, we don't need to look at it's full path
|
||||
/// to the root of the worktree; it's sufficient to explore only the path between last node with a KnownAbsent state and the directory of a path, since we run searches
|
||||
/// from the leaf up to the root of the worktree. When any of the ancestors is forbidden, we don't need to look at the node or its ancestors.
|
||||
/// When there's a present labeled node on the path to the root, we don't need to ask the adapter to run the search at all.
|
||||
///
|
||||
/// In practical terms, it means that by storing label presence we don't need to do a project discovery on a given folder more than once
|
||||
/// (unless the node is invalidated, which can happen when FS entries are renamed/removed).
|
||||
///
|
||||
/// Storing project absence allows us to recognize which paths have already been scanned for a project root unsuccessfully. This way we don't need to run
|
||||
/// such scan more than once.
|
||||
#[derive(Clone, Copy, Debug, PartialOrd, PartialEq, Ord, Eq)]
|
||||
pub(super) enum LabelPresence {
|
||||
KnownAbsent,
|
||||
Present,
|
||||
}
|
||||
|
||||
impl<Label: Ord + Clone> RootPathTrie<Label> {
|
||||
pub(super) fn new() -> Self {
|
||||
Self::new_with_key(Arc::from(Path::new("")))
|
||||
}
|
||||
fn new_with_key(worktree_relative_path: Arc<Path>) -> Self {
|
||||
RootPathTrie {
|
||||
worktree_relative_path,
|
||||
labels: Default::default(),
|
||||
children: Default::default(),
|
||||
}
|
||||
}
|
||||
// Internal implementation of inner that allows one to visit descendants of insertion point for a node.
|
||||
fn insert_inner(
|
||||
&mut self,
|
||||
path: &TriePath,
|
||||
value: Label,
|
||||
presence: LabelPresence,
|
||||
) -> &mut Self {
|
||||
let mut current = self;
|
||||
|
||||
let mut path_so_far = PathBuf::new();
|
||||
for key in path.0.iter() {
|
||||
path_so_far.push(Path::new(key));
|
||||
current = match current.children.entry(key.clone()) {
|
||||
Entry::Vacant(vacant_entry) => vacant_entry
|
||||
.insert(RootPathTrie::new_with_key(Arc::from(path_so_far.as_path()))),
|
||||
Entry::Occupied(occupied_entry) => occupied_entry.into_mut(),
|
||||
};
|
||||
}
|
||||
let _previous_value = current.labels.insert(value, presence);
|
||||
debug_assert_eq!(_previous_value, None);
|
||||
current
|
||||
}
|
||||
pub(super) fn insert(&mut self, path: &TriePath, value: Label, presence: LabelPresence) {
|
||||
self.insert_inner(path, value, presence);
|
||||
}
|
||||
|
||||
pub(super) fn walk<'a>(
|
||||
&'a self,
|
||||
path: &TriePath,
|
||||
callback: &mut dyn for<'b> FnMut(
|
||||
&'b Arc<Path>,
|
||||
&'a BTreeMap<Label, LabelPresence>,
|
||||
) -> ControlFlow<()>,
|
||||
) {
|
||||
let mut current = self;
|
||||
for key in path.0.iter() {
|
||||
if !current.labels.is_empty() {
|
||||
if (callback)(¤t.worktree_relative_path, ¤t.labels).is_break() {
|
||||
return;
|
||||
};
|
||||
}
|
||||
current = match current.children.get(key) {
|
||||
Some(child) => child,
|
||||
None => return,
|
||||
};
|
||||
}
|
||||
if !current.labels.is_empty() {
|
||||
(callback)(¤t.worktree_relative_path, ¤t.labels);
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn remove(&mut self, path: &TriePath) {
|
||||
let mut current = self;
|
||||
for path in path.0.iter().take(path.0.len().saturating_sub(1)) {
|
||||
current = match current.children.get_mut(path) {
|
||||
Some(child) => child,
|
||||
None => return,
|
||||
};
|
||||
}
|
||||
if let Some(final_entry_name) = path.0.last() {
|
||||
current.children.remove(final_entry_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// [TriePath] is a [Path] preprocessed for amortizing the cost of doing multiple lookups in distinct [RootPathTrie]s.
|
||||
#[derive(Clone)]
|
||||
pub(super) struct TriePath(Arc<[Arc<OsStr>]>);
|
||||
|
||||
impl From<&Path> for TriePath {
|
||||
fn from(value: &Path) -> Self {
|
||||
TriePath(value.components().map(|c| c.as_os_str().into()).collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_insert_and_lookup() {
|
||||
let mut trie = RootPathTrie::<()>::new();
|
||||
trie.insert(
|
||||
&TriePath::from(Path::new("a/b/c")),
|
||||
(),
|
||||
LabelPresence::Present,
|
||||
);
|
||||
|
||||
trie.walk(&TriePath::from(Path::new("a/b/c")), &mut |path, nodes| {
|
||||
assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
|
||||
assert_eq!(path.as_ref(), Path::new("a/b/c"));
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
// Now let's annotate a parent with "Known missing" node.
|
||||
trie.insert(
|
||||
&TriePath::from(Path::new("a")),
|
||||
(),
|
||||
LabelPresence::KnownAbsent,
|
||||
);
|
||||
|
||||
// Ensure that we walk from the root to the leaf.
|
||||
let mut visited_paths = BTreeSet::new();
|
||||
trie.walk(&TriePath::from(Path::new("a/b/c")), &mut |path, nodes| {
|
||||
if path.as_ref() == Path::new("a/b/c") {
|
||||
assert_eq!(
|
||||
visited_paths,
|
||||
BTreeSet::from_iter([Arc::from(Path::new("a/"))])
|
||||
);
|
||||
assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
|
||||
} else if path.as_ref() == Path::new("a/") {
|
||||
assert!(visited_paths.is_empty());
|
||||
assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
|
||||
} else {
|
||||
panic!("Unknown path");
|
||||
}
|
||||
// Assert that we only ever visit a path once.
|
||||
assert!(visited_paths.insert(path.clone()));
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
|
||||
// One can also pass a path whose prefix is in the tree, but not that path itself.
|
||||
let mut visited_paths = BTreeSet::new();
|
||||
trie.walk(
|
||||
&TriePath::from(Path::new("a/b/c/d/e/f/g")),
|
||||
&mut |path, nodes| {
|
||||
if path.as_ref() == Path::new("a/b/c") {
|
||||
assert_eq!(
|
||||
visited_paths,
|
||||
BTreeSet::from_iter([Arc::from(Path::new("a/"))])
|
||||
);
|
||||
assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
|
||||
} else if path.as_ref() == Path::new("a/") {
|
||||
assert!(visited_paths.is_empty());
|
||||
assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
|
||||
} else {
|
||||
panic!("Unknown path");
|
||||
}
|
||||
// Assert that we only ever visit a path once.
|
||||
assert!(visited_paths.insert(path.clone()));
|
||||
ControlFlow::Continue(())
|
||||
},
|
||||
);
|
||||
|
||||
// Test breaking from the tree-walk.
|
||||
let mut visited_paths = BTreeSet::new();
|
||||
trie.walk(&TriePath::from(Path::new("a/b/c")), &mut |path, nodes| {
|
||||
if path.as_ref() == Path::new("a/") {
|
||||
assert!(visited_paths.is_empty());
|
||||
assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
|
||||
} else {
|
||||
panic!("Unknown path");
|
||||
}
|
||||
// Assert that we only ever visit a path once.
|
||||
assert!(visited_paths.insert(path.clone()));
|
||||
ControlFlow::Break(())
|
||||
});
|
||||
assert_eq!(visited_paths.len(), 1);
|
||||
|
||||
// Entry removal.
|
||||
trie.insert(
|
||||
&TriePath::from(Path::new("a/b")),
|
||||
(),
|
||||
LabelPresence::KnownAbsent,
|
||||
);
|
||||
let mut visited_paths = BTreeSet::new();
|
||||
trie.walk(&TriePath::from(Path::new("a/b/c")), &mut |path, _nodes| {
|
||||
// Assert that we only ever visit a path once.
|
||||
assert!(visited_paths.insert(path.clone()));
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
assert_eq!(visited_paths.len(), 3);
|
||||
trie.remove(&TriePath::from(Path::new("a/b/")));
|
||||
let mut visited_paths = BTreeSet::new();
|
||||
trie.walk(&TriePath::from(Path::new("a/b/c")), &mut |path, _nodes| {
|
||||
// Assert that we only ever visit a path once.
|
||||
assert!(visited_paths.insert(path.clone()));
|
||||
ControlFlow::Continue(())
|
||||
});
|
||||
assert_eq!(visited_paths.len(), 1);
|
||||
assert_eq!(
|
||||
visited_paths.into_iter().next().unwrap().as_ref(),
|
||||
Path::new("a/")
|
||||
);
|
||||
}
|
||||
}
|
440
crates/project/src/project_tree/server_tree.rs
Normal file
440
crates/project/src/project_tree/server_tree.rs
Normal file
|
@ -0,0 +1,440 @@
|
|||
//! This module defines an LSP Tree.
|
||||
//!
|
||||
//! An LSP Tree is responsible for determining which language servers apply to a given project path.
|
||||
//!
|
||||
//! ## RPC
|
||||
//! LSP Tree is transparent to RPC peers; when clients ask host to spawn a new language server, the host will perform LSP Tree lookup for provided path; it may decide
|
||||
//! to reuse existing language server. The client maintains it's own LSP Tree that is a subset of host LSP Tree. Done this way, the client does not need to
|
||||
//! ask about suitable language server for each path it interacts with; it can resolve most of the queries locally.
|
||||
//! This module defines a Project Tree.
|
||||
|
||||
use std::{
|
||||
collections::{BTreeMap, BTreeSet},
|
||||
path::Path,
|
||||
sync::{Arc, Weak},
|
||||
};
|
||||
|
||||
use collections::{HashMap, IndexMap};
|
||||
use gpui::{App, AppContext, Entity, Subscription};
|
||||
use itertools::Itertools;
|
||||
use language::{
|
||||
language_settings::AllLanguageSettings, Attach, LanguageName, LanguageRegistry,
|
||||
LspAdapterDelegate,
|
||||
};
|
||||
use lsp::LanguageServerName;
|
||||
use settings::{Settings, SettingsLocation, WorktreeId};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use crate::{project_settings::LspSettings, LanguageServerId, ProjectPath};
|
||||
|
||||
use super::{AdapterWrapper, ProjectTree, ProjectTreeEvent};
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct ServersForWorktree {
|
||||
roots: BTreeMap<
|
||||
Arc<Path>,
|
||||
BTreeMap<LanguageServerName, (Arc<InnerTreeNode>, BTreeSet<LanguageName>)>,
|
||||
>,
|
||||
}
|
||||
|
||||
pub struct LanguageServerTree {
|
||||
project_tree: Entity<ProjectTree>,
|
||||
instances: BTreeMap<WorktreeId, ServersForWorktree>,
|
||||
attach_kind_cache: HashMap<LanguageServerName, Attach>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
_subscriptions: Subscription,
|
||||
}
|
||||
|
||||
/// A node in language server tree represents either:
|
||||
/// - A language server that has already been initialized/updated for a given project
|
||||
/// - A soon-to-be-initialized language server.
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct LanguageServerTreeNode(Weak<InnerTreeNode>);
|
||||
|
||||
/// Describes a request to launch a language server.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct LaunchDisposition<'a> {
|
||||
pub(crate) server_name: &'a LanguageServerName,
|
||||
pub(crate) attach: Attach,
|
||||
pub(crate) path: ProjectPath,
|
||||
pub(crate) settings: Arc<LspSettings>,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a InnerTreeNode> for LaunchDisposition<'a> {
|
||||
fn from(value: &'a InnerTreeNode) -> Self {
|
||||
LaunchDisposition {
|
||||
server_name: &value.name,
|
||||
attach: value.attach,
|
||||
path: value.path.clone(),
|
||||
settings: value.settings.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl LanguageServerTreeNode {
|
||||
/// Returns a language server ID for this node if there is one.
|
||||
/// Returns None if this node has not been initialized yet or it is no longer in the tree.
|
||||
pub(crate) fn server_id(&self) -> Option<LanguageServerId> {
|
||||
self.0.upgrade()?.id.get().copied()
|
||||
}
|
||||
/// Returns a language server ID for this node if it has already been initialized; otherwise runs the provided closure to initialize the language server node in a tree.
|
||||
/// May return None if the node no longer belongs to the server tree it was created in.
|
||||
pub(crate) fn server_id_or_init(
|
||||
&self,
|
||||
init: impl FnOnce(LaunchDisposition) -> LanguageServerId,
|
||||
) -> Option<LanguageServerId> {
|
||||
let this = self.0.upgrade()?;
|
||||
Some(
|
||||
*this
|
||||
.id
|
||||
.get_or_init(|| init(LaunchDisposition::from(&*this))),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Weak<InnerTreeNode>> for LanguageServerTreeNode {
|
||||
fn from(weak: Weak<InnerTreeNode>) -> Self {
|
||||
LanguageServerTreeNode(weak)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct InnerTreeNode {
|
||||
id: OnceLock<LanguageServerId>,
|
||||
name: LanguageServerName,
|
||||
attach: Attach,
|
||||
path: ProjectPath,
|
||||
settings: Arc<LspSettings>,
|
||||
}
|
||||
|
||||
impl InnerTreeNode {
|
||||
fn new(
|
||||
name: LanguageServerName,
|
||||
attach: Attach,
|
||||
path: ProjectPath,
|
||||
settings: impl Into<Arc<LspSettings>>,
|
||||
) -> Self {
|
||||
InnerTreeNode {
|
||||
id: Default::default(),
|
||||
name,
|
||||
attach,
|
||||
path,
|
||||
settings: settings.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines how the list of adapters to query should be constructed.
|
||||
pub(crate) enum AdapterQuery<'a> {
|
||||
/// Search for roots of all adapters associated with a given language name.
|
||||
Language(&'a LanguageName),
|
||||
/// Search for roots of adapter with a given name.
|
||||
Adapter(&'a LanguageServerName),
|
||||
}
|
||||
|
||||
impl LanguageServerTree {
|
||||
pub(crate) fn new(
|
||||
project_tree: Entity<ProjectTree>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
cx: &mut App,
|
||||
) -> Entity<Self> {
|
||||
cx.new(|cx| Self {
|
||||
_subscriptions: cx.subscribe(
|
||||
&project_tree,
|
||||
|_: &mut Self, _, event, _| {
|
||||
if event == &ProjectTreeEvent::Cleared {}
|
||||
},
|
||||
),
|
||||
project_tree,
|
||||
instances: Default::default(),
|
||||
attach_kind_cache: Default::default(),
|
||||
languages,
|
||||
})
|
||||
}
|
||||
/// Memoize calls to attach_kind on LspAdapter (which might be a WASM extension, thus ~expensive to call).
|
||||
fn attach_kind(&mut self, adapter: &AdapterWrapper) -> Attach {
|
||||
*self
|
||||
.attach_kind_cache
|
||||
.entry(adapter.0.name.clone())
|
||||
.or_insert_with(|| adapter.0.attach_kind())
|
||||
}
|
||||
|
||||
/// Get all language server root points for a given path and language; the language servers might already be initialized at a given path.
|
||||
pub(crate) fn get<'a>(
|
||||
&'a mut self,
|
||||
path: ProjectPath,
|
||||
query: AdapterQuery<'_>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
cx: &mut App,
|
||||
) -> impl Iterator<Item = LanguageServerTreeNode> + 'a {
|
||||
let settings_location = SettingsLocation {
|
||||
worktree_id: path.worktree_id,
|
||||
path: &path.path,
|
||||
};
|
||||
let adapters = match query {
|
||||
AdapterQuery::Language(language_name) => {
|
||||
self.adapters_for_language(settings_location, language_name, cx)
|
||||
}
|
||||
AdapterQuery::Adapter(language_server_name) => IndexMap::from_iter(
|
||||
self.adapter_for_name(language_server_name)
|
||||
.map(|adapter| (adapter, (LspSettings::default(), BTreeSet::new()))),
|
||||
),
|
||||
};
|
||||
self.get_with_adapters(path, adapters, delegate, cx)
|
||||
}
|
||||
|
||||
fn get_with_adapters<'a>(
|
||||
&'a mut self,
|
||||
path: ProjectPath,
|
||||
adapters: IndexMap<AdapterWrapper, (LspSettings, BTreeSet<LanguageName>)>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
cx: &mut App,
|
||||
) -> impl Iterator<Item = LanguageServerTreeNode> + 'a {
|
||||
let worktree_id = path.worktree_id;
|
||||
#[allow(clippy::mutable_key_type)]
|
||||
let mut roots = self.project_tree.update(cx, |this, cx| {
|
||||
this.root_for_path(
|
||||
path,
|
||||
adapters
|
||||
.iter()
|
||||
.map(|(adapter, _)| adapter.0.clone())
|
||||
.collect(),
|
||||
delegate,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let mut root_path = None;
|
||||
// Backwards-compat: Fill in any adapters for which we did not detect the root as having the project root at the root of a worktree.
|
||||
for (adapter, _) in adapters.iter() {
|
||||
roots.entry(adapter.clone()).or_insert_with(|| {
|
||||
root_path
|
||||
.get_or_insert_with(|| ProjectPath {
|
||||
worktree_id,
|
||||
path: Arc::from("".as_ref()),
|
||||
})
|
||||
.clone()
|
||||
});
|
||||
}
|
||||
|
||||
roots
|
||||
.into_iter()
|
||||
.filter_map(move |(adapter, root_path)| {
|
||||
let attach = self.attach_kind(&adapter);
|
||||
let (index, _, (settings, new_languages)) = adapters.get_full(&adapter)?;
|
||||
let inner_node = self
|
||||
.instances
|
||||
.entry(root_path.worktree_id)
|
||||
.or_default()
|
||||
.roots
|
||||
.entry(root_path.path.clone())
|
||||
.or_default()
|
||||
.entry(adapter.0.name.clone());
|
||||
let (node, languages) = inner_node.or_insert_with(move || {
|
||||
(
|
||||
Arc::new(InnerTreeNode::new(
|
||||
adapter.0.name(),
|
||||
attach,
|
||||
root_path,
|
||||
settings.clone(),
|
||||
)),
|
||||
Default::default(),
|
||||
)
|
||||
});
|
||||
languages.extend(new_languages.iter().cloned());
|
||||
Some((index, Arc::downgrade(&node).into()))
|
||||
})
|
||||
.sorted_by_key(|(index, _)| *index)
|
||||
.map(|(_, node)| node)
|
||||
}
|
||||
|
||||
fn adapter_for_name(&self, name: &LanguageServerName) -> Option<AdapterWrapper> {
|
||||
self.languages.adapter_for_name(name).map(AdapterWrapper)
|
||||
}
|
||||
|
||||
fn adapters_for_language(
|
||||
&self,
|
||||
settings_location: SettingsLocation,
|
||||
language_name: &LanguageName,
|
||||
cx: &App,
|
||||
) -> IndexMap<AdapterWrapper, (LspSettings, BTreeSet<LanguageName>)> {
|
||||
let settings = AllLanguageSettings::get(Some(settings_location), cx).language(
|
||||
Some(settings_location),
|
||||
Some(language_name),
|
||||
cx,
|
||||
);
|
||||
if !settings.enable_language_server {
|
||||
return Default::default();
|
||||
}
|
||||
let available_lsp_adapters = self.languages.lsp_adapters(&language_name);
|
||||
let available_language_servers = available_lsp_adapters
|
||||
.iter()
|
||||
.map(|lsp_adapter| lsp_adapter.name.clone())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let desired_language_servers =
|
||||
settings.customized_language_servers(&available_language_servers);
|
||||
let adapters_with_settings = desired_language_servers
|
||||
.into_iter()
|
||||
.filter_map(|desired_adapter| {
|
||||
let adapter = if let Some(adapter) = available_lsp_adapters
|
||||
.iter()
|
||||
.find(|adapter| adapter.name == desired_adapter)
|
||||
{
|
||||
Some(adapter.clone())
|
||||
} else if let Some(adapter) =
|
||||
self.languages.load_available_lsp_adapter(&desired_adapter)
|
||||
{
|
||||
self.languages
|
||||
.register_lsp_adapter(language_name.clone(), adapter.adapter.clone());
|
||||
Some(adapter)
|
||||
} else {
|
||||
None
|
||||
}?;
|
||||
let adapter_settings = crate::lsp_store::language_server_settings_for(
|
||||
settings_location,
|
||||
&adapter.name,
|
||||
cx,
|
||||
)
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
Some((
|
||||
AdapterWrapper(adapter),
|
||||
(
|
||||
adapter_settings,
|
||||
BTreeSet::from_iter([language_name.clone()]),
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<IndexMap<_, _>>();
|
||||
// After starting all the language servers, reorder them to reflect the desired order
|
||||
// based on the settings.
|
||||
//
|
||||
// This is done, in part, to ensure that language servers loaded at different points
|
||||
// (e.g., native vs extension) still end up in the right order at the end, rather than
|
||||
// it being based on which language server happened to be loaded in first.
|
||||
self.languages.reorder_language_servers(
|
||||
&language_name,
|
||||
adapters_with_settings
|
||||
.keys()
|
||||
.map(|wrapper| wrapper.0.clone())
|
||||
.collect(),
|
||||
);
|
||||
|
||||
adapters_with_settings
|
||||
}
|
||||
|
||||
// Rebasing a tree:
|
||||
// - Clears it out
|
||||
// - Provides you with the indirect access to the old tree while you're reinitializing a new one (by querying it).
|
||||
pub(crate) fn rebase(&mut self) -> ServerTreeRebase<'_> {
|
||||
ServerTreeRebase::new(self)
|
||||
}
|
||||
|
||||
/// Remove nodes with a given ID from the tree.
|
||||
pub(crate) fn remove_nodes(&mut self, ids: &BTreeSet<LanguageServerId>) {
|
||||
for (_, servers) in &mut self.instances {
|
||||
for (_, nodes) in &mut servers.roots {
|
||||
nodes.retain(|_, (node, _)| node.id.get().map_or(true, |id| !ids.contains(&id)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ServerTreeRebase<'a> {
|
||||
old_contents: BTreeMap<WorktreeId, ServersForWorktree>,
|
||||
new_tree: &'a mut LanguageServerTree,
|
||||
/// All server IDs seen in the old tree.
|
||||
all_server_ids: BTreeMap<LanguageServerId, LanguageServerName>,
|
||||
/// Server IDs we've preserved for a new iteration of the tree. `all_server_ids - rebased_server_ids` is the
|
||||
/// set of server IDs that can be shut down.
|
||||
rebased_server_ids: BTreeSet<LanguageServerId>,
|
||||
}
|
||||
|
||||
impl<'tree> ServerTreeRebase<'tree> {
|
||||
fn new(new_tree: &'tree mut LanguageServerTree) -> Self {
|
||||
let old_contents = std::mem::take(&mut new_tree.instances);
|
||||
new_tree.attach_kind_cache.clear();
|
||||
let all_server_ids = old_contents
|
||||
.values()
|
||||
.flat_map(|nodes| {
|
||||
nodes.roots.values().flat_map(|servers| {
|
||||
servers.values().filter_map(|server| {
|
||||
server
|
||||
.0
|
||||
.id
|
||||
.get()
|
||||
.copied()
|
||||
.map(|id| (id, server.0.name.clone()))
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
Self {
|
||||
old_contents,
|
||||
new_tree,
|
||||
all_server_ids,
|
||||
rebased_server_ids: BTreeSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get<'a>(
|
||||
&'a mut self,
|
||||
path: ProjectPath,
|
||||
query: AdapterQuery<'_>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
cx: &mut App,
|
||||
) -> impl Iterator<Item = LanguageServerTreeNode> + 'a {
|
||||
let settings_location = SettingsLocation {
|
||||
worktree_id: path.worktree_id,
|
||||
path: &path.path,
|
||||
};
|
||||
let adapters = match query {
|
||||
AdapterQuery::Language(language_name) => {
|
||||
self.new_tree
|
||||
.adapters_for_language(settings_location, language_name, cx)
|
||||
}
|
||||
AdapterQuery::Adapter(language_server_name) => IndexMap::from_iter(
|
||||
self.new_tree
|
||||
.adapter_for_name(language_server_name)
|
||||
.map(|adapter| (adapter, (LspSettings::default(), BTreeSet::new()))),
|
||||
),
|
||||
};
|
||||
|
||||
self.new_tree
|
||||
.get_with_adapters(path, adapters, delegate, cx)
|
||||
.filter_map(|node| {
|
||||
// Inspect result of the query and initialize it ourselves before
|
||||
// handing it off to the caller.
|
||||
let disposition = node.0.upgrade()?;
|
||||
|
||||
if disposition.id.get().is_some() {
|
||||
return Some(node);
|
||||
}
|
||||
let Some((existing_node, _)) = self
|
||||
.old_contents
|
||||
.get(&disposition.path.worktree_id)
|
||||
.and_then(|worktree_nodes| worktree_nodes.roots.get(&disposition.path.path))
|
||||
.and_then(|roots| roots.get(&disposition.name))
|
||||
.filter(|(old_node, _)| {
|
||||
disposition.attach == old_node.attach
|
||||
&& disposition.settings == old_node.settings
|
||||
})
|
||||
else {
|
||||
return Some(node);
|
||||
};
|
||||
if let Some(existing_id) = existing_node.id.get() {
|
||||
self.rebased_server_ids.insert(*existing_id);
|
||||
disposition.id.set(*existing_id).ok();
|
||||
}
|
||||
|
||||
Some(node)
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns IDs of servers that are no longer referenced (and can be shut down).
|
||||
pub(crate) fn finish(self) -> BTreeMap<LanguageServerId, LanguageServerName> {
|
||||
self.all_server_ids
|
||||
.into_iter()
|
||||
.filter(|(id, _)| !self.rebased_server_ids.contains(id))
|
||||
.collect()
|
||||
}
|
||||
}
|
|
@ -7234,8 +7234,8 @@ mod tests {
|
|||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor().clone());
|
||||
fs.as_fake().insert_tree("/root", json!({})).await;
|
||||
let project = Project::test(fs, ["/root".as_ref()], cx).await;
|
||||
fs.as_fake().insert_tree(path!("/root"), json!({})).await;
|
||||
let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
|
||||
let workspace =
|
||||
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
@ -7258,7 +7258,7 @@ mod tests {
|
|||
.unwrap();
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
cx.simulate_new_path_selection(|_| Some(PathBuf::from("/root/new")));
|
||||
cx.simulate_new_path_selection(|_| Some(PathBuf::from(path!("/root/new"))));
|
||||
save_task.await.unwrap();
|
||||
|
||||
// Rename the file
|
||||
|
|
|
@ -789,6 +789,7 @@ message Symbol {
|
|||
PointUtf16 start = 7;
|
||||
PointUtf16 end = 8;
|
||||
bytes signature = 9;
|
||||
uint64 language_server_id = 10;
|
||||
}
|
||||
|
||||
message OpenBufferForSymbol {
|
||||
|
|
|
@ -671,7 +671,7 @@ mod tests {
|
|||
}];
|
||||
delegate.set_workspaces(vec![(
|
||||
WorkspaceId::default(),
|
||||
SerializedWorkspaceLocation::from_local_paths(vec!["/test/path/"]),
|
||||
SerializedWorkspaceLocation::from_local_paths(vec![path!("/test/path/")]),
|
||||
)]);
|
||||
});
|
||||
})
|
||||
|
|
|
@ -117,7 +117,7 @@ async fn test_open_item_on_modifiers_release(cx: &mut gpui::TestAppContext) {
|
|||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
json!({
|
||||
"1.txt": "First file",
|
||||
"2.txt": "Second file",
|
||||
|
@ -125,7 +125,7 @@ async fn test_open_item_on_modifiers_release(cx: &mut gpui::TestAppContext) {
|
|||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
|
||||
|
|
|
@ -1020,7 +1020,7 @@ mod tests {
|
|||
|
||||
let _rs_file = workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
workspace.open_abs_path(PathBuf::from("/dir/b.rs"), true, window, cx)
|
||||
workspace.open_abs_path(PathBuf::from(path!("/dir/b.rs")), true, window, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
|
|
@ -1711,7 +1711,7 @@ mod tests {
|
|||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
json!({
|
||||
"a": {
|
||||
},
|
||||
|
@ -1721,7 +1721,7 @@ mod tests {
|
|||
|
||||
cx.update(|cx| {
|
||||
open_paths(
|
||||
&[PathBuf::from("/root/a/new")],
|
||||
&[PathBuf::from(path!("/root/a/new"))],
|
||||
app_state.clone(),
|
||||
workspace::OpenOptions::default(),
|
||||
cx,
|
||||
|
|
|
@ -97,10 +97,11 @@ impl Render for QuickActionBar {
|
|||
show_inline_completions,
|
||||
inline_completion_enabled,
|
||||
) = {
|
||||
let supports_inlay_hints =
|
||||
editor.update(cx, |editor, cx| editor.supports_inlay_hints(cx));
|
||||
let editor = editor.read(cx);
|
||||
let selection_menu_enabled = editor.selection_menu_enabled(cx);
|
||||
let inlay_hints_enabled = editor.inlay_hints_enabled();
|
||||
let supports_inlay_hints = editor.supports_inlay_hints(cx);
|
||||
let git_blame_inline_enabled = editor.git_blame_inline_enabled();
|
||||
let show_git_blame_gutter = editor.show_git_blame_gutter();
|
||||
let auto_signature_help_enabled = editor.auto_signature_help_enabled(cx);
|
||||
|
@ -472,13 +473,22 @@ impl ToolbarItemView for QuickActionBar {
|
|||
self._inlay_hints_enabled_subscription.take();
|
||||
|
||||
if let Some(editor) = active_item.downcast::<Editor>() {
|
||||
let mut inlay_hints_enabled = editor.read(cx).inlay_hints_enabled();
|
||||
let mut supports_inlay_hints = editor.read(cx).supports_inlay_hints(cx);
|
||||
let (mut inlay_hints_enabled, mut supports_inlay_hints) =
|
||||
editor.update(cx, |editor, cx| {
|
||||
(
|
||||
editor.inlay_hints_enabled(),
|
||||
editor.supports_inlay_hints(cx),
|
||||
)
|
||||
});
|
||||
self._inlay_hints_enabled_subscription =
|
||||
Some(cx.observe(&editor, move |_, editor, cx| {
|
||||
let editor = editor.read(cx);
|
||||
let new_inlay_hints_enabled = editor.inlay_hints_enabled();
|
||||
let new_supports_inlay_hints = editor.supports_inlay_hints(cx);
|
||||
let (new_inlay_hints_enabled, new_supports_inlay_hints) =
|
||||
editor.update(cx, |editor, cx| {
|
||||
(
|
||||
editor.inlay_hints_enabled(),
|
||||
editor.supports_inlay_hints(cx),
|
||||
)
|
||||
});
|
||||
let should_notify = inlay_hints_enabled != new_inlay_hints_enabled
|
||||
|| supports_inlay_hints != new_supports_inlay_hints;
|
||||
inlay_hints_enabled = new_inlay_hints_enabled;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue