Merge branch 'main' into prompt-on-close
This commit is contained in:
commit
fe27a27cb6
50 changed files with 2443 additions and 1640 deletions
11
Cargo.lock
generated
11
Cargo.lock
generated
|
@ -5428,6 +5428,16 @@ dependencies = [
|
||||||
"tree-sitter",
|
"tree-sitter",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tree-sitter-typescript"
|
||||||
|
version = "0.20.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4e8ed0ecb931cdff13c6a13f45ccd615156e2779d9ffb0395864e05505e6e86d"
|
||||||
|
dependencies = [
|
||||||
|
"cc",
|
||||||
|
"tree-sitter",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ttf-parser"
|
name = "ttf-parser"
|
||||||
version = "0.9.0"
|
version = "0.9.0"
|
||||||
|
@ -6039,6 +6049,7 @@ dependencies = [
|
||||||
"tree-sitter-json",
|
"tree-sitter-json",
|
||||||
"tree-sitter-markdown",
|
"tree-sitter-markdown",
|
||||||
"tree-sitter-rust",
|
"tree-sitter-rust",
|
||||||
|
"tree-sitter-typescript",
|
||||||
"unindent",
|
"unindent",
|
||||||
"url",
|
"url",
|
||||||
"util",
|
"util",
|
||||||
|
|
|
@ -2593,6 +2593,8 @@ impl Editor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut ranges_to_highlight = Vec::new();
|
let mut ranges_to_highlight = Vec::new();
|
||||||
|
@ -6451,13 +6453,12 @@ pub fn styled_runs_for_code_label<'a>(
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use gpui::{
|
use gpui::{
|
||||||
geometry::rect::RectF,
|
geometry::rect::RectF,
|
||||||
platform::{WindowBounds, WindowOptions},
|
platform::{WindowBounds, WindowOptions},
|
||||||
};
|
};
|
||||||
use language::{LanguageConfig, LanguageServerConfig};
|
use language::{FakeLspAdapter, LanguageConfig};
|
||||||
use lsp::FakeLanguageServer;
|
use lsp::FakeLanguageServer;
|
||||||
use project::FakeFs;
|
use project::FakeFs;
|
||||||
use smol::stream::StreamExt;
|
use smol::stream::StreamExt;
|
||||||
|
@ -8893,26 +8894,27 @@ mod tests {
|
||||||
cx.foreground().forbid_parking();
|
cx.foreground().forbid_parking();
|
||||||
cx.update(populate_settings);
|
cx.update(populate_settings);
|
||||||
|
|
||||||
let (mut language_server_config, mut fake_servers) = LanguageServerConfig::fake();
|
let mut language = Language::new(
|
||||||
language_server_config.set_fake_capabilities(lsp::ServerCapabilities {
|
|
||||||
document_formatting_provider: Some(lsp::OneOf::Left(true)),
|
|
||||||
..Default::default()
|
|
||||||
});
|
|
||||||
let language = Arc::new(Language::new(
|
|
||||||
LanguageConfig {
|
LanguageConfig {
|
||||||
name: "Rust".into(),
|
name: "Rust".into(),
|
||||||
path_suffixes: vec!["rs".to_string()],
|
path_suffixes: vec!["rs".to_string()],
|
||||||
language_server: Some(language_server_config),
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
Some(tree_sitter_rust::language()),
|
Some(tree_sitter_rust::language()),
|
||||||
));
|
);
|
||||||
|
let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||||
|
capabilities: lsp::ServerCapabilities {
|
||||||
|
document_formatting_provider: Some(lsp::OneOf::Left(true)),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
..Default::default()
|
||||||
|
});
|
||||||
|
|
||||||
let fs = FakeFs::new(cx.background().clone());
|
let fs = FakeFs::new(cx.background().clone());
|
||||||
fs.insert_file("/file.rs", Default::default()).await;
|
fs.insert_file("/file.rs", Default::default()).await;
|
||||||
|
|
||||||
let project = Project::test(fs, cx);
|
let project = Project::test(fs, cx);
|
||||||
project.update(cx, |project, _| project.languages().add(language));
|
project.update(cx, |project, _| project.languages().add(Arc::new(language)));
|
||||||
|
|
||||||
let worktree_id = project
|
let worktree_id = project
|
||||||
.update(cx, |project, cx| {
|
.update(cx, |project, cx| {
|
||||||
|
@ -8926,7 +8928,9 @@ mod tests {
|
||||||
.update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
|
.update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let mut fake_server = fake_servers.next().await.unwrap();
|
|
||||||
|
cx.foreground().start_waiting();
|
||||||
|
let fake_server = fake_servers.next().await.unwrap();
|
||||||
|
|
||||||
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||||
let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx));
|
let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx));
|
||||||
|
@ -8940,13 +8944,14 @@ mod tests {
|
||||||
params.text_document.uri,
|
params.text_document.uri,
|
||||||
lsp::Url::from_file_path("/file.rs").unwrap()
|
lsp::Url::from_file_path("/file.rs").unwrap()
|
||||||
);
|
);
|
||||||
Some(vec![lsp::TextEdit::new(
|
Ok(Some(vec![lsp::TextEdit::new(
|
||||||
lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)),
|
lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)),
|
||||||
", ".to_string(),
|
", ".to_string(),
|
||||||
)])
|
)]))
|
||||||
})
|
})
|
||||||
.next()
|
.next()
|
||||||
.await;
|
.await;
|
||||||
|
cx.foreground().start_waiting();
|
||||||
save.await.unwrap();
|
save.await.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
editor.read_with(cx, |editor, cx| editor.text(cx)),
|
editor.read_with(cx, |editor, cx| editor.text(cx)),
|
||||||
|
@ -8968,6 +8973,7 @@ mod tests {
|
||||||
});
|
});
|
||||||
let save = cx.update(|cx| editor.save(project.clone(), cx));
|
let save = cx.update(|cx| editor.save(project.clone(), cx));
|
||||||
cx.foreground().advance_clock(items::FORMAT_TIMEOUT);
|
cx.foreground().advance_clock(items::FORMAT_TIMEOUT);
|
||||||
|
cx.foreground().start_waiting();
|
||||||
save.await.unwrap();
|
save.await.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
editor.read_with(cx, |editor, cx| editor.text(cx)),
|
editor.read_with(cx, |editor, cx| editor.text(cx)),
|
||||||
|
@ -8980,23 +8986,24 @@ mod tests {
|
||||||
async fn test_completion(cx: &mut gpui::TestAppContext) {
|
async fn test_completion(cx: &mut gpui::TestAppContext) {
|
||||||
cx.update(populate_settings);
|
cx.update(populate_settings);
|
||||||
|
|
||||||
let (mut language_server_config, mut fake_servers) = LanguageServerConfig::fake();
|
let mut language = Language::new(
|
||||||
language_server_config.set_fake_capabilities(lsp::ServerCapabilities {
|
|
||||||
completion_provider: Some(lsp::CompletionOptions {
|
|
||||||
trigger_characters: Some(vec![".".to_string(), ":".to_string()]),
|
|
||||||
..Default::default()
|
|
||||||
}),
|
|
||||||
..Default::default()
|
|
||||||
});
|
|
||||||
let language = Arc::new(Language::new(
|
|
||||||
LanguageConfig {
|
LanguageConfig {
|
||||||
name: "Rust".into(),
|
name: "Rust".into(),
|
||||||
path_suffixes: vec!["rs".to_string()],
|
path_suffixes: vec!["rs".to_string()],
|
||||||
language_server: Some(language_server_config),
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
Some(tree_sitter_rust::language()),
|
Some(tree_sitter_rust::language()),
|
||||||
));
|
);
|
||||||
|
let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
|
||||||
|
capabilities: lsp::ServerCapabilities {
|
||||||
|
completion_provider: Some(lsp::CompletionOptions {
|
||||||
|
trigger_characters: Some(vec![".".to_string(), ":".to_string()]),
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
..Default::default()
|
||||||
|
});
|
||||||
|
|
||||||
let text = "
|
let text = "
|
||||||
one
|
one
|
||||||
|
@ -9009,7 +9016,7 @@ mod tests {
|
||||||
fs.insert_file("/file.rs", text).await;
|
fs.insert_file("/file.rs", text).await;
|
||||||
|
|
||||||
let project = Project::test(fs, cx);
|
let project = Project::test(fs, cx);
|
||||||
project.update(cx, |project, _| project.languages().add(language));
|
project.update(cx, |project, _| project.languages().add(Arc::new(language)));
|
||||||
|
|
||||||
let worktree_id = project
|
let worktree_id = project
|
||||||
.update(cx, |project, cx| {
|
.update(cx, |project, cx| {
|
||||||
|
@ -9168,7 +9175,7 @@ mod tests {
|
||||||
params.text_document_position.position,
|
params.text_document_position.position,
|
||||||
lsp::Position::new(position.row, position.column)
|
lsp::Position::new(position.row, position.column)
|
||||||
);
|
);
|
||||||
Some(lsp::CompletionResponse::Array(
|
Ok(Some(lsp::CompletionResponse::Array(
|
||||||
completions
|
completions
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(range, new_text)| lsp::CompletionItem {
|
.map(|(range, new_text)| lsp::CompletionItem {
|
||||||
|
@ -9183,7 +9190,7 @@ mod tests {
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
))
|
)))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.next()
|
.next()
|
||||||
|
@ -9197,7 +9204,7 @@ mod tests {
|
||||||
fake.handle_request::<lsp::request::ResolveCompletionItem, _, _>(move |_, _| {
|
fake.handle_request::<lsp::request::ResolveCompletionItem, _, _>(move |_, _| {
|
||||||
let edit = edit.clone();
|
let edit = edit.clone();
|
||||||
async move {
|
async move {
|
||||||
lsp::CompletionItem {
|
Ok(lsp::CompletionItem {
|
||||||
additional_text_edits: edit.map(|(range, new_text)| {
|
additional_text_edits: edit.map(|(range, new_text)| {
|
||||||
vec![lsp::TextEdit::new(
|
vec![lsp::TextEdit::new(
|
||||||
lsp::Range::new(
|
lsp::Range::new(
|
||||||
|
@ -9208,7 +9215,7 @@ mod tests {
|
||||||
)]
|
)]
|
||||||
}),
|
}),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.next()
|
.next()
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
pub use crate::{
|
pub use crate::{
|
||||||
diagnostic_set::DiagnosticSet,
|
diagnostic_set::DiagnosticSet,
|
||||||
highlight_map::{HighlightId, HighlightMap},
|
highlight_map::{HighlightId, HighlightMap},
|
||||||
proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
|
proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, PLAIN_TEXT,
|
||||||
PLAIN_TEXT,
|
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
|
diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
|
||||||
|
|
|
@ -34,6 +34,23 @@ pub struct Summary {
|
||||||
count: usize,
|
count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T> DiagnosticEntry<T> {
|
||||||
|
// Used to provide diagnostic context to lsp codeAction request
|
||||||
|
pub fn to_lsp_diagnostic_stub(&self) -> lsp::Diagnostic {
|
||||||
|
let code = self
|
||||||
|
.diagnostic
|
||||||
|
.code
|
||||||
|
.clone()
|
||||||
|
.map(lsp::NumberOrString::String);
|
||||||
|
|
||||||
|
lsp::Diagnostic {
|
||||||
|
code,
|
||||||
|
severity: Some(self.diagnostic.severity),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl DiagnosticSet {
|
impl DiagnosticSet {
|
||||||
pub fn from_sorted_entries<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
|
pub fn from_sorted_entries<I>(iter: I, buffer: &text::BufferSnapshot) -> Self
|
||||||
where
|
where
|
||||||
|
|
|
@ -7,8 +7,8 @@ pub mod proto;
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use client::http::{self, HttpClient};
|
use client::http::HttpClient;
|
||||||
use collections::HashSet;
|
use collections::HashMap;
|
||||||
use futures::{
|
use futures::{
|
||||||
future::{BoxFuture, Shared},
|
future::{BoxFuture, Shared},
|
||||||
FutureExt, TryFutureExt,
|
FutureExt, TryFutureExt,
|
||||||
|
@ -20,6 +20,7 @@ use parking_lot::{Mutex, RwLock};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use std::{
|
use std::{
|
||||||
|
any::Any,
|
||||||
cell::RefCell,
|
cell::RefCell,
|
||||||
ops::Range,
|
ops::Range,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
@ -51,7 +52,6 @@ lazy_static! {
|
||||||
brackets: Default::default(),
|
brackets: Default::default(),
|
||||||
autoclose_before: Default::default(),
|
autoclose_before: Default::default(),
|
||||||
line_comment: None,
|
line_comment: None,
|
||||||
language_server: None,
|
|
||||||
},
|
},
|
||||||
None,
|
None,
|
||||||
));
|
));
|
||||||
|
@ -61,20 +61,18 @@ pub trait ToLspPosition {
|
||||||
fn to_lsp_position(self) -> lsp::Position;
|
fn to_lsp_position(self) -> lsp::Position;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct LspBinaryVersion {
|
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||||
pub name: String,
|
pub struct LanguageServerName(pub Arc<str>);
|
||||||
pub url: Option<http::Url>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait LspAdapter: 'static + Send + Sync {
|
pub trait LspAdapter: 'static + Send + Sync {
|
||||||
fn name(&self) -> &'static str;
|
fn name(&self) -> LanguageServerName;
|
||||||
fn fetch_latest_server_version(
|
fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
) -> BoxFuture<'static, Result<LspBinaryVersion>>;
|
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>>;
|
||||||
fn fetch_server_binary(
|
fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: LspBinaryVersion,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
) -> BoxFuture<'static, Result<PathBuf>>;
|
) -> BoxFuture<'static, Result<PathBuf>>;
|
||||||
|
@ -96,6 +94,14 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||||
fn initialization_options(&self) -> Option<Value> {
|
fn initialization_options(&self) -> Option<Value> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
@ -113,7 +119,6 @@ pub struct LanguageConfig {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub autoclose_before: String,
|
pub autoclose_before: String,
|
||||||
pub line_comment: Option<String>,
|
pub line_comment: Option<String>,
|
||||||
pub language_server: Option<LanguageServerConfig>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for LanguageConfig {
|
impl Default for LanguageConfig {
|
||||||
|
@ -124,25 +129,17 @@ impl Default for LanguageConfig {
|
||||||
brackets: Default::default(),
|
brackets: Default::default(),
|
||||||
autoclose_before: Default::default(),
|
autoclose_before: Default::default(),
|
||||||
line_comment: Default::default(),
|
line_comment: Default::default(),
|
||||||
language_server: Default::default(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Deserialize)]
|
|
||||||
pub struct LanguageServerConfig {
|
|
||||||
pub disk_based_diagnostic_sources: HashSet<String>,
|
|
||||||
pub disk_based_diagnostics_progress_token: Option<String>,
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
|
||||||
#[serde(skip)]
|
|
||||||
fake_config: Option<FakeLanguageServerConfig>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
struct FakeLanguageServerConfig {
|
pub struct FakeLspAdapter {
|
||||||
servers_tx: mpsc::UnboundedSender<lsp::FakeLanguageServer>,
|
pub name: &'static str,
|
||||||
capabilities: lsp::ServerCapabilities,
|
pub capabilities: lsp::ServerCapabilities,
|
||||||
initializer: Option<Box<dyn 'static + Send + Sync + Fn(&mut lsp::FakeLanguageServer)>>,
|
pub initializer: Option<Box<dyn 'static + Send + Sync + Fn(&mut lsp::FakeLanguageServer)>>,
|
||||||
|
pub disk_based_diagnostics_progress_token: Option<&'static str>,
|
||||||
|
pub disk_based_diagnostics_sources: &'static [&'static str],
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
#[derive(Clone, Debug, Deserialize)]
|
||||||
|
@ -157,7 +154,12 @@ pub struct Language {
|
||||||
pub(crate) config: LanguageConfig,
|
pub(crate) config: LanguageConfig,
|
||||||
pub(crate) grammar: Option<Arc<Grammar>>,
|
pub(crate) grammar: Option<Arc<Grammar>>,
|
||||||
pub(crate) adapter: Option<Arc<dyn LspAdapter>>,
|
pub(crate) adapter: Option<Arc<dyn LspAdapter>>,
|
||||||
lsp_binary_path: Mutex<Option<Shared<BoxFuture<'static, Result<PathBuf, Arc<anyhow::Error>>>>>>,
|
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
fake_adapter: Option<(
|
||||||
|
mpsc::UnboundedSender<lsp::FakeLanguageServer>,
|
||||||
|
Arc<FakeLspAdapter>,
|
||||||
|
)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Grammar {
|
pub struct Grammar {
|
||||||
|
@ -184,6 +186,12 @@ pub struct LanguageRegistry {
|
||||||
lsp_binary_statuses_tx: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
lsp_binary_statuses_tx: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||||
lsp_binary_statuses_rx: async_broadcast::Receiver<(Arc<Language>, LanguageServerBinaryStatus)>,
|
lsp_binary_statuses_rx: async_broadcast::Receiver<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||||
login_shell_env_loaded: Shared<Task<()>>,
|
login_shell_env_loaded: Shared<Task<()>>,
|
||||||
|
lsp_binary_paths: Mutex<
|
||||||
|
HashMap<
|
||||||
|
LanguageServerName,
|
||||||
|
Shared<BoxFuture<'static, Result<PathBuf, Arc<anyhow::Error>>>>,
|
||||||
|
>,
|
||||||
|
>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LanguageRegistry {
|
impl LanguageRegistry {
|
||||||
|
@ -195,6 +203,7 @@ impl LanguageRegistry {
|
||||||
lsp_binary_statuses_tx,
|
lsp_binary_statuses_tx,
|
||||||
lsp_binary_statuses_rx,
|
lsp_binary_statuses_rx,
|
||||||
login_shell_env_loaded: login_shell_env_loaded.shared(),
|
login_shell_env_loaded: login_shell_env_loaded.shared(),
|
||||||
|
lsp_binary_paths: Default::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -244,7 +253,7 @@ impl LanguageRegistry {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn start_language_server(
|
pub fn start_language_server(
|
||||||
&self,
|
self: &Arc<Self>,
|
||||||
server_id: usize,
|
server_id: usize,
|
||||||
language: Arc<Language>,
|
language: Arc<Language>,
|
||||||
root_path: Arc<Path>,
|
root_path: Arc<Path>,
|
||||||
|
@ -252,34 +261,20 @@ impl LanguageRegistry {
|
||||||
cx: &mut MutableAppContext,
|
cx: &mut MutableAppContext,
|
||||||
) -> Option<Task<Result<lsp::LanguageServer>>> {
|
) -> Option<Task<Result<lsp::LanguageServer>>> {
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
if language
|
if language.fake_adapter.is_some() {
|
||||||
.config
|
|
||||||
.language_server
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|config| config.fake_config.as_ref())
|
|
||||||
.is_some()
|
|
||||||
{
|
|
||||||
let language = language.clone();
|
let language = language.clone();
|
||||||
return Some(cx.spawn(|mut cx| async move {
|
return Some(cx.spawn(|cx| async move {
|
||||||
let fake_config = language
|
let (servers_tx, fake_adapter) = language.fake_adapter.as_ref().unwrap();
|
||||||
.config
|
let (server, mut fake_server) = lsp::LanguageServer::fake_with_capabilities(
|
||||||
.language_server
|
fake_adapter.capabilities.clone(),
|
||||||
.as_ref()
|
cx.clone(),
|
||||||
.unwrap()
|
);
|
||||||
.fake_config
|
|
||||||
.as_ref()
|
if let Some(initializer) = &fake_adapter.initializer {
|
||||||
.unwrap();
|
|
||||||
let (server, mut fake_server) = cx.update(|cx| {
|
|
||||||
lsp::LanguageServer::fake_with_capabilities(
|
|
||||||
fake_config.capabilities.clone(),
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
});
|
|
||||||
if let Some(initializer) = &fake_config.initializer {
|
|
||||||
initializer(&mut fake_server);
|
initializer(&mut fake_server);
|
||||||
}
|
}
|
||||||
|
|
||||||
let servers_tx = fake_config.servers_tx.clone();
|
let servers_tx = servers_tx.clone();
|
||||||
cx.background()
|
cx.background()
|
||||||
.spawn(async move {
|
.spawn(async move {
|
||||||
fake_server
|
fake_server
|
||||||
|
@ -298,16 +293,17 @@ impl LanguageRegistry {
|
||||||
.ok_or_else(|| anyhow!("language server download directory has not been assigned"))
|
.ok_or_else(|| anyhow!("language server download directory has not been assigned"))
|
||||||
.log_err()?;
|
.log_err()?;
|
||||||
|
|
||||||
|
let this = self.clone();
|
||||||
let adapter = language.adapter.clone()?;
|
let adapter = language.adapter.clone()?;
|
||||||
let background = cx.background().clone();
|
|
||||||
let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone();
|
let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone();
|
||||||
let login_shell_env_loaded = self.login_shell_env_loaded.clone();
|
let login_shell_env_loaded = self.login_shell_env_loaded.clone();
|
||||||
Some(cx.background().spawn(async move {
|
Some(cx.spawn(|cx| async move {
|
||||||
login_shell_env_loaded.await;
|
login_shell_env_loaded.await;
|
||||||
let server_binary_path = language
|
let server_binary_path = this
|
||||||
.lsp_binary_path
|
.lsp_binary_paths
|
||||||
.lock()
|
.lock()
|
||||||
.get_or_insert_with(|| {
|
.entry(adapter.name())
|
||||||
|
.or_insert_with(|| {
|
||||||
get_server_binary_path(
|
get_server_binary_path(
|
||||||
adapter.clone(),
|
adapter.clone(),
|
||||||
language.clone(),
|
language.clone(),
|
||||||
|
@ -329,8 +325,7 @@ impl LanguageRegistry {
|
||||||
&server_binary_path,
|
&server_binary_path,
|
||||||
server_args,
|
server_args,
|
||||||
&root_path,
|
&root_path,
|
||||||
adapter.initialization_options(),
|
cx,
|
||||||
background,
|
|
||||||
)?;
|
)?;
|
||||||
Ok(server)
|
Ok(server)
|
||||||
}))
|
}))
|
||||||
|
@ -350,7 +345,7 @@ async fn get_server_binary_path(
|
||||||
download_dir: Arc<Path>,
|
download_dir: Arc<Path>,
|
||||||
statuses: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
statuses: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||||
) -> Result<PathBuf> {
|
) -> Result<PathBuf> {
|
||||||
let container_dir = download_dir.join(adapter.name());
|
let container_dir = download_dir.join(adapter.name().0.as_ref());
|
||||||
if !container_dir.exists() {
|
if !container_dir.exists() {
|
||||||
smol::fs::create_dir_all(&container_dir)
|
smol::fs::create_dir_all(&container_dir)
|
||||||
.await
|
.await
|
||||||
|
@ -423,10 +418,16 @@ impl Language {
|
||||||
})
|
})
|
||||||
}),
|
}),
|
||||||
adapter: None,
|
adapter: None,
|
||||||
lsp_binary_path: Default::default(),
|
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
fake_adapter: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn lsp_adapter(&self) -> Option<Arc<dyn LspAdapter>> {
|
||||||
|
self.adapter.clone()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn with_highlights_query(mut self, source: &str) -> Result<Self> {
|
pub fn with_highlights_query(mut self, source: &str) -> Result<Self> {
|
||||||
let grammar = self
|
let grammar = self
|
||||||
.grammar
|
.grammar
|
||||||
|
@ -467,11 +468,23 @@ impl Language {
|
||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_lsp_adapter(mut self, lsp_adapter: impl LspAdapter) -> Self {
|
pub fn with_lsp_adapter(mut self, lsp_adapter: Arc<dyn LspAdapter>) -> Self {
|
||||||
self.adapter = Some(Arc::new(lsp_adapter));
|
self.adapter = Some(lsp_adapter);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
pub fn set_fake_lsp_adapter(
|
||||||
|
&mut self,
|
||||||
|
fake_lsp_adapter: FakeLspAdapter,
|
||||||
|
) -> mpsc::UnboundedReceiver<lsp::FakeLanguageServer> {
|
||||||
|
let (servers_tx, servers_rx) = mpsc::unbounded();
|
||||||
|
let adapter = Arc::new(fake_lsp_adapter);
|
||||||
|
self.fake_adapter = Some((servers_tx, adapter.clone()));
|
||||||
|
self.adapter = Some(adapter);
|
||||||
|
servers_rx
|
||||||
|
}
|
||||||
|
|
||||||
pub fn name(&self) -> Arc<str> {
|
pub fn name(&self) -> Arc<str> {
|
||||||
self.config.name.clone()
|
self.config.name.clone()
|
||||||
}
|
}
|
||||||
|
@ -480,18 +493,16 @@ impl Language {
|
||||||
self.config.line_comment.as_deref()
|
self.config.line_comment.as_deref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn disk_based_diagnostic_sources(&self) -> Option<&HashSet<String>> {
|
pub fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
|
||||||
self.config
|
self.adapter.as_ref().map_or(&[] as &[_], |adapter| {
|
||||||
.language_server
|
adapter.disk_based_diagnostic_sources()
|
||||||
.as_ref()
|
})
|
||||||
.map(|config| &config.disk_based_diagnostic_sources)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn disk_based_diagnostics_progress_token(&self) -> Option<&String> {
|
pub fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
|
||||||
self.config
|
self.adapter
|
||||||
.language_server
|
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|config| config.disk_based_diagnostics_progress_token.as_ref())
|
.and_then(|adapter| adapter.disk_based_diagnostics_progress_token())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn process_diagnostics(&self, diagnostics: &mut lsp::PublishDiagnosticsParams) {
|
pub fn process_diagnostics(&self, diagnostics: &mut lsp::PublishDiagnosticsParams) {
|
||||||
|
@ -598,47 +609,70 @@ impl CodeLabel {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
impl LanguageServerConfig {
|
impl Default for FakeLspAdapter {
|
||||||
pub fn fake() -> (Self, mpsc::UnboundedReceiver<lsp::FakeLanguageServer>) {
|
fn default() -> Self {
|
||||||
let (servers_tx, servers_rx) = mpsc::unbounded();
|
Self {
|
||||||
(
|
name: "the-fake-language-server",
|
||||||
Self {
|
capabilities: lsp::LanguageServer::full_capabilities(),
|
||||||
fake_config: Some(FakeLanguageServerConfig {
|
initializer: None,
|
||||||
servers_tx,
|
disk_based_diagnostics_progress_token: None,
|
||||||
capabilities: lsp::LanguageServer::full_capabilities(),
|
disk_based_diagnostics_sources: &[],
|
||||||
initializer: None,
|
}
|
||||||
}),
|
|
||||||
disk_based_diagnostics_progress_token: Some("fakeServer/check".to_string()),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
servers_rx,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_fake_capabilities(&mut self, capabilities: lsp::ServerCapabilities) {
|
|
||||||
self.fake_config.as_mut().unwrap().capabilities = capabilities;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_fake_initializer(
|
|
||||||
&mut self,
|
|
||||||
initializer: impl 'static + Send + Sync + Fn(&mut lsp::FakeLanguageServer),
|
|
||||||
) {
|
|
||||||
self.fake_config.as_mut().unwrap().initializer = Some(Box::new(initializer));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToLspPosition for PointUtf16 {
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
fn to_lsp_position(self) -> lsp::Position {
|
impl LspAdapter for FakeLspAdapter {
|
||||||
lsp::Position::new(self.row, self.column)
|
fn name(&self) -> LanguageServerName {
|
||||||
|
LanguageServerName(self.name.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn fetch_latest_server_version(
|
||||||
|
&self,
|
||||||
|
_: Arc<dyn HttpClient>,
|
||||||
|
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||||
|
unreachable!();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_server_binary(
|
||||||
|
&self,
|
||||||
|
_: Box<dyn 'static + Send + Any>,
|
||||||
|
_: Arc<dyn HttpClient>,
|
||||||
|
_: PathBuf,
|
||||||
|
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||||
|
unreachable!();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cached_server_binary(&self, _: PathBuf) -> BoxFuture<'static, Option<PathBuf>> {
|
||||||
|
unreachable!();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||||
|
|
||||||
|
fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
|
||||||
|
self.disk_based_diagnostics_sources
|
||||||
|
}
|
||||||
|
|
||||||
|
fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
|
||||||
|
self.disk_based_diagnostics_progress_token
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn point_to_lsp(point: PointUtf16) -> lsp::Position {
|
||||||
|
lsp::Position::new(point.row, point.column)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn point_from_lsp(point: lsp::Position) -> PointUtf16 {
|
pub fn point_from_lsp(point: lsp::Position) -> PointUtf16 {
|
||||||
PointUtf16::new(point.line, point.character)
|
PointUtf16::new(point.line, point.character)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn range_from_lsp(range: lsp::Range) -> Range<PointUtf16> {
|
pub fn range_to_lsp(range: Range<PointUtf16>) -> lsp::Range {
|
||||||
let start = PointUtf16::new(range.start.line, range.start.character);
|
lsp::Range {
|
||||||
let end = PointUtf16::new(range.end.line, range.end.character);
|
start: point_to_lsp(range.start),
|
||||||
start..end
|
end: point_to_lsp(range.end),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn range_from_lsp(range: lsp::Range) -> Range<PointUtf16> {
|
||||||
|
point_from_lsp(range.start)..point_from_lsp(range.end)
|
||||||
}
|
}
|
||||||
|
|
|
@ -931,7 +931,6 @@ fn rust_lang() -> Language {
|
||||||
LanguageConfig {
|
LanguageConfig {
|
||||||
name: "Rust".into(),
|
name: "Rust".into(),
|
||||||
path_suffixes: vec!["rs".to_string()],
|
path_suffixes: vec!["rs".to_string()],
|
||||||
language_server: None,
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
Some(tree_sitter_rust::language()),
|
Some(tree_sitter_rust::language()),
|
||||||
|
|
|
@ -1,15 +1,17 @@
|
||||||
|
pub use lsp_types::*;
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use futures::{channel::oneshot, io::BufWriter, AsyncRead, AsyncWrite};
|
use futures::{channel::oneshot, io::BufWriter, AsyncRead, AsyncWrite};
|
||||||
use gpui::{executor, Task};
|
use gpui::{executor, AsyncAppContext, Task};
|
||||||
use parking_lot::{Mutex, RwLock};
|
use parking_lot::Mutex;
|
||||||
use postage::{barrier, prelude::Stream};
|
use postage::{barrier, prelude::Stream};
|
||||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||||
use serde_json::{json, value::RawValue, Value};
|
use serde_json::{json, value::RawValue, Value};
|
||||||
use smol::{
|
use smol::{
|
||||||
channel,
|
channel,
|
||||||
io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader},
|
io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader},
|
||||||
process::Command,
|
process,
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
future::Future,
|
future::Future,
|
||||||
|
@ -22,15 +24,12 @@ use std::{
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use std::{path::Path, process::Stdio};
|
use std::{path::Path, process::Stdio};
|
||||||
use util::TryFutureExt;
|
use util::{ResultExt, TryFutureExt};
|
||||||
|
|
||||||
pub use lsp_types::*;
|
|
||||||
|
|
||||||
const JSON_RPC_VERSION: &'static str = "2.0";
|
const JSON_RPC_VERSION: &'static str = "2.0";
|
||||||
const CONTENT_LEN_HEADER: &'static str = "Content-Length: ";
|
const CONTENT_LEN_HEADER: &'static str = "Content-Length: ";
|
||||||
|
|
||||||
type NotificationHandler =
|
type NotificationHandler = Box<dyn Send + FnMut(Option<usize>, &str, AsyncAppContext)>;
|
||||||
Box<dyn Send + Sync + FnMut(Option<usize>, &str, &mut channel::Sender<Vec<u8>>) -> Result<()>>;
|
|
||||||
type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
|
type ResponseHandler = Box<dyn Send + FnOnce(Result<&str, Error>)>;
|
||||||
|
|
||||||
pub struct LanguageServer {
|
pub struct LanguageServer {
|
||||||
|
@ -39,18 +38,17 @@ pub struct LanguageServer {
|
||||||
outbound_tx: channel::Sender<Vec<u8>>,
|
outbound_tx: channel::Sender<Vec<u8>>,
|
||||||
name: String,
|
name: String,
|
||||||
capabilities: ServerCapabilities,
|
capabilities: ServerCapabilities,
|
||||||
notification_handlers: Arc<RwLock<HashMap<&'static str, NotificationHandler>>>,
|
notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
|
||||||
response_handlers: Arc<Mutex<HashMap<usize, ResponseHandler>>>,
|
response_handlers: Arc<Mutex<HashMap<usize, ResponseHandler>>>,
|
||||||
executor: Arc<executor::Background>,
|
executor: Arc<executor::Background>,
|
||||||
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
|
io_tasks: Mutex<Option<(Task<Option<()>>, Task<Option<()>>)>>,
|
||||||
output_done_rx: Mutex<Option<barrier::Receiver>>,
|
output_done_rx: Mutex<Option<barrier::Receiver>>,
|
||||||
root_path: PathBuf,
|
root_path: PathBuf,
|
||||||
options: Option<Value>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Subscription {
|
pub struct Subscription {
|
||||||
method: &'static str,
|
method: &'static str,
|
||||||
notification_handlers: Arc<RwLock<HashMap<&'static str, NotificationHandler>>>,
|
notification_handlers: Arc<Mutex<HashMap<&'static str, NotificationHandler>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
|
@ -61,18 +59,6 @@ struct Request<'a, T> {
|
||||||
params: T,
|
params: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct AnyRequest<'a> {
|
|
||||||
id: usize,
|
|
||||||
#[serde(borrow)]
|
|
||||||
jsonrpc: &'a str,
|
|
||||||
#[serde(borrow)]
|
|
||||||
method: &'a str,
|
|
||||||
#[serde(borrow)]
|
|
||||||
params: &'a RawValue,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
struct AnyResponse<'a> {
|
struct AnyResponse<'a> {
|
||||||
id: usize,
|
id: usize,
|
||||||
|
@ -85,7 +71,8 @@ struct AnyResponse<'a> {
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct Response<T> {
|
struct Response<T> {
|
||||||
id: usize,
|
id: usize,
|
||||||
result: T,
|
result: Option<T>,
|
||||||
|
error: Option<Error>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
|
@ -118,15 +105,14 @@ impl LanguageServer {
|
||||||
binary_path: &Path,
|
binary_path: &Path,
|
||||||
args: &[&str],
|
args: &[&str],
|
||||||
root_path: &Path,
|
root_path: &Path,
|
||||||
options: Option<Value>,
|
cx: AsyncAppContext,
|
||||||
background: Arc<executor::Background>,
|
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let working_dir = if root_path.is_dir() {
|
let working_dir = if root_path.is_dir() {
|
||||||
root_path
|
root_path
|
||||||
} else {
|
} else {
|
||||||
root_path.parent().unwrap_or(Path::new("/"))
|
root_path.parent().unwrap_or(Path::new("/"))
|
||||||
};
|
};
|
||||||
let mut server = Command::new(binary_path)
|
let mut server = process::Command::new(binary_path)
|
||||||
.current_dir(working_dir)
|
.current_dir(working_dir)
|
||||||
.args(args)
|
.args(args)
|
||||||
.stdin(Stdio::piped())
|
.stdin(Stdio::piped())
|
||||||
|
@ -136,99 +122,97 @@ impl LanguageServer {
|
||||||
let stdin = server.stdin.take().unwrap();
|
let stdin = server.stdin.take().unwrap();
|
||||||
let stdout = server.stdout.take().unwrap();
|
let stdout = server.stdout.take().unwrap();
|
||||||
let mut server =
|
let mut server =
|
||||||
Self::new_internal(server_id, stdin, stdout, root_path, options, background);
|
Self::new_internal(server_id, stdin, stdout, root_path, cx, |notification| {
|
||||||
|
log::info!(
|
||||||
|
"unhandled notification {}:\n{}",
|
||||||
|
notification.method,
|
||||||
|
serde_json::to_string_pretty(
|
||||||
|
&Value::from_str(notification.params.get()).unwrap()
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
);
|
||||||
|
});
|
||||||
if let Some(name) = binary_path.file_name() {
|
if let Some(name) = binary_path.file_name() {
|
||||||
server.name = name.to_string_lossy().to_string();
|
server.name = name.to_string_lossy().to_string();
|
||||||
}
|
}
|
||||||
Ok(server)
|
Ok(server)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_internal<Stdin, Stdout>(
|
fn new_internal<Stdin, Stdout, F>(
|
||||||
server_id: usize,
|
server_id: usize,
|
||||||
stdin: Stdin,
|
stdin: Stdin,
|
||||||
stdout: Stdout,
|
stdout: Stdout,
|
||||||
root_path: &Path,
|
root_path: &Path,
|
||||||
options: Option<Value>,
|
cx: AsyncAppContext,
|
||||||
executor: Arc<executor::Background>,
|
mut on_unhandled_notification: F,
|
||||||
) -> Self
|
) -> Self
|
||||||
where
|
where
|
||||||
Stdin: AsyncWrite + Unpin + Send + 'static,
|
Stdin: AsyncWrite + Unpin + Send + 'static,
|
||||||
Stdout: AsyncRead + Unpin + Send + 'static,
|
Stdout: AsyncRead + Unpin + Send + 'static,
|
||||||
|
F: FnMut(AnyNotification) + 'static + Send,
|
||||||
{
|
{
|
||||||
let mut stdin = BufWriter::new(stdin);
|
let mut stdin = BufWriter::new(stdin);
|
||||||
let mut stdout = BufReader::new(stdout);
|
let mut stdout = BufReader::new(stdout);
|
||||||
let (outbound_tx, outbound_rx) = channel::unbounded::<Vec<u8>>();
|
let (outbound_tx, outbound_rx) = channel::unbounded::<Vec<u8>>();
|
||||||
let notification_handlers =
|
let notification_handlers =
|
||||||
Arc::new(RwLock::new(HashMap::<_, NotificationHandler>::default()));
|
Arc::new(Mutex::new(HashMap::<_, NotificationHandler>::default()));
|
||||||
let response_handlers = Arc::new(Mutex::new(HashMap::<_, ResponseHandler>::default()));
|
let response_handlers = Arc::new(Mutex::new(HashMap::<_, ResponseHandler>::default()));
|
||||||
let input_task = executor.spawn(
|
let input_task = cx.spawn(|cx| {
|
||||||
{
|
let notification_handlers = notification_handlers.clone();
|
||||||
let notification_handlers = notification_handlers.clone();
|
let response_handlers = response_handlers.clone();
|
||||||
let response_handlers = response_handlers.clone();
|
async move {
|
||||||
let mut outbound_tx = outbound_tx.clone();
|
let _clear_response_handlers = ClearResponseHandlers(response_handlers.clone());
|
||||||
async move {
|
let mut buffer = Vec::new();
|
||||||
let _clear_response_handlers = ClearResponseHandlers(response_handlers.clone());
|
loop {
|
||||||
let mut buffer = Vec::new();
|
buffer.clear();
|
||||||
loop {
|
stdout.read_until(b'\n', &mut buffer).await?;
|
||||||
buffer.clear();
|
stdout.read_until(b'\n', &mut buffer).await?;
|
||||||
stdout.read_until(b'\n', &mut buffer).await?;
|
let message_len: usize = std::str::from_utf8(&buffer)?
|
||||||
stdout.read_until(b'\n', &mut buffer).await?;
|
.strip_prefix(CONTENT_LEN_HEADER)
|
||||||
let message_len: usize = std::str::from_utf8(&buffer)?
|
.ok_or_else(|| anyhow!("invalid header"))?
|
||||||
.strip_prefix(CONTENT_LEN_HEADER)
|
.trim_end()
|
||||||
.ok_or_else(|| anyhow!("invalid header"))?
|
.parse()?;
|
||||||
.trim_end()
|
|
||||||
.parse()?;
|
|
||||||
|
|
||||||
buffer.resize(message_len, 0);
|
buffer.resize(message_len, 0);
|
||||||
stdout.read_exact(&mut buffer).await?;
|
stdout.read_exact(&mut buffer).await?;
|
||||||
|
log::trace!("incoming message:{}", String::from_utf8_lossy(&buffer));
|
||||||
|
|
||||||
if let Ok(AnyNotification { id, method, params }) =
|
if let Ok(msg) = serde_json::from_slice::<AnyNotification>(&buffer) {
|
||||||
serde_json::from_slice(&buffer)
|
if let Some(handler) = notification_handlers.lock().get_mut(msg.method) {
|
||||||
{
|
handler(msg.id, msg.params.get(), cx.clone());
|
||||||
if let Some(handler) = notification_handlers.write().get_mut(method) {
|
|
||||||
if let Err(e) = handler(id, params.get(), &mut outbound_tx) {
|
|
||||||
log::error!("error handling {} message: {:?}", method, e);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
log::info!(
|
|
||||||
"unhandled notification {}:\n{}",
|
|
||||||
method,
|
|
||||||
serde_json::to_string_pretty(
|
|
||||||
&Value::from_str(params.get()).unwrap()
|
|
||||||
)
|
|
||||||
.unwrap()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} else if let Ok(AnyResponse { id, error, result }) =
|
|
||||||
serde_json::from_slice(&buffer)
|
|
||||||
{
|
|
||||||
if let Some(handler) = response_handlers.lock().remove(&id) {
|
|
||||||
if let Some(error) = error {
|
|
||||||
handler(Err(error));
|
|
||||||
} else if let Some(result) = result {
|
|
||||||
handler(Ok(result.get()));
|
|
||||||
} else {
|
|
||||||
handler(Ok("null"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
return Err(anyhow!(
|
on_unhandled_notification(msg);
|
||||||
"failed to deserialize message:\n{}",
|
|
||||||
std::str::from_utf8(&buffer)?
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
} else if let Ok(AnyResponse { id, error, result }) =
|
||||||
|
serde_json::from_slice(&buffer)
|
||||||
|
{
|
||||||
|
if let Some(handler) = response_handlers.lock().remove(&id) {
|
||||||
|
if let Some(error) = error {
|
||||||
|
handler(Err(error));
|
||||||
|
} else if let Some(result) = result {
|
||||||
|
handler(Ok(result.get()));
|
||||||
|
} else {
|
||||||
|
handler(Ok("null"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"failed to deserialize message:\n{}",
|
||||||
|
std::str::from_utf8(&buffer)?
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
.log_err(),
|
.log_err()
|
||||||
);
|
});
|
||||||
let (output_done_tx, output_done_rx) = barrier::channel();
|
let (output_done_tx, output_done_rx) = barrier::channel();
|
||||||
let output_task = executor.spawn({
|
let output_task = cx.background().spawn({
|
||||||
let response_handlers = response_handlers.clone();
|
let response_handlers = response_handlers.clone();
|
||||||
async move {
|
async move {
|
||||||
let _clear_response_handlers = ClearResponseHandlers(response_handlers);
|
let _clear_response_handlers = ClearResponseHandlers(response_handlers);
|
||||||
let mut content_len_buffer = Vec::new();
|
let mut content_len_buffer = Vec::new();
|
||||||
while let Ok(message) = outbound_rx.recv().await {
|
while let Ok(message) = outbound_rx.recv().await {
|
||||||
|
log::trace!("outgoing message:{}", String::from_utf8_lossy(&message));
|
||||||
content_len_buffer.clear();
|
content_len_buffer.clear();
|
||||||
write!(content_len_buffer, "{}", message.len()).unwrap();
|
write!(content_len_buffer, "{}", message.len()).unwrap();
|
||||||
stdin.write_all(CONTENT_LEN_HEADER.as_bytes()).await?;
|
stdin.write_all(CONTENT_LEN_HEADER.as_bytes()).await?;
|
||||||
|
@ -251,18 +235,15 @@ impl LanguageServer {
|
||||||
capabilities: Default::default(),
|
capabilities: Default::default(),
|
||||||
next_id: Default::default(),
|
next_id: Default::default(),
|
||||||
outbound_tx,
|
outbound_tx,
|
||||||
executor: executor.clone(),
|
executor: cx.background().clone(),
|
||||||
io_tasks: Mutex::new(Some((input_task, output_task))),
|
io_tasks: Mutex::new(Some((input_task, output_task))),
|
||||||
output_done_rx: Mutex::new(Some(output_done_rx)),
|
output_done_rx: Mutex::new(Some(output_done_rx)),
|
||||||
root_path: root_path.to_path_buf(),
|
root_path: root_path.to_path_buf(),
|
||||||
options,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn initialize(mut self) -> Result<Arc<Self>> {
|
pub async fn initialize(mut self, options: Option<Value>) -> Result<Arc<Self>> {
|
||||||
let options = self.options.take();
|
let root_uri = Url::from_file_path(&self.root_path).unwrap();
|
||||||
let mut this = Arc::new(self);
|
|
||||||
let root_uri = Url::from_file_path(&this.root_path).unwrap();
|
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let params = InitializeParams {
|
let params = InitializeParams {
|
||||||
process_id: Default::default(),
|
process_id: Default::default(),
|
||||||
|
@ -288,12 +269,13 @@ impl LanguageServer {
|
||||||
value_set: vec![
|
value_set: vec![
|
||||||
CodeActionKind::REFACTOR.as_str().into(),
|
CodeActionKind::REFACTOR.as_str().into(),
|
||||||
CodeActionKind::QUICKFIX.as_str().into(),
|
CodeActionKind::QUICKFIX.as_str().into(),
|
||||||
|
CodeActionKind::SOURCE.as_str().into(),
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
data_support: Some(true),
|
data_support: Some(true),
|
||||||
resolve_support: Some(CodeActionCapabilityResolveSupport {
|
resolve_support: Some(CodeActionCapabilityResolveSupport {
|
||||||
properties: vec!["edit".to_string()],
|
properties: vec!["edit".to_string(), "command".to_string()],
|
||||||
}),
|
}),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
}),
|
}),
|
||||||
|
@ -324,16 +306,14 @@ impl LanguageServer {
|
||||||
locale: Default::default(),
|
locale: Default::default(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let response = this.request::<request::Initialize>(params).await?;
|
let response = self.request::<request::Initialize>(params).await?;
|
||||||
{
|
if let Some(info) = response.server_info {
|
||||||
let this = Arc::get_mut(&mut this).unwrap();
|
self.name = info.name;
|
||||||
if let Some(info) = response.server_info {
|
|
||||||
this.name = info.name;
|
|
||||||
}
|
|
||||||
this.capabilities = response.capabilities;
|
|
||||||
}
|
}
|
||||||
this.notify::<notification::Initialized>(InitializedParams {})?;
|
self.capabilities = response.capabilities;
|
||||||
Ok(this)
|
|
||||||
|
self.notify::<notification::Initialized>(InitializedParams {})?;
|
||||||
|
Ok(Arc::new(self))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn shutdown(&self) -> Option<impl 'static + Send + Future<Output = Option<()>>> {
|
pub fn shutdown(&self) -> Option<impl 'static + Send + Future<Output = Option<()>>> {
|
||||||
|
@ -368,37 +348,42 @@ impl LanguageServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn on_notification<T, F>(&mut self, f: F) -> Subscription
|
#[must_use]
|
||||||
|
pub fn on_notification<T, F>(&self, f: F) -> Subscription
|
||||||
where
|
where
|
||||||
T: notification::Notification,
|
T: notification::Notification,
|
||||||
F: 'static + Send + Sync + FnMut(T::Params),
|
F: 'static + Send + FnMut(T::Params, AsyncAppContext),
|
||||||
{
|
{
|
||||||
self.on_custom_notification(T::METHOD, f)
|
self.on_custom_notification(T::METHOD, f)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn on_request<T, F>(&mut self, f: F) -> Subscription
|
#[must_use]
|
||||||
|
pub fn on_request<T, F, Fut>(&self, f: F) -> Subscription
|
||||||
where
|
where
|
||||||
T: request::Request,
|
T: request::Request,
|
||||||
F: 'static + Send + Sync + FnMut(T::Params) -> Result<T::Result>,
|
T::Params: 'static + Send,
|
||||||
|
F: 'static + Send + FnMut(T::Params, AsyncAppContext) -> Fut,
|
||||||
|
Fut: 'static + Future<Output = Result<T::Result>>,
|
||||||
{
|
{
|
||||||
self.on_custom_request(T::METHOD, f)
|
self.on_custom_request(T::METHOD, f)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn on_custom_notification<Params, F>(
|
pub fn remove_request_handler<T: request::Request>(&self) {
|
||||||
&mut self,
|
self.notification_handlers.lock().remove(T::METHOD);
|
||||||
method: &'static str,
|
}
|
||||||
mut f: F,
|
|
||||||
) -> Subscription
|
#[must_use]
|
||||||
|
pub fn on_custom_notification<Params, F>(&self, method: &'static str, mut f: F) -> Subscription
|
||||||
where
|
where
|
||||||
F: 'static + Send + Sync + FnMut(Params),
|
F: 'static + Send + FnMut(Params, AsyncAppContext),
|
||||||
Params: DeserializeOwned,
|
Params: DeserializeOwned,
|
||||||
{
|
{
|
||||||
let prev_handler = self.notification_handlers.write().insert(
|
let prev_handler = self.notification_handlers.lock().insert(
|
||||||
method,
|
method,
|
||||||
Box::new(move |_, params, _| {
|
Box::new(move |_, params, cx| {
|
||||||
let params = serde_json::from_str(params)?;
|
if let Some(params) = serde_json::from_str(params).log_err() {
|
||||||
f(params);
|
f(params, cx);
|
||||||
Ok(())
|
}
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
assert!(
|
assert!(
|
||||||
|
@ -411,26 +396,52 @@ impl LanguageServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn on_custom_request<Params, Res, F>(
|
#[must_use]
|
||||||
&mut self,
|
pub fn on_custom_request<Params, Res, Fut, F>(
|
||||||
|
&self,
|
||||||
method: &'static str,
|
method: &'static str,
|
||||||
mut f: F,
|
mut f: F,
|
||||||
) -> Subscription
|
) -> Subscription
|
||||||
where
|
where
|
||||||
F: 'static + Send + Sync + FnMut(Params) -> Result<Res>,
|
F: 'static + Send + FnMut(Params, AsyncAppContext) -> Fut,
|
||||||
Params: DeserializeOwned,
|
Fut: 'static + Future<Output = Result<Res>>,
|
||||||
|
Params: DeserializeOwned + Send + 'static,
|
||||||
Res: Serialize,
|
Res: Serialize,
|
||||||
{
|
{
|
||||||
let prev_handler = self.notification_handlers.write().insert(
|
let outbound_tx = self.outbound_tx.clone();
|
||||||
|
let prev_handler = self.notification_handlers.lock().insert(
|
||||||
method,
|
method,
|
||||||
Box::new(move |id, params, tx| {
|
Box::new(move |id, params, cx| {
|
||||||
if let Some(id) = id {
|
if let Some(id) = id {
|
||||||
let params = serde_json::from_str(params)?;
|
if let Some(params) = serde_json::from_str(params).log_err() {
|
||||||
let result = f(params)?;
|
let response = f(params, cx.clone());
|
||||||
let response = serde_json::to_vec(&Response { id, result })?;
|
cx.foreground()
|
||||||
tx.try_send(response)?;
|
.spawn({
|
||||||
|
let outbound_tx = outbound_tx.clone();
|
||||||
|
async move {
|
||||||
|
let response = match response.await {
|
||||||
|
Ok(result) => Response {
|
||||||
|
id,
|
||||||
|
result: Some(result),
|
||||||
|
error: None,
|
||||||
|
},
|
||||||
|
Err(error) => Response {
|
||||||
|
id,
|
||||||
|
result: None,
|
||||||
|
error: Some(Error {
|
||||||
|
message: error.to_string(),
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
if let Some(response) = serde_json::to_vec(&response).log_err()
|
||||||
|
{
|
||||||
|
outbound_tx.try_send(response).ok();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.detach();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
assert!(
|
assert!(
|
||||||
|
@ -456,7 +467,7 @@ impl LanguageServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn request<T: request::Request>(
|
pub fn request<T: request::Request>(
|
||||||
self: &Arc<Self>,
|
&self,
|
||||||
params: T::Params,
|
params: T::Params,
|
||||||
) -> impl Future<Output = Result<T::Result>>
|
) -> impl Future<Output = Result<T::Result>>
|
||||||
where
|
where
|
||||||
|
@ -547,36 +558,17 @@ impl Subscription {
|
||||||
|
|
||||||
impl Drop for Subscription {
|
impl Drop for Subscription {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
self.notification_handlers.write().remove(self.method);
|
self.notification_handlers.lock().remove(self.method);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct FakeLanguageServer {
|
pub struct FakeLanguageServer {
|
||||||
handlers: FakeLanguageServerHandlers,
|
pub server: Arc<LanguageServer>,
|
||||||
outgoing_tx: futures::channel::mpsc::UnboundedSender<Vec<u8>>,
|
notifications_rx: channel::Receiver<(String, String)>,
|
||||||
incoming_rx: futures::channel::mpsc::UnboundedReceiver<Vec<u8>>,
|
|
||||||
_input_task: Task<Result<()>>,
|
|
||||||
_output_task: Task<Result<()>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
|
||||||
type FakeLanguageServerHandlers = Arc<
|
|
||||||
Mutex<
|
|
||||||
HashMap<
|
|
||||||
&'static str,
|
|
||||||
Box<
|
|
||||||
dyn Send
|
|
||||||
+ FnMut(
|
|
||||||
usize,
|
|
||||||
&[u8],
|
|
||||||
gpui::AsyncAppContext,
|
|
||||||
) -> futures::future::BoxFuture<'static, Vec<u8>>,
|
|
||||||
>,
|
|
||||||
>,
|
|
||||||
>,
|
|
||||||
>;
|
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
impl LanguageServer {
|
impl LanguageServer {
|
||||||
pub fn full_capabilities() -> ServerCapabilities {
|
pub fn full_capabilities() -> ServerCapabilities {
|
||||||
|
@ -589,177 +581,101 @@ impl LanguageServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fake(cx: &mut gpui::MutableAppContext) -> (Self, FakeLanguageServer) {
|
pub fn fake(cx: AsyncAppContext) -> (Self, FakeLanguageServer) {
|
||||||
Self::fake_with_capabilities(Self::full_capabilities(), cx)
|
Self::fake_with_capabilities(Self::full_capabilities(), cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fake_with_capabilities(
|
pub fn fake_with_capabilities(
|
||||||
capabilities: ServerCapabilities,
|
capabilities: ServerCapabilities,
|
||||||
cx: &mut gpui::MutableAppContext,
|
cx: AsyncAppContext,
|
||||||
) -> (Self, FakeLanguageServer) {
|
) -> (Self, FakeLanguageServer) {
|
||||||
let (stdin_writer, stdin_reader) = async_pipe::pipe();
|
let (stdin_writer, stdin_reader) = async_pipe::pipe();
|
||||||
let (stdout_writer, stdout_reader) = async_pipe::pipe();
|
let (stdout_writer, stdout_reader) = async_pipe::pipe();
|
||||||
|
let (notifications_tx, notifications_rx) = channel::unbounded();
|
||||||
|
|
||||||
let mut fake = FakeLanguageServer::new(stdin_reader, stdout_writer, cx);
|
|
||||||
fake.handle_request::<request::Initialize, _, _>({
|
|
||||||
let capabilities = capabilities.clone();
|
|
||||||
move |_, _| {
|
|
||||||
let capabilities = capabilities.clone();
|
|
||||||
async move {
|
|
||||||
InitializeResult {
|
|
||||||
capabilities,
|
|
||||||
..Default::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let executor = cx.background().clone();
|
|
||||||
let server = Self::new_internal(
|
let server = Self::new_internal(
|
||||||
0,
|
0,
|
||||||
stdin_writer,
|
stdin_writer,
|
||||||
stdout_reader,
|
stdout_reader,
|
||||||
Path::new("/"),
|
Path::new("/"),
|
||||||
None,
|
cx.clone(),
|
||||||
executor,
|
|_| {},
|
||||||
);
|
);
|
||||||
|
let fake = FakeLanguageServer {
|
||||||
|
server: Arc::new(Self::new_internal(
|
||||||
|
0,
|
||||||
|
stdout_writer,
|
||||||
|
stdin_reader,
|
||||||
|
Path::new("/"),
|
||||||
|
cx.clone(),
|
||||||
|
move |msg| {
|
||||||
|
notifications_tx
|
||||||
|
.try_send((msg.method.to_string(), msg.params.get().to_string()))
|
||||||
|
.ok();
|
||||||
|
},
|
||||||
|
)),
|
||||||
|
notifications_rx,
|
||||||
|
};
|
||||||
|
fake.handle_request::<request::Initialize, _, _>({
|
||||||
|
let capabilities = capabilities.clone();
|
||||||
|
move |_, _| {
|
||||||
|
let capabilities = capabilities.clone();
|
||||||
|
async move {
|
||||||
|
Ok(InitializeResult {
|
||||||
|
capabilities,
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
(server, fake)
|
(server, fake)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
impl FakeLanguageServer {
|
impl FakeLanguageServer {
|
||||||
fn new(
|
pub fn notify<T: notification::Notification>(&self, params: T::Params) {
|
||||||
stdin: async_pipe::PipeReader,
|
self.server.notify::<T>(params).ok();
|
||||||
stdout: async_pipe::PipeWriter,
|
|
||||||
cx: &mut gpui::MutableAppContext,
|
|
||||||
) -> Self {
|
|
||||||
use futures::StreamExt as _;
|
|
||||||
|
|
||||||
let (incoming_tx, incoming_rx) = futures::channel::mpsc::unbounded();
|
|
||||||
let (outgoing_tx, mut outgoing_rx) = futures::channel::mpsc::unbounded();
|
|
||||||
let handlers = FakeLanguageServerHandlers::default();
|
|
||||||
|
|
||||||
let input_task = cx.spawn(|cx| {
|
|
||||||
let handlers = handlers.clone();
|
|
||||||
let outgoing_tx = outgoing_tx.clone();
|
|
||||||
async move {
|
|
||||||
let mut buffer = Vec::new();
|
|
||||||
let mut stdin = smol::io::BufReader::new(stdin);
|
|
||||||
while Self::receive(&mut stdin, &mut buffer).await.is_ok() {
|
|
||||||
cx.background().simulate_random_delay().await;
|
|
||||||
|
|
||||||
if let Ok(request) = serde_json::from_slice::<AnyRequest>(&buffer) {
|
|
||||||
assert_eq!(request.jsonrpc, JSON_RPC_VERSION);
|
|
||||||
|
|
||||||
let response;
|
|
||||||
if let Some(handler) = handlers.lock().get_mut(request.method) {
|
|
||||||
response =
|
|
||||||
handler(request.id, request.params.get().as_bytes(), cx.clone())
|
|
||||||
.await;
|
|
||||||
log::debug!("handled lsp request. method:{}", request.method);
|
|
||||||
} else {
|
|
||||||
response = serde_json::to_vec(&AnyResponse {
|
|
||||||
id: request.id,
|
|
||||||
error: Some(Error {
|
|
||||||
message: "no handler".to_string(),
|
|
||||||
}),
|
|
||||||
result: None,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
log::debug!("unhandled lsp request. method:{}", request.method);
|
|
||||||
}
|
|
||||||
outgoing_tx.unbounded_send(response)?;
|
|
||||||
} else {
|
|
||||||
incoming_tx.unbounded_send(buffer.clone())?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok::<_, anyhow::Error>(())
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
let output_task = cx.background().spawn(async move {
|
|
||||||
let mut stdout = smol::io::BufWriter::new(stdout);
|
|
||||||
while let Some(message) = outgoing_rx.next().await {
|
|
||||||
stdout.write_all(CONTENT_LEN_HEADER.as_bytes()).await?;
|
|
||||||
stdout
|
|
||||||
.write_all((format!("{}", message.len())).as_bytes())
|
|
||||||
.await?;
|
|
||||||
stdout.write_all("\r\n\r\n".as_bytes()).await?;
|
|
||||||
stdout.write_all(&message).await?;
|
|
||||||
stdout.flush().await?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
Self {
|
|
||||||
outgoing_tx,
|
|
||||||
incoming_rx,
|
|
||||||
handlers,
|
|
||||||
_input_task: input_task,
|
|
||||||
_output_task: output_task,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn notify<T: notification::Notification>(&mut self, params: T::Params) {
|
|
||||||
let message = serde_json::to_vec(&Notification {
|
|
||||||
jsonrpc: JSON_RPC_VERSION,
|
|
||||||
method: T::METHOD,
|
|
||||||
params,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
self.outgoing_tx.unbounded_send(message).unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn receive_notification<T: notification::Notification>(&mut self) -> T::Params {
|
pub async fn receive_notification<T: notification::Notification>(&mut self) -> T::Params {
|
||||||
use futures::StreamExt as _;
|
use futures::StreamExt as _;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let bytes = self.incoming_rx.next().await.unwrap();
|
let (method, params) = self.notifications_rx.next().await.unwrap();
|
||||||
if let Ok(notification) = serde_json::from_slice::<Notification<T::Params>>(&bytes) {
|
if &method == T::METHOD {
|
||||||
assert_eq!(notification.method, T::METHOD);
|
return serde_json::from_str::<T::Params>(¶ms).unwrap();
|
||||||
return notification.params;
|
|
||||||
} else {
|
} else {
|
||||||
log::info!(
|
log::info!("skipping message in fake language server {:?}", params);
|
||||||
"skipping message in fake language server {:?}",
|
|
||||||
std::str::from_utf8(&bytes)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn handle_request<T, F, Fut>(
|
pub fn handle_request<T, F, Fut>(
|
||||||
&mut self,
|
&self,
|
||||||
mut handler: F,
|
mut handler: F,
|
||||||
) -> futures::channel::mpsc::UnboundedReceiver<()>
|
) -> futures::channel::mpsc::UnboundedReceiver<()>
|
||||||
where
|
where
|
||||||
T: 'static + request::Request,
|
T: 'static + request::Request,
|
||||||
|
T::Params: 'static + Send,
|
||||||
F: 'static + Send + FnMut(T::Params, gpui::AsyncAppContext) -> Fut,
|
F: 'static + Send + FnMut(T::Params, gpui::AsyncAppContext) -> Fut,
|
||||||
Fut: 'static + Send + Future<Output = T::Result>,
|
Fut: 'static + Send + Future<Output = Result<T::Result>>,
|
||||||
{
|
{
|
||||||
use futures::FutureExt as _;
|
|
||||||
|
|
||||||
let (responded_tx, responded_rx) = futures::channel::mpsc::unbounded();
|
let (responded_tx, responded_rx) = futures::channel::mpsc::unbounded();
|
||||||
self.handlers.lock().insert(
|
self.server.remove_request_handler::<T>();
|
||||||
T::METHOD,
|
self.server
|
||||||
Box::new(move |id, params, cx| {
|
.on_request::<T, _, _>(move |params, cx| {
|
||||||
let result = handler(serde_json::from_slice::<T::Params>(params).unwrap(), cx);
|
let result = handler(params, cx.clone());
|
||||||
let responded_tx = responded_tx.clone();
|
let responded_tx = responded_tx.clone();
|
||||||
async move {
|
async move {
|
||||||
|
cx.background().simulate_random_delay().await;
|
||||||
let result = result.await;
|
let result = result.await;
|
||||||
let result = serde_json::to_string(&result).unwrap();
|
|
||||||
let result = serde_json::from_str::<&RawValue>(&result).unwrap();
|
|
||||||
let response = AnyResponse {
|
|
||||||
id,
|
|
||||||
error: None,
|
|
||||||
result: Some(result),
|
|
||||||
};
|
|
||||||
responded_tx.unbounded_send(()).ok();
|
responded_tx.unbounded_send(()).ok();
|
||||||
serde_json::to_vec(&response).unwrap()
|
result
|
||||||
}
|
}
|
||||||
.boxed()
|
})
|
||||||
}),
|
.detach();
|
||||||
);
|
|
||||||
responded_rx
|
responded_rx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -767,7 +683,7 @@ impl FakeLanguageServer {
|
||||||
where
|
where
|
||||||
T: 'static + request::Request,
|
T: 'static + request::Request,
|
||||||
{
|
{
|
||||||
self.handlers.lock().remove(T::METHOD);
|
self.server.remove_request_handler::<T>();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn start_progress(&mut self, token: impl Into<String>) {
|
pub async fn start_progress(&mut self, token: impl Into<String>) {
|
||||||
|
@ -783,25 +699,6 @@ impl FakeLanguageServer {
|
||||||
value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(Default::default())),
|
value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(Default::default())),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn receive(
|
|
||||||
stdin: &mut smol::io::BufReader<async_pipe::PipeReader>,
|
|
||||||
buffer: &mut Vec<u8>,
|
|
||||||
) -> Result<()> {
|
|
||||||
buffer.clear();
|
|
||||||
stdin.read_until(b'\n', buffer).await?;
|
|
||||||
stdin.read_until(b'\n', buffer).await?;
|
|
||||||
let message_len: usize = std::str::from_utf8(buffer)
|
|
||||||
.unwrap()
|
|
||||||
.strip_prefix(CONTENT_LEN_HEADER)
|
|
||||||
.ok_or_else(|| anyhow!("invalid content length header"))?
|
|
||||||
.trim_end()
|
|
||||||
.parse()
|
|
||||||
.unwrap();
|
|
||||||
buffer.resize(message_len, 0);
|
|
||||||
stdin.read_exact(buffer).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ClearResponseHandlers(Arc<Mutex<HashMap<usize, ResponseHandler>>>);
|
struct ClearResponseHandlers(Arc<Mutex<HashMap<usize, ResponseHandler>>>);
|
||||||
|
@ -826,22 +723,22 @@ mod tests {
|
||||||
|
|
||||||
#[gpui::test]
|
#[gpui::test]
|
||||||
async fn test_fake(cx: &mut TestAppContext) {
|
async fn test_fake(cx: &mut TestAppContext) {
|
||||||
let (mut server, mut fake) = cx.update(LanguageServer::fake);
|
let (server, mut fake) = LanguageServer::fake(cx.to_async());
|
||||||
|
|
||||||
let (message_tx, message_rx) = channel::unbounded();
|
let (message_tx, message_rx) = channel::unbounded();
|
||||||
let (diagnostics_tx, diagnostics_rx) = channel::unbounded();
|
let (diagnostics_tx, diagnostics_rx) = channel::unbounded();
|
||||||
server
|
server
|
||||||
.on_notification::<notification::ShowMessage, _>(move |params| {
|
.on_notification::<notification::ShowMessage, _>(move |params, _| {
|
||||||
message_tx.try_send(params).unwrap()
|
message_tx.try_send(params).unwrap()
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
server
|
server
|
||||||
.on_notification::<notification::PublishDiagnostics, _>(move |params| {
|
.on_notification::<notification::PublishDiagnostics, _>(move |params, _| {
|
||||||
diagnostics_tx.try_send(params).unwrap()
|
diagnostics_tx.try_send(params).unwrap()
|
||||||
})
|
})
|
||||||
.detach();
|
.detach();
|
||||||
|
|
||||||
let server = server.initialize().await.unwrap();
|
let server = server.initialize(None).await.unwrap();
|
||||||
server
|
server
|
||||||
.notify::<notification::DidOpenTextDocument>(DidOpenTextDocumentParams {
|
.notify::<notification::DidOpenTextDocument>(DidOpenTextDocumentParams {
|
||||||
text_document: TextDocumentItem::new(
|
text_document: TextDocumentItem::new(
|
||||||
|
@ -876,7 +773,7 @@ mod tests {
|
||||||
"file://b/c"
|
"file://b/c"
|
||||||
);
|
);
|
||||||
|
|
||||||
fake.handle_request::<request::Shutdown, _, _>(|_, _| async move {});
|
fake.handle_request::<request::Shutdown, _, _>(|_, _| async move { Ok(()) });
|
||||||
|
|
||||||
drop(server);
|
drop(server);
|
||||||
fake.receive_notification::<notification::Exit>().await;
|
fake.receive_notification::<notification::Exit>().await;
|
||||||
|
|
|
@ -4,9 +4,9 @@ use async_trait::async_trait;
|
||||||
use client::{proto, PeerId};
|
use client::{proto, PeerId};
|
||||||
use gpui::{AppContext, AsyncAppContext, ModelHandle};
|
use gpui::{AppContext, AsyncAppContext, ModelHandle};
|
||||||
use language::{
|
use language::{
|
||||||
point_from_lsp,
|
point_from_lsp, point_to_lsp,
|
||||||
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
|
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
|
||||||
range_from_lsp, Anchor, Bias, Buffer, PointUtf16, ToLspPosition, ToPointUtf16,
|
range_from_lsp, Anchor, Bias, Buffer, PointUtf16, ToPointUtf16,
|
||||||
};
|
};
|
||||||
use lsp::{DocumentHighlightKind, ServerCapabilities};
|
use lsp::{DocumentHighlightKind, ServerCapabilities};
|
||||||
use std::{cmp::Reverse, ops::Range, path::Path};
|
use std::{cmp::Reverse, ops::Range, path::Path};
|
||||||
|
@ -91,7 +91,7 @@ impl LspCommand for PrepareRename {
|
||||||
text_document: lsp::TextDocumentIdentifier {
|
text_document: lsp::TextDocumentIdentifier {
|
||||||
uri: lsp::Url::from_file_path(path).unwrap(),
|
uri: lsp::Url::from_file_path(path).unwrap(),
|
||||||
},
|
},
|
||||||
position: self.position.to_lsp_position(),
|
position: point_to_lsp(self.position),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -208,7 +208,7 @@ impl LspCommand for PerformRename {
|
||||||
text_document: lsp::TextDocumentIdentifier {
|
text_document: lsp::TextDocumentIdentifier {
|
||||||
uri: lsp::Url::from_file_path(path).unwrap(),
|
uri: lsp::Url::from_file_path(path).unwrap(),
|
||||||
},
|
},
|
||||||
position: self.position.to_lsp_position(),
|
position: point_to_lsp(self.position),
|
||||||
},
|
},
|
||||||
new_name: self.new_name.clone(),
|
new_name: self.new_name.clone(),
|
||||||
work_done_progress_params: Default::default(),
|
work_done_progress_params: Default::default(),
|
||||||
|
@ -223,22 +223,19 @@ impl LspCommand for PerformRename {
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<ProjectTransaction> {
|
) -> Result<ProjectTransaction> {
|
||||||
if let Some(edit) = message {
|
if let Some(edit) = message {
|
||||||
let language_server = project
|
let (lsp_adapter, lsp_server) = project
|
||||||
.read_with(&cx, |project, cx| {
|
.read_with(&cx, |project, cx| {
|
||||||
project
|
project
|
||||||
.language_server_for_buffer(buffer.read(cx), cx)
|
.language_server_for_buffer(buffer.read(cx), cx)
|
||||||
.cloned()
|
.cloned()
|
||||||
})
|
})
|
||||||
.ok_or_else(|| anyhow!("no language server found for buffer"))?;
|
.ok_or_else(|| anyhow!("no language server found for buffer"))?;
|
||||||
let language = buffer
|
|
||||||
.read_with(&cx, |buffer, _| buffer.language().cloned())
|
|
||||||
.ok_or_else(|| anyhow!("no language for buffer"))?;
|
|
||||||
Project::deserialize_workspace_edit(
|
Project::deserialize_workspace_edit(
|
||||||
project,
|
project,
|
||||||
edit,
|
edit,
|
||||||
self.push_to_history,
|
self.push_to_history,
|
||||||
language.name(),
|
lsp_adapter,
|
||||||
language_server,
|
lsp_server,
|
||||||
&mut cx,
|
&mut cx,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
|
@ -328,7 +325,7 @@ impl LspCommand for GetDefinition {
|
||||||
text_document: lsp::TextDocumentIdentifier {
|
text_document: lsp::TextDocumentIdentifier {
|
||||||
uri: lsp::Url::from_file_path(path).unwrap(),
|
uri: lsp::Url::from_file_path(path).unwrap(),
|
||||||
},
|
},
|
||||||
position: self.position.to_lsp_position(),
|
position: point_to_lsp(self.position),
|
||||||
},
|
},
|
||||||
work_done_progress_params: Default::default(),
|
work_done_progress_params: Default::default(),
|
||||||
partial_result_params: Default::default(),
|
partial_result_params: Default::default(),
|
||||||
|
@ -343,16 +340,13 @@ impl LspCommand for GetDefinition {
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<Location>> {
|
) -> Result<Vec<Location>> {
|
||||||
let mut definitions = Vec::new();
|
let mut definitions = Vec::new();
|
||||||
let language_server = project
|
let (lsp_adapter, language_server) = project
|
||||||
.read_with(&cx, |project, cx| {
|
.read_with(&cx, |project, cx| {
|
||||||
project
|
project
|
||||||
.language_server_for_buffer(buffer.read(cx), cx)
|
.language_server_for_buffer(buffer.read(cx), cx)
|
||||||
.cloned()
|
.cloned()
|
||||||
})
|
})
|
||||||
.ok_or_else(|| anyhow!("no language server found for buffer"))?;
|
.ok_or_else(|| anyhow!("no language server found for buffer"))?;
|
||||||
let language = buffer
|
|
||||||
.read_with(&cx, |buffer, _| buffer.language().cloned())
|
|
||||||
.ok_or_else(|| anyhow!("no language for buffer"))?;
|
|
||||||
|
|
||||||
if let Some(message) = message {
|
if let Some(message) = message {
|
||||||
let mut unresolved_locations = Vec::new();
|
let mut unresolved_locations = Vec::new();
|
||||||
|
@ -377,7 +371,7 @@ impl LspCommand for GetDefinition {
|
||||||
.update(&mut cx, |this, cx| {
|
.update(&mut cx, |this, cx| {
|
||||||
this.open_local_buffer_via_lsp(
|
this.open_local_buffer_via_lsp(
|
||||||
target_uri,
|
target_uri,
|
||||||
language.name(),
|
lsp_adapter.clone(),
|
||||||
language_server.clone(),
|
language_server.clone(),
|
||||||
cx,
|
cx,
|
||||||
)
|
)
|
||||||
|
@ -503,7 +497,7 @@ impl LspCommand for GetReferences {
|
||||||
text_document: lsp::TextDocumentIdentifier {
|
text_document: lsp::TextDocumentIdentifier {
|
||||||
uri: lsp::Url::from_file_path(path).unwrap(),
|
uri: lsp::Url::from_file_path(path).unwrap(),
|
||||||
},
|
},
|
||||||
position: self.position.to_lsp_position(),
|
position: point_to_lsp(self.position),
|
||||||
},
|
},
|
||||||
work_done_progress_params: Default::default(),
|
work_done_progress_params: Default::default(),
|
||||||
partial_result_params: Default::default(),
|
partial_result_params: Default::default(),
|
||||||
|
@ -521,16 +515,13 @@ impl LspCommand for GetReferences {
|
||||||
mut cx: AsyncAppContext,
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<Vec<Location>> {
|
) -> Result<Vec<Location>> {
|
||||||
let mut references = Vec::new();
|
let mut references = Vec::new();
|
||||||
let language_server = project
|
let (lsp_adapter, language_server) = project
|
||||||
.read_with(&cx, |project, cx| {
|
.read_with(&cx, |project, cx| {
|
||||||
project
|
project
|
||||||
.language_server_for_buffer(buffer.read(cx), cx)
|
.language_server_for_buffer(buffer.read(cx), cx)
|
||||||
.cloned()
|
.cloned()
|
||||||
})
|
})
|
||||||
.ok_or_else(|| anyhow!("no language server found for buffer"))?;
|
.ok_or_else(|| anyhow!("no language server found for buffer"))?;
|
||||||
let language = buffer
|
|
||||||
.read_with(&cx, |buffer, _| buffer.language().cloned())
|
|
||||||
.ok_or_else(|| anyhow!("no language for buffer"))?;
|
|
||||||
|
|
||||||
if let Some(locations) = locations {
|
if let Some(locations) = locations {
|
||||||
for lsp_location in locations {
|
for lsp_location in locations {
|
||||||
|
@ -538,7 +529,7 @@ impl LspCommand for GetReferences {
|
||||||
.update(&mut cx, |this, cx| {
|
.update(&mut cx, |this, cx| {
|
||||||
this.open_local_buffer_via_lsp(
|
this.open_local_buffer_via_lsp(
|
||||||
lsp_location.uri,
|
lsp_location.uri,
|
||||||
language.name(),
|
lsp_adapter.clone(),
|
||||||
language_server.clone(),
|
language_server.clone(),
|
||||||
cx,
|
cx,
|
||||||
)
|
)
|
||||||
|
@ -668,7 +659,7 @@ impl LspCommand for GetDocumentHighlights {
|
||||||
text_document: lsp::TextDocumentIdentifier {
|
text_document: lsp::TextDocumentIdentifier {
|
||||||
uri: lsp::Url::from_file_path(path).unwrap(),
|
uri: lsp::Url::from_file_path(path).unwrap(),
|
||||||
},
|
},
|
||||||
position: self.position.to_lsp_position(),
|
position: point_to_lsp(self.position),
|
||||||
},
|
},
|
||||||
work_done_progress_params: Default::default(),
|
work_done_progress_params: Default::default(),
|
||||||
partial_result_params: Default::default(),
|
partial_result_params: Default::default(),
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -231,7 +231,7 @@ message GetProjectSymbolsResponse {
|
||||||
message Symbol {
|
message Symbol {
|
||||||
uint64 source_worktree_id = 1;
|
uint64 source_worktree_id = 1;
|
||||||
uint64 worktree_id = 2;
|
uint64 worktree_id = 2;
|
||||||
string language_name = 3;
|
string language_server_name = 3;
|
||||||
string name = 4;
|
string name = 4;
|
||||||
int32 kind = 5;
|
int32 kind = 5;
|
||||||
string path = 6;
|
string path = 6;
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -164,6 +164,55 @@ fn test_line_len() {
|
||||||
assert_eq!(buffer.line_len(5), 0);
|
assert_eq!(buffer.line_len(5), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_common_prefix_at_positionn() {
|
||||||
|
let text = "a = str; b = δα";
|
||||||
|
let buffer = Buffer::new(0, 0, History::new(text.into()));
|
||||||
|
|
||||||
|
let offset1 = offset_after(text, "str");
|
||||||
|
let offset2 = offset_after(text, "δα");
|
||||||
|
|
||||||
|
// the preceding word is a prefix of the suggestion
|
||||||
|
assert_eq!(
|
||||||
|
buffer.common_prefix_at(offset1, "string"),
|
||||||
|
range_of(text, "str"),
|
||||||
|
);
|
||||||
|
// a suffix of the preceding word is a prefix of the suggestion
|
||||||
|
assert_eq!(
|
||||||
|
buffer.common_prefix_at(offset1, "tree"),
|
||||||
|
range_of(text, "tr"),
|
||||||
|
);
|
||||||
|
// the preceding word is a substring of the suggestion, but not a prefix
|
||||||
|
assert_eq!(
|
||||||
|
buffer.common_prefix_at(offset1, "astro"),
|
||||||
|
empty_range_after(text, "str"),
|
||||||
|
);
|
||||||
|
|
||||||
|
// prefix matching is case insenstive.
|
||||||
|
assert_eq!(
|
||||||
|
buffer.common_prefix_at(offset1, "Strαngε"),
|
||||||
|
range_of(text, "str"),
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
buffer.common_prefix_at(offset2, "ΔΑΜΝ"),
|
||||||
|
range_of(text, "δα"),
|
||||||
|
);
|
||||||
|
|
||||||
|
fn offset_after(text: &str, part: &str) -> usize {
|
||||||
|
text.find(part).unwrap() + part.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn empty_range_after(text: &str, part: &str) -> Range<usize> {
|
||||||
|
let offset = offset_after(text, part);
|
||||||
|
offset..offset
|
||||||
|
}
|
||||||
|
|
||||||
|
fn range_of(text: &str, part: &str) -> Range<usize> {
|
||||||
|
let start = text.find(part).unwrap();
|
||||||
|
start..start + part.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_text_summary_for_range() {
|
fn test_text_summary_for_range() {
|
||||||
let buffer = Buffer::new(0, 0, History::new("ab\nefg\nhklm\nnopqrs\ntuvwxyz".into()));
|
let buffer = Buffer::new(0, 0, History::new("ab\nefg\nhklm\nnopqrs\ntuvwxyz".into()));
|
||||||
|
|
|
@ -1508,6 +1508,30 @@ impl BufferSnapshot {
|
||||||
.eq(needle.bytes())
|
.eq(needle.bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
|
||||||
|
where
|
||||||
|
T: ToOffset + TextDimension,
|
||||||
|
{
|
||||||
|
let offset = position.to_offset(self);
|
||||||
|
let common_prefix_len = needle
|
||||||
|
.char_indices()
|
||||||
|
.map(|(index, _)| index)
|
||||||
|
.chain([needle.len()])
|
||||||
|
.take_while(|&len| len <= offset)
|
||||||
|
.filter(|&len| {
|
||||||
|
let left = self
|
||||||
|
.chars_for_range(offset - len..offset)
|
||||||
|
.flat_map(|c| char::to_lowercase(c));
|
||||||
|
let right = needle[..len].chars().flat_map(|c| char::to_lowercase(c));
|
||||||
|
left.eq(right)
|
||||||
|
})
|
||||||
|
.last()
|
||||||
|
.unwrap_or(0);
|
||||||
|
let start_offset = offset - common_prefix_len;
|
||||||
|
let start = self.text_summary_for_range(0..start_offset);
|
||||||
|
start..position
|
||||||
|
}
|
||||||
|
|
||||||
pub fn text(&self) -> String {
|
pub fn text(&self) -> String {
|
||||||
self.visible_text.to_string()
|
self.visible_text.to_string()
|
||||||
}
|
}
|
||||||
|
|
|
@ -99,6 +99,7 @@ tree-sitter-c = "0.20.1"
|
||||||
tree-sitter-json = "0.19.0"
|
tree-sitter-json = "0.19.0"
|
||||||
tree-sitter-rust = "0.20.1"
|
tree-sitter-rust = "0.20.1"
|
||||||
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown", rev = "330ecab87a3e3a7211ac69bbadc19eabecdb1cca" }
|
||||||
|
tree-sitter-typescript = "0.20.1"
|
||||||
url = "2.2"
|
url = "2.2"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|
112
crates/zed/src/languages.rs
Normal file
112
crates/zed/src/languages.rs
Normal file
|
@ -0,0 +1,112 @@
|
||||||
|
use gpui::Task;
|
||||||
|
pub use language::*;
|
||||||
|
use rust_embed::RustEmbed;
|
||||||
|
use std::{borrow::Cow, str, sync::Arc};
|
||||||
|
|
||||||
|
mod c;
|
||||||
|
mod installation;
|
||||||
|
mod json;
|
||||||
|
mod rust;
|
||||||
|
mod typescript;
|
||||||
|
|
||||||
|
#[derive(RustEmbed)]
|
||||||
|
#[folder = "src/languages"]
|
||||||
|
#[exclude = "*.rs"]
|
||||||
|
struct LanguageDir;
|
||||||
|
|
||||||
|
pub fn build_language_registry(login_shell_env_loaded: Task<()>) -> LanguageRegistry {
|
||||||
|
let languages = LanguageRegistry::new(login_shell_env_loaded);
|
||||||
|
for (name, grammar, lsp_adapter) in [
|
||||||
|
(
|
||||||
|
"c",
|
||||||
|
tree_sitter_c::language(),
|
||||||
|
Some(Arc::new(c::CLspAdapter) as Arc<dyn LspAdapter>),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"json",
|
||||||
|
tree_sitter_json::language(),
|
||||||
|
Some(Arc::new(json::JsonLspAdapter)),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"markdown",
|
||||||
|
tree_sitter_markdown::language(),
|
||||||
|
None, //
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"rust",
|
||||||
|
tree_sitter_rust::language(),
|
||||||
|
Some(Arc::new(rust::RustLspAdapter)),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"tsx",
|
||||||
|
tree_sitter_typescript::language_tsx(),
|
||||||
|
Some(Arc::new(typescript::TypeScriptLspAdapter)),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"typescript",
|
||||||
|
tree_sitter_typescript::language_typescript(),
|
||||||
|
Some(Arc::new(typescript::TypeScriptLspAdapter)),
|
||||||
|
),
|
||||||
|
] {
|
||||||
|
languages.add(Arc::new(language(name, grammar, lsp_adapter)));
|
||||||
|
}
|
||||||
|
languages
|
||||||
|
}
|
||||||
|
|
||||||
|
fn language(
|
||||||
|
name: &str,
|
||||||
|
grammar: tree_sitter::Language,
|
||||||
|
lsp_adapter: Option<Arc<dyn LspAdapter>>,
|
||||||
|
) -> Language {
|
||||||
|
let config = toml::from_slice(
|
||||||
|
&LanguageDir::get(&format!("{}/config.toml", name))
|
||||||
|
.unwrap()
|
||||||
|
.data,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let mut language = Language::new(config, Some(grammar));
|
||||||
|
|
||||||
|
if let Some(query) = load_query(name, "/highlights") {
|
||||||
|
language = language
|
||||||
|
.with_highlights_query(query.as_ref())
|
||||||
|
.expect("failed to evaluate highlights query");
|
||||||
|
}
|
||||||
|
if let Some(query) = load_query(name, "/brackets") {
|
||||||
|
language = language
|
||||||
|
.with_brackets_query(query.as_ref())
|
||||||
|
.expect("failed to load brackets query");
|
||||||
|
}
|
||||||
|
if let Some(query) = load_query(name, "/indents") {
|
||||||
|
language = language
|
||||||
|
.with_indents_query(query.as_ref())
|
||||||
|
.expect("failed to load indents query");
|
||||||
|
}
|
||||||
|
if let Some(query) = load_query(name, "/outline") {
|
||||||
|
language = language
|
||||||
|
.with_outline_query(query.as_ref())
|
||||||
|
.expect("failed to load outline query");
|
||||||
|
}
|
||||||
|
if let Some(lsp_adapter) = lsp_adapter {
|
||||||
|
language = language.with_lsp_adapter(lsp_adapter)
|
||||||
|
}
|
||||||
|
language
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_query(name: &str, filename_prefix: &str) -> Option<Cow<'static, str>> {
|
||||||
|
let mut result = None;
|
||||||
|
for path in LanguageDir::iter() {
|
||||||
|
if let Some(remainder) = path.strip_prefix(name) {
|
||||||
|
if remainder.starts_with(filename_prefix) {
|
||||||
|
let contents = match LanguageDir::get(path.as_ref()).unwrap().data {
|
||||||
|
Cow::Borrowed(s) => Cow::Borrowed(str::from_utf8(s).unwrap()),
|
||||||
|
Cow::Owned(s) => Cow::Owned(String::from_utf8(s).unwrap()),
|
||||||
|
};
|
||||||
|
match &mut result {
|
||||||
|
None => result = Some(contents),
|
||||||
|
Some(r) => r.to_mut().push_str(contents.as_ref()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
114
crates/zed/src/languages/c.rs
Normal file
114
crates/zed/src/languages/c.rs
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
use super::installation::{latest_github_release, GitHubLspBinaryVersion};
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
use client::http::{HttpClient, Method};
|
||||||
|
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||||
|
pub use language::*;
|
||||||
|
use smol::fs::{self, File};
|
||||||
|
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||||
|
use util::{ResultExt, TryFutureExt};
|
||||||
|
|
||||||
|
pub struct CLspAdapter;
|
||||||
|
|
||||||
|
impl super::LspAdapter for CLspAdapter {
|
||||||
|
fn name(&self) -> LanguageServerName {
|
||||||
|
LanguageServerName("clangd".into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_latest_server_version(
|
||||||
|
&self,
|
||||||
|
http: Arc<dyn HttpClient>,
|
||||||
|
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||||
|
async move {
|
||||||
|
let version = latest_github_release("clangd/clangd", http, |release_name| {
|
||||||
|
format!("clangd-mac-{release_name}.zip")
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
|
Ok(Box::new(version) as Box<_>)
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_server_binary(
|
||||||
|
&self,
|
||||||
|
version: Box<dyn 'static + Send + Any>,
|
||||||
|
http: Arc<dyn HttpClient>,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||||
|
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||||
|
async move {
|
||||||
|
let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
|
||||||
|
let version_dir = container_dir.join(format!("clangd_{}", version.name));
|
||||||
|
let binary_path = version_dir.join("bin/clangd");
|
||||||
|
|
||||||
|
if fs::metadata(&binary_path).await.is_err() {
|
||||||
|
let response = http
|
||||||
|
.send(
|
||||||
|
surf::RequestBuilder::new(Method::Get, version.url)
|
||||||
|
.middleware(surf::middleware::Redirect::default())
|
||||||
|
.build(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
||||||
|
let mut file = File::create(&zip_path).await?;
|
||||||
|
if !response.status().is_success() {
|
||||||
|
Err(anyhow!(
|
||||||
|
"download failed with status {}",
|
||||||
|
response.status().to_string()
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
futures::io::copy(response, &mut file).await?;
|
||||||
|
|
||||||
|
let unzip_status = smol::process::Command::new("unzip")
|
||||||
|
.current_dir(&container_dir)
|
||||||
|
.arg(&zip_path)
|
||||||
|
.output()
|
||||||
|
.await?
|
||||||
|
.status;
|
||||||
|
if !unzip_status.success() {
|
||||||
|
Err(anyhow!("failed to unzip clangd archive"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
|
||||||
|
while let Some(entry) = entries.next().await {
|
||||||
|
if let Some(entry) = entry.log_err() {
|
||||||
|
let entry_path = entry.path();
|
||||||
|
if entry_path.as_path() != version_dir {
|
||||||
|
fs::remove_dir_all(&entry_path).await.log_err();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(binary_path)
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cached_server_binary(&self, container_dir: PathBuf) -> BoxFuture<'static, Option<PathBuf>> {
|
||||||
|
async move {
|
||||||
|
let mut last_clangd_dir = None;
|
||||||
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
while let Some(entry) = entries.next().await {
|
||||||
|
let entry = entry?;
|
||||||
|
if entry.file_type().await?.is_dir() {
|
||||||
|
last_clangd_dir = Some(entry.path());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let clangd_dir = last_clangd_dir.ok_or_else(|| anyhow!("no cached binary"))?;
|
||||||
|
let clangd_bin = clangd_dir.join("bin/clangd");
|
||||||
|
if clangd_bin.exists() {
|
||||||
|
Ok(clangd_bin)
|
||||||
|
} else {
|
||||||
|
Err(anyhow!(
|
||||||
|
"missing clangd binary in directory {:?}",
|
||||||
|
clangd_dir
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.log_err()
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||||
|
}
|
|
@ -9,6 +9,3 @@ brackets = [
|
||||||
{ start = "\"", end = "\"", close = true, newline = false },
|
{ start = "\"", end = "\"", close = true, newline = false },
|
||||||
{ start = "/*", end = " */", close = true, newline = false },
|
{ start = "/*", end = " */", close = true, newline = false },
|
||||||
]
|
]
|
||||||
|
|
||||||
[language_server]
|
|
||||||
disk_based_diagnostic_sources = []
|
|
111
crates/zed/src/languages/installation.rs
Normal file
111
crates/zed/src/languages/installation.rs
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use client::http::{self, HttpClient, Method};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use std::{path::Path, sync::Arc};
|
||||||
|
|
||||||
|
pub struct GitHubLspBinaryVersion {
|
||||||
|
pub name: String,
|
||||||
|
pub url: http::Url,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
struct NpmInfo {
|
||||||
|
#[serde(default)]
|
||||||
|
dist_tags: NpmInfoDistTags,
|
||||||
|
versions: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Default)]
|
||||||
|
struct NpmInfoDistTags {
|
||||||
|
latest: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub(crate) struct GithubRelease {
|
||||||
|
name: String,
|
||||||
|
assets: Vec<GithubReleaseAsset>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub(crate) struct GithubReleaseAsset {
|
||||||
|
name: String,
|
||||||
|
browser_download_url: http::Url,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn npm_package_latest_version(name: &str) -> Result<String> {
|
||||||
|
let output = smol::process::Command::new("npm")
|
||||||
|
.args(["info", name, "--json"])
|
||||||
|
.output()
|
||||||
|
.await?;
|
||||||
|
if !output.status.success() {
|
||||||
|
Err(anyhow!(
|
||||||
|
"failed to execute npm info: {:?}",
|
||||||
|
String::from_utf8_lossy(&output.stderr)
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
let mut info: NpmInfo = serde_json::from_slice(&output.stdout)?;
|
||||||
|
info.dist_tags
|
||||||
|
.latest
|
||||||
|
.or_else(|| info.versions.pop())
|
||||||
|
.ok_or_else(|| anyhow!("no version found for npm package {}", name))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn npm_install_packages(
|
||||||
|
packages: impl IntoIterator<Item = (&str, &str)>,
|
||||||
|
directory: &Path,
|
||||||
|
) -> Result<()> {
|
||||||
|
let output = smol::process::Command::new("npm")
|
||||||
|
.arg("install")
|
||||||
|
.arg("--prefix")
|
||||||
|
.arg(directory)
|
||||||
|
.args(
|
||||||
|
packages
|
||||||
|
.into_iter()
|
||||||
|
.map(|(name, version)| format!("{name}@{version}")),
|
||||||
|
)
|
||||||
|
.output()
|
||||||
|
.await
|
||||||
|
.context("failed to run npm install")?;
|
||||||
|
if !output.status.success() {
|
||||||
|
Err(anyhow!(
|
||||||
|
"failed to execute npm install: {:?}",
|
||||||
|
String::from_utf8_lossy(&output.stderr)
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn latest_github_release(
|
||||||
|
repo_name_with_owner: &str,
|
||||||
|
http: Arc<dyn HttpClient>,
|
||||||
|
asset_name: impl Fn(&str) -> String,
|
||||||
|
) -> Result<GitHubLspBinaryVersion> {
|
||||||
|
let release = http
|
||||||
|
.send(
|
||||||
|
surf::RequestBuilder::new(
|
||||||
|
Method::Get,
|
||||||
|
http::Url::parse(&format!(
|
||||||
|
"https://api.github.com/repos/{repo_name_with_owner}/releases/latest"
|
||||||
|
))
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.middleware(surf::middleware::Redirect::default())
|
||||||
|
.build(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|err| anyhow!("error fetching latest release: {}", err))?
|
||||||
|
.body_json::<GithubRelease>()
|
||||||
|
.await
|
||||||
|
.map_err(|err| anyhow!("error parsing latest release: {}", err))?;
|
||||||
|
let asset_name = asset_name(&release.name);
|
||||||
|
let asset = release
|
||||||
|
.assets
|
||||||
|
.iter()
|
||||||
|
.find(|asset| asset.name == asset_name)
|
||||||
|
.ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
|
||||||
|
Ok(GitHubLspBinaryVersion {
|
||||||
|
name: release.name,
|
||||||
|
url: asset.browser_download_url.clone(),
|
||||||
|
})
|
||||||
|
}
|
130
crates/zed/src/languages/json.rs
Normal file
130
crates/zed/src/languages/json.rs
Normal file
|
@ -0,0 +1,130 @@
|
||||||
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use client::http::HttpClient;
|
||||||
|
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||||
|
use language::{LanguageServerName, LspAdapter};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use serde_json::json;
|
||||||
|
use smol::fs;
|
||||||
|
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||||
|
use util::{ResultExt, TryFutureExt};
|
||||||
|
|
||||||
|
pub struct JsonLspAdapter;
|
||||||
|
|
||||||
|
impl JsonLspAdapter {
|
||||||
|
const BIN_PATH: &'static str =
|
||||||
|
"node_modules/vscode-json-languageserver/bin/vscode-json-languageserver";
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LspAdapter for JsonLspAdapter {
|
||||||
|
fn name(&self) -> LanguageServerName {
|
||||||
|
LanguageServerName("vscode-json-languageserver".into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn server_args(&self) -> &[&str] {
|
||||||
|
&["--stdio"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_latest_server_version(
|
||||||
|
&self,
|
||||||
|
_: Arc<dyn HttpClient>,
|
||||||
|
) -> BoxFuture<'static, Result<Box<dyn 'static + Any + Send>>> {
|
||||||
|
async move {
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct NpmInfo {
|
||||||
|
versions: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let output = smol::process::Command::new("npm")
|
||||||
|
.args(["info", "vscode-json-languageserver", "--json"])
|
||||||
|
.output()
|
||||||
|
.await?;
|
||||||
|
if !output.status.success() {
|
||||||
|
Err(anyhow!("failed to execute npm info"))?;
|
||||||
|
}
|
||||||
|
let mut info: NpmInfo = serde_json::from_slice(&output.stdout)?;
|
||||||
|
|
||||||
|
Ok(Box::new(
|
||||||
|
info.versions
|
||||||
|
.pop()
|
||||||
|
.ok_or_else(|| anyhow!("no versions found in npm info"))?,
|
||||||
|
) as Box<_>)
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_server_binary(
|
||||||
|
&self,
|
||||||
|
version: Box<dyn 'static + Send + Any>,
|
||||||
|
_: Arc<dyn HttpClient>,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||||
|
let version = version.downcast::<String>().unwrap();
|
||||||
|
async move {
|
||||||
|
let version_dir = container_dir.join(version.as_str());
|
||||||
|
fs::create_dir_all(&version_dir)
|
||||||
|
.await
|
||||||
|
.context("failed to create version directory")?;
|
||||||
|
let binary_path = version_dir.join(Self::BIN_PATH);
|
||||||
|
|
||||||
|
if fs::metadata(&binary_path).await.is_err() {
|
||||||
|
let output = smol::process::Command::new("npm")
|
||||||
|
.current_dir(&version_dir)
|
||||||
|
.arg("install")
|
||||||
|
.arg(format!("vscode-json-languageserver@{}", version))
|
||||||
|
.output()
|
||||||
|
.await
|
||||||
|
.context("failed to run npm install")?;
|
||||||
|
if !output.status.success() {
|
||||||
|
Err(anyhow!("failed to install vscode-json-languageserver"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
|
||||||
|
while let Some(entry) = entries.next().await {
|
||||||
|
if let Some(entry) = entry.log_err() {
|
||||||
|
let entry_path = entry.path();
|
||||||
|
if entry_path.as_path() != version_dir {
|
||||||
|
fs::remove_dir_all(&entry_path).await.log_err();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(binary_path)
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cached_server_binary(&self, container_dir: PathBuf) -> BoxFuture<'static, Option<PathBuf>> {
|
||||||
|
async move {
|
||||||
|
let mut last_version_dir = None;
|
||||||
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
while let Some(entry) = entries.next().await {
|
||||||
|
let entry = entry?;
|
||||||
|
if entry.file_type().await?.is_dir() {
|
||||||
|
last_version_dir = Some(entry.path());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
|
||||||
|
let bin_path = last_version_dir.join(Self::BIN_PATH);
|
||||||
|
if bin_path.exists() {
|
||||||
|
Ok(bin_path)
|
||||||
|
} else {
|
||||||
|
Err(anyhow!(
|
||||||
|
"missing executable in directory {:?}",
|
||||||
|
last_version_dir
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.log_err()
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||||
|
|
||||||
|
fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||||
|
Some(json!({
|
||||||
|
"provideFormatter": true
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
|
@ -6,6 +6,3 @@ brackets = [
|
||||||
{ start = "[", end = "]", close = true, newline = true },
|
{ start = "[", end = "]", close = true, newline = true },
|
||||||
{ start = "\"", end = "\"", close = true, newline = false },
|
{ start = "\"", end = "\"", close = true, newline = false },
|
||||||
]
|
]
|
||||||
|
|
||||||
[language_server]
|
|
||||||
disk_based_diagnostic_sources = []
|
|
|
@ -1,92 +1,50 @@
|
||||||
use anyhow::{anyhow, Context, Result};
|
use super::installation::{latest_github_release, GitHubLspBinaryVersion};
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
use async_compression::futures::bufread::GzipDecoder;
|
use async_compression::futures::bufread::GzipDecoder;
|
||||||
use client::http::{self, HttpClient, Method};
|
use client::http::{HttpClient, Method};
|
||||||
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||||
use gpui::Task;
|
|
||||||
pub use language::*;
|
pub use language::*;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rust_embed::RustEmbed;
|
|
||||||
use serde::Deserialize;
|
|
||||||
use serde_json::json;
|
|
||||||
use smol::fs::{self, File};
|
use smol::fs::{self, File};
|
||||||
use std::{borrow::Cow, env::consts, path::PathBuf, str, sync::Arc};
|
use std::{any::Any, borrow::Cow, env::consts, path::PathBuf, str, sync::Arc};
|
||||||
use util::{ResultExt, TryFutureExt};
|
use util::{ResultExt, TryFutureExt};
|
||||||
|
|
||||||
#[derive(RustEmbed)]
|
pub struct RustLspAdapter;
|
||||||
#[folder = "languages"]
|
|
||||||
struct LanguageDir;
|
|
||||||
|
|
||||||
struct RustLspAdapter;
|
|
||||||
struct CLspAdapter;
|
|
||||||
struct JsonLspAdapter;
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct GithubRelease {
|
|
||||||
name: String,
|
|
||||||
assets: Vec<GithubReleaseAsset>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct GithubReleaseAsset {
|
|
||||||
name: String,
|
|
||||||
browser_download_url: http::Url,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LspAdapter for RustLspAdapter {
|
impl LspAdapter for RustLspAdapter {
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> LanguageServerName {
|
||||||
"rust-analyzer"
|
LanguageServerName("rust-analyzer".into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fetch_latest_server_version(
|
fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
) -> BoxFuture<'static, Result<LspBinaryVersion>> {
|
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||||
async move {
|
async move {
|
||||||
let release = http
|
let version = latest_github_release("rust-analyzer/rust-analyzer", http, |_| {
|
||||||
.send(
|
format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH)
|
||||||
surf::RequestBuilder::new(
|
|
||||||
Method::Get,
|
|
||||||
http::Url::parse(
|
|
||||||
"https://api.github.com/repos/rust-analyzer/rust-analyzer/releases/latest",
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.middleware(surf::middleware::Redirect::default())
|
|
||||||
.build(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(|err| anyhow!("error fetching latest release: {}", err))?
|
|
||||||
.body_json::<GithubRelease>()
|
|
||||||
.await
|
|
||||||
.map_err(|err| anyhow!("error parsing latest release: {}", err))?;
|
|
||||||
let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH);
|
|
||||||
let asset = release
|
|
||||||
.assets
|
|
||||||
.iter()
|
|
||||||
.find(|asset| asset.name == asset_name)
|
|
||||||
.ok_or_else(|| anyhow!("no release found matching {:?}", asset_name))?;
|
|
||||||
Ok(LspBinaryVersion {
|
|
||||||
name: release.name,
|
|
||||||
url: Some(asset.browser_download_url.clone()),
|
|
||||||
})
|
})
|
||||||
|
.await?;
|
||||||
|
Ok(Box::new(version) as Box<_>)
|
||||||
}
|
}
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fetch_server_binary(
|
fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: LspBinaryVersion,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
http: Arc<dyn HttpClient>,
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||||
async move {
|
async move {
|
||||||
|
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||||
let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
|
let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
|
||||||
|
|
||||||
if fs::metadata(&destination_path).await.is_err() {
|
if fs::metadata(&destination_path).await.is_err() {
|
||||||
let response = http
|
let response = http
|
||||||
.send(
|
.send(
|
||||||
surf::RequestBuilder::new(Method::Get, version.url.unwrap())
|
surf::RequestBuilder::new(Method::Get, version.url)
|
||||||
.middleware(surf::middleware::Redirect::default())
|
.middleware(surf::middleware::Redirect::default())
|
||||||
.build(),
|
.build(),
|
||||||
)
|
)
|
||||||
|
@ -131,6 +89,14 @@ impl LspAdapter for RustLspAdapter {
|
||||||
.boxed()
|
.boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn disk_based_diagnostic_sources(&self) -> &'static [&'static str] {
|
||||||
|
&["rustc"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn disk_based_diagnostics_progress_token(&self) -> Option<&'static str> {
|
||||||
|
Some("rustAnalyzer/cargo check")
|
||||||
|
}
|
||||||
|
|
||||||
fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
|
fn process_diagnostics(&self, params: &mut lsp::PublishDiagnosticsParams) {
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref REGEX: Regex = Regex::new("(?m)`([^`]+)\n`$").unwrap();
|
static ref REGEX: Regex = Regex::new("(?m)`([^`]+)\n`$").unwrap();
|
||||||
|
@ -287,325 +253,11 @@ impl LspAdapter for RustLspAdapter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LspAdapter for CLspAdapter {
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"clangd"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn fetch_latest_server_version(
|
|
||||||
&self,
|
|
||||||
http: Arc<dyn HttpClient>,
|
|
||||||
) -> BoxFuture<'static, Result<LspBinaryVersion>> {
|
|
||||||
async move {
|
|
||||||
let release = http
|
|
||||||
.send(
|
|
||||||
surf::RequestBuilder::new(
|
|
||||||
Method::Get,
|
|
||||||
http::Url::parse(
|
|
||||||
"https://api.github.com/repos/clangd/clangd/releases/latest",
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.middleware(surf::middleware::Redirect::default())
|
|
||||||
.build(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(|err| anyhow!("error fetching latest release: {}", err))?
|
|
||||||
.body_json::<GithubRelease>()
|
|
||||||
.await
|
|
||||||
.map_err(|err| anyhow!("error parsing latest release: {}", err))?;
|
|
||||||
let asset_name = format!("clangd-mac-{}.zip", release.name);
|
|
||||||
let asset = release
|
|
||||||
.assets
|
|
||||||
.iter()
|
|
||||||
.find(|asset| asset.name == asset_name)
|
|
||||||
.ok_or_else(|| anyhow!("no release found matching {:?}", asset_name))?;
|
|
||||||
Ok(LspBinaryVersion {
|
|
||||||
name: release.name,
|
|
||||||
url: Some(asset.browser_download_url.clone()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
.boxed()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn fetch_server_binary(
|
|
||||||
&self,
|
|
||||||
version: LspBinaryVersion,
|
|
||||||
http: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
|
||||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
|
||||||
async move {
|
|
||||||
let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
|
|
||||||
let version_dir = container_dir.join(format!("clangd_{}", version.name));
|
|
||||||
let binary_path = version_dir.join("bin/clangd");
|
|
||||||
|
|
||||||
if fs::metadata(&binary_path).await.is_err() {
|
|
||||||
let response = http
|
|
||||||
.send(
|
|
||||||
surf::RequestBuilder::new(Method::Get, version.url.unwrap())
|
|
||||||
.middleware(surf::middleware::Redirect::default())
|
|
||||||
.build(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
|
||||||
let mut file = File::create(&zip_path).await?;
|
|
||||||
if !response.status().is_success() {
|
|
||||||
Err(anyhow!(
|
|
||||||
"download failed with status {}",
|
|
||||||
response.status().to_string()
|
|
||||||
))?;
|
|
||||||
}
|
|
||||||
futures::io::copy(response, &mut file).await?;
|
|
||||||
|
|
||||||
let unzip_status = smol::process::Command::new("unzip")
|
|
||||||
.current_dir(&container_dir)
|
|
||||||
.arg(&zip_path)
|
|
||||||
.output()
|
|
||||||
.await?
|
|
||||||
.status;
|
|
||||||
if !unzip_status.success() {
|
|
||||||
Err(anyhow!("failed to unzip clangd archive"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
|
|
||||||
while let Some(entry) = entries.next().await {
|
|
||||||
if let Some(entry) = entry.log_err() {
|
|
||||||
let entry_path = entry.path();
|
|
||||||
if entry_path.as_path() != version_dir {
|
|
||||||
fs::remove_dir_all(&entry_path).await.log_err();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(binary_path)
|
|
||||||
}
|
|
||||||
.boxed()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn cached_server_binary(&self, container_dir: PathBuf) -> BoxFuture<'static, Option<PathBuf>> {
|
|
||||||
async move {
|
|
||||||
let mut last_clangd_dir = None;
|
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
|
||||||
while let Some(entry) = entries.next().await {
|
|
||||||
let entry = entry?;
|
|
||||||
if entry.file_type().await?.is_dir() {
|
|
||||||
last_clangd_dir = Some(entry.path());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let clangd_dir = last_clangd_dir.ok_or_else(|| anyhow!("no cached binary"))?;
|
|
||||||
let clangd_bin = clangd_dir.join("bin/clangd");
|
|
||||||
if clangd_bin.exists() {
|
|
||||||
Ok(clangd_bin)
|
|
||||||
} else {
|
|
||||||
Err(anyhow!(
|
|
||||||
"missing clangd binary in directory {:?}",
|
|
||||||
clangd_dir
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.log_err()
|
|
||||||
.boxed()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl JsonLspAdapter {
|
|
||||||
const BIN_PATH: &'static str =
|
|
||||||
"node_modules/vscode-json-languageserver/bin/vscode-json-languageserver";
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LspAdapter for JsonLspAdapter {
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"vscode-json-languageserver"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn server_args(&self) -> &[&str] {
|
|
||||||
&["--stdio"]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn fetch_latest_server_version(
|
|
||||||
&self,
|
|
||||||
_: Arc<dyn HttpClient>,
|
|
||||||
) -> BoxFuture<'static, Result<LspBinaryVersion>> {
|
|
||||||
async move {
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct NpmInfo {
|
|
||||||
versions: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let output = smol::process::Command::new("npm")
|
|
||||||
.args(["info", "vscode-json-languageserver", "--json"])
|
|
||||||
.output()
|
|
||||||
.await?;
|
|
||||||
if !output.status.success() {
|
|
||||||
Err(anyhow!("failed to execute npm info"))?;
|
|
||||||
}
|
|
||||||
let mut info: NpmInfo = serde_json::from_slice(&output.stdout)?;
|
|
||||||
|
|
||||||
Ok(LspBinaryVersion {
|
|
||||||
name: info
|
|
||||||
.versions
|
|
||||||
.pop()
|
|
||||||
.ok_or_else(|| anyhow!("no versions found in npm info"))?,
|
|
||||||
url: Default::default(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
.boxed()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn fetch_server_binary(
|
|
||||||
&self,
|
|
||||||
version: LspBinaryVersion,
|
|
||||||
_: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
|
||||||
) -> BoxFuture<'static, Result<PathBuf>> {
|
|
||||||
async move {
|
|
||||||
let version_dir = container_dir.join(&version.name);
|
|
||||||
fs::create_dir_all(&version_dir)
|
|
||||||
.await
|
|
||||||
.context("failed to create version directory")?;
|
|
||||||
let binary_path = version_dir.join(Self::BIN_PATH);
|
|
||||||
|
|
||||||
if fs::metadata(&binary_path).await.is_err() {
|
|
||||||
let output = smol::process::Command::new("npm")
|
|
||||||
.current_dir(&version_dir)
|
|
||||||
.arg("install")
|
|
||||||
.arg(format!("vscode-json-languageserver@{}", version.name))
|
|
||||||
.output()
|
|
||||||
.await
|
|
||||||
.context("failed to run npm install")?;
|
|
||||||
if !output.status.success() {
|
|
||||||
Err(anyhow!("failed to install vscode-json-languageserver"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
|
|
||||||
while let Some(entry) = entries.next().await {
|
|
||||||
if let Some(entry) = entry.log_err() {
|
|
||||||
let entry_path = entry.path();
|
|
||||||
if entry_path.as_path() != version_dir {
|
|
||||||
fs::remove_dir_all(&entry_path).await.log_err();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(binary_path)
|
|
||||||
}
|
|
||||||
.boxed()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn cached_server_binary(&self, container_dir: PathBuf) -> BoxFuture<'static, Option<PathBuf>> {
|
|
||||||
async move {
|
|
||||||
let mut last_version_dir = None;
|
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
|
||||||
while let Some(entry) = entries.next().await {
|
|
||||||
let entry = entry?;
|
|
||||||
if entry.file_type().await?.is_dir() {
|
|
||||||
last_version_dir = Some(entry.path());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
|
|
||||||
let bin_path = last_version_dir.join(Self::BIN_PATH);
|
|
||||||
if bin_path.exists() {
|
|
||||||
Ok(bin_path)
|
|
||||||
} else {
|
|
||||||
Err(anyhow!(
|
|
||||||
"missing executable in directory {:?}",
|
|
||||||
last_version_dir
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.log_err()
|
|
||||||
.boxed()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
|
||||||
|
|
||||||
fn initialization_options(&self) -> Option<serde_json::Value> {
|
|
||||||
Some(json!({
|
|
||||||
"provideFormatter": true
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build_language_registry(login_shell_env_loaded: Task<()>) -> LanguageRegistry {
|
|
||||||
let languages = LanguageRegistry::new(login_shell_env_loaded);
|
|
||||||
languages.add(Arc::new(c()));
|
|
||||||
languages.add(Arc::new(json()));
|
|
||||||
languages.add(Arc::new(rust()));
|
|
||||||
languages.add(Arc::new(markdown()));
|
|
||||||
languages
|
|
||||||
}
|
|
||||||
|
|
||||||
fn rust() -> Language {
|
|
||||||
let grammar = tree_sitter_rust::language();
|
|
||||||
let config = toml::from_slice(&LanguageDir::get("rust/config.toml").unwrap().data).unwrap();
|
|
||||||
Language::new(config, Some(grammar))
|
|
||||||
.with_highlights_query(load_query("rust/highlights.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_brackets_query(load_query("rust/brackets.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_indents_query(load_query("rust/indents.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_outline_query(load_query("rust/outline.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_lsp_adapter(RustLspAdapter)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn c() -> Language {
|
|
||||||
let grammar = tree_sitter_c::language();
|
|
||||||
let config = toml::from_slice(&LanguageDir::get("c/config.toml").unwrap().data).unwrap();
|
|
||||||
Language::new(config, Some(grammar))
|
|
||||||
.with_highlights_query(load_query("c/highlights.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_brackets_query(load_query("c/brackets.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_indents_query(load_query("c/indents.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_outline_query(load_query("c/outline.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_lsp_adapter(CLspAdapter)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn json() -> Language {
|
|
||||||
let grammar = tree_sitter_json::language();
|
|
||||||
let config = toml::from_slice(&LanguageDir::get("json/config.toml").unwrap().data).unwrap();
|
|
||||||
Language::new(config, Some(grammar))
|
|
||||||
.with_highlights_query(load_query("json/highlights.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_brackets_query(load_query("json/brackets.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_indents_query(load_query("json/indents.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_outline_query(load_query("json/outline.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.with_lsp_adapter(JsonLspAdapter)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn markdown() -> Language {
|
|
||||||
let grammar = tree_sitter_markdown::language();
|
|
||||||
let config = toml::from_slice(&LanguageDir::get("markdown/config.toml").unwrap().data).unwrap();
|
|
||||||
Language::new(config, Some(grammar))
|
|
||||||
.with_highlights_query(load_query("markdown/highlights.scm").as_ref())
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load_query(path: &str) -> Cow<'static, str> {
|
|
||||||
match LanguageDir::get(path).unwrap().data {
|
|
||||||
Cow::Borrowed(s) => Cow::Borrowed(str::from_utf8(s).unwrap()),
|
|
||||||
Cow::Owned(s) => Cow::Owned(String::from_utf8(s).unwrap()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use crate::languages::{language, LspAdapter};
|
||||||
use gpui::color::Color;
|
use gpui::color::Color;
|
||||||
use language::LspAdapter;
|
|
||||||
use theme::SyntaxTheme;
|
use theme::SyntaxTheme;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -651,7 +303,11 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_rust_label_for_completion() {
|
fn test_rust_label_for_completion() {
|
||||||
let language = rust();
|
let language = language(
|
||||||
|
"rust",
|
||||||
|
tree_sitter_rust::language(),
|
||||||
|
Some(Arc::new(RustLspAdapter)),
|
||||||
|
);
|
||||||
let grammar = language.grammar().unwrap();
|
let grammar = language.grammar().unwrap();
|
||||||
let theme = SyntaxTheme::new(vec![
|
let theme = SyntaxTheme::new(vec![
|
||||||
("type".into(), Color::green().into()),
|
("type".into(), Color::green().into()),
|
||||||
|
@ -726,7 +382,11 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_rust_label_for_symbol() {
|
fn test_rust_label_for_symbol() {
|
||||||
let language = rust();
|
let language = language(
|
||||||
|
"rust",
|
||||||
|
tree_sitter_rust::language(),
|
||||||
|
Some(Arc::new(RustLspAdapter)),
|
||||||
|
);
|
||||||
let grammar = language.grammar().unwrap();
|
let grammar = language.grammar().unwrap();
|
||||||
let theme = SyntaxTheme::new(vec![
|
let theme = SyntaxTheme::new(vec![
|
||||||
("type".into(), Color::green().into()),
|
("type".into(), Color::green().into()),
|
|
@ -10,7 +10,3 @@ brackets = [
|
||||||
{ start = "\"", end = "\"", close = true, newline = false },
|
{ start = "\"", end = "\"", close = true, newline = false },
|
||||||
{ start = "/*", end = " */", close = true, newline = false },
|
{ start = "/*", end = " */", close = true, newline = false },
|
||||||
]
|
]
|
||||||
|
|
||||||
[language_server]
|
|
||||||
disk_based_diagnostic_sources = ["rustc"]
|
|
||||||
disk_based_diagnostics_progress_token = "rustAnalyzer/cargo check"
|
|
1
crates/zed/src/languages/tsx/brackets.scm
Symbolic link
1
crates/zed/src/languages/tsx/brackets.scm
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../typescript/brackets.scm
|
12
crates/zed/src/languages/tsx/config.toml
Normal file
12
crates/zed/src/languages/tsx/config.toml
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
name = "TSX"
|
||||||
|
path_suffixes = ["tsx", "js"]
|
||||||
|
line_comment = "// "
|
||||||
|
autoclose_before = ";:.,=}])>"
|
||||||
|
brackets = [
|
||||||
|
{ start = "{", end = "}", close = true, newline = true },
|
||||||
|
{ start = "[", end = "]", close = true, newline = true },
|
||||||
|
{ start = "(", end = ")", close = true, newline = true },
|
||||||
|
{ start = "<", end = ">", close = false, newline = true },
|
||||||
|
{ start = "\"", end = "\"", close = true, newline = false },
|
||||||
|
{ start = "/*", end = " */", close = true, newline = false },
|
||||||
|
]
|
0
crates/zed/src/languages/tsx/highlights-jsx.scm
Normal file
0
crates/zed/src/languages/tsx/highlights-jsx.scm
Normal file
1
crates/zed/src/languages/tsx/highlights.scm
Symbolic link
1
crates/zed/src/languages/tsx/highlights.scm
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../typescript/highlights.scm
|
1
crates/zed/src/languages/tsx/indents.scm
Symbolic link
1
crates/zed/src/languages/tsx/indents.scm
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../typescript/indents.scm
|
1
crates/zed/src/languages/tsx/outline.scm
Symbolic link
1
crates/zed/src/languages/tsx/outline.scm
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../typescript/outline.scm
|
146
crates/zed/src/languages/typescript.rs
Normal file
146
crates/zed/src/languages/typescript.rs
Normal file
|
@ -0,0 +1,146 @@
|
||||||
|
use super::installation::{npm_install_packages, npm_package_latest_version};
|
||||||
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use client::http::HttpClient;
|
||||||
|
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||||
|
use language::{LanguageServerName, LspAdapter};
|
||||||
|
use serde_json::json;
|
||||||
|
use smol::fs;
|
||||||
|
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||||
|
use util::{ResultExt, TryFutureExt};
|
||||||
|
|
||||||
|
pub struct TypeScriptLspAdapter;
|
||||||
|
|
||||||
|
impl TypeScriptLspAdapter {
|
||||||
|
const BIN_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.js";
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Versions {
|
||||||
|
typescript_version: String,
|
||||||
|
server_version: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LspAdapter for TypeScriptLspAdapter {
|
||||||
|
fn name(&self) -> LanguageServerName {
|
||||||
|
LanguageServerName("typescript-language-server".into())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn server_args(&self) -> &[&str] {
|
||||||
|
&["--stdio", "--tsserver-path", "node_modules/typescript/lib"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_latest_server_version(
|
||||||
|
&self,
|
||||||
|
_: Arc<dyn HttpClient>,
|
||||||
|
) -> BoxFuture<'static, Result<Box<dyn 'static + Send + Any>>> {
|
||||||
|
async move {
|
||||||
|
Ok(Box::new(Versions {
|
||||||
|
typescript_version: npm_package_latest_version("typescript").await?,
|
||||||
|
server_version: npm_package_latest_version("typescript-language-server").await?,
|
||||||
|
}) as Box<_>)
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fetch_server_binary(
|
||||||
|
&self,
|
||||||
|
versions: Box<dyn 'static + Send + Any>,
|
||||||
|
_: Arc<dyn HttpClient>,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
) -> BoxFuture<'static, Result<PathBuf>> {
|
||||||
|
let versions = versions.downcast::<Versions>().unwrap();
|
||||||
|
async move {
|
||||||
|
let version_dir = container_dir.join(&format!(
|
||||||
|
"typescript-{}:server-{}",
|
||||||
|
versions.typescript_version, versions.server_version
|
||||||
|
));
|
||||||
|
fs::create_dir_all(&version_dir)
|
||||||
|
.await
|
||||||
|
.context("failed to create version directory")?;
|
||||||
|
let binary_path = version_dir.join(Self::BIN_PATH);
|
||||||
|
|
||||||
|
if fs::metadata(&binary_path).await.is_err() {
|
||||||
|
npm_install_packages(
|
||||||
|
[
|
||||||
|
("typescript", versions.typescript_version.as_str()),
|
||||||
|
(
|
||||||
|
"typescript-language-server",
|
||||||
|
&versions.server_version.as_str(),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
&version_dir,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
|
||||||
|
while let Some(entry) = entries.next().await {
|
||||||
|
if let Some(entry) = entry.log_err() {
|
||||||
|
let entry_path = entry.path();
|
||||||
|
if entry_path.as_path() != version_dir {
|
||||||
|
fs::remove_dir_all(&entry_path).await.log_err();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(binary_path)
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn cached_server_binary(&self, container_dir: PathBuf) -> BoxFuture<'static, Option<PathBuf>> {
|
||||||
|
async move {
|
||||||
|
let mut last_version_dir = None;
|
||||||
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
while let Some(entry) = entries.next().await {
|
||||||
|
let entry = entry?;
|
||||||
|
if entry.file_type().await?.is_dir() {
|
||||||
|
last_version_dir = Some(entry.path());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
|
||||||
|
let bin_path = last_version_dir.join(Self::BIN_PATH);
|
||||||
|
if bin_path.exists() {
|
||||||
|
Ok(bin_path)
|
||||||
|
} else {
|
||||||
|
Err(anyhow!(
|
||||||
|
"missing executable in directory {:?}",
|
||||||
|
last_version_dir
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.log_err()
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||||
|
|
||||||
|
fn label_for_completion(
|
||||||
|
&self,
|
||||||
|
item: &lsp::CompletionItem,
|
||||||
|
language: &language::Language,
|
||||||
|
) -> Option<language::CodeLabel> {
|
||||||
|
use lsp::CompletionItemKind as Kind;
|
||||||
|
let len = item.label.len();
|
||||||
|
let grammar = language.grammar()?;
|
||||||
|
let highlight_id = match item.kind? {
|
||||||
|
Kind::CLASS | Kind::INTERFACE => grammar.highlight_id_for_name("type"),
|
||||||
|
Kind::CONSTRUCTOR => grammar.highlight_id_for_name("type"),
|
||||||
|
Kind::CONSTANT => grammar.highlight_id_for_name("constant"),
|
||||||
|
Kind::FUNCTION | Kind::METHOD => grammar.highlight_id_for_name("function"),
|
||||||
|
Kind::PROPERTY | Kind::FIELD => grammar.highlight_id_for_name("property"),
|
||||||
|
_ => None,
|
||||||
|
}?;
|
||||||
|
Some(language::CodeLabel {
|
||||||
|
text: item.label.clone(),
|
||||||
|
runs: vec![(0..len, highlight_id)],
|
||||||
|
filter_range: 0..len,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn initialization_options(&self) -> Option<serde_json::Value> {
|
||||||
|
Some(json!({
|
||||||
|
"provideFormatter": true
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
5
crates/zed/src/languages/typescript/brackets.scm
Normal file
5
crates/zed/src/languages/typescript/brackets.scm
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
("(" @open ")" @close)
|
||||||
|
("[" @open "]" @close)
|
||||||
|
("{" @open "}" @close)
|
||||||
|
("<" @open ">" @close)
|
||||||
|
("\"" @open "\"" @close)
|
12
crates/zed/src/languages/typescript/config.toml
Normal file
12
crates/zed/src/languages/typescript/config.toml
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
name = "TypeScript"
|
||||||
|
path_suffixes = ["ts"]
|
||||||
|
line_comment = "// "
|
||||||
|
autoclose_before = ";:.,=}])>"
|
||||||
|
brackets = [
|
||||||
|
{ start = "{", end = "}", close = true, newline = true },
|
||||||
|
{ start = "[", end = "]", close = true, newline = true },
|
||||||
|
{ start = "(", end = ")", close = true, newline = true },
|
||||||
|
{ start = "<", end = ">", close = false, newline = true },
|
||||||
|
{ start = "\"", end = "\"", close = true, newline = false },
|
||||||
|
{ start = "/*", end = " */", close = true, newline = false },
|
||||||
|
]
|
219
crates/zed/src/languages/typescript/highlights.scm
Normal file
219
crates/zed/src/languages/typescript/highlights.scm
Normal file
|
@ -0,0 +1,219 @@
|
||||||
|
; Variables
|
||||||
|
|
||||||
|
(identifier) @variable
|
||||||
|
|
||||||
|
; Properties
|
||||||
|
|
||||||
|
(property_identifier) @property
|
||||||
|
|
||||||
|
; Function and method calls
|
||||||
|
|
||||||
|
(call_expression
|
||||||
|
function: (identifier) @function)
|
||||||
|
|
||||||
|
(call_expression
|
||||||
|
function: (member_expression
|
||||||
|
property: (property_identifier) @function.method))
|
||||||
|
|
||||||
|
; Function and method definitions
|
||||||
|
|
||||||
|
(function
|
||||||
|
name: (identifier) @function)
|
||||||
|
(function_declaration
|
||||||
|
name: (identifier) @function)
|
||||||
|
(method_definition
|
||||||
|
name: (property_identifier) @function.method)
|
||||||
|
|
||||||
|
(pair
|
||||||
|
key: (property_identifier) @function.method
|
||||||
|
value: [(function) (arrow_function)])
|
||||||
|
|
||||||
|
(assignment_expression
|
||||||
|
left: (member_expression
|
||||||
|
property: (property_identifier) @function.method)
|
||||||
|
right: [(function) (arrow_function)])
|
||||||
|
|
||||||
|
(variable_declarator
|
||||||
|
name: (identifier) @function
|
||||||
|
value: [(function) (arrow_function)])
|
||||||
|
|
||||||
|
(assignment_expression
|
||||||
|
left: (identifier) @function
|
||||||
|
right: [(function) (arrow_function)])
|
||||||
|
|
||||||
|
; Special identifiers
|
||||||
|
|
||||||
|
((identifier) @constructor
|
||||||
|
(#match? @constructor "^[A-Z]"))
|
||||||
|
|
||||||
|
([
|
||||||
|
(identifier)
|
||||||
|
(shorthand_property_identifier)
|
||||||
|
(shorthand_property_identifier_pattern)
|
||||||
|
] @constant
|
||||||
|
(#match? @constant "^[A-Z_][A-Z\\d_]+$"))
|
||||||
|
|
||||||
|
; Literals
|
||||||
|
|
||||||
|
(this) @variable.builtin
|
||||||
|
(super) @variable.builtin
|
||||||
|
|
||||||
|
[
|
||||||
|
(true)
|
||||||
|
(false)
|
||||||
|
(null)
|
||||||
|
(undefined)
|
||||||
|
] @constant.builtin
|
||||||
|
|
||||||
|
(comment) @comment
|
||||||
|
|
||||||
|
[
|
||||||
|
(string)
|
||||||
|
(template_string)
|
||||||
|
] @string
|
||||||
|
|
||||||
|
(regex) @string.special
|
||||||
|
(number) @number
|
||||||
|
|
||||||
|
; Tokens
|
||||||
|
|
||||||
|
(template_substitution
|
||||||
|
"${" @punctuation.special
|
||||||
|
"}" @punctuation.special) @embedded
|
||||||
|
|
||||||
|
[
|
||||||
|
";"
|
||||||
|
"?."
|
||||||
|
"."
|
||||||
|
","
|
||||||
|
] @punctuation.delimiter
|
||||||
|
|
||||||
|
[
|
||||||
|
"-"
|
||||||
|
"--"
|
||||||
|
"-="
|
||||||
|
"+"
|
||||||
|
"++"
|
||||||
|
"+="
|
||||||
|
"*"
|
||||||
|
"*="
|
||||||
|
"**"
|
||||||
|
"**="
|
||||||
|
"/"
|
||||||
|
"/="
|
||||||
|
"%"
|
||||||
|
"%="
|
||||||
|
"<"
|
||||||
|
"<="
|
||||||
|
"<<"
|
||||||
|
"<<="
|
||||||
|
"="
|
||||||
|
"=="
|
||||||
|
"==="
|
||||||
|
"!"
|
||||||
|
"!="
|
||||||
|
"!=="
|
||||||
|
"=>"
|
||||||
|
">"
|
||||||
|
">="
|
||||||
|
">>"
|
||||||
|
">>="
|
||||||
|
">>>"
|
||||||
|
">>>="
|
||||||
|
"~"
|
||||||
|
"^"
|
||||||
|
"&"
|
||||||
|
"|"
|
||||||
|
"^="
|
||||||
|
"&="
|
||||||
|
"|="
|
||||||
|
"&&"
|
||||||
|
"||"
|
||||||
|
"??"
|
||||||
|
"&&="
|
||||||
|
"||="
|
||||||
|
"??="
|
||||||
|
] @operator
|
||||||
|
|
||||||
|
[
|
||||||
|
"("
|
||||||
|
")"
|
||||||
|
"["
|
||||||
|
"]"
|
||||||
|
"{"
|
||||||
|
"}"
|
||||||
|
] @punctuation.bracket
|
||||||
|
|
||||||
|
[
|
||||||
|
"as"
|
||||||
|
"async"
|
||||||
|
"await"
|
||||||
|
"break"
|
||||||
|
"case"
|
||||||
|
"catch"
|
||||||
|
"class"
|
||||||
|
"const"
|
||||||
|
"continue"
|
||||||
|
"debugger"
|
||||||
|
"default"
|
||||||
|
"delete"
|
||||||
|
"do"
|
||||||
|
"else"
|
||||||
|
"export"
|
||||||
|
"extends"
|
||||||
|
"finally"
|
||||||
|
"for"
|
||||||
|
"from"
|
||||||
|
"function"
|
||||||
|
"get"
|
||||||
|
"if"
|
||||||
|
"import"
|
||||||
|
"in"
|
||||||
|
"instanceof"
|
||||||
|
"let"
|
||||||
|
"new"
|
||||||
|
"of"
|
||||||
|
"return"
|
||||||
|
"set"
|
||||||
|
"static"
|
||||||
|
"switch"
|
||||||
|
"target"
|
||||||
|
"throw"
|
||||||
|
"try"
|
||||||
|
"typeof"
|
||||||
|
"var"
|
||||||
|
"void"
|
||||||
|
"while"
|
||||||
|
"with"
|
||||||
|
"yield"
|
||||||
|
] @keyword
|
||||||
|
|
||||||
|
; Types
|
||||||
|
|
||||||
|
(type_identifier) @type
|
||||||
|
(predefined_type) @type.builtin
|
||||||
|
|
||||||
|
((identifier) @type
|
||||||
|
(#match? @type "^[A-Z]"))
|
||||||
|
|
||||||
|
(type_arguments
|
||||||
|
"<" @punctuation.bracket
|
||||||
|
">" @punctuation.bracket)
|
||||||
|
|
||||||
|
; Keywords
|
||||||
|
|
||||||
|
[ "abstract"
|
||||||
|
"declare"
|
||||||
|
"enum"
|
||||||
|
"export"
|
||||||
|
"implements"
|
||||||
|
"interface"
|
||||||
|
"keyof"
|
||||||
|
"namespace"
|
||||||
|
"private"
|
||||||
|
"protected"
|
||||||
|
"public"
|
||||||
|
"type"
|
||||||
|
"readonly"
|
||||||
|
"override"
|
||||||
|
] @keyword
|
15
crates/zed/src/languages/typescript/indents.scm
Normal file
15
crates/zed/src/languages/typescript/indents.scm
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
[
|
||||||
|
(call_expression)
|
||||||
|
(assignment_expression)
|
||||||
|
(member_expression)
|
||||||
|
(lexical_declaration)
|
||||||
|
(variable_declaration)
|
||||||
|
(assignment_expression)
|
||||||
|
(if_statement)
|
||||||
|
(for_statement)
|
||||||
|
] @indent
|
||||||
|
|
||||||
|
(_ "[" "]" @end) @indent
|
||||||
|
(_ "<" ">" @end) @indent
|
||||||
|
(_ "{" "}" @end) @indent
|
||||||
|
(_ "(" ")" @end) @indent
|
55
crates/zed/src/languages/typescript/outline.scm
Normal file
55
crates/zed/src/languages/typescript/outline.scm
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
(internal_module
|
||||||
|
"namespace" @context
|
||||||
|
name: (_) @name) @item
|
||||||
|
|
||||||
|
(enum_declaration
|
||||||
|
"enum" @context
|
||||||
|
name: (_) @name) @item
|
||||||
|
|
||||||
|
(function_declaration
|
||||||
|
"async"? @context
|
||||||
|
"function" @context
|
||||||
|
name: (_) @name
|
||||||
|
parameters: (formal_parameters
|
||||||
|
"(" @context
|
||||||
|
")" @context)) @item
|
||||||
|
|
||||||
|
(interface_declaration
|
||||||
|
"interface" @context
|
||||||
|
name: (_) @name) @item
|
||||||
|
|
||||||
|
(program
|
||||||
|
(lexical_declaration
|
||||||
|
["let" "const"] @context
|
||||||
|
(variable_declarator
|
||||||
|
name: (_) @name) @item))
|
||||||
|
|
||||||
|
(class_declaration
|
||||||
|
"class" @context
|
||||||
|
name: (_) @name) @item
|
||||||
|
|
||||||
|
(method_definition
|
||||||
|
[
|
||||||
|
"get"
|
||||||
|
"set"
|
||||||
|
"async"
|
||||||
|
"*"
|
||||||
|
"readonly"
|
||||||
|
"static"
|
||||||
|
(override_modifier)
|
||||||
|
(accessibility_modifier)
|
||||||
|
]* @context
|
||||||
|
name: (_) @name
|
||||||
|
parameters: (formal_parameters
|
||||||
|
"(" @context
|
||||||
|
")" @context)) @item
|
||||||
|
|
||||||
|
(public_field_definition
|
||||||
|
[
|
||||||
|
"declare"
|
||||||
|
"readonly"
|
||||||
|
"abstract"
|
||||||
|
"static"
|
||||||
|
(accessibility_modifier)
|
||||||
|
]* @context
|
||||||
|
name: (_) @name) @item
|
|
@ -19,7 +19,7 @@ use workspace::{
|
||||||
AppState, OpenNew, OpenParams, OpenPaths, Settings,
|
AppState, OpenNew, OpenParams, OpenPaths, Settings,
|
||||||
};
|
};
|
||||||
use zed::{
|
use zed::{
|
||||||
self, assets::Assets, build_window_options, build_workspace, fs::RealFs, language, menus,
|
self, assets::Assets, build_window_options, build_workspace, fs::RealFs, languages, menus,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
@ -34,7 +34,7 @@ fn main() {
|
||||||
let default_settings = Settings::new("Zed Mono", &app.font_cache(), theme)
|
let default_settings = Settings::new("Zed Mono", &app.font_cache(), theme)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.with_overrides(
|
.with_overrides(
|
||||||
language::PLAIN_TEXT.name(),
|
languages::PLAIN_TEXT.name(),
|
||||||
settings::LanguageOverride {
|
settings::LanguageOverride {
|
||||||
soft_wrap: Some(settings::SoftWrap::PreferredLineLength),
|
soft_wrap: Some(settings::SoftWrap::PreferredLineLength),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
|
@ -60,7 +60,7 @@ fn main() {
|
||||||
app.run(move |cx| {
|
app.run(move |cx| {
|
||||||
let http = http::client();
|
let http = http::client();
|
||||||
let client = client::Client::new(http.clone());
|
let client = client::Client::new(http.clone());
|
||||||
let mut languages = language::build_language_registry(login_shell_env_loaded);
|
let mut languages = languages::build_language_registry(login_shell_env_loaded);
|
||||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));
|
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http.clone(), cx));
|
||||||
let channel_list =
|
let channel_list =
|
||||||
cx.add_model(|cx| ChannelList::new(user_store.clone(), client.clone(), cx));
|
cx.add_model(|cx| ChannelList::new(user_store.clone(), client.clone(), cx));
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
pub mod assets;
|
pub mod assets;
|
||||||
pub mod language;
|
pub mod languages;
|
||||||
pub mod menus;
|
pub mod menus;
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
pub mod test;
|
pub mod test;
|
||||||
|
@ -574,7 +574,7 @@ mod tests {
|
||||||
assert_eq!(editor.title(cx), "untitled");
|
assert_eq!(editor.title(cx), "untitled");
|
||||||
assert!(Arc::ptr_eq(
|
assert!(Arc::ptr_eq(
|
||||||
editor.language(cx).unwrap(),
|
editor.language(cx).unwrap(),
|
||||||
&language::PLAIN_TEXT
|
&languages::PLAIN_TEXT
|
||||||
));
|
));
|
||||||
editor.handle_input(&editor::Input("hi".into()), cx);
|
editor.handle_input(&editor::Input("hi".into()), cx);
|
||||||
assert!(editor.is_dirty(cx));
|
assert!(editor.is_dirty(cx));
|
||||||
|
@ -664,7 +664,7 @@ mod tests {
|
||||||
editor.update(cx, |editor, cx| {
|
editor.update(cx, |editor, cx| {
|
||||||
assert!(Arc::ptr_eq(
|
assert!(Arc::ptr_eq(
|
||||||
editor.language(cx).unwrap(),
|
editor.language(cx).unwrap(),
|
||||||
&language::PLAIN_TEXT
|
&languages::PLAIN_TEXT
|
||||||
));
|
));
|
||||||
editor.handle_input(&editor::Input("hi".into()), cx);
|
editor.handle_input(&editor::Input("hi".into()), cx);
|
||||||
assert!(editor.is_dirty(cx.as_ref()));
|
assert!(editor.is_dirty(cx.as_ref()));
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue