Temporarily comment out closure errors to address other errors in project

This commit is contained in:
Isaac Clayton 2022-07-05 14:39:30 +02:00
parent 2ff67ef9f6
commit ce90dbd06a
8 changed files with 357 additions and 392 deletions

View file

@ -712,61 +712,64 @@ impl Project {
} }
fn on_settings_changed(&mut self, cx: &mut ModelContext<'_, Self>) { fn on_settings_changed(&mut self, cx: &mut ModelContext<'_, Self>) {
let settings = cx.global::<Settings>(); // let settings = cx.global::<Settings>();
self.lsp_settings_changed = Some(cx.spawn(|project, cx| async { // self.lsp_settings_changed = Some(cx.spawn(|project, cx| async {
let language_servers_to_start = project.update(&mut cx, |project, cx| { // let language_servers_to_start = project.update(&mut cx, |project, cx| {
let mut language_servers_to_start = Vec::new(); // let mut language_servers_to_start = Vec::new();
for buffer in self.opened_buffers.values() { // for buffer in self.opened_buffers.values() {
if let Some(buffer) = buffer.upgrade(cx) { // if let Some(buffer) = buffer.upgrade(cx) {
let buffer = buffer.read(cx); // let buffer = buffer.read(cx);
if let Some((file, language)) = // if let Some((file, language)) =
File::from_dyn(buffer.file()).zip(buffer.language()) // File::from_dyn(buffer.file()).zip(buffer.language())
{ // {
if settings.enable_language_server(Some(&language.name())) { // if settings.enable_language_server(Some(&language.name())) {
let worktree = file.worktree.read(cx); // let worktree = file.worktree.read(cx);
language_servers_to_start.push(( // language_servers_to_start.push((
worktree.id(), // worktree.id(),
worktree.as_local().unwrap().abs_path().clone(), // worktree.as_local().unwrap().abs_path().clone(),
language.clone(), // language.clone(),
)); // ));
} // }
} // }
} // }
} // }
language_servers_to_start // language_servers_to_start
}); // });
let mut language_servers_to_stop = Vec::new(); // let mut language_servers_to_stop = Vec::new();
for language in self.languages.to_vec() { // for language in self.languages.to_vec() {
if let Some(lsp_adapter) = language.lsp_adapter() { // if let Some(lsp_adapter) = language.lsp_adapter() {
if !settings.enable_language_server(Some(&language.name())) { // if !settings.enable_language_server(Some(&language.name())) {
let lsp_name = lsp_adapter.name().await; // let lsp_name = lsp_adapter.name().await;
for (worktree_id, started_lsp_name) in self.started_language_servers.keys() // for (worktree_id, started_lsp_name) in self.started_language_servers.keys()
{ // {
if lsp_name == *started_lsp_name { // if lsp_name == *started_lsp_name {
language_servers_to_stop // language_servers_to_stop
.push((*worktree_id, started_lsp_name.clone())); // .push((*worktree_id, started_lsp_name.clone()));
} // }
} // }
} // }
} // }
} // }
project.update(&mut cx, |project, cx| { // project.update(&mut cx, |project, cx| {
// Stop all newly-disabled language servers. // // Stop all newly-disabled language servers.
for (worktree_id, adapter_name) in language_servers_to_stop { // for (worktree_id, adapter_name) in language_servers_to_stop {
self.stop_language_server(worktree_id, adapter_name, cx) // self.stop_language_server(worktree_id, adapter_name, cx)
.detach(); // .detach();
} // }
// Start all the newly-enabled language servers. // // Start all the newly-enabled language servers.
for (worktree_id, worktree_path, language) in language_servers_to_start { // for (worktree_id, worktree_path, language) in language_servers_to_start {
self.start_language_server(worktree_id, worktree_path, language, cx); // self.start_language_server(worktree_id, worktree_path, language, cx);
} // }
cx.notify(); // cx.notify();
}); // });
})) // }))
// TODO(isaac): uncomment the above
todo!()
} }
pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> { pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
@ -2186,6 +2189,7 @@ impl Project {
language_server.clone(), language_server.clone(),
cx, cx,
) )
} }
}) })
.detach(); .detach();
@ -2496,9 +2500,12 @@ impl Project {
return; return;
} }
let same_token =
Some(token.as_ref()) == disk_based_diagnostics_progress_token.as_ref().map(|x| &**x);
match progress { match progress {
lsp::WorkDoneProgress::Begin(report) => { lsp::WorkDoneProgress::Begin(report) => {
if Some(token) == disk_based_diagnostics_progress_token { if same_token {
language_server_status.has_pending_diagnostic_updates = true; language_server_status.has_pending_diagnostic_updates = true;
self.disk_based_diagnostics_started(server_id, cx); self.disk_based_diagnostics_started(server_id, cx);
self.broadcast_language_server_update( self.broadcast_language_server_update(
@ -2529,7 +2536,7 @@ impl Project {
} }
} }
lsp::WorkDoneProgress::Report(report) => { lsp::WorkDoneProgress::Report(report) => {
if Some(token) != disk_based_diagnostics_progress_token { if !same_token {
self.on_lsp_work_progress( self.on_lsp_work_progress(
server_id, server_id,
token.clone(), token.clone(),
@ -2555,7 +2562,7 @@ impl Project {
lsp::WorkDoneProgress::End(_) => { lsp::WorkDoneProgress::End(_) => {
language_server_status.progress_tokens.remove(&token); language_server_status.progress_tokens.remove(&token);
if Some(token) == disk_based_diagnostics_progress_token { if same_token {
language_server_status.has_pending_diagnostic_updates = false; language_server_status.has_pending_diagnostic_updates = false;
self.disk_based_diagnostics_finished(server_id, cx); self.disk_based_diagnostics_finished(server_id, cx);
self.broadcast_language_server_update( self.broadcast_language_server_update(
@ -3299,16 +3306,12 @@ impl Project {
return Ok(Default::default()); return Ok(Default::default());
}; };
struct PartialSymbol<F1, F2> struct PartialSymbol {
where
F1: Future<Output = LanguageServerName>,
F2: Future<Output = Option<CodeLabel>>,
{
source_worktree_id: WorktreeId, source_worktree_id: WorktreeId,
worktree_id: WorktreeId, worktree_id: WorktreeId,
language_server_name: F1, adapter: Arc<dyn LspAdapter>,
path: PathBuf, path: PathBuf,
label: Option<F2>, language: Option<Arc<Language>>,
name: String, name: String,
kind: lsp::SymbolKind, kind: lsp::SymbolKind,
range: Range<PointUtf16>, range: Range<PointUtf16>,
@ -3334,23 +3337,17 @@ impl Project {
path = relativize_path(&worktree_abs_path, &abs_path); path = relativize_path(&worktree_abs_path, &abs_path);
} }
let label = match this.languages.select_language(&path) { let language = this.languages.select_language(&path).clone();
Some(language) => Some(
language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind),
),
None => None,
};
let signature = this.symbol_signature(worktree_id, &path); let signature = this.symbol_signature(worktree_id, &path);
let language_server_name = adapter.name();
partial_symbols.push(PartialSymbol { partial_symbols.push(PartialSymbol {
source_worktree_id, source_worktree_id,
worktree_id, worktree_id,
language_server_name, // TODO: just pass out single adapter?
adapter: adapter.clone(),
name: lsp_symbol.name, name: lsp_symbol.name,
kind: lsp_symbol.kind, kind: lsp_symbol.kind,
label, language,
path, path,
range: range_from_lsp(lsp_symbol.location.range), range: range_from_lsp(lsp_symbol.location.range),
signature, signature,
@ -3363,16 +3360,18 @@ impl Project {
let mut symbols = Vec::new(); let mut symbols = Vec::new();
for ps in partial_symbols.into_iter() { for ps in partial_symbols.into_iter() {
let label = match ps.label { let label = match ps.language {
Some(label) => label.await, Some(language) => language.label_for_symbol(&ps.name, ps.kind).await,
None => None, None => None,
} }
.unwrap_or_else(|| CodeLabel::plain(ps.name.clone(), None)); .unwrap_or_else(|| CodeLabel::plain(ps.name.clone(), None));
let language_server_name = ps.adapter.name().await;
symbols.push(Symbol { symbols.push(Symbol {
source_worktree_id: ps.source_worktree_id, source_worktree_id: ps.source_worktree_id,
worktree_id: ps.worktree_id, worktree_id: ps.worktree_id,
language_server_name: ps.language_server_name.await, language_server_name,
name: ps.name, name: ps.name,
kind: ps.kind, kind: ps.kind,
label, label,
@ -3394,10 +3393,11 @@ impl Project {
let mut symbols = Vec::new(); let mut symbols = Vec::new();
if let Some(this) = this.upgrade(&cx) { if let Some(this) = this.upgrade(&cx) {
let new_symbols = this.read_with(&cx, |this, _| { let new_symbols = this.read_with(&cx, |this, _| {
response let mut new_symbols = Vec::new();
.symbols for symbol in response.symbols.into_iter() {
.into_iter() new_symbols.push(this.deserialize_symbol(symbol));
.map(|symbol| this.deserialize_symbol(symbol)) }
new_symbols
}); });
for new_symbol in new_symbols { for new_symbol in new_symbols {
if let Some(new_symbol) = new_symbol.await.ok() { if let Some(new_symbol) = new_symbol.await.ok() {
@ -3533,10 +3533,10 @@ impl Project {
Default::default() Default::default()
}; };
struct PartialCompletion<F: Future<Output = Option<CodeLabel>>> { struct PartialCompletion {
pub old_range: Range<Anchor>, pub old_range: Range<Anchor>,
pub new_text: String, pub new_text: String,
pub label: Option<F>, pub language: Option<Arc<Language>>,
pub lsp_completion: lsp::CompletionItem, pub lsp_completion: lsp::CompletionItem,
} }
@ -3656,15 +3656,10 @@ impl Project {
} }
}; };
let label = match language.as_ref() {
Some(l) => Some(l.label_for_completion(&lsp_completion)),
None => None,
};
let partial_completion = PartialCompletion { let partial_completion = PartialCompletion {
old_range, old_range,
new_text, new_text,
label, language: language.clone(),
lsp_completion, lsp_completion,
}; };
@ -3676,8 +3671,8 @@ impl Project {
let mut result = Vec::new(); let mut result = Vec::new();
for pc in partial_completions.into_iter() { for pc in partial_completions.into_iter() {
let label = match pc.label { let label = match pc.language.as_ref() {
Some(label) => label.await, Some(l) => l.label_for_completion(&pc.lsp_completion).await,
None => None, None => None,
} }
.unwrap_or_else(|| { .unwrap_or_else(|| {
@ -3716,10 +3711,11 @@ impl Project {
}) })
.await; .await;
let completions = Vec::new(); let mut completions = Vec::new();
for completion in response.completions.into_iter() { for completion in response.completions.into_iter() {
completions completions.push(
.push(language::proto::deserialize_completion(completion, language).await); language::proto::deserialize_completion(completion, language.clone()).await,
);
} }
completions.into_iter().collect() completions.into_iter().collect()
}) })

View file

@ -1,5 +1,6 @@
use super::installation::{latest_github_release, GitHubLspBinaryVersion}; use super::installation::{latest_github_release, GitHubLspBinaryVersion};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use client::http::HttpClient; use client::http::HttpClient;
use futures::{future::BoxFuture, FutureExt, StreamExt}; use futures::{future::BoxFuture, FutureExt, StreamExt};
pub use language::*; pub use language::*;
@ -23,21 +24,18 @@ impl super::LspAdapter for CLspAdapter {
&self, &self,
http: Arc<dyn HttpClient>, http: Arc<dyn HttpClient>,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
async move { let release = latest_github_release("clangd/clangd", http).await?;
let release = latest_github_release("clangd/clangd", http).await?; let asset_name = format!("clangd-mac-{}.zip", release.name);
let asset_name = format!("clangd-mac-{}.zip", release.name); let asset = release
let asset = release .assets
.assets .iter()
.iter() .find(|asset| asset.name == asset_name)
.find(|asset| asset.name == asset_name) .ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
.ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?; let version = GitHubLspBinaryVersion {
let version = GitHubLspBinaryVersion { name: release.name,
name: release.name, url: asset.browser_download_url.clone(),
url: asset.browser_download_url.clone(), };
}; Ok(Box::new(version) as Box<_>)
Ok(Box::new(version) as Box<_>)
}
.boxed()
} }
async fn fetch_server_binary( async fn fetch_server_binary(
@ -47,54 +45,51 @@ impl super::LspAdapter for CLspAdapter {
container_dir: PathBuf, container_dir: PathBuf,
) -> Result<PathBuf> { ) -> Result<PathBuf> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap(); let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
async move { let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
let zip_path = container_dir.join(format!("clangd_{}.zip", version.name)); let version_dir = container_dir.join(format!("clangd_{}", version.name));
let version_dir = container_dir.join(format!("clangd_{}", version.name)); let binary_path = version_dir.join("bin/clangd");
let binary_path = version_dir.join("bin/clangd");
if fs::metadata(&binary_path).await.is_err() { if fs::metadata(&binary_path).await.is_err() {
let mut response = http let mut response = http
.get(&version.url, Default::default(), true) .get(&version.url, Default::default(), true)
.await .await
.context("error downloading release")?; .context("error downloading release")?;
let mut file = File::create(&zip_path).await?; let mut file = File::create(&zip_path).await?;
if !response.status().is_success() { if !response.status().is_success() {
Err(anyhow!( Err(anyhow!(
"download failed with status {}", "download failed with status {}",
response.status().to_string() response.status().to_string()
))?; ))?;
} }
futures::io::copy(response.body_mut(), &mut file).await?; futures::io::copy(response.body_mut(), &mut file).await?;
let unzip_status = smol::process::Command::new("unzip") let unzip_status = smol::process::Command::new("unzip")
.current_dir(&container_dir) .current_dir(&container_dir)
.arg(&zip_path) .arg(&zip_path)
.output() .output()
.await? .await?
.status; .status;
if !unzip_status.success() { if !unzip_status.success() {
Err(anyhow!("failed to unzip clangd archive"))?; Err(anyhow!("failed to unzip clangd archive"))?;
} }
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() { if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {
if let Some(entry) = entry.log_err() { if let Some(entry) = entry.log_err() {
let entry_path = entry.path(); let entry_path = entry.path();
if entry_path.as_path() != version_dir { if entry_path.as_path() != version_dir {
fs::remove_dir_all(&entry_path).await.log_err(); fs::remove_dir_all(&entry_path).await.log_err();
}
} }
} }
} }
} }
Ok(binary_path)
} }
.boxed()
Ok(binary_path)
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> { async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
async move { (|| async move {
let mut last_clangd_dir = None; let mut last_clangd_dir = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {
@ -113,9 +108,9 @@ impl super::LspAdapter for CLspAdapter {
clangd_dir clangd_dir
)) ))
} }
} })()
.await
.log_err() .log_err()
.boxed()
} }
async fn label_for_completion( async fn label_for_completion(

View file

@ -1,5 +1,6 @@
use super::installation::latest_github_release; use super::installation::latest_github_release;
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use async_trait::async_trait;
use client::http::HttpClient; use client::http::HttpClient;
use futures::{future::BoxFuture, FutureExt, StreamExt}; use futures::{future::BoxFuture, FutureExt, StreamExt};
pub use language::*; pub use language::*;
@ -36,18 +37,15 @@ impl super::LspAdapter for GoLspAdapter {
&self, &self,
http: Arc<dyn HttpClient>, http: Arc<dyn HttpClient>,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
async move { let release = latest_github_release("golang/tools", http).await?;
let release = latest_github_release("golang/tools", http).await?; let version: Option<String> = release.name.strip_prefix("gopls/v").map(str::to_string);
let version: Option<String> = release.name.strip_prefix("gopls/v").map(str::to_string); if version.is_none() {
if version.is_none() { log::warn!(
log::warn!( "couldn't infer gopls version from github release name '{}'",
"couldn't infer gopls version from github release name '{}'", release.name
release.name );
);
}
Ok(Box::new(version) as Box<_>)
} }
.boxed() Ok(Box::new(version) as Box<_>)
} }
async fn fetch_server_binary( async fn fetch_server_binary(
@ -59,65 +57,62 @@ impl super::LspAdapter for GoLspAdapter {
let version = version.downcast::<Option<String>>().unwrap(); let version = version.downcast::<Option<String>>().unwrap();
let this = *self; let this = *self;
async move { if let Some(version) = *version {
if let Some(version) = *version { let binary_path = container_dir.join(&format!("gopls_{version}"));
let binary_path = container_dir.join(&format!("gopls_{version}")); if let Ok(metadata) = fs::metadata(&binary_path).await {
if let Ok(metadata) = fs::metadata(&binary_path).await { if metadata.is_file() {
if metadata.is_file() { if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() { while let Some(entry) = entries.next().await {
while let Some(entry) = entries.next().await { if let Some(entry) = entry.log_err() {
if let Some(entry) = entry.log_err() { let entry_path = entry.path();
let entry_path = entry.path(); if entry_path.as_path() != binary_path
if entry_path.as_path() != binary_path && entry.file_name() != "gobin"
&& entry.file_name() != "gobin" {
{ fs::remove_file(&entry_path).await.log_err();
fs::remove_file(&entry_path).await.log_err();
}
} }
} }
} }
return Ok(binary_path.to_path_buf());
} }
return Ok(binary_path.to_path_buf());
} }
} else if let Some(path) = this.cached_server_binary(container_dir.clone()).await {
return Ok(path.to_path_buf());
} }
} else if let Some(path) = this.cached_server_binary(container_dir.clone()).await {
let gobin_dir = container_dir.join("gobin"); return Ok(path.to_path_buf());
fs::create_dir_all(&gobin_dir).await?;
let install_output = process::Command::new("go")
.env("GO111MODULE", "on")
.env("GOBIN", &gobin_dir)
.args(["install", "golang.org/x/tools/gopls@latest"])
.output()
.await?;
if !install_output.status.success() {
Err(anyhow!("failed to install gopls. Is go installed?"))?;
}
let installed_binary_path = gobin_dir.join("gopls");
let version_output = process::Command::new(&installed_binary_path)
.arg("version")
.output()
.await
.map_err(|e| anyhow!("failed to run installed gopls binary {:?}", e))?;
let version_stdout = str::from_utf8(&version_output.stdout)
.map_err(|_| anyhow!("gopls version produced invalid utf8"))?;
let version = GOPLS_VERSION_REGEX
.find(version_stdout)
.ok_or_else(|| anyhow!("failed to parse gopls version output"))?
.as_str();
let binary_path = container_dir.join(&format!("gopls_{version}"));
fs::rename(&installed_binary_path, &binary_path).await?;
Ok(binary_path.to_path_buf())
} }
.boxed()
let gobin_dir = container_dir.join("gobin");
fs::create_dir_all(&gobin_dir).await?;
let install_output = process::Command::new("go")
.env("GO111MODULE", "on")
.env("GOBIN", &gobin_dir)
.args(["install", "golang.org/x/tools/gopls@latest"])
.output()
.await?;
if !install_output.status.success() {
Err(anyhow!("failed to install gopls. Is go installed?"))?;
}
let installed_binary_path = gobin_dir.join("gopls");
let version_output = process::Command::new(&installed_binary_path)
.arg("version")
.output()
.await
.map_err(|e| anyhow!("failed to run installed gopls binary {:?}", e))?;
let version_stdout = str::from_utf8(&version_output.stdout)
.map_err(|_| anyhow!("gopls version produced invalid utf8"))?;
let version = GOPLS_VERSION_REGEX
.find(version_stdout)
.ok_or_else(|| anyhow!("failed to parse gopls version output"))?
.as_str();
let binary_path = container_dir.join(&format!("gopls_{version}"));
fs::rename(&installed_binary_path, &binary_path).await?;
Ok(binary_path.to_path_buf())
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> { async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
async move { (|| async move {
let mut last_binary_path = None; let mut last_binary_path = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {
@ -137,9 +132,9 @@ impl super::LspAdapter for GoLspAdapter {
} else { } else {
Err(anyhow!("no cached binary")) Err(anyhow!("no cached binary"))
} }
} })()
.await
.log_err() .log_err()
.boxed()
} }
async fn label_for_completion( async fn label_for_completion(
@ -345,12 +340,12 @@ mod tests {
let highlight_field = grammar.highlight_id_for_name("property").unwrap(); let highlight_field = grammar.highlight_id_for_name("property").unwrap();
assert_eq!( assert_eq!(
language.label_for_completion(&lsp::CompletionItem { smol::block_on(language.label_for_completion(&lsp::CompletionItem {
kind: Some(lsp::CompletionItemKind::FUNCTION), kind: Some(lsp::CompletionItemKind::FUNCTION),
label: "Hello".to_string(), label: "Hello".to_string(),
detail: Some("func(a B) c.D".to_string()), detail: Some("func(a B) c.D".to_string()),
..Default::default() ..Default::default()
}), })),
Some(CodeLabel { Some(CodeLabel {
text: "Hello(a B) c.D".to_string(), text: "Hello(a B) c.D".to_string(),
filter_range: 0..5, filter_range: 0..5,
@ -364,12 +359,12 @@ mod tests {
// Nested methods // Nested methods
assert_eq!( assert_eq!(
language.label_for_completion(&lsp::CompletionItem { smol::block_on(language.label_for_completion(&lsp::CompletionItem {
kind: Some(lsp::CompletionItemKind::METHOD), kind: Some(lsp::CompletionItemKind::METHOD),
label: "one.two.Three".to_string(), label: "one.two.Three".to_string(),
detail: Some("func() [3]interface{}".to_string()), detail: Some("func() [3]interface{}".to_string()),
..Default::default() ..Default::default()
}), })),
Some(CodeLabel { Some(CodeLabel {
text: "one.two.Three() [3]interface{}".to_string(), text: "one.two.Three() [3]interface{}".to_string(),
filter_range: 0..13, filter_range: 0..13,
@ -383,12 +378,12 @@ mod tests {
// Nested fields // Nested fields
assert_eq!( assert_eq!(
language.label_for_completion(&lsp::CompletionItem { smol::block_on(language.label_for_completion(&lsp::CompletionItem {
kind: Some(lsp::CompletionItemKind::FIELD), kind: Some(lsp::CompletionItemKind::FIELD),
label: "two.Three".to_string(), label: "two.Three".to_string(),
detail: Some("a.Bcd".to_string()), detail: Some("a.Bcd".to_string()),
..Default::default() ..Default::default()
}), })),
Some(CodeLabel { Some(CodeLabel {
text: "two.Three a.Bcd".to_string(), text: "two.Three a.Bcd".to_string(),
filter_range: 0..9, filter_range: 0..9,

View file

@ -1,5 +1,6 @@
use super::installation::{npm_install_packages, npm_package_latest_version}; use super::installation::{npm_install_packages, npm_package_latest_version};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use client::http::HttpClient; use client::http::HttpClient;
use futures::{future::BoxFuture, FutureExt, StreamExt}; use futures::{future::BoxFuture, FutureExt, StreamExt};
use language::{LanguageServerName, LspAdapter}; use language::{LanguageServerName, LspAdapter};
@ -33,10 +34,7 @@ impl LspAdapter for JsonLspAdapter {
&self, &self,
_: Arc<dyn HttpClient>, _: Arc<dyn HttpClient>,
) -> Result<Box<dyn 'static + Any + Send>> { ) -> Result<Box<dyn 'static + Any + Send>> {
async move { Ok(Box::new(npm_package_latest_version("vscode-json-languageserver").await?) as Box<_>)
Ok(Box::new(npm_package_latest_version("vscode-json-languageserver").await?) as Box<_>)
}
.boxed()
} }
async fn fetch_server_binary( async fn fetch_server_binary(
@ -46,39 +44,36 @@ impl LspAdapter for JsonLspAdapter {
container_dir: PathBuf, container_dir: PathBuf,
) -> Result<PathBuf> { ) -> Result<PathBuf> {
let version = version.downcast::<String>().unwrap(); let version = version.downcast::<String>().unwrap();
async move { let version_dir = container_dir.join(version.as_str());
let version_dir = container_dir.join(version.as_str()); fs::create_dir_all(&version_dir)
fs::create_dir_all(&version_dir) .await
.await .context("failed to create version directory")?;
.context("failed to create version directory")?; let binary_path = version_dir.join(Self::BIN_PATH);
let binary_path = version_dir.join(Self::BIN_PATH);
if fs::metadata(&binary_path).await.is_err() { if fs::metadata(&binary_path).await.is_err() {
npm_install_packages( npm_install_packages(
[("vscode-json-languageserver", version.as_str())], [("vscode-json-languageserver", version.as_str())],
&version_dir, &version_dir,
) )
.await?; .await?;
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() { if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {
if let Some(entry) = entry.log_err() { if let Some(entry) = entry.log_err() {
let entry_path = entry.path(); let entry_path = entry.path();
if entry_path.as_path() != version_dir { if entry_path.as_path() != version_dir {
fs::remove_dir_all(&entry_path).await.log_err(); fs::remove_dir_all(&entry_path).await.log_err();
}
} }
} }
} }
} }
Ok(binary_path)
} }
.boxed()
Ok(binary_path)
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> { async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
async move { (|| async move {
let mut last_version_dir = None; let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {
@ -97,9 +92,9 @@ impl LspAdapter for JsonLspAdapter {
last_version_dir last_version_dir
)) ))
} }
} })()
.await
.log_err() .log_err()
.boxed()
} }
async fn initialization_options(&self) -> Option<serde_json::Value> { async fn initialization_options(&self) -> Option<serde_json::Value> {

View file

@ -1,4 +1,5 @@
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use async_trait::async_trait;
use client::http::HttpClient; use client::http::HttpClient;
use futures::lock::Mutex; use futures::lock::Mutex;
use futures::Future; use futures::Future;
@ -85,35 +86,26 @@ struct Versions {
// I wish there was high-level instrumentation for this... // I wish there was high-level instrumentation for this...
// - it's totally a deadlock, the proof is in the pudding // - it's totally a deadlock, the proof is in the pudding
// macro_rules! call_block {
// ($self:ident, $name:expr, $arg:expr) => {
// $self.executor.block(async {
// dbg!("starting to block on something");
// let locked = $self.runtime.lock();
// dbg!("locked runtime");
// // TODO: No blocking calls!
// let mut awaited = locked.await;
// dbg!("awaited lock");
// let called = awaited.call($name, $arg);
// dbg!("called function");
// let result = called.await;
// dbg!("awaited result");
// result
// })
// };
// }
// TODO: convert to async trait
#[async_trait] #[async_trait]
impl LspAdapter for PluginLspAdapter { impl LspAdapter for PluginLspAdapter {
async fn name(&self) -> LanguageServerName { async fn name(&self) -> LanguageServerName {
let name: String = call_block!(self, &self.name, ()).unwrap(); let name: String = self
.runtime
.lock()
.await
.call(&self.name, ())
.await
.unwrap();
LanguageServerName(name.into()) LanguageServerName(name.into())
} }
async fn server_args<'a>(&'a self) -> Vec<String> { async fn server_args<'a>(&'a self) -> Vec<String> {
call_block!(self, &self.server_args, ()).unwrap() self.runtime
.lock()
.await
.call(&self.server_args, ())
.await
.unwrap()
} }
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
@ -133,15 +125,15 @@ impl LspAdapter for PluginLspAdapter {
.ok_or_else(|| anyhow!("Could not fetch latest server version")) .ok_or_else(|| anyhow!("Could not fetch latest server version"))
.map(|v| Box::new(v) as Box<_>) .map(|v| Box::new(v) as Box<_>)
}) })
.boxed() .await
} }
fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>, _: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
) -> BoxFuture<'static, Result<PathBuf>> { ) -> Result<PathBuf> {
let version = *version.downcast::<String>().unwrap(); let version = *version.downcast::<String>().unwrap();
let runtime = self.runtime.clone(); let runtime = self.runtime.clone();
let function = self.fetch_server_binary; let function = self.fetch_server_binary;
@ -154,10 +146,10 @@ impl LspAdapter for PluginLspAdapter {
runtime.remove_resource(handle)?; runtime.remove_resource(handle)?;
result.map_err(|e| anyhow!("{}", e)) result.map_err(|e| anyhow!("{}", e))
}) })
.boxed() .await
} }
fn cached_server_binary(&self, container_dir: PathBuf) -> BoxFuture<'static, Option<PathBuf>> { async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
let runtime = self.runtime.clone(); let runtime = self.runtime.clone();
let function = self.cached_server_binary; let function = self.cached_server_binary;
@ -169,10 +161,10 @@ impl LspAdapter for PluginLspAdapter {
runtime.remove_resource(handle).ok()?; runtime.remove_resource(handle).ok()?;
result result
}) })
.boxed() .await
} }
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {} // async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
// fn label_for_completion( // fn label_for_completion(
// &self, // &self,
@ -193,8 +185,14 @@ impl LspAdapter for PluginLspAdapter {
// }) // })
// } // }
fn initialization_options(&self) -> Option<serde_json::Value> { async fn initialization_options(&self) -> Option<serde_json::Value> {
let string: String = call_block!(self, &self.initialization_options, ()).log_err()?; let string: String = self
.runtime
.lock()
.await
.call(&self.initialization_options, ())
.await
.log_err()?;
serde_json::from_str(&string).ok() serde_json::from_str(&string).ok()
} }

View file

@ -1,5 +1,6 @@
use super::installation::{npm_install_packages, npm_package_latest_version}; use super::installation::{npm_install_packages, npm_package_latest_version};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use client::http::HttpClient; use client::http::HttpClient;
use futures::{future::BoxFuture, FutureExt, StreamExt}; use futures::{future::BoxFuture, FutureExt, StreamExt};
use language::{LanguageServerName, LspAdapter}; use language::{LanguageServerName, LspAdapter};
@ -31,7 +32,7 @@ impl LspAdapter for PythonLspAdapter {
&self, &self,
_: Arc<dyn HttpClient>, _: Arc<dyn HttpClient>,
) -> Result<Box<dyn 'static + Any + Send>> { ) -> Result<Box<dyn 'static + Any + Send>> {
async move { Ok(Box::new(npm_package_latest_version("pyright").await?) as Box<_>) }.boxed() Ok(Box::new(npm_package_latest_version("pyright").await?) as Box<_>)
} }
async fn fetch_server_binary( async fn fetch_server_binary(
@ -41,35 +42,32 @@ impl LspAdapter for PythonLspAdapter {
container_dir: PathBuf, container_dir: PathBuf,
) -> Result<PathBuf> { ) -> Result<PathBuf> {
let version = version.downcast::<String>().unwrap(); let version = version.downcast::<String>().unwrap();
async move { let version_dir = container_dir.join(version.as_str());
let version_dir = container_dir.join(version.as_str()); fs::create_dir_all(&version_dir)
fs::create_dir_all(&version_dir) .await
.await .context("failed to create version directory")?;
.context("failed to create version directory")?; let binary_path = version_dir.join(Self::BIN_PATH);
let binary_path = version_dir.join(Self::BIN_PATH);
if fs::metadata(&binary_path).await.is_err() { if fs::metadata(&binary_path).await.is_err() {
npm_install_packages([("pyright", version.as_str())], &version_dir).await?; npm_install_packages([("pyright", version.as_str())], &version_dir).await?;
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() { if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {
if let Some(entry) = entry.log_err() { if let Some(entry) = entry.log_err() {
let entry_path = entry.path(); let entry_path = entry.path();
if entry_path.as_path() != version_dir { if entry_path.as_path() != version_dir {
fs::remove_dir_all(&entry_path).await.log_err(); fs::remove_dir_all(&entry_path).await.log_err();
}
} }
} }
} }
} }
Ok(binary_path)
} }
.boxed()
Ok(binary_path)
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> { async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
async move { (|| async move {
let mut last_version_dir = None; let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {
@ -88,9 +86,9 @@ impl LspAdapter for PythonLspAdapter {
last_version_dir last_version_dir
)) ))
} }
} })()
.await
.log_err() .log_err()
.boxed()
} }
async fn label_for_completion( async fn label_for_completion(

View file

@ -1,6 +1,7 @@
use super::installation::{latest_github_release, GitHubLspBinaryVersion}; use super::installation::{latest_github_release, GitHubLspBinaryVersion};
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use async_compression::futures::bufread::GzipDecoder; use async_compression::futures::bufread::GzipDecoder;
use async_trait::async_trait;
use client::http::HttpClient; use client::http::HttpClient;
use futures::{future::BoxFuture, io::BufReader, FutureExt, StreamExt}; use futures::{future::BoxFuture, io::BufReader, FutureExt, StreamExt};
pub use language::*; pub use language::*;
@ -19,6 +20,7 @@ use util::{ResultExt, TryFutureExt};
pub struct RustLspAdapter; pub struct RustLspAdapter;
#[async_trait]
impl LspAdapter for RustLspAdapter { impl LspAdapter for RustLspAdapter {
async fn name(&self) -> LanguageServerName { async fn name(&self) -> LanguageServerName {
LanguageServerName("rust-analyzer".into()) LanguageServerName("rust-analyzer".into())
@ -28,21 +30,18 @@ impl LspAdapter for RustLspAdapter {
&self, &self,
http: Arc<dyn HttpClient>, http: Arc<dyn HttpClient>,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
async move { let release = latest_github_release("rust-analyzer/rust-analyzer", http).await?;
let release = latest_github_release("rust-analyzer/rust-analyzer", http).await?; let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH);
let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH); let asset = release
let asset = release .assets
.assets .iter()
.iter() .find(|asset| asset.name == asset_name)
.find(|asset| asset.name == asset_name) .ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?;
.ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?; let version = GitHubLspBinaryVersion {
let version = GitHubLspBinaryVersion { name: release.name,
name: release.name, url: asset.browser_download_url.clone(),
url: asset.browser_download_url.clone(), };
}; Ok(Box::new(version) as Box<_>)
Ok(Box::new(version) as Box<_>)
}
.boxed()
} }
async fn fetch_server_binary( async fn fetch_server_binary(
@ -51,55 +50,49 @@ impl LspAdapter for RustLspAdapter {
http: Arc<dyn HttpClient>, http: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
) -> Result<PathBuf> { ) -> Result<PathBuf> {
async move { let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap(); let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
if fs::metadata(&destination_path).await.is_err() { if fs::metadata(&destination_path).await.is_err() {
let mut response = http let mut response = http
.get(&version.url, Default::default(), true) .get(&version.url, Default::default(), true)
.await .await
.map_err(|err| anyhow!("error downloading release: {}", err))?; .map_err(|err| anyhow!("error downloading release: {}", err))?;
let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut())); let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
let mut file = File::create(&destination_path).await?; let mut file = File::create(&destination_path).await?;
futures::io::copy(decompressed_bytes, &mut file).await?; futures::io::copy(decompressed_bytes, &mut file).await?;
fs::set_permissions( fs::set_permissions(
&destination_path, &destination_path,
<fs::Permissions as fs::unix::PermissionsExt>::from_mode(0o755), <fs::Permissions as fs::unix::PermissionsExt>::from_mode(0o755),
) )
.await?; .await?;
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() { if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {
if let Some(entry) = entry.log_err() { if let Some(entry) = entry.log_err() {
let entry_path = entry.path(); let entry_path = entry.path();
if entry_path.as_path() != destination_path { if entry_path.as_path() != destination_path {
fs::remove_file(&entry_path).await.log_err(); fs::remove_file(&entry_path).await.log_err();
}
} }
} }
} }
} }
Ok(destination_path)
} }
.boxed()
Ok(destination_path)
} }
async fn cached_server_binary( async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
&self, (|| async move {
container_dir: PathBuf,
) -> BoxFuture<'static, Option<PathBuf>> {
async move {
let mut last = None; let mut last = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {
last = Some(entry?.path()); last = Some(entry?.path());
} }
last.ok_or_else(|| anyhow!("no cached binary")) last.ok_or_else(|| anyhow!("no cached binary"))
} })()
.await
.log_err() .log_err()
.boxed()
} }
async fn disk_based_diagnostic_sources(&self) -> Vec<String> { async fn disk_based_diagnostic_sources(&self) -> Vec<String> {
@ -337,12 +330,12 @@ mod tests {
let highlight_field = grammar.highlight_id_for_name("property").unwrap(); let highlight_field = grammar.highlight_id_for_name("property").unwrap();
assert_eq!( assert_eq!(
language.label_for_completion(&lsp::CompletionItem { smol::block_on(language.label_for_completion(&lsp::CompletionItem {
kind: Some(lsp::CompletionItemKind::FUNCTION), kind: Some(lsp::CompletionItemKind::FUNCTION),
label: "hello(…)".to_string(), label: "hello(…)".to_string(),
detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()), detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()),
..Default::default() ..Default::default()
}), })),
Some(CodeLabel { Some(CodeLabel {
text: "hello(&mut Option<T>) -> Vec<T>".to_string(), text: "hello(&mut Option<T>) -> Vec<T>".to_string(),
filter_range: 0..5, filter_range: 0..5,
@ -358,12 +351,12 @@ mod tests {
); );
assert_eq!( assert_eq!(
language.label_for_completion(&lsp::CompletionItem { smol::block_on(language.label_for_completion(&lsp::CompletionItem {
kind: Some(lsp::CompletionItemKind::FIELD), kind: Some(lsp::CompletionItemKind::FIELD),
label: "len".to_string(), label: "len".to_string(),
detail: Some("usize".to_string()), detail: Some("usize".to_string()),
..Default::default() ..Default::default()
}), })),
Some(CodeLabel { Some(CodeLabel {
text: "len: usize".to_string(), text: "len: usize".to_string(),
filter_range: 0..3, filter_range: 0..3,
@ -372,12 +365,12 @@ mod tests {
); );
assert_eq!( assert_eq!(
language.label_for_completion(&lsp::CompletionItem { smol::block_on(language.label_for_completion(&lsp::CompletionItem {
kind: Some(lsp::CompletionItemKind::FUNCTION), kind: Some(lsp::CompletionItemKind::FUNCTION),
label: "hello(…)".to_string(), label: "hello(…)".to_string(),
detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()), detail: Some("fn(&mut Option<T>) -> Vec<T>".to_string()),
..Default::default() ..Default::default()
}), })),
Some(CodeLabel { Some(CodeLabel {
text: "hello(&mut Option<T>) -> Vec<T>".to_string(), text: "hello(&mut Option<T>) -> Vec<T>".to_string(),
filter_range: 0..5, filter_range: 0..5,
@ -415,7 +408,7 @@ mod tests {
let highlight_keyword = grammar.highlight_id_for_name("keyword").unwrap(); let highlight_keyword = grammar.highlight_id_for_name("keyword").unwrap();
assert_eq!( assert_eq!(
language.label_for_symbol("hello", lsp::SymbolKind::FUNCTION), smol::block_on(language.label_for_symbol("hello", lsp::SymbolKind::FUNCTION)),
Some(CodeLabel { Some(CodeLabel {
text: "fn hello".to_string(), text: "fn hello".to_string(),
filter_range: 3..8, filter_range: 3..8,
@ -424,7 +417,7 @@ mod tests {
); );
assert_eq!( assert_eq!(
language.label_for_symbol("World", lsp::SymbolKind::TYPE_PARAMETER), smol::block_on(language.label_for_symbol("World", lsp::SymbolKind::TYPE_PARAMETER)),
Some(CodeLabel { Some(CodeLabel {
text: "type World".to_string(), text: "type World".to_string(),
filter_range: 5..10, filter_range: 5..10,

View file

@ -1,5 +1,6 @@
use super::installation::{npm_install_packages, npm_package_latest_version}; use super::installation::{npm_install_packages, npm_package_latest_version};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use client::http::HttpClient; use client::http::HttpClient;
use futures::{future::BoxFuture, FutureExt, StreamExt}; use futures::{future::BoxFuture, FutureExt, StreamExt};
use language::{LanguageServerName, LspAdapter}; use language::{LanguageServerName, LspAdapter};
@ -40,13 +41,10 @@ impl LspAdapter for TypeScriptLspAdapter {
&self, &self,
_: Arc<dyn HttpClient>, _: Arc<dyn HttpClient>,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
async move { Ok(Box::new(Versions {
Ok(Box::new(Versions { typescript_version: npm_package_latest_version("typescript").await?,
typescript_version: npm_package_latest_version("typescript").await?, server_version: npm_package_latest_version("typescript-language-server").await?,
server_version: npm_package_latest_version("typescript-language-server").await?, }) as Box<_>)
}) as Box<_>)
}
.boxed()
} }
async fn fetch_server_binary( async fn fetch_server_binary(
@ -56,48 +54,45 @@ impl LspAdapter for TypeScriptLspAdapter {
container_dir: PathBuf, container_dir: PathBuf,
) -> Result<PathBuf> { ) -> Result<PathBuf> {
let versions = versions.downcast::<Versions>().unwrap(); let versions = versions.downcast::<Versions>().unwrap();
async move { let version_dir = container_dir.join(&format!(
let version_dir = container_dir.join(&format!( "typescript-{}:server-{}",
"typescript-{}:server-{}", versions.typescript_version, versions.server_version
versions.typescript_version, versions.server_version ));
)); fs::create_dir_all(&version_dir)
fs::create_dir_all(&version_dir) .await
.await .context("failed to create version directory")?;
.context("failed to create version directory")?; let binary_path = version_dir.join(Self::BIN_PATH);
let binary_path = version_dir.join(Self::BIN_PATH);
if fs::metadata(&binary_path).await.is_err() { if fs::metadata(&binary_path).await.is_err() {
npm_install_packages( npm_install_packages(
[ [
("typescript", versions.typescript_version.as_str()), ("typescript", versions.typescript_version.as_str()),
( (
"typescript-language-server", "typescript-language-server",
&versions.server_version.as_str(), &versions.server_version.as_str(),
), ),
], ],
&version_dir, &version_dir,
) )
.await?; .await?;
if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() { if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {
if let Some(entry) = entry.log_err() { if let Some(entry) = entry.log_err() {
let entry_path = entry.path(); let entry_path = entry.path();
if entry_path.as_path() != version_dir { if entry_path.as_path() != version_dir {
fs::remove_dir_all(&entry_path).await.log_err(); fs::remove_dir_all(&entry_path).await.log_err();
}
} }
} }
} }
} }
Ok(binary_path)
} }
.boxed()
Ok(binary_path)
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> { async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
async move { (|| async move {
let mut last_version_dir = None; let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {
@ -116,9 +111,9 @@ impl LspAdapter for TypeScriptLspAdapter {
last_version_dir last_version_dir
)) ))
} }
} })()
.await
.log_err() .log_err()
.boxed()
} }
async fn label_for_completion( async fn label_for_completion(