Rename extension
crate to extension_host
(#20081)
This PR renames the `extension` crate to `extension_host`. This is to free up the name so that we can create a smaller-scoped `extension` crate. Release Notes: - N/A
This commit is contained in:
parent
c8f1969916
commit
ea44c510a3
30 changed files with 46 additions and 45 deletions
|
@ -1,67 +0,0 @@
|
|||
[package]
|
||||
name = "extension"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/extension_store.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
no-webrtc = ["workspace/no-webrtc"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
assistant_slash_command.workspace = true
|
||||
async-compression.workspace = true
|
||||
async-tar.workspace = true
|
||||
async-trait.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
node_runtime.workspace = true
|
||||
paths.workspace = true
|
||||
project.workspace = true
|
||||
release_channel.workspace = true
|
||||
schemars.workspace = true
|
||||
semantic_version.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
snippet_provider.workspace = true
|
||||
task.workspace = true
|
||||
theme.workspace = true
|
||||
toml.workspace = true
|
||||
ui.workspace = true
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
wasm-encoder.workspace = true
|
||||
wasmparser.workspace = true
|
||||
wasmtime-wasi.workspace = true
|
||||
wasmtime.workspace = true
|
||||
wit-component.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
ctor.workspace = true
|
||||
env_logger.workspace = true
|
||||
fs = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
parking_lot.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
reqwest_client.workspace = true
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
|
@ -1 +0,0 @@
|
|||
../../LICENSE-GPL
|
|
@ -1,46 +0,0 @@
|
|||
use std::env;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
copy_extension_api_rust_files()
|
||||
}
|
||||
|
||||
/// rust-analyzer doesn't support include! for files from outside the crate.
|
||||
/// Copy them to the OUT_DIR, so we can include them from there, which is supported.
|
||||
fn copy_extension_api_rust_files() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let out_dir = env::var("OUT_DIR")?;
|
||||
let input_dir = PathBuf::from("../extension_api/wit");
|
||||
let output_dir = PathBuf::from(out_dir);
|
||||
|
||||
println!("cargo:rerun-if-changed={}", input_dir.display());
|
||||
|
||||
for entry in fs::read_dir(&input_dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
println!("cargo:rerun-if-changed={}", path.display());
|
||||
|
||||
for subentry in fs::read_dir(&path)? {
|
||||
let subentry = subentry?;
|
||||
let subpath = subentry.path();
|
||||
if subpath.extension() == Some(std::ffi::OsStr::new("rs")) {
|
||||
let relative_path = subpath.strip_prefix(&input_dir)?;
|
||||
let destination = output_dir.join(relative_path);
|
||||
|
||||
fs::create_dir_all(destination.parent().unwrap())?;
|
||||
fs::copy(&subpath, &destination)?;
|
||||
}
|
||||
}
|
||||
} else if path.extension() == Some(std::ffi::OsStr::new("rs")) {
|
||||
let relative_path = path.strip_prefix(&input_dir)?;
|
||||
let destination = output_dir.join(relative_path);
|
||||
|
||||
fs::create_dir_all(destination.parent().unwrap())?;
|
||||
fs::copy(&path, &destination)?;
|
||||
println!("cargo:rerun-if-changed={}", path.display());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,603 +0,0 @@
|
|||
use crate::wasm_host::parse_wasm_extension_version;
|
||||
use crate::ExtensionManifest;
|
||||
use crate::{extension_manifest::ExtensionLibraryKind, GrammarManifestEntry};
|
||||
use anyhow::{anyhow, bail, Context as _, Result};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use futures::io::BufReader;
|
||||
use futures::AsyncReadExt;
|
||||
use http_client::{self, AsyncBody, HttpClient};
|
||||
use serde::Deserialize;
|
||||
use std::{
|
||||
env, fs, mem,
|
||||
path::{Path, PathBuf},
|
||||
process::{Command, Stdio},
|
||||
sync::Arc,
|
||||
};
|
||||
use wasm_encoder::{ComponentSectionId, Encode as _, RawSection, Section as _};
|
||||
use wasmparser::Parser;
|
||||
use wit_component::ComponentEncoder;
|
||||
|
||||
/// Currently, we compile with Rust's `wasm32-wasip1` target, which works with WASI `preview1`.
|
||||
/// But the WASM component model is based on WASI `preview2`. So we need an 'adapter' WASM
|
||||
/// module, which implements the `preview1` interface in terms of `preview2`.
|
||||
///
|
||||
/// Once Rust 1.78 is released, there will be a `wasm32-wasip2` target available, so we will
|
||||
/// not need the adapter anymore.
|
||||
const RUST_TARGET: &str = "wasm32-wasip1";
|
||||
const WASI_ADAPTER_URL: &str =
|
||||
"https://github.com/bytecodealliance/wasmtime/releases/download/v18.0.2/wasi_snapshot_preview1.reactor.wasm";
|
||||
|
||||
/// Compiling Tree-sitter parsers from C to WASM requires Clang 17, and a WASM build of libc
|
||||
/// and clang's runtime library. The `wasi-sdk` provides these binaries.
|
||||
///
|
||||
/// Once Clang 17 and its wasm target are available via system package managers, we won't need
|
||||
/// to download this.
|
||||
const WASI_SDK_URL: &str = "https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-21/";
|
||||
const WASI_SDK_ASSET_NAME: Option<&str> = if cfg!(target_os = "macos") {
|
||||
Some("wasi-sdk-21.0-macos.tar.gz")
|
||||
} else if cfg!(target_os = "linux") {
|
||||
Some("wasi-sdk-21.0-linux.tar.gz")
|
||||
} else if cfg!(target_os = "windows") {
|
||||
Some("wasi-sdk-21.0.m-mingw.tar.gz")
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
pub struct ExtensionBuilder {
|
||||
cache_dir: PathBuf,
|
||||
pub http: Arc<dyn HttpClient>,
|
||||
}
|
||||
|
||||
pub struct CompileExtensionOptions {
|
||||
pub release: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CargoToml {
|
||||
package: CargoTomlPackage,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CargoTomlPackage {
|
||||
name: String,
|
||||
}
|
||||
|
||||
impl ExtensionBuilder {
|
||||
pub fn new(http_client: Arc<dyn HttpClient>, cache_dir: PathBuf) -> Self {
|
||||
Self {
|
||||
cache_dir,
|
||||
http: http_client,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn compile_extension(
|
||||
&self,
|
||||
extension_dir: &Path,
|
||||
extension_manifest: &mut ExtensionManifest,
|
||||
options: CompileExtensionOptions,
|
||||
) -> Result<()> {
|
||||
populate_defaults(extension_manifest, extension_dir)?;
|
||||
|
||||
if extension_dir.is_relative() {
|
||||
bail!(
|
||||
"extension dir {} is not an absolute path",
|
||||
extension_dir.display()
|
||||
);
|
||||
}
|
||||
|
||||
fs::create_dir_all(&self.cache_dir).context("failed to create cache dir")?;
|
||||
|
||||
if extension_manifest.lib.kind == Some(ExtensionLibraryKind::Rust) {
|
||||
log::info!("compiling Rust extension {}", extension_dir.display());
|
||||
self.compile_rust_extension(extension_dir, extension_manifest, options)
|
||||
.await
|
||||
.context("failed to compile Rust extension")?;
|
||||
log::info!("compiled Rust extension {}", extension_dir.display());
|
||||
}
|
||||
|
||||
for (grammar_name, grammar_metadata) in &extension_manifest.grammars {
|
||||
log::info!(
|
||||
"compiling grammar {grammar_name} for extension {}",
|
||||
extension_dir.display()
|
||||
);
|
||||
self.compile_grammar(extension_dir, grammar_name.as_ref(), grammar_metadata)
|
||||
.await
|
||||
.with_context(|| format!("failed to compile grammar '{grammar_name}'"))?;
|
||||
log::info!(
|
||||
"compiled grammar {grammar_name} for extension {}",
|
||||
extension_dir.display()
|
||||
);
|
||||
}
|
||||
|
||||
log::info!("finished compiling extension {}", extension_dir.display());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn compile_rust_extension(
|
||||
&self,
|
||||
extension_dir: &Path,
|
||||
manifest: &mut ExtensionManifest,
|
||||
options: CompileExtensionOptions,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
self.install_rust_wasm_target_if_needed()?;
|
||||
let adapter_bytes = self.install_wasi_preview1_adapter_if_needed().await?;
|
||||
|
||||
let cargo_toml_content = fs::read_to_string(extension_dir.join("Cargo.toml"))?;
|
||||
let cargo_toml: CargoToml = toml::from_str(&cargo_toml_content)?;
|
||||
|
||||
log::info!(
|
||||
"compiling Rust crate for extension {}",
|
||||
extension_dir.display()
|
||||
);
|
||||
let output = Command::new("cargo")
|
||||
.args(["build", "--target", RUST_TARGET])
|
||||
.args(options.release.then_some("--release"))
|
||||
.arg("--target-dir")
|
||||
.arg(extension_dir.join("target"))
|
||||
.current_dir(extension_dir)
|
||||
.output()
|
||||
.context("failed to run `cargo`")?;
|
||||
if !output.status.success() {
|
||||
bail!(
|
||||
"failed to build extension {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"compiled Rust crate for extension {}",
|
||||
extension_dir.display()
|
||||
);
|
||||
|
||||
let mut wasm_path = PathBuf::from(extension_dir);
|
||||
wasm_path.extend([
|
||||
"target",
|
||||
RUST_TARGET,
|
||||
if options.release { "release" } else { "debug" },
|
||||
&cargo_toml
|
||||
.package
|
||||
.name
|
||||
// The wasm32-wasip1 target normalizes `-` in package names to `_` in the resulting `.wasm` file.
|
||||
.replace('-', "_"),
|
||||
]);
|
||||
wasm_path.set_extension("wasm");
|
||||
|
||||
let wasm_bytes = fs::read(&wasm_path)
|
||||
.with_context(|| format!("failed to read output module `{}`", wasm_path.display()))?;
|
||||
|
||||
let encoder = ComponentEncoder::default()
|
||||
.module(&wasm_bytes)?
|
||||
.adapter("wasi_snapshot_preview1", &adapter_bytes)
|
||||
.context("failed to load adapter module")?
|
||||
.validate(true);
|
||||
|
||||
log::info!(
|
||||
"encoding wasm component for extension {}",
|
||||
extension_dir.display()
|
||||
);
|
||||
|
||||
let component_bytes = encoder
|
||||
.encode()
|
||||
.context("failed to encode wasm component")?;
|
||||
|
||||
let component_bytes = self
|
||||
.strip_custom_sections(&component_bytes)
|
||||
.context("failed to strip debug sections from wasm component")?;
|
||||
|
||||
let wasm_extension_api_version =
|
||||
parse_wasm_extension_version(&manifest.id, &component_bytes)
|
||||
.context("compiled wasm did not contain a valid zed extension api version")?;
|
||||
manifest.lib.version = Some(wasm_extension_api_version);
|
||||
|
||||
let extension_file = extension_dir.join("extension.wasm");
|
||||
fs::write(extension_file.clone(), &component_bytes)
|
||||
.context("failed to write extension.wasm")?;
|
||||
|
||||
log::info!(
|
||||
"extension {} written to {}",
|
||||
extension_dir.display(),
|
||||
extension_file.display()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn compile_grammar(
|
||||
&self,
|
||||
extension_dir: &Path,
|
||||
grammar_name: &str,
|
||||
grammar_metadata: &GrammarManifestEntry,
|
||||
) -> Result<()> {
|
||||
let clang_path = self.install_wasi_sdk_if_needed().await?;
|
||||
|
||||
let mut grammar_repo_dir = extension_dir.to_path_buf();
|
||||
grammar_repo_dir.extend(["grammars", grammar_name]);
|
||||
|
||||
let mut grammar_wasm_path = grammar_repo_dir.clone();
|
||||
grammar_wasm_path.set_extension("wasm");
|
||||
|
||||
log::info!("checking out {grammar_name} parser");
|
||||
self.checkout_repo(
|
||||
&grammar_repo_dir,
|
||||
&grammar_metadata.repository,
|
||||
&grammar_metadata.rev,
|
||||
)?;
|
||||
|
||||
let base_grammar_path = grammar_metadata
|
||||
.path
|
||||
.as_ref()
|
||||
.map(|path| grammar_repo_dir.join(path))
|
||||
.unwrap_or(grammar_repo_dir);
|
||||
|
||||
let src_path = base_grammar_path.join("src");
|
||||
let parser_path = src_path.join("parser.c");
|
||||
let scanner_path = src_path.join("scanner.c");
|
||||
|
||||
log::info!("compiling {grammar_name} parser");
|
||||
let clang_output = Command::new(&clang_path)
|
||||
.args(["-fPIC", "-shared", "-Os"])
|
||||
.arg(format!("-Wl,--export=tree_sitter_{grammar_name}"))
|
||||
.arg("-o")
|
||||
.arg(&grammar_wasm_path)
|
||||
.arg("-I")
|
||||
.arg(&src_path)
|
||||
.arg(&parser_path)
|
||||
.args(scanner_path.exists().then_some(scanner_path))
|
||||
.output()
|
||||
.context("failed to run clang")?;
|
||||
|
||||
if !clang_output.status.success() {
|
||||
bail!(
|
||||
"failed to compile {} parser with clang: {}",
|
||||
grammar_name,
|
||||
String::from_utf8_lossy(&clang_output.stderr),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn checkout_repo(&self, directory: &Path, url: &str, rev: &str) -> Result<()> {
|
||||
let git_dir = directory.join(".git");
|
||||
|
||||
if directory.exists() {
|
||||
let remotes_output = Command::new("git")
|
||||
.arg("--git-dir")
|
||||
.arg(&git_dir)
|
||||
.args(["remote", "-v"])
|
||||
.output()?;
|
||||
let has_remote = remotes_output.status.success()
|
||||
&& String::from_utf8_lossy(&remotes_output.stdout)
|
||||
.lines()
|
||||
.any(|line| {
|
||||
let mut parts = line.split(|c: char| c.is_whitespace());
|
||||
parts.next() == Some("origin") && parts.any(|part| part == url)
|
||||
});
|
||||
if !has_remote {
|
||||
bail!(
|
||||
"grammar directory '{}' already exists, but is not a git clone of '{}'",
|
||||
directory.display(),
|
||||
url
|
||||
);
|
||||
}
|
||||
} else {
|
||||
fs::create_dir_all(directory).with_context(|| {
|
||||
format!("failed to create grammar directory {}", directory.display(),)
|
||||
})?;
|
||||
let init_output = Command::new("git")
|
||||
.arg("init")
|
||||
.current_dir(directory)
|
||||
.output()?;
|
||||
if !init_output.status.success() {
|
||||
bail!(
|
||||
"failed to run `git init` in directory '{}'",
|
||||
directory.display()
|
||||
);
|
||||
}
|
||||
|
||||
let remote_add_output = Command::new("git")
|
||||
.arg("--git-dir")
|
||||
.arg(&git_dir)
|
||||
.args(["remote", "add", "origin", url])
|
||||
.output()
|
||||
.context("failed to execute `git remote add`")?;
|
||||
if !remote_add_output.status.success() {
|
||||
bail!(
|
||||
"failed to add remote {url} for git repository {}",
|
||||
git_dir.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let fetch_output = Command::new("git")
|
||||
.arg("--git-dir")
|
||||
.arg(&git_dir)
|
||||
.args(["fetch", "--depth", "1", "origin", rev])
|
||||
.output()
|
||||
.context("failed to execute `git fetch`")?;
|
||||
|
||||
let checkout_output = Command::new("git")
|
||||
.arg("--git-dir")
|
||||
.arg(&git_dir)
|
||||
.args(["checkout", rev])
|
||||
.current_dir(directory)
|
||||
.output()
|
||||
.context("failed to execute `git checkout`")?;
|
||||
if !checkout_output.status.success() {
|
||||
if !fetch_output.status.success() {
|
||||
bail!(
|
||||
"failed to fetch revision {} in directory '{}'",
|
||||
rev,
|
||||
directory.display()
|
||||
);
|
||||
}
|
||||
bail!(
|
||||
"failed to checkout revision {} in directory '{}': {}",
|
||||
rev,
|
||||
directory.display(),
|
||||
String::from_utf8_lossy(&checkout_output.stderr)
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn install_rust_wasm_target_if_needed(&self) -> Result<()> {
|
||||
let rustc_output = Command::new("rustc")
|
||||
.arg("--print")
|
||||
.arg("sysroot")
|
||||
.output()
|
||||
.context("failed to run rustc")?;
|
||||
if !rustc_output.status.success() {
|
||||
bail!(
|
||||
"failed to retrieve rust sysroot: {}",
|
||||
String::from_utf8_lossy(&rustc_output.stderr)
|
||||
);
|
||||
}
|
||||
|
||||
let sysroot = PathBuf::from(String::from_utf8(rustc_output.stdout)?.trim());
|
||||
if sysroot.join("lib/rustlib").join(RUST_TARGET).exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let output = Command::new("rustup")
|
||||
.args(["target", "add", RUST_TARGET])
|
||||
.stderr(Stdio::inherit())
|
||||
.stdout(Stdio::inherit())
|
||||
.output()
|
||||
.context("failed to run `rustup target add`")?;
|
||||
if !output.status.success() {
|
||||
bail!("failed to install the `{RUST_TARGET}` target");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn install_wasi_preview1_adapter_if_needed(&self) -> Result<Vec<u8>> {
|
||||
let cache_path = self.cache_dir.join("wasi_snapshot_preview1.reactor.wasm");
|
||||
if let Ok(content) = fs::read(&cache_path) {
|
||||
if Parser::is_core_wasm(&content) {
|
||||
return Ok(content);
|
||||
}
|
||||
}
|
||||
|
||||
fs::remove_file(&cache_path).ok();
|
||||
|
||||
log::info!(
|
||||
"downloading wasi adapter module to {}",
|
||||
cache_path.display()
|
||||
);
|
||||
let mut response = self
|
||||
.http
|
||||
.get(WASI_ADAPTER_URL, AsyncBody::default(), true)
|
||||
.await?;
|
||||
|
||||
let mut content = Vec::new();
|
||||
let mut body = BufReader::new(response.body_mut());
|
||||
body.read_to_end(&mut content).await?;
|
||||
|
||||
fs::write(&cache_path, &content)
|
||||
.with_context(|| format!("failed to save file {}", cache_path.display()))?;
|
||||
|
||||
if !Parser::is_core_wasm(&content) {
|
||||
bail!("downloaded wasi adapter is invalid");
|
||||
}
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
async fn install_wasi_sdk_if_needed(&self) -> Result<PathBuf> {
|
||||
let url = if let Some(asset_name) = WASI_SDK_ASSET_NAME {
|
||||
format!("{WASI_SDK_URL}/{asset_name}")
|
||||
} else {
|
||||
bail!("wasi-sdk is not available for platform {}", env::consts::OS);
|
||||
};
|
||||
|
||||
let wasi_sdk_dir = self.cache_dir.join("wasi-sdk");
|
||||
let mut clang_path = wasi_sdk_dir.clone();
|
||||
clang_path.extend(["bin", &format!("clang{}", env::consts::EXE_SUFFIX)]);
|
||||
|
||||
if fs::metadata(&clang_path).map_or(false, |metadata| metadata.is_file()) {
|
||||
return Ok(clang_path);
|
||||
}
|
||||
|
||||
let mut tar_out_dir = wasi_sdk_dir.clone();
|
||||
tar_out_dir.set_extension("archive");
|
||||
|
||||
fs::remove_dir_all(&wasi_sdk_dir).ok();
|
||||
fs::remove_dir_all(&tar_out_dir).ok();
|
||||
|
||||
log::info!("downloading wasi-sdk to {}", wasi_sdk_dir.display());
|
||||
let mut response = self.http.get(&url, AsyncBody::default(), true).await?;
|
||||
let body = BufReader::new(response.body_mut());
|
||||
let body = GzipDecoder::new(body);
|
||||
let tar = Archive::new(body);
|
||||
|
||||
tar.unpack(&tar_out_dir)
|
||||
.await
|
||||
.context("failed to unpack wasi-sdk archive")?;
|
||||
|
||||
let inner_dir = fs::read_dir(&tar_out_dir)?
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("no content"))?
|
||||
.context("failed to read contents of extracted wasi archive directory")?
|
||||
.path();
|
||||
fs::rename(&inner_dir, &wasi_sdk_dir).context("failed to move extracted wasi dir")?;
|
||||
fs::remove_dir_all(&tar_out_dir).ok();
|
||||
|
||||
Ok(clang_path)
|
||||
}
|
||||
|
||||
// This was adapted from:
|
||||
// https://github.com/bytecodealliance/wasm-tools/blob/1791a8f139722e9f8679a2bd3d8e423e55132b22/src/bin/wasm-tools/strip.rs
|
||||
fn strip_custom_sections(&self, input: &Vec<u8>) -> Result<Vec<u8>> {
|
||||
use wasmparser::Payload::*;
|
||||
|
||||
let strip_custom_section = |name: &str| name.starts_with(".debug");
|
||||
|
||||
let mut output = Vec::new();
|
||||
let mut stack = Vec::new();
|
||||
|
||||
for payload in Parser::new(0).parse_all(input) {
|
||||
let payload = payload?;
|
||||
let component_header = wasm_encoder::Component::HEADER;
|
||||
let module_header = wasm_encoder::Module::HEADER;
|
||||
|
||||
// Track nesting depth, so that we don't mess with inner producer sections:
|
||||
match payload {
|
||||
Version { encoding, .. } => {
|
||||
output.extend_from_slice(match encoding {
|
||||
wasmparser::Encoding::Component => &component_header,
|
||||
wasmparser::Encoding::Module => &module_header,
|
||||
});
|
||||
}
|
||||
ModuleSection { .. } | ComponentSection { .. } => {
|
||||
stack.push(mem::take(&mut output));
|
||||
continue;
|
||||
}
|
||||
End { .. } => {
|
||||
let mut parent = match stack.pop() {
|
||||
Some(c) => c,
|
||||
None => break,
|
||||
};
|
||||
if output.starts_with(&component_header) {
|
||||
parent.push(ComponentSectionId::Component as u8);
|
||||
output.encode(&mut parent);
|
||||
} else {
|
||||
parent.push(ComponentSectionId::CoreModule as u8);
|
||||
output.encode(&mut parent);
|
||||
}
|
||||
output = parent;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if let CustomSection(c) = &payload {
|
||||
if strip_custom_section(c.name()) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((id, range)) = payload.as_section() {
|
||||
RawSection {
|
||||
id,
|
||||
data: &input[range],
|
||||
}
|
||||
.append_to(&mut output);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
|
||||
fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> Result<()> {
|
||||
// For legacy extensions on the v0 schema (aka, using `extension.json`), clear out any existing
|
||||
// contents of the computed fields, since we don't care what the existing values are.
|
||||
if manifest.schema_version.is_v0() {
|
||||
manifest.languages.clear();
|
||||
manifest.grammars.clear();
|
||||
manifest.themes.clear();
|
||||
}
|
||||
|
||||
let cargo_toml_path = extension_path.join("Cargo.toml");
|
||||
if cargo_toml_path.exists() {
|
||||
manifest.lib.kind = Some(ExtensionLibraryKind::Rust);
|
||||
}
|
||||
|
||||
let languages_dir = extension_path.join("languages");
|
||||
if languages_dir.exists() {
|
||||
for entry in fs::read_dir(&languages_dir).context("failed to list languages dir")? {
|
||||
let entry = entry?;
|
||||
let language_dir = entry.path();
|
||||
let config_path = language_dir.join("config.toml");
|
||||
if config_path.exists() {
|
||||
let relative_language_dir =
|
||||
language_dir.strip_prefix(extension_path)?.to_path_buf();
|
||||
if !manifest.languages.contains(&relative_language_dir) {
|
||||
manifest.languages.push(relative_language_dir);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let themes_dir = extension_path.join("themes");
|
||||
if themes_dir.exists() {
|
||||
for entry in fs::read_dir(&themes_dir).context("failed to list themes dir")? {
|
||||
let entry = entry?;
|
||||
let theme_path = entry.path();
|
||||
if theme_path.extension() == Some("json".as_ref()) {
|
||||
let relative_theme_path = theme_path.strip_prefix(extension_path)?.to_path_buf();
|
||||
if !manifest.themes.contains(&relative_theme_path) {
|
||||
manifest.themes.push(relative_theme_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let snippets_json_path = extension_path.join("snippets.json");
|
||||
if snippets_json_path.exists() {
|
||||
manifest.snippets = Some(snippets_json_path);
|
||||
}
|
||||
|
||||
// For legacy extensions on the v0 schema (aka, using `extension.json`), we want to populate the grammars in
|
||||
// the manifest using the contents of the `grammars` directory.
|
||||
if manifest.schema_version.is_v0() {
|
||||
let grammars_dir = extension_path.join("grammars");
|
||||
if grammars_dir.exists() {
|
||||
for entry in fs::read_dir(&grammars_dir).context("failed to list grammars dir")? {
|
||||
let entry = entry?;
|
||||
let grammar_path = entry.path();
|
||||
if grammar_path.extension() == Some("toml".as_ref()) {
|
||||
#[derive(Deserialize)]
|
||||
struct GrammarConfigToml {
|
||||
pub repository: String,
|
||||
pub commit: String,
|
||||
#[serde(default)]
|
||||
pub path: Option<String>,
|
||||
}
|
||||
|
||||
let grammar_config = fs::read_to_string(&grammar_path)?;
|
||||
let grammar_config: GrammarConfigToml = toml::from_str(&grammar_config)?;
|
||||
|
||||
let grammar_name = grammar_path
|
||||
.file_stem()
|
||||
.and_then(|stem| stem.to_str())
|
||||
.ok_or_else(|| anyhow!("no grammar name"))?;
|
||||
if !manifest.grammars.contains_key(grammar_name) {
|
||||
manifest.grammars.insert(
|
||||
grammar_name.into(),
|
||||
GrammarManifestEntry {
|
||||
repository: grammar_config.repository,
|
||||
rev: grammar_config.commit,
|
||||
path: grammar_config.path,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use futures::FutureExt;
|
||||
use indexed_docs::{IndexedDocsDatabase, IndexedDocsProvider, PackageName, ProviderId};
|
||||
use wasmtime_wasi::WasiView;
|
||||
|
||||
use crate::wasm_host::{WasmExtension, WasmHost};
|
||||
|
||||
pub struct ExtensionIndexedDocsProvider {
|
||||
pub(crate) extension: WasmExtension,
|
||||
pub(crate) host: Arc<WasmHost>,
|
||||
pub(crate) id: ProviderId,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl IndexedDocsProvider for ExtensionIndexedDocsProvider {
|
||||
fn id(&self) -> ProviderId {
|
||||
self.id.clone()
|
||||
}
|
||||
|
||||
fn database_path(&self) -> PathBuf {
|
||||
let mut database_path = self.host.work_dir.clone();
|
||||
database_path.push(self.extension.manifest.id.as_ref());
|
||||
database_path.push("docs");
|
||||
database_path.push(format!("{}.0.mdb", self.id));
|
||||
|
||||
database_path
|
||||
}
|
||||
|
||||
async fn suggest_packages(&self) -> Result<Vec<PackageName>> {
|
||||
self.extension
|
||||
.call({
|
||||
let id = self.id.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let packages = extension
|
||||
.call_suggest_docs_packages(store, id.as_ref())
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err:?}"))?;
|
||||
|
||||
Ok(packages
|
||||
.into_iter()
|
||||
.map(|package| PackageName::from(package.as_str()))
|
||||
.collect())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn index(&self, package: PackageName, database: Arc<IndexedDocsDatabase>) -> Result<()> {
|
||||
self.extension
|
||||
.call({
|
||||
let id = self.id.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let database_resource = store.data_mut().table().push(database)?;
|
||||
extension
|
||||
.call_index_docs(
|
||||
store,
|
||||
id.as_ref(),
|
||||
package.as_ref(),
|
||||
database_resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err:?}"))?;
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
|
@ -1,585 +0,0 @@
|
|||
use crate::wasm_host::{
|
||||
wit::{self, LanguageServerConfig},
|
||||
WasmExtension, WasmHost,
|
||||
};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use futures::{Future, FutureExt};
|
||||
use gpui::AsyncAppContext;
|
||||
use language::{
|
||||
CodeLabel, HighlightId, Language, LanguageServerName, LanguageToolchainStore, LspAdapter,
|
||||
LspAdapterDelegate,
|
||||
};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions};
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use std::ops::Range;
|
||||
use std::{any::Any, path::PathBuf, pin::Pin, sync::Arc};
|
||||
use util::{maybe, ResultExt};
|
||||
use wasmtime_wasi::WasiView as _;
|
||||
|
||||
pub struct ExtensionLspAdapter {
|
||||
pub(crate) extension: WasmExtension,
|
||||
pub(crate) language_server_id: LanguageServerName,
|
||||
pub(crate) config: LanguageServerConfig,
|
||||
pub(crate) host: Arc<WasmHost>,
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl LspAdapter for ExtensionLspAdapter {
|
||||
fn name(&self) -> LanguageServerName {
|
||||
LanguageServerName(self.config.name.clone().into())
|
||||
}
|
||||
|
||||
fn get_language_server_command<'a>(
|
||||
self: Arc<Self>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_: LanguageServerBinaryOptions,
|
||||
_: futures::lock::MutexGuard<'a, Option<LanguageServerBinary>>,
|
||||
_: &'a mut AsyncAppContext,
|
||||
) -> Pin<Box<dyn 'a + Future<Output = Result<LanguageServerBinary>>>> {
|
||||
async move {
|
||||
let command = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(delegate)?;
|
||||
let command = extension
|
||||
.call_language_server_command(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
&this.config,
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
anyhow::Ok(command)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
let path = self
|
||||
.host
|
||||
.path_from_extension(&self.extension.manifest.id, command.command.as_ref());
|
||||
|
||||
// TODO: This should now be done via the `zed::make_file_executable` function in
|
||||
// Zed extension API, but we're leaving these existing usages in place temporarily
|
||||
// to avoid any compatibility issues between Zed and the extension versions.
|
||||
//
|
||||
// We can remove once the following extension versions no longer see any use:
|
||||
// - toml@0.0.2
|
||||
// - zig@0.0.1
|
||||
if ["toml", "zig"].contains(&self.extension.manifest.id.as_ref())
|
||||
&& path.starts_with(&self.host.work_dir)
|
||||
{
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
use std::fs::{self, Permissions};
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
fs::set_permissions(&path, Permissions::from_mode(0o755))
|
||||
.context("failed to set file permissions")?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(LanguageServerBinary {
|
||||
path,
|
||||
arguments: command.args.into_iter().map(|arg| arg.into()).collect(),
|
||||
env: Some(command.env.into_iter().collect()),
|
||||
})
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||
unreachable!("get_language_server_command is overridden")
|
||||
}
|
||||
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
_: Box<dyn 'static + Send + Any>,
|
||||
_: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Result<LanguageServerBinary> {
|
||||
unreachable!("get_language_server_command is overridden")
|
||||
}
|
||||
|
||||
async fn cached_server_binary(
|
||||
&self,
|
||||
_: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
unreachable!("get_language_server_command is overridden")
|
||||
}
|
||||
|
||||
fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
|
||||
let code_action_kinds = self
|
||||
.extension
|
||||
.manifest
|
||||
.language_servers
|
||||
.get(&self.language_server_id)
|
||||
.and_then(|server| server.code_action_kinds.clone());
|
||||
|
||||
code_action_kinds.or(Some(vec![
|
||||
CodeActionKind::EMPTY,
|
||||
CodeActionKind::QUICKFIX,
|
||||
CodeActionKind::REFACTOR,
|
||||
CodeActionKind::REFACTOR_EXTRACT,
|
||||
CodeActionKind::SOURCE,
|
||||
]))
|
||||
}
|
||||
|
||||
fn language_ids(&self) -> HashMap<String, String> {
|
||||
// TODO: The language IDs can be provided via the language server options
|
||||
// in `extension.toml now but we're leaving these existing usages in place temporarily
|
||||
// to avoid any compatibility issues between Zed and the extension versions.
|
||||
//
|
||||
// We can remove once the following extension versions no longer see any use:
|
||||
// - php@0.0.1
|
||||
if self.extension.manifest.id.as_ref() == "php" {
|
||||
return HashMap::from_iter([("PHP".into(), "php".into())]);
|
||||
}
|
||||
|
||||
self.extension
|
||||
.manifest
|
||||
.language_servers
|
||||
.get(&LanguageServerName(self.config.name.clone().into()))
|
||||
.map(|server| server.language_ids.clone())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
async fn initialization_options(
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<serde_json::Value>> {
|
||||
let delegate = delegate.clone();
|
||||
let json_options = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(delegate)?;
|
||||
let options = extension
|
||||
.call_language_server_initialization_options(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
&this.config,
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
anyhow::Ok(options)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
Ok(if let Some(json_options) = json_options {
|
||||
serde_json::from_str(&json_options).with_context(|| {
|
||||
format!("failed to parse initialization_options from extension: {json_options}")
|
||||
})?
|
||||
} else {
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
_cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let delegate = delegate.clone();
|
||||
let json_options: Option<String> = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(delegate)?;
|
||||
let options = extension
|
||||
.call_language_server_workspace_configuration(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
anyhow::Ok(options)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
Ok(if let Some(json_options) = json_options {
|
||||
serde_json::from_str(&json_options).with_context(|| {
|
||||
format!("failed to parse initialization_options from extension: {json_options}")
|
||||
})?
|
||||
} else {
|
||||
serde_json::json!({})
|
||||
})
|
||||
}
|
||||
|
||||
async fn labels_for_completions(
|
||||
self: Arc<Self>,
|
||||
completions: &[lsp::CompletionItem],
|
||||
language: &Arc<Language>,
|
||||
) -> Result<Vec<Option<CodeLabel>>> {
|
||||
let completions = completions
|
||||
.iter()
|
||||
.map(|completion| wit::Completion::from(completion.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let labels = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
extension
|
||||
.call_labels_for_completions(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
completions,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(labels_from_wit(labels, language))
|
||||
}
|
||||
|
||||
async fn labels_for_symbols(
|
||||
self: Arc<Self>,
|
||||
symbols: &[(String, lsp::SymbolKind)],
|
||||
language: &Arc<Language>,
|
||||
) -> Result<Vec<Option<CodeLabel>>> {
|
||||
let symbols = symbols
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|(name, kind)| wit::Symbol {
|
||||
name,
|
||||
kind: kind.into(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let labels = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
extension
|
||||
.call_labels_for_symbols(store, &this.language_server_id, symbols)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(labels_from_wit(labels, language))
|
||||
}
|
||||
}
|
||||
|
||||
fn labels_from_wit(
|
||||
labels: Vec<Option<wit::CodeLabel>>,
|
||||
language: &Arc<Language>,
|
||||
) -> Vec<Option<CodeLabel>> {
|
||||
labels
|
||||
.into_iter()
|
||||
.map(|label| {
|
||||
let label = label?;
|
||||
let runs = if label.code.is_empty() {
|
||||
Vec::new()
|
||||
} else {
|
||||
language.highlight_text(&label.code.as_str().into(), 0..label.code.len())
|
||||
};
|
||||
build_code_label(&label, &runs, language)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn build_code_label(
|
||||
label: &wit::CodeLabel,
|
||||
parsed_runs: &[(Range<usize>, HighlightId)],
|
||||
language: &Arc<Language>,
|
||||
) -> Option<CodeLabel> {
|
||||
let mut text = String::new();
|
||||
let mut runs = vec![];
|
||||
|
||||
for span in &label.spans {
|
||||
match span {
|
||||
wit::CodeLabelSpan::CodeRange(range) => {
|
||||
let range = Range::from(*range);
|
||||
let code_span = &label.code.get(range.clone())?;
|
||||
let mut input_ix = range.start;
|
||||
let mut output_ix = text.len();
|
||||
for (run_range, id) in parsed_runs {
|
||||
if run_range.start >= range.end {
|
||||
break;
|
||||
}
|
||||
if run_range.end <= input_ix {
|
||||
continue;
|
||||
}
|
||||
|
||||
if run_range.start > input_ix {
|
||||
let len = run_range.start - input_ix;
|
||||
output_ix += len;
|
||||
input_ix += len;
|
||||
}
|
||||
|
||||
let len = range.end.min(run_range.end) - input_ix;
|
||||
runs.push((output_ix..output_ix + len, *id));
|
||||
output_ix += len;
|
||||
input_ix += len;
|
||||
}
|
||||
|
||||
text.push_str(code_span);
|
||||
}
|
||||
wit::CodeLabelSpan::Literal(span) => {
|
||||
let highlight_id = language
|
||||
.grammar()
|
||||
.zip(span.highlight_name.as_ref())
|
||||
.and_then(|(grammar, highlight_name)| {
|
||||
grammar.highlight_id_for_name(highlight_name)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let ix = text.len();
|
||||
runs.push((ix..ix + span.text.len(), highlight_id));
|
||||
text.push_str(&span.text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let filter_range = Range::from(label.filter_range);
|
||||
text.get(filter_range.clone())?;
|
||||
Some(CodeLabel {
|
||||
text,
|
||||
runs,
|
||||
filter_range,
|
||||
})
|
||||
}
|
||||
|
||||
impl From<wit::Range> for Range<usize> {
|
||||
fn from(range: wit::Range) -> Self {
|
||||
let start = range.start as usize;
|
||||
let end = range.end as usize;
|
||||
start..end
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::CompletionItem> for wit::Completion {
|
||||
fn from(value: lsp::CompletionItem) -> Self {
|
||||
Self {
|
||||
label: value.label,
|
||||
detail: value.detail,
|
||||
kind: value.kind.map(Into::into),
|
||||
insert_text_format: value.insert_text_format.map(Into::into),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::CompletionItemKind> for wit::CompletionKind {
|
||||
fn from(value: lsp::CompletionItemKind) -> Self {
|
||||
match value {
|
||||
lsp::CompletionItemKind::TEXT => Self::Text,
|
||||
lsp::CompletionItemKind::METHOD => Self::Method,
|
||||
lsp::CompletionItemKind::FUNCTION => Self::Function,
|
||||
lsp::CompletionItemKind::CONSTRUCTOR => Self::Constructor,
|
||||
lsp::CompletionItemKind::FIELD => Self::Field,
|
||||
lsp::CompletionItemKind::VARIABLE => Self::Variable,
|
||||
lsp::CompletionItemKind::CLASS => Self::Class,
|
||||
lsp::CompletionItemKind::INTERFACE => Self::Interface,
|
||||
lsp::CompletionItemKind::MODULE => Self::Module,
|
||||
lsp::CompletionItemKind::PROPERTY => Self::Property,
|
||||
lsp::CompletionItemKind::UNIT => Self::Unit,
|
||||
lsp::CompletionItemKind::VALUE => Self::Value,
|
||||
lsp::CompletionItemKind::ENUM => Self::Enum,
|
||||
lsp::CompletionItemKind::KEYWORD => Self::Keyword,
|
||||
lsp::CompletionItemKind::SNIPPET => Self::Snippet,
|
||||
lsp::CompletionItemKind::COLOR => Self::Color,
|
||||
lsp::CompletionItemKind::FILE => Self::File,
|
||||
lsp::CompletionItemKind::REFERENCE => Self::Reference,
|
||||
lsp::CompletionItemKind::FOLDER => Self::Folder,
|
||||
lsp::CompletionItemKind::ENUM_MEMBER => Self::EnumMember,
|
||||
lsp::CompletionItemKind::CONSTANT => Self::Constant,
|
||||
lsp::CompletionItemKind::STRUCT => Self::Struct,
|
||||
lsp::CompletionItemKind::EVENT => Self::Event,
|
||||
lsp::CompletionItemKind::OPERATOR => Self::Operator,
|
||||
lsp::CompletionItemKind::TYPE_PARAMETER => Self::TypeParameter,
|
||||
_ => Self::Other(extract_int(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::InsertTextFormat> for wit::InsertTextFormat {
|
||||
fn from(value: lsp::InsertTextFormat) -> Self {
|
||||
match value {
|
||||
lsp::InsertTextFormat::PLAIN_TEXT => Self::PlainText,
|
||||
lsp::InsertTextFormat::SNIPPET => Self::Snippet,
|
||||
_ => Self::Other(extract_int(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::SymbolKind> for wit::SymbolKind {
|
||||
fn from(value: lsp::SymbolKind) -> Self {
|
||||
match value {
|
||||
lsp::SymbolKind::FILE => Self::File,
|
||||
lsp::SymbolKind::MODULE => Self::Module,
|
||||
lsp::SymbolKind::NAMESPACE => Self::Namespace,
|
||||
lsp::SymbolKind::PACKAGE => Self::Package,
|
||||
lsp::SymbolKind::CLASS => Self::Class,
|
||||
lsp::SymbolKind::METHOD => Self::Method,
|
||||
lsp::SymbolKind::PROPERTY => Self::Property,
|
||||
lsp::SymbolKind::FIELD => Self::Field,
|
||||
lsp::SymbolKind::CONSTRUCTOR => Self::Constructor,
|
||||
lsp::SymbolKind::ENUM => Self::Enum,
|
||||
lsp::SymbolKind::INTERFACE => Self::Interface,
|
||||
lsp::SymbolKind::FUNCTION => Self::Function,
|
||||
lsp::SymbolKind::VARIABLE => Self::Variable,
|
||||
lsp::SymbolKind::CONSTANT => Self::Constant,
|
||||
lsp::SymbolKind::STRING => Self::String,
|
||||
lsp::SymbolKind::NUMBER => Self::Number,
|
||||
lsp::SymbolKind::BOOLEAN => Self::Boolean,
|
||||
lsp::SymbolKind::ARRAY => Self::Array,
|
||||
lsp::SymbolKind::OBJECT => Self::Object,
|
||||
lsp::SymbolKind::KEY => Self::Key,
|
||||
lsp::SymbolKind::NULL => Self::Null,
|
||||
lsp::SymbolKind::ENUM_MEMBER => Self::EnumMember,
|
||||
lsp::SymbolKind::STRUCT => Self::Struct,
|
||||
lsp::SymbolKind::EVENT => Self::Event,
|
||||
lsp::SymbolKind::OPERATOR => Self::Operator,
|
||||
lsp::SymbolKind::TYPE_PARAMETER => Self::TypeParameter,
|
||||
_ => Self::Other(extract_int(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_int<T: Serialize>(value: T) -> i32 {
|
||||
maybe!({
|
||||
let kind = serde_json::to_value(&value)?;
|
||||
serde_json::from_value(kind)
|
||||
})
|
||||
.log_err()
|
||||
.unwrap_or(-1)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_code_label() {
|
||||
use util::test::marked_text_ranges;
|
||||
|
||||
let (code, code_ranges) = marked_text_ranges(
|
||||
"«const» «a»: «fn»(«Bcd»(«Efgh»)) -> «Ijklm» = pqrs.tuv",
|
||||
false,
|
||||
);
|
||||
let code_runs = code_ranges
|
||||
.into_iter()
|
||||
.map(|range| (range, HighlightId(0)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let label = build_code_label(
|
||||
&wit::CodeLabel {
|
||||
spans: vec![
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find("pqrs").unwrap() as u32,
|
||||
end: code.len() as u32,
|
||||
}),
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find(": fn").unwrap() as u32,
|
||||
end: code.find(" = ").unwrap() as u32,
|
||||
}),
|
||||
],
|
||||
filter_range: wit::Range {
|
||||
start: 0,
|
||||
end: "pqrs.tuv".len() as u32,
|
||||
},
|
||||
code,
|
||||
},
|
||||
&code_runs,
|
||||
&language::PLAIN_TEXT,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let (label_text, label_ranges) =
|
||||
marked_text_ranges("pqrs.tuv: «fn»(«Bcd»(«Efgh»)) -> «Ijklm»", false);
|
||||
let label_runs = label_ranges
|
||||
.into_iter()
|
||||
.map(|range| (range, HighlightId(0)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(
|
||||
label,
|
||||
CodeLabel {
|
||||
text: label_text,
|
||||
runs: label_runs,
|
||||
filter_range: label.filter_range.clone()
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_code_label_with_invalid_ranges() {
|
||||
use util::test::marked_text_ranges;
|
||||
|
||||
let (code, code_ranges) = marked_text_ranges("const «a»: «B» = '🏀'", false);
|
||||
let code_runs = code_ranges
|
||||
.into_iter()
|
||||
.map(|range| (range, HighlightId(0)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// A span uses a code range that is invalid because it starts inside of
|
||||
// a multi-byte character.
|
||||
let label = build_code_label(
|
||||
&wit::CodeLabel {
|
||||
spans: vec![
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find('B').unwrap() as u32,
|
||||
end: code.find(" = ").unwrap() as u32,
|
||||
}),
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find('🏀').unwrap() as u32 + 1,
|
||||
end: code.len() as u32,
|
||||
}),
|
||||
],
|
||||
filter_range: wit::Range {
|
||||
start: 0,
|
||||
end: "B".len() as u32,
|
||||
},
|
||||
code,
|
||||
},
|
||||
&code_runs,
|
||||
&language::PLAIN_TEXT,
|
||||
);
|
||||
assert!(label.is_none());
|
||||
|
||||
// Filter range extends beyond actual text
|
||||
let label = build_code_label(
|
||||
&wit::CodeLabel {
|
||||
spans: vec![wit::CodeLabelSpan::Literal(wit::CodeLabelSpanLiteral {
|
||||
text: "abc".into(),
|
||||
highlight_name: Some("type".into()),
|
||||
})],
|
||||
filter_range: wit::Range { start: 0, end: 5 },
|
||||
code: String::new(),
|
||||
},
|
||||
&code_runs,
|
||||
&language::PLAIN_TEXT,
|
||||
);
|
||||
assert!(label.is_none());
|
||||
}
|
|
@ -1,212 +0,0 @@
|
|||
use anyhow::{anyhow, Context, Result};
|
||||
use collections::{BTreeMap, HashMap};
|
||||
use fs::Fs;
|
||||
use language::{LanguageName, LanguageServerName};
|
||||
use semantic_version::SemanticVersion;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
fmt,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
/// This is the old version of the extension manifest, from when it was `extension.json`.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct OldExtensionManifest {
|
||||
pub name: String,
|
||||
pub version: Arc<str>,
|
||||
|
||||
#[serde(default)]
|
||||
pub description: Option<String>,
|
||||
#[serde(default)]
|
||||
pub repository: Option<String>,
|
||||
#[serde(default)]
|
||||
pub authors: Vec<String>,
|
||||
|
||||
#[serde(default)]
|
||||
pub themes: BTreeMap<Arc<str>, PathBuf>,
|
||||
#[serde(default)]
|
||||
pub languages: BTreeMap<Arc<str>, PathBuf>,
|
||||
#[serde(default)]
|
||||
pub grammars: BTreeMap<Arc<str>, PathBuf>,
|
||||
}
|
||||
|
||||
/// The schema version of the [`ExtensionManifest`].
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
|
||||
pub struct SchemaVersion(pub i32);
|
||||
|
||||
impl fmt::Display for SchemaVersion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl SchemaVersion {
|
||||
pub const ZERO: Self = Self(0);
|
||||
|
||||
pub fn is_v0(&self) -> bool {
|
||||
self == &Self::ZERO
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub struct ExtensionManifest {
|
||||
pub id: Arc<str>,
|
||||
pub name: String,
|
||||
pub version: Arc<str>,
|
||||
pub schema_version: SchemaVersion,
|
||||
|
||||
#[serde(default)]
|
||||
pub description: Option<String>,
|
||||
#[serde(default)]
|
||||
pub repository: Option<String>,
|
||||
#[serde(default)]
|
||||
pub authors: Vec<String>,
|
||||
#[serde(default)]
|
||||
pub lib: LibManifestEntry,
|
||||
|
||||
#[serde(default)]
|
||||
pub themes: Vec<PathBuf>,
|
||||
#[serde(default)]
|
||||
pub languages: Vec<PathBuf>,
|
||||
#[serde(default)]
|
||||
pub grammars: BTreeMap<Arc<str>, GrammarManifestEntry>,
|
||||
#[serde(default)]
|
||||
pub language_servers: BTreeMap<LanguageServerName, LanguageServerManifestEntry>,
|
||||
#[serde(default)]
|
||||
pub slash_commands: BTreeMap<Arc<str>, SlashCommandManifestEntry>,
|
||||
#[serde(default)]
|
||||
pub indexed_docs_providers: BTreeMap<Arc<str>, IndexedDocsProviderEntry>,
|
||||
#[serde(default)]
|
||||
pub snippets: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, PartialEq, Eq, Debug, Deserialize, Serialize)]
|
||||
pub struct LibManifestEntry {
|
||||
pub kind: Option<ExtensionLibraryKind>,
|
||||
pub version: Option<SemanticVersion>,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
|
||||
pub enum ExtensionLibraryKind {
|
||||
Rust,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, PartialEq, Eq, Debug, Deserialize, Serialize)]
|
||||
pub struct GrammarManifestEntry {
|
||||
pub repository: String,
|
||||
#[serde(alias = "commit")]
|
||||
pub rev: String,
|
||||
#[serde(default)]
|
||||
pub path: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
|
||||
pub struct LanguageServerManifestEntry {
|
||||
/// Deprecated in favor of `languages`.
|
||||
#[serde(default)]
|
||||
language: Option<LanguageName>,
|
||||
/// The list of languages this language server should work with.
|
||||
#[serde(default)]
|
||||
languages: Vec<LanguageName>,
|
||||
#[serde(default)]
|
||||
pub language_ids: HashMap<String, String>,
|
||||
#[serde(default)]
|
||||
pub code_action_kinds: Option<Vec<lsp::CodeActionKind>>,
|
||||
}
|
||||
|
||||
impl LanguageServerManifestEntry {
|
||||
/// Returns the list of languages for the language server.
|
||||
///
|
||||
/// Prefer this over accessing the `language` or `languages` fields directly,
|
||||
/// as we currently support both.
|
||||
///
|
||||
/// We can replace this with just field access for the `languages` field once
|
||||
/// we have removed `language`.
|
||||
pub fn languages(&self) -> impl IntoIterator<Item = LanguageName> + '_ {
|
||||
let language = if self.languages.is_empty() {
|
||||
self.language.clone()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
self.languages.iter().cloned().chain(language)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
|
||||
pub struct SlashCommandManifestEntry {
|
||||
pub description: String,
|
||||
pub requires_argument: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
|
||||
pub struct IndexedDocsProviderEntry {}
|
||||
|
||||
impl ExtensionManifest {
|
||||
pub async fn load(fs: Arc<dyn Fs>, extension_dir: &Path) -> Result<Self> {
|
||||
let extension_name = extension_dir
|
||||
.file_name()
|
||||
.and_then(OsStr::to_str)
|
||||
.ok_or_else(|| anyhow!("invalid extension name"))?;
|
||||
|
||||
let mut extension_manifest_path = extension_dir.join("extension.json");
|
||||
if fs.is_file(&extension_manifest_path).await {
|
||||
let manifest_content = fs
|
||||
.load(&extension_manifest_path)
|
||||
.await
|
||||
.with_context(|| format!("failed to load {extension_name} extension.json"))?;
|
||||
let manifest_json = serde_json::from_str::<OldExtensionManifest>(&manifest_content)
|
||||
.with_context(|| {
|
||||
format!("invalid extension.json for extension {extension_name}")
|
||||
})?;
|
||||
|
||||
Ok(manifest_from_old_manifest(manifest_json, extension_name))
|
||||
} else {
|
||||
extension_manifest_path.set_extension("toml");
|
||||
let manifest_content = fs
|
||||
.load(&extension_manifest_path)
|
||||
.await
|
||||
.with_context(|| format!("failed to load {extension_name} extension.toml"))?;
|
||||
toml::from_str(&manifest_content)
|
||||
.with_context(|| format!("invalid extension.json for extension {extension_name}"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn manifest_from_old_manifest(
|
||||
manifest_json: OldExtensionManifest,
|
||||
extension_id: &str,
|
||||
) -> ExtensionManifest {
|
||||
ExtensionManifest {
|
||||
id: extension_id.into(),
|
||||
name: manifest_json.name,
|
||||
version: manifest_json.version,
|
||||
description: manifest_json.description,
|
||||
repository: manifest_json.repository,
|
||||
authors: manifest_json.authors,
|
||||
schema_version: SchemaVersion::ZERO,
|
||||
lib: Default::default(),
|
||||
themes: {
|
||||
let mut themes = manifest_json.themes.into_values().collect::<Vec<_>>();
|
||||
themes.sort();
|
||||
themes.dedup();
|
||||
themes
|
||||
},
|
||||
languages: {
|
||||
let mut languages = manifest_json.languages.into_values().collect::<Vec<_>>();
|
||||
languages.sort();
|
||||
languages.dedup();
|
||||
languages
|
||||
},
|
||||
grammars: manifest_json
|
||||
.grammars
|
||||
.into_keys()
|
||||
.map(|grammar_name| (grammar_name, Default::default()))
|
||||
.collect(),
|
||||
language_servers: Default::default(),
|
||||
slash_commands: BTreeMap::default(),
|
||||
indexed_docs_providers: BTreeMap::default(),
|
||||
snippets: None,
|
||||
}
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use gpui::AppContext;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
|
||||
pub struct ExtensionSettings {
|
||||
/// The extensions that should be automatically installed by Zed.
|
||||
///
|
||||
/// This is used to make functionality provided by extensions (e.g., language support)
|
||||
/// available out-of-the-box.
|
||||
#[serde(default)]
|
||||
pub auto_install_extensions: HashMap<Arc<str>, bool>,
|
||||
#[serde(default)]
|
||||
pub auto_update_extensions: HashMap<Arc<str>, bool>,
|
||||
}
|
||||
|
||||
impl ExtensionSettings {
|
||||
/// Returns whether the given extension should be auto-installed.
|
||||
pub fn should_auto_install(&self, extension_id: &str) -> bool {
|
||||
self.auto_install_extensions
|
||||
.get(extension_id)
|
||||
.copied()
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
pub fn should_auto_update(&self, extension_id: &str) -> bool {
|
||||
self.auto_update_extensions
|
||||
.get(extension_id)
|
||||
.copied()
|
||||
.unwrap_or(true)
|
||||
}
|
||||
}
|
||||
|
||||
impl Settings for ExtensionSettings {
|
||||
const KEY: Option<&'static str> = None;
|
||||
|
||||
type FileContent = Self;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _cx: &mut AppContext) -> Result<Self> {
|
||||
SettingsSources::<Self::FileContent>::json_merge_with(
|
||||
[sources.default]
|
||||
.into_iter()
|
||||
.chain(sources.user)
|
||||
.chain(sources.server),
|
||||
)
|
||||
}
|
||||
}
|
|
@ -1,135 +0,0 @@
|
|||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
};
|
||||
use futures::FutureExt;
|
||||
use gpui::{Task, WeakView, WindowContext};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use ui::prelude::*;
|
||||
use wasmtime_wasi::WasiView;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::wasm_host::{WasmExtension, WasmHost};
|
||||
|
||||
pub struct ExtensionSlashCommand {
|
||||
pub(crate) extension: WasmExtension,
|
||||
#[allow(unused)]
|
||||
pub(crate) host: Arc<WasmHost>,
|
||||
pub(crate) command: crate::wit::SlashCommand,
|
||||
}
|
||||
|
||||
impl SlashCommand for ExtensionSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
self.command.name.clone()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
self.command.description.clone()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.command.tooltip_text.clone()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
self.command.requires_argument
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
let arguments = arguments.to_owned();
|
||||
cx.background_executor().spawn(async move {
|
||||
self.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
move |extension, store| {
|
||||
async move {
|
||||
let completions = extension
|
||||
.call_complete_slash_command_argument(
|
||||
store,
|
||||
&this.command,
|
||||
&arguments,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
|
||||
anyhow::Ok(
|
||||
completions
|
||||
.into_iter()
|
||||
.map(|completion| ArgumentCompletion {
|
||||
label: completion.label.into(),
|
||||
new_text: completion.new_text,
|
||||
replace_previous_arguments: false,
|
||||
after_completion: completion.run_command.into(),
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<SlashCommandResult> {
|
||||
let arguments = arguments.to_owned();
|
||||
let output = cx.background_executor().spawn(async move {
|
||||
self.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
move |extension, store| {
|
||||
async move {
|
||||
let resource = if let Some(delegate) = delegate {
|
||||
Some(store.data_mut().table().push(delegate)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let output = extension
|
||||
.call_run_slash_command(store, &this.command, &arguments, resource)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
|
||||
anyhow::Ok(output)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await
|
||||
});
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let output = output.await?;
|
||||
Ok(SlashCommandOutput {
|
||||
text: output.text,
|
||||
sections: output
|
||||
.sections
|
||||
.into_iter()
|
||||
.map(|section| SlashCommandOutputSection {
|
||||
range: section.range.into(),
|
||||
icon: IconName::Code,
|
||||
label: section.label.into(),
|
||||
metadata: None,
|
||||
})
|
||||
.collect(),
|
||||
run_commands_in_text: false,
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,783 +0,0 @@
|
|||
use crate::extension_manifest::SchemaVersion;
|
||||
use crate::extension_settings::ExtensionSettings;
|
||||
use crate::{
|
||||
Event, ExtensionIndex, ExtensionIndexEntry, ExtensionIndexLanguageEntry,
|
||||
ExtensionIndexThemeEntry, ExtensionManifest, ExtensionStore, GrammarManifestEntry,
|
||||
RELOAD_DEBOUNCE_DURATION,
|
||||
};
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use async_compression::futures::bufread::GzipEncoder;
|
||||
use collections::BTreeMap;
|
||||
use fs::{FakeFs, Fs, RealFs};
|
||||
use futures::{io::BufReader, AsyncReadExt, StreamExt};
|
||||
use gpui::{Context, SemanticVersion, TestAppContext};
|
||||
use http_client::{FakeHttpClient, Response};
|
||||
use indexed_docs::IndexedDocsRegistry;
|
||||
use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName};
|
||||
use node_runtime::NodeRuntime;
|
||||
use parking_lot::Mutex;
|
||||
use project::{Project, DEFAULT_COMPLETION_CONTEXT};
|
||||
use release_channel::AppVersion;
|
||||
use reqwest_client::ReqwestClient;
|
||||
use serde_json::json;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use snippet_provider::SnippetRegistry;
|
||||
use std::{
|
||||
ffi::OsString,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use theme::{RealThemeRegistry, ThemeRegistry};
|
||||
use util::test::temp_tree;
|
||||
|
||||
#[cfg(test)]
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
if std::env::var("RUST_LOG").is_ok() {
|
||||
env_logger::init();
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_extension_store(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let http_client = FakeHttpClient::with_200_response();
|
||||
|
||||
fs.insert_tree(
|
||||
"/the-extension-dir",
|
||||
json!({
|
||||
"installed": {
|
||||
"zed-monokai": {
|
||||
"extension.json": r#"{
|
||||
"id": "zed-monokai",
|
||||
"name": "Zed Monokai",
|
||||
"version": "2.0.0",
|
||||
"themes": {
|
||||
"Monokai Dark": "themes/monokai.json",
|
||||
"Monokai Light": "themes/monokai.json",
|
||||
"Monokai Pro Dark": "themes/monokai-pro.json",
|
||||
"Monokai Pro Light": "themes/monokai-pro.json"
|
||||
}
|
||||
}"#,
|
||||
"themes": {
|
||||
"monokai.json": r#"{
|
||||
"name": "Monokai",
|
||||
"author": "Someone",
|
||||
"themes": [
|
||||
{
|
||||
"name": "Monokai Dark",
|
||||
"appearance": "dark",
|
||||
"style": {}
|
||||
},
|
||||
{
|
||||
"name": "Monokai Light",
|
||||
"appearance": "light",
|
||||
"style": {}
|
||||
}
|
||||
]
|
||||
}"#,
|
||||
"monokai-pro.json": r#"{
|
||||
"name": "Monokai Pro",
|
||||
"author": "Someone",
|
||||
"themes": [
|
||||
{
|
||||
"name": "Monokai Pro Dark",
|
||||
"appearance": "dark",
|
||||
"style": {}
|
||||
},
|
||||
{
|
||||
"name": "Monokai Pro Light",
|
||||
"appearance": "light",
|
||||
"style": {}
|
||||
}
|
||||
]
|
||||
}"#,
|
||||
}
|
||||
},
|
||||
"zed-ruby": {
|
||||
"extension.json": r#"{
|
||||
"id": "zed-ruby",
|
||||
"name": "Zed Ruby",
|
||||
"version": "1.0.0",
|
||||
"grammars": {
|
||||
"ruby": "grammars/ruby.wasm",
|
||||
"embedded_template": "grammars/embedded_template.wasm"
|
||||
},
|
||||
"languages": {
|
||||
"ruby": "languages/ruby",
|
||||
"erb": "languages/erb"
|
||||
}
|
||||
}"#,
|
||||
"grammars": {
|
||||
"ruby.wasm": "",
|
||||
"embedded_template.wasm": "",
|
||||
},
|
||||
"languages": {
|
||||
"ruby": {
|
||||
"config.toml": r#"
|
||||
name = "Ruby"
|
||||
grammar = "ruby"
|
||||
path_suffixes = ["rb"]
|
||||
"#,
|
||||
"highlights.scm": "",
|
||||
},
|
||||
"erb": {
|
||||
"config.toml": r#"
|
||||
name = "ERB"
|
||||
grammar = "embedded_template"
|
||||
path_suffixes = ["erb"]
|
||||
"#,
|
||||
"highlights.scm": "",
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let mut expected_index = ExtensionIndex {
|
||||
extensions: [
|
||||
(
|
||||
"zed-ruby".into(),
|
||||
ExtensionIndexEntry {
|
||||
manifest: Arc::new(ExtensionManifest {
|
||||
id: "zed-ruby".into(),
|
||||
name: "Zed Ruby".into(),
|
||||
version: "1.0.0".into(),
|
||||
schema_version: SchemaVersion::ZERO,
|
||||
description: None,
|
||||
authors: Vec::new(),
|
||||
repository: None,
|
||||
themes: Default::default(),
|
||||
lib: Default::default(),
|
||||
languages: vec!["languages/erb".into(), "languages/ruby".into()],
|
||||
grammars: [
|
||||
("embedded_template".into(), GrammarManifestEntry::default()),
|
||||
("ruby".into(), GrammarManifestEntry::default()),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
language_servers: BTreeMap::default(),
|
||||
slash_commands: BTreeMap::default(),
|
||||
indexed_docs_providers: BTreeMap::default(),
|
||||
snippets: None,
|
||||
}),
|
||||
dev: false,
|
||||
},
|
||||
),
|
||||
(
|
||||
"zed-monokai".into(),
|
||||
ExtensionIndexEntry {
|
||||
manifest: Arc::new(ExtensionManifest {
|
||||
id: "zed-monokai".into(),
|
||||
name: "Zed Monokai".into(),
|
||||
version: "2.0.0".into(),
|
||||
schema_version: SchemaVersion::ZERO,
|
||||
description: None,
|
||||
authors: vec![],
|
||||
repository: None,
|
||||
themes: vec![
|
||||
"themes/monokai-pro.json".into(),
|
||||
"themes/monokai.json".into(),
|
||||
],
|
||||
lib: Default::default(),
|
||||
languages: Default::default(),
|
||||
grammars: BTreeMap::default(),
|
||||
language_servers: BTreeMap::default(),
|
||||
slash_commands: BTreeMap::default(),
|
||||
indexed_docs_providers: BTreeMap::default(),
|
||||
snippets: None,
|
||||
}),
|
||||
dev: false,
|
||||
},
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
languages: [
|
||||
(
|
||||
"ERB".into(),
|
||||
ExtensionIndexLanguageEntry {
|
||||
extension: "zed-ruby".into(),
|
||||
path: "languages/erb".into(),
|
||||
grammar: Some("embedded_template".into()),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["erb".into()],
|
||||
first_line_pattern: None,
|
||||
},
|
||||
},
|
||||
),
|
||||
(
|
||||
"Ruby".into(),
|
||||
ExtensionIndexLanguageEntry {
|
||||
extension: "zed-ruby".into(),
|
||||
path: "languages/ruby".into(),
|
||||
grammar: Some("ruby".into()),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["rb".into()],
|
||||
first_line_pattern: None,
|
||||
},
|
||||
},
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
themes: [
|
||||
(
|
||||
"Monokai Dark".into(),
|
||||
ExtensionIndexThemeEntry {
|
||||
extension: "zed-monokai".into(),
|
||||
path: "themes/monokai.json".into(),
|
||||
},
|
||||
),
|
||||
(
|
||||
"Monokai Light".into(),
|
||||
ExtensionIndexThemeEntry {
|
||||
extension: "zed-monokai".into(),
|
||||
path: "themes/monokai.json".into(),
|
||||
},
|
||||
),
|
||||
(
|
||||
"Monokai Pro Dark".into(),
|
||||
ExtensionIndexThemeEntry {
|
||||
extension: "zed-monokai".into(),
|
||||
path: "themes/monokai-pro.json".into(),
|
||||
},
|
||||
),
|
||||
(
|
||||
"Monokai Pro Light".into(),
|
||||
ExtensionIndexThemeEntry {
|
||||
extension: "zed-monokai".into(),
|
||||
path: "themes/monokai-pro.json".into(),
|
||||
},
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
};
|
||||
|
||||
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
let theme_registry = Arc::new(RealThemeRegistry::new(Box::new(())));
|
||||
let slash_command_registry = SlashCommandRegistry::new();
|
||||
let indexed_docs_registry = Arc::new(IndexedDocsRegistry::new(cx.executor()));
|
||||
let snippet_registry = Arc::new(SnippetRegistry::new());
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
|
||||
let store = cx.new_model(|cx| {
|
||||
ExtensionStore::new(
|
||||
PathBuf::from("/the-extension-dir"),
|
||||
None,
|
||||
fs.clone(),
|
||||
http_client.clone(),
|
||||
http_client.clone(),
|
||||
None,
|
||||
node_runtime.clone(),
|
||||
language_registry.clone(),
|
||||
theme_registry.clone(),
|
||||
slash_command_registry.clone(),
|
||||
indexed_docs_registry.clone(),
|
||||
snippet_registry.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.executor().advance_clock(super::RELOAD_DEBOUNCE_DURATION);
|
||||
store.read_with(cx, |store, _| {
|
||||
let index = &store.extension_index;
|
||||
assert_eq!(index.extensions, expected_index.extensions);
|
||||
assert_eq!(index.languages, expected_index.languages);
|
||||
assert_eq!(index.themes, expected_index.themes);
|
||||
|
||||
assert_eq!(
|
||||
language_registry.language_names(),
|
||||
["ERB", "Plain Text", "Ruby"]
|
||||
);
|
||||
assert_eq!(
|
||||
theme_registry.list_names(),
|
||||
[
|
||||
"Monokai Dark",
|
||||
"Monokai Light",
|
||||
"Monokai Pro Dark",
|
||||
"Monokai Pro Light",
|
||||
"One Dark",
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
fs.insert_tree(
|
||||
"/the-extension-dir/installed/zed-gruvbox",
|
||||
json!({
|
||||
"extension.json": r#"{
|
||||
"id": "zed-gruvbox",
|
||||
"name": "Zed Gruvbox",
|
||||
"version": "1.0.0",
|
||||
"themes": {
|
||||
"Gruvbox": "themes/gruvbox.json"
|
||||
}
|
||||
}"#,
|
||||
"themes": {
|
||||
"gruvbox.json": r#"{
|
||||
"name": "Gruvbox",
|
||||
"author": "Someone Else",
|
||||
"themes": [
|
||||
{
|
||||
"name": "Gruvbox",
|
||||
"appearance": "dark",
|
||||
"style": {}
|
||||
}
|
||||
]
|
||||
}"#,
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
expected_index.extensions.insert(
|
||||
"zed-gruvbox".into(),
|
||||
ExtensionIndexEntry {
|
||||
manifest: Arc::new(ExtensionManifest {
|
||||
id: "zed-gruvbox".into(),
|
||||
name: "Zed Gruvbox".into(),
|
||||
version: "1.0.0".into(),
|
||||
schema_version: SchemaVersion::ZERO,
|
||||
description: None,
|
||||
authors: vec![],
|
||||
repository: None,
|
||||
themes: vec!["themes/gruvbox.json".into()],
|
||||
lib: Default::default(),
|
||||
languages: Default::default(),
|
||||
grammars: BTreeMap::default(),
|
||||
language_servers: BTreeMap::default(),
|
||||
slash_commands: BTreeMap::default(),
|
||||
indexed_docs_providers: BTreeMap::default(),
|
||||
snippets: None,
|
||||
}),
|
||||
dev: false,
|
||||
},
|
||||
);
|
||||
expected_index.themes.insert(
|
||||
"Gruvbox".into(),
|
||||
ExtensionIndexThemeEntry {
|
||||
extension: "zed-gruvbox".into(),
|
||||
path: "themes/gruvbox.json".into(),
|
||||
},
|
||||
);
|
||||
|
||||
#[allow(clippy::let_underscore_future)]
|
||||
let _ = store.update(cx, |store, cx| store.reload(None, cx));
|
||||
|
||||
cx.executor().advance_clock(RELOAD_DEBOUNCE_DURATION);
|
||||
store.read_with(cx, |store, _| {
|
||||
let index = &store.extension_index;
|
||||
assert_eq!(index.extensions, expected_index.extensions);
|
||||
assert_eq!(index.languages, expected_index.languages);
|
||||
assert_eq!(index.themes, expected_index.themes);
|
||||
|
||||
assert_eq!(
|
||||
theme_registry.list_names(),
|
||||
[
|
||||
"Gruvbox",
|
||||
"Monokai Dark",
|
||||
"Monokai Light",
|
||||
"Monokai Pro Dark",
|
||||
"Monokai Pro Light",
|
||||
"One Dark",
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
let prev_fs_metadata_call_count = fs.metadata_call_count();
|
||||
let prev_fs_read_dir_call_count = fs.read_dir_call_count();
|
||||
|
||||
// Create new extension store, as if Zed were restarting.
|
||||
drop(store);
|
||||
let store = cx.new_model(|cx| {
|
||||
ExtensionStore::new(
|
||||
PathBuf::from("/the-extension-dir"),
|
||||
None,
|
||||
fs.clone(),
|
||||
http_client.clone(),
|
||||
http_client.clone(),
|
||||
None,
|
||||
node_runtime.clone(),
|
||||
language_registry.clone(),
|
||||
theme_registry.clone(),
|
||||
slash_command_registry,
|
||||
indexed_docs_registry,
|
||||
snippet_registry,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
store.read_with(cx, |store, _| {
|
||||
assert_eq!(store.extension_index, expected_index);
|
||||
assert_eq!(
|
||||
language_registry.language_names(),
|
||||
["ERB", "Plain Text", "Ruby"]
|
||||
);
|
||||
assert_eq!(
|
||||
language_registry.grammar_names(),
|
||||
["embedded_template".into(), "ruby".into()]
|
||||
);
|
||||
assert_eq!(
|
||||
theme_registry.list_names(),
|
||||
[
|
||||
"Gruvbox",
|
||||
"Monokai Dark",
|
||||
"Monokai Light",
|
||||
"Monokai Pro Dark",
|
||||
"Monokai Pro Light",
|
||||
"One Dark",
|
||||
]
|
||||
);
|
||||
|
||||
// The on-disk manifest limits the number of FS calls that need to be made
|
||||
// on startup.
|
||||
assert_eq!(fs.read_dir_call_count(), prev_fs_read_dir_call_count);
|
||||
assert_eq!(fs.metadata_call_count(), prev_fs_metadata_call_count + 2);
|
||||
});
|
||||
|
||||
store.update(cx, |store, cx| {
|
||||
store.uninstall_extension("zed-ruby".into(), cx)
|
||||
});
|
||||
|
||||
cx.executor().advance_clock(RELOAD_DEBOUNCE_DURATION);
|
||||
expected_index.extensions.remove("zed-ruby");
|
||||
expected_index.languages.remove("Ruby");
|
||||
expected_index.languages.remove("ERB");
|
||||
|
||||
store.read_with(cx, |store, _| {
|
||||
assert_eq!(store.extension_index, expected_index);
|
||||
assert_eq!(language_registry.language_names(), ["Plain Text"]);
|
||||
assert_eq!(language_registry.grammar_names(), []);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
cx.executor().allow_parking();
|
||||
|
||||
let root_dir = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap();
|
||||
let cache_dir = root_dir.join("target");
|
||||
let test_extension_id = "test-extension";
|
||||
let test_extension_dir = root_dir.join("extensions").join(test_extension_id);
|
||||
|
||||
let fs = Arc::new(RealFs::default());
|
||||
let extensions_dir = temp_tree(json!({
|
||||
"installed": {},
|
||||
"work": {}
|
||||
}));
|
||||
let project_dir = temp_tree(json!({
|
||||
"test.gleam": ""
|
||||
}));
|
||||
|
||||
let extensions_dir = extensions_dir.path().canonicalize().unwrap();
|
||||
let project_dir = project_dir.path().canonicalize().unwrap();
|
||||
|
||||
let project = Project::test(fs.clone(), [project_dir.as_path()], cx).await;
|
||||
|
||||
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
|
||||
let theme_registry = Arc::new(RealThemeRegistry::new(Box::new(())));
|
||||
let slash_command_registry = SlashCommandRegistry::new();
|
||||
let indexed_docs_registry = Arc::new(IndexedDocsRegistry::new(cx.executor()));
|
||||
let snippet_registry = Arc::new(SnippetRegistry::new());
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
|
||||
let mut status_updates = language_registry.language_server_binary_statuses();
|
||||
|
||||
struct FakeLanguageServerVersion {
|
||||
version: String,
|
||||
binary_contents: String,
|
||||
http_request_count: usize,
|
||||
}
|
||||
|
||||
let language_server_version = Arc::new(Mutex::new(FakeLanguageServerVersion {
|
||||
version: "v1.2.3".into(),
|
||||
binary_contents: "the-binary-contents".into(),
|
||||
http_request_count: 0,
|
||||
}));
|
||||
|
||||
let extension_client = FakeHttpClient::create({
|
||||
let language_server_version = language_server_version.clone();
|
||||
move |request| {
|
||||
let language_server_version = language_server_version.clone();
|
||||
async move {
|
||||
let version = language_server_version.lock().version.clone();
|
||||
let binary_contents = language_server_version.lock().binary_contents.clone();
|
||||
|
||||
let github_releases_uri = "https://api.github.com/repos/gleam-lang/gleam/releases";
|
||||
let asset_download_uri =
|
||||
format!("https://fake-download.example.com/gleam-{version}");
|
||||
|
||||
let uri = request.uri().to_string();
|
||||
if uri == github_releases_uri {
|
||||
language_server_version.lock().http_request_count += 1;
|
||||
Ok(Response::new(
|
||||
json!([
|
||||
{
|
||||
"tag_name": version,
|
||||
"prerelease": false,
|
||||
"tarball_url": "",
|
||||
"zipball_url": "",
|
||||
"assets": [
|
||||
{
|
||||
"name": format!("gleam-{version}-aarch64-apple-darwin.tar.gz"),
|
||||
"browser_download_url": asset_download_uri
|
||||
},
|
||||
{
|
||||
"name": format!("gleam-{version}-x86_64-unknown-linux-musl.tar.gz"),
|
||||
"browser_download_url": asset_download_uri
|
||||
},
|
||||
{
|
||||
"name": format!("gleam-{version}-aarch64-unknown-linux-musl.tar.gz"),
|
||||
"browser_download_url": asset_download_uri
|
||||
}
|
||||
]
|
||||
}
|
||||
])
|
||||
.to_string()
|
||||
.into(),
|
||||
))
|
||||
} else if uri == asset_download_uri {
|
||||
language_server_version.lock().http_request_count += 1;
|
||||
let mut bytes = Vec::<u8>::new();
|
||||
let mut archive = async_tar::Builder::new(&mut bytes);
|
||||
let mut header = async_tar::Header::new_gnu();
|
||||
header.set_size(binary_contents.len() as u64);
|
||||
archive
|
||||
.append_data(&mut header, "gleam", binary_contents.as_bytes())
|
||||
.await
|
||||
.unwrap();
|
||||
archive.into_inner().await.unwrap();
|
||||
let mut gzipped_bytes = Vec::new();
|
||||
let mut encoder = GzipEncoder::new(BufReader::new(bytes.as_slice()));
|
||||
encoder.read_to_end(&mut gzipped_bytes).await.unwrap();
|
||||
Ok(Response::new(gzipped_bytes.into()))
|
||||
} else {
|
||||
Ok(Response::builder().status(404).body("not found".into())?)
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
let user_agent = cx.update(|cx| {
|
||||
format!(
|
||||
"Zed/{} ({}; {})",
|
||||
AppVersion::global(cx),
|
||||
std::env::consts::OS,
|
||||
std::env::consts::ARCH
|
||||
)
|
||||
});
|
||||
let builder_client =
|
||||
Arc::new(ReqwestClient::user_agent(&user_agent).expect("Could not create HTTP client"));
|
||||
|
||||
let extension_store = cx.new_model(|cx| {
|
||||
ExtensionStore::new(
|
||||
extensions_dir.clone(),
|
||||
Some(cache_dir),
|
||||
fs.clone(),
|
||||
extension_client.clone(),
|
||||
builder_client,
|
||||
None,
|
||||
node_runtime,
|
||||
language_registry.clone(),
|
||||
theme_registry.clone(),
|
||||
slash_command_registry,
|
||||
indexed_docs_registry,
|
||||
snippet_registry,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
// Ensure that debounces fire.
|
||||
let mut events = cx.events(&extension_store);
|
||||
let executor = cx.executor();
|
||||
let _task = cx.executor().spawn(async move {
|
||||
while let Some(event) = events.next().await {
|
||||
if let crate::Event::StartedReloading = event {
|
||||
executor.advance_clock(RELOAD_DEBOUNCE_DURATION);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
extension_store.update(cx, |_, cx| {
|
||||
cx.subscribe(&extension_store, |_, _, event, _| {
|
||||
if matches!(event, Event::ExtensionFailedToLoad(_)) {
|
||||
panic!("extension failed to load");
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
|
||||
extension_store
|
||||
.update(cx, |store, cx| {
|
||||
store.install_dev_extension(test_extension_dir.clone(), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut fake_servers = language_registry.register_fake_language_server(
|
||||
LanguageServerName("gleam".into()),
|
||||
lsp::ServerCapabilities {
|
||||
completion_provider: Some(Default::default()),
|
||||
..Default::default()
|
||||
},
|
||||
None,
|
||||
);
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(project_dir.join("test.gleam"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let fake_server = fake_servers.next().await.unwrap();
|
||||
let expected_server_path =
|
||||
extensions_dir.join(format!("work/{test_extension_id}/gleam-v1.2.3/gleam"));
|
||||
let expected_binary_contents = language_server_version.lock().binary_contents.clone();
|
||||
|
||||
assert_eq!(fake_server.binary.path, expected_server_path);
|
||||
assert_eq!(fake_server.binary.arguments, [OsString::from("lsp")]);
|
||||
assert_eq!(
|
||||
fs.load(&expected_server_path).await.unwrap(),
|
||||
expected_binary_contents
|
||||
);
|
||||
assert_eq!(language_server_version.lock().http_request_count, 2);
|
||||
assert_eq!(
|
||||
[
|
||||
status_updates.next().await.unwrap(),
|
||||
status_updates.next().await.unwrap(),
|
||||
status_updates.next().await.unwrap(),
|
||||
],
|
||||
[
|
||||
(
|
||||
LanguageServerName("gleam".into()),
|
||||
LanguageServerBinaryStatus::CheckingForUpdate
|
||||
),
|
||||
(
|
||||
LanguageServerName("gleam".into()),
|
||||
LanguageServerBinaryStatus::Downloading
|
||||
),
|
||||
(
|
||||
LanguageServerName("gleam".into()),
|
||||
LanguageServerBinaryStatus::None
|
||||
)
|
||||
]
|
||||
);
|
||||
|
||||
// The extension creates custom labels for completion items.
|
||||
fake_server.handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
|
||||
Ok(Some(lsp::CompletionResponse::Array(vec![
|
||||
lsp::CompletionItem {
|
||||
label: "foo".into(),
|
||||
kind: Some(lsp::CompletionItemKind::FUNCTION),
|
||||
detail: Some("fn() -> Result(Nil, Error)".into()),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::CompletionItem {
|
||||
label: "bar.baz".into(),
|
||||
kind: Some(lsp::CompletionItemKind::FUNCTION),
|
||||
detail: Some("fn(List(a)) -> a".into()),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::CompletionItem {
|
||||
label: "Quux".into(),
|
||||
kind: Some(lsp::CompletionItemKind::CONSTRUCTOR),
|
||||
detail: Some("fn(String) -> T".into()),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::CompletionItem {
|
||||
label: "my_string".into(),
|
||||
kind: Some(lsp::CompletionItemKind::CONSTANT),
|
||||
detail: Some("String".into()),
|
||||
..Default::default()
|
||||
},
|
||||
])))
|
||||
});
|
||||
|
||||
let completion_labels = project
|
||||
.update(cx, |project, cx| {
|
||||
project.completions(&buffer, 0, DEFAULT_COMPLETION_CONTEXT, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|c| c.label.text)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
completion_labels,
|
||||
[
|
||||
"foo: fn() -> Result(Nil, Error)".to_string(),
|
||||
"bar.baz: fn(List(a)) -> a".to_string(),
|
||||
"Quux: fn(String) -> T".to_string(),
|
||||
"my_string: String".to_string(),
|
||||
]
|
||||
);
|
||||
|
||||
// Simulate a new version of the language server being released
|
||||
language_server_version.lock().version = "v2.0.0".into();
|
||||
language_server_version.lock().binary_contents = "the-new-binary-contents".into();
|
||||
language_server_version.lock().http_request_count = 0;
|
||||
|
||||
// Start a new instance of the language server.
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers([buffer.clone()], cx)
|
||||
});
|
||||
|
||||
// The extension has cached the binary path, and does not attempt
|
||||
// to reinstall it.
|
||||
let fake_server = fake_servers.next().await.unwrap();
|
||||
assert_eq!(fake_server.binary.path, expected_server_path);
|
||||
assert_eq!(
|
||||
fs.load(&expected_server_path).await.unwrap(),
|
||||
expected_binary_contents
|
||||
);
|
||||
assert_eq!(language_server_version.lock().http_request_count, 0);
|
||||
|
||||
// Reload the extension, clearing its cache.
|
||||
// Start a new instance of the language server.
|
||||
extension_store
|
||||
.update(cx, |store, cx| store.reload(Some("gleam".into()), cx))
|
||||
.await;
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers([buffer.clone()], cx)
|
||||
});
|
||||
|
||||
// The extension re-fetches the latest version of the language server.
|
||||
let fake_server = fake_servers.next().await.unwrap();
|
||||
let new_expected_server_path =
|
||||
extensions_dir.join(format!("work/{test_extension_id}/gleam-v2.0.0/gleam"));
|
||||
let expected_binary_contents = language_server_version.lock().binary_contents.clone();
|
||||
assert_eq!(fake_server.binary.path, new_expected_server_path);
|
||||
assert_eq!(fake_server.binary.arguments, [OsString::from("lsp")]);
|
||||
assert_eq!(
|
||||
fs.load(&new_expected_server_path).await.unwrap(),
|
||||
expected_binary_contents
|
||||
);
|
||||
|
||||
// The old language server directory has been cleaned up.
|
||||
assert!(fs.metadata(&expected_server_path).await.unwrap().is_none());
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
Project::init_settings(cx);
|
||||
ExtensionSettings::register(cx);
|
||||
language::init(cx);
|
||||
});
|
||||
}
|
|
@ -1,300 +0,0 @@
|
|||
pub(crate) mod wit;
|
||||
|
||||
use crate::ExtensionManifest;
|
||||
use anyhow::{anyhow, bail, Context as _, Result};
|
||||
use fs::{normalize_path, Fs};
|
||||
use futures::future::LocalBoxFuture;
|
||||
use futures::{
|
||||
channel::{
|
||||
mpsc::{self, UnboundedSender},
|
||||
oneshot,
|
||||
},
|
||||
future::BoxFuture,
|
||||
Future, FutureExt, StreamExt as _,
|
||||
};
|
||||
use gpui::{AppContext, AsyncAppContext, BackgroundExecutor, Task};
|
||||
use http_client::HttpClient;
|
||||
use language::LanguageRegistry;
|
||||
use node_runtime::NodeRuntime;
|
||||
use release_channel::ReleaseChannel;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::{Arc, OnceLock},
|
||||
};
|
||||
use wasmtime::{
|
||||
component::{Component, ResourceTable},
|
||||
Engine, Store,
|
||||
};
|
||||
use wasmtime_wasi as wasi;
|
||||
use wit::Extension;
|
||||
|
||||
pub(crate) struct WasmHost {
|
||||
engine: Engine,
|
||||
release_channel: ReleaseChannel,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
node_runtime: NodeRuntime,
|
||||
pub(crate) language_registry: Arc<LanguageRegistry>,
|
||||
fs: Arc<dyn Fs>,
|
||||
pub(crate) work_dir: PathBuf,
|
||||
_main_thread_message_task: Task<()>,
|
||||
main_thread_message_tx: mpsc::UnboundedSender<MainThreadCall>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct WasmExtension {
|
||||
tx: UnboundedSender<ExtensionCall>,
|
||||
pub(crate) manifest: Arc<ExtensionManifest>,
|
||||
#[allow(unused)]
|
||||
pub zed_api_version: SemanticVersion,
|
||||
}
|
||||
|
||||
pub(crate) struct WasmState {
|
||||
manifest: Arc<ExtensionManifest>,
|
||||
pub(crate) table: ResourceTable,
|
||||
ctx: wasi::WasiCtx,
|
||||
pub(crate) host: Arc<WasmHost>,
|
||||
}
|
||||
|
||||
type MainThreadCall =
|
||||
Box<dyn Send + for<'a> FnOnce(&'a mut AsyncAppContext) -> LocalBoxFuture<'a, ()>>;
|
||||
|
||||
type ExtensionCall = Box<
|
||||
dyn Send + for<'a> FnOnce(&'a mut Extension, &'a mut Store<WasmState>) -> BoxFuture<'a, ()>,
|
||||
>;
|
||||
|
||||
fn wasm_engine() -> wasmtime::Engine {
|
||||
static WASM_ENGINE: OnceLock<wasmtime::Engine> = OnceLock::new();
|
||||
|
||||
WASM_ENGINE
|
||||
.get_or_init(|| {
|
||||
let mut config = wasmtime::Config::new();
|
||||
config.wasm_component_model(true);
|
||||
config.async_support(true);
|
||||
wasmtime::Engine::new(&config).unwrap()
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
|
||||
impl WasmHost {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
node_runtime: NodeRuntime,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
work_dir: PathBuf,
|
||||
cx: &mut AppContext,
|
||||
) -> Arc<Self> {
|
||||
let (tx, mut rx) = mpsc::unbounded::<MainThreadCall>();
|
||||
let task = cx.spawn(|mut cx| async move {
|
||||
while let Some(message) = rx.next().await {
|
||||
message(&mut cx).await;
|
||||
}
|
||||
});
|
||||
Arc::new(Self {
|
||||
engine: wasm_engine(),
|
||||
fs,
|
||||
work_dir,
|
||||
http_client,
|
||||
node_runtime,
|
||||
language_registry,
|
||||
release_channel: ReleaseChannel::global(cx),
|
||||
_main_thread_message_task: task,
|
||||
main_thread_message_tx: tx,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn load_extension(
|
||||
self: &Arc<Self>,
|
||||
wasm_bytes: Vec<u8>,
|
||||
manifest: Arc<ExtensionManifest>,
|
||||
executor: BackgroundExecutor,
|
||||
) -> Task<Result<WasmExtension>> {
|
||||
let this = self.clone();
|
||||
executor.clone().spawn(async move {
|
||||
let zed_api_version = parse_wasm_extension_version(&manifest.id, &wasm_bytes)?;
|
||||
|
||||
let component = Component::from_binary(&this.engine, &wasm_bytes)
|
||||
.context("failed to compile wasm component")?;
|
||||
|
||||
let mut store = wasmtime::Store::new(
|
||||
&this.engine,
|
||||
WasmState {
|
||||
ctx: this.build_wasi_ctx(&manifest).await?,
|
||||
manifest: manifest.clone(),
|
||||
table: ResourceTable::new(),
|
||||
host: this.clone(),
|
||||
},
|
||||
);
|
||||
|
||||
let mut extension = Extension::instantiate_async(
|
||||
&mut store,
|
||||
this.release_channel,
|
||||
zed_api_version,
|
||||
&component,
|
||||
)
|
||||
.await?;
|
||||
|
||||
extension
|
||||
.call_init_extension(&mut store)
|
||||
.await
|
||||
.context("failed to initialize wasm extension")?;
|
||||
|
||||
let (tx, mut rx) = mpsc::unbounded::<ExtensionCall>();
|
||||
executor
|
||||
.spawn(async move {
|
||||
while let Some(call) = rx.next().await {
|
||||
(call)(&mut extension, &mut store).await;
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Ok(WasmExtension {
|
||||
manifest,
|
||||
tx,
|
||||
zed_api_version,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async fn build_wasi_ctx(&self, manifest: &Arc<ExtensionManifest>) -> Result<wasi::WasiCtx> {
|
||||
let extension_work_dir = self.work_dir.join(manifest.id.as_ref());
|
||||
self.fs
|
||||
.create_dir(&extension_work_dir)
|
||||
.await
|
||||
.context("failed to create extension work dir")?;
|
||||
|
||||
let file_perms = wasi::FilePerms::all();
|
||||
let dir_perms = wasi::DirPerms::all();
|
||||
|
||||
Ok(wasi::WasiCtxBuilder::new()
|
||||
.inherit_stdio()
|
||||
.preopened_dir(&extension_work_dir, ".", dir_perms, file_perms)?
|
||||
.preopened_dir(
|
||||
&extension_work_dir,
|
||||
extension_work_dir.to_string_lossy(),
|
||||
dir_perms,
|
||||
file_perms,
|
||||
)?
|
||||
.env("PWD", extension_work_dir.to_string_lossy())
|
||||
.env("RUST_BACKTRACE", "full")
|
||||
.build())
|
||||
}
|
||||
|
||||
pub fn path_from_extension(&self, id: &Arc<str>, path: &Path) -> PathBuf {
|
||||
let extension_work_dir = self.work_dir.join(id.as_ref());
|
||||
normalize_path(&extension_work_dir.join(path))
|
||||
}
|
||||
|
||||
pub fn writeable_path_from_extension(&self, id: &Arc<str>, path: &Path) -> Result<PathBuf> {
|
||||
let extension_work_dir = self.work_dir.join(id.as_ref());
|
||||
let path = normalize_path(&extension_work_dir.join(path));
|
||||
if path.starts_with(&extension_work_dir) {
|
||||
Ok(path)
|
||||
} else {
|
||||
Err(anyhow!("cannot write to path {}", path.display()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_wasm_extension_version(
|
||||
extension_id: &str,
|
||||
wasm_bytes: &[u8],
|
||||
) -> Result<SemanticVersion> {
|
||||
let mut version = None;
|
||||
|
||||
for part in wasmparser::Parser::new(0).parse_all(wasm_bytes) {
|
||||
if let wasmparser::Payload::CustomSection(s) =
|
||||
part.context("error parsing wasm extension")?
|
||||
{
|
||||
if s.name() == "zed:api-version" {
|
||||
version = parse_wasm_extension_version_custom_section(s.data());
|
||||
if version.is_none() {
|
||||
bail!(
|
||||
"extension {} has invalid zed:api-version section: {:?}",
|
||||
extension_id,
|
||||
s.data()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The reason we wait until we're done parsing all of the Wasm bytes to return the version
|
||||
// is to work around a panic that can happen inside of Wasmtime when the bytes are invalid.
|
||||
//
|
||||
// By parsing the entirety of the Wasm bytes before we return, we're able to detect this problem
|
||||
// earlier as an `Err` rather than as a panic.
|
||||
version.ok_or_else(|| anyhow!("extension {} has no zed:api-version section", extension_id))
|
||||
}
|
||||
|
||||
fn parse_wasm_extension_version_custom_section(data: &[u8]) -> Option<SemanticVersion> {
|
||||
if data.len() == 6 {
|
||||
Some(SemanticVersion::new(
|
||||
u16::from_be_bytes([data[0], data[1]]) as _,
|
||||
u16::from_be_bytes([data[2], data[3]]) as _,
|
||||
u16::from_be_bytes([data[4], data[5]]) as _,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl WasmExtension {
|
||||
pub async fn call<T, Fn>(&self, f: Fn) -> T
|
||||
where
|
||||
T: 'static + Send,
|
||||
Fn: 'static
|
||||
+ Send
|
||||
+ for<'a> FnOnce(&'a mut Extension, &'a mut Store<WasmState>) -> BoxFuture<'a, T>,
|
||||
{
|
||||
let (return_tx, return_rx) = oneshot::channel();
|
||||
self.tx
|
||||
.clone()
|
||||
.unbounded_send(Box::new(move |extension, store| {
|
||||
async {
|
||||
let result = f(extension, store).await;
|
||||
return_tx.send(result).ok();
|
||||
}
|
||||
.boxed()
|
||||
}))
|
||||
.expect("wasm extension channel should not be closed yet");
|
||||
return_rx.await.expect("wasm extension channel")
|
||||
}
|
||||
}
|
||||
|
||||
impl WasmState {
|
||||
fn on_main_thread<T, Fn>(&self, f: Fn) -> impl 'static + Future<Output = T>
|
||||
where
|
||||
T: 'static + Send,
|
||||
Fn: 'static + Send + for<'a> FnOnce(&'a mut AsyncAppContext) -> LocalBoxFuture<'a, T>,
|
||||
{
|
||||
let (return_tx, return_rx) = oneshot::channel();
|
||||
self.host
|
||||
.main_thread_message_tx
|
||||
.clone()
|
||||
.unbounded_send(Box::new(move |cx| {
|
||||
async {
|
||||
let result = f(cx).await;
|
||||
return_tx.send(result).ok();
|
||||
}
|
||||
.boxed_local()
|
||||
}))
|
||||
.expect("main thread message channel should not be closed yet");
|
||||
async move { return_rx.await.expect("main thread message channel") }
|
||||
}
|
||||
|
||||
fn work_dir(&self) -> PathBuf {
|
||||
self.host.work_dir.join(self.manifest.id.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl wasi::WasiView for WasmState {
|
||||
fn table(&mut self) -> &mut ResourceTable {
|
||||
&mut self.table
|
||||
}
|
||||
|
||||
fn ctx(&mut self) -> &mut wasi::WasiCtx {
|
||||
&mut self.ctx
|
||||
}
|
||||
}
|
|
@ -1,405 +0,0 @@
|
|||
mod since_v0_0_1;
|
||||
mod since_v0_0_4;
|
||||
mod since_v0_0_6;
|
||||
mod since_v0_1_0;
|
||||
mod since_v0_2_0;
|
||||
use indexed_docs::IndexedDocsDatabase;
|
||||
use release_channel::ReleaseChannel;
|
||||
use since_v0_2_0 as latest;
|
||||
|
||||
use super::{wasm_engine, WasmState};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use language::{LanguageServerName, LspAdapterDelegate};
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::{ops::RangeInclusive, sync::Arc};
|
||||
use wasmtime::{
|
||||
component::{Component, Linker, Resource},
|
||||
Store,
|
||||
};
|
||||
|
||||
#[cfg(test)]
|
||||
pub use latest::CodeLabelSpanLiteral;
|
||||
pub use latest::{
|
||||
zed::extension::lsp::{Completion, CompletionKind, InsertTextFormat, Symbol, SymbolKind},
|
||||
zed::extension::slash_command::{SlashCommandArgumentCompletion, SlashCommandOutput},
|
||||
CodeLabel, CodeLabelSpan, Command, Range, SlashCommand,
|
||||
};
|
||||
pub use since_v0_0_4::LanguageServerConfig;
|
||||
|
||||
pub fn new_linker(
|
||||
f: impl Fn(&mut Linker<WasmState>, fn(&mut WasmState) -> &mut WasmState) -> Result<()>,
|
||||
) -> Linker<WasmState> {
|
||||
let mut linker = Linker::new(&wasm_engine());
|
||||
wasmtime_wasi::add_to_linker_async(&mut linker).unwrap();
|
||||
f(&mut linker, wasi_view).unwrap();
|
||||
linker
|
||||
}
|
||||
|
||||
fn wasi_view(state: &mut WasmState) -> &mut WasmState {
|
||||
state
|
||||
}
|
||||
|
||||
/// Returns whether the given Wasm API version is supported by the Wasm host.
|
||||
pub fn is_supported_wasm_api_version(
|
||||
release_channel: ReleaseChannel,
|
||||
version: SemanticVersion,
|
||||
) -> bool {
|
||||
wasm_api_version_range(release_channel).contains(&version)
|
||||
}
|
||||
|
||||
/// Returns the Wasm API version range that is supported by the Wasm host.
|
||||
#[inline(always)]
|
||||
pub fn wasm_api_version_range(release_channel: ReleaseChannel) -> RangeInclusive<SemanticVersion> {
|
||||
// Note: The release channel can be used to stage a new version of the extension API.
|
||||
let _ = release_channel;
|
||||
|
||||
let max_version = match release_channel {
|
||||
ReleaseChannel::Dev | ReleaseChannel::Nightly => latest::MAX_VERSION,
|
||||
ReleaseChannel::Stable | ReleaseChannel::Preview => since_v0_1_0::MAX_VERSION,
|
||||
};
|
||||
|
||||
since_v0_0_1::MIN_VERSION..=max_version
|
||||
}
|
||||
|
||||
pub enum Extension {
|
||||
V020(since_v0_2_0::Extension),
|
||||
V010(since_v0_1_0::Extension),
|
||||
V006(since_v0_0_6::Extension),
|
||||
V004(since_v0_0_4::Extension),
|
||||
V001(since_v0_0_1::Extension),
|
||||
}
|
||||
|
||||
impl Extension {
|
||||
pub async fn instantiate_async(
|
||||
store: &mut Store<WasmState>,
|
||||
release_channel: ReleaseChannel,
|
||||
version: SemanticVersion,
|
||||
component: &Component,
|
||||
) -> Result<Self> {
|
||||
// Note: The release channel can be used to stage a new version of the extension API.
|
||||
let allow_latest_version = match release_channel {
|
||||
ReleaseChannel::Dev | ReleaseChannel::Nightly => true,
|
||||
ReleaseChannel::Stable | ReleaseChannel::Preview => false,
|
||||
};
|
||||
|
||||
if allow_latest_version && version >= latest::MIN_VERSION {
|
||||
let extension =
|
||||
latest::Extension::instantiate_async(store, component, latest::linker())
|
||||
.await
|
||||
.context("failed to instantiate wasm extension")?;
|
||||
Ok(Self::V020(extension))
|
||||
} else if version >= since_v0_1_0::MIN_VERSION {
|
||||
let extension = since_v0_1_0::Extension::instantiate_async(
|
||||
store,
|
||||
component,
|
||||
since_v0_1_0::linker(),
|
||||
)
|
||||
.await
|
||||
.context("failed to instantiate wasm extension")?;
|
||||
Ok(Self::V010(extension))
|
||||
} else if version >= since_v0_0_6::MIN_VERSION {
|
||||
let extension = since_v0_0_6::Extension::instantiate_async(
|
||||
store,
|
||||
component,
|
||||
since_v0_0_6::linker(),
|
||||
)
|
||||
.await
|
||||
.context("failed to instantiate wasm extension")?;
|
||||
Ok(Self::V006(extension))
|
||||
} else if version >= since_v0_0_4::MIN_VERSION {
|
||||
let extension = since_v0_0_4::Extension::instantiate_async(
|
||||
store,
|
||||
component,
|
||||
since_v0_0_4::linker(),
|
||||
)
|
||||
.await
|
||||
.context("failed to instantiate wasm extension")?;
|
||||
Ok(Self::V004(extension))
|
||||
} else {
|
||||
let extension = since_v0_0_1::Extension::instantiate_async(
|
||||
store,
|
||||
component,
|
||||
since_v0_0_1::linker(),
|
||||
)
|
||||
.await
|
||||
.context("failed to instantiate wasm extension")?;
|
||||
Ok(Self::V001(extension))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_init_extension(&self, store: &mut Store<WasmState>) -> Result<()> {
|
||||
match self {
|
||||
Extension::V020(ext) => ext.call_init_extension(store).await,
|
||||
Extension::V010(ext) => ext.call_init_extension(store).await,
|
||||
Extension::V006(ext) => ext.call_init_extension(store).await,
|
||||
Extension::V004(ext) => ext.call_init_extension(store).await,
|
||||
Extension::V001(ext) => ext.call_init_extension(store).await,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_language_server_command(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
config: &LanguageServerConfig,
|
||||
resource: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> Result<Result<Command, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
ext.call_language_server_command(store, &language_server_id.0, resource)
|
||||
.await
|
||||
}
|
||||
Extension::V010(ext) => Ok(ext
|
||||
.call_language_server_command(store, &language_server_id.0, resource)
|
||||
.await?
|
||||
.map(|command| command.into())),
|
||||
Extension::V006(ext) => Ok(ext
|
||||
.call_language_server_command(store, &language_server_id.0, resource)
|
||||
.await?
|
||||
.map(|command| command.into())),
|
||||
Extension::V004(ext) => Ok(ext
|
||||
.call_language_server_command(store, config, resource)
|
||||
.await?
|
||||
.map(|command| command.into())),
|
||||
Extension::V001(ext) => Ok(ext
|
||||
.call_language_server_command(store, &config.clone().into(), resource)
|
||||
.await?
|
||||
.map(|command| command.into())),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_language_server_initialization_options(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
config: &LanguageServerConfig,
|
||||
resource: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> Result<Result<Option<String>, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
ext.call_language_server_initialization_options(
|
||||
store,
|
||||
&language_server_id.0,
|
||||
resource,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Extension::V010(ext) => {
|
||||
ext.call_language_server_initialization_options(
|
||||
store,
|
||||
&language_server_id.0,
|
||||
resource,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Extension::V006(ext) => {
|
||||
ext.call_language_server_initialization_options(
|
||||
store,
|
||||
&language_server_id.0,
|
||||
resource,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Extension::V004(ext) => {
|
||||
ext.call_language_server_initialization_options(store, config, resource)
|
||||
.await
|
||||
}
|
||||
Extension::V001(ext) => {
|
||||
ext.call_language_server_initialization_options(
|
||||
store,
|
||||
&config.clone().into(),
|
||||
resource,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_language_server_workspace_configuration(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
resource: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> Result<Result<Option<String>, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
ext.call_language_server_workspace_configuration(
|
||||
store,
|
||||
&language_server_id.0,
|
||||
resource,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Extension::V010(ext) => {
|
||||
ext.call_language_server_workspace_configuration(
|
||||
store,
|
||||
&language_server_id.0,
|
||||
resource,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Extension::V006(ext) => {
|
||||
ext.call_language_server_workspace_configuration(
|
||||
store,
|
||||
&language_server_id.0,
|
||||
resource,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Extension::V004(_) | Extension::V001(_) => Ok(Ok(None)),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_labels_for_completions(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
completions: Vec<latest::Completion>,
|
||||
) -> Result<Result<Vec<Option<CodeLabel>>, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
ext.call_labels_for_completions(store, &language_server_id.0, &completions)
|
||||
.await
|
||||
}
|
||||
Extension::V010(ext) => Ok(ext
|
||||
.call_labels_for_completions(store, &language_server_id.0, &completions)
|
||||
.await?
|
||||
.map(|labels| {
|
||||
labels
|
||||
.into_iter()
|
||||
.map(|label| label.map(Into::into))
|
||||
.collect()
|
||||
})),
|
||||
Extension::V006(ext) => Ok(ext
|
||||
.call_labels_for_completions(store, &language_server_id.0, &completions)
|
||||
.await?
|
||||
.map(|labels| {
|
||||
labels
|
||||
.into_iter()
|
||||
.map(|label| label.map(Into::into))
|
||||
.collect()
|
||||
})),
|
||||
Extension::V001(_) | Extension::V004(_) => Ok(Ok(Vec::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_labels_for_symbols(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
symbols: Vec<latest::Symbol>,
|
||||
) -> Result<Result<Vec<Option<CodeLabel>>, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
ext.call_labels_for_symbols(store, &language_server_id.0, &symbols)
|
||||
.await
|
||||
}
|
||||
Extension::V010(ext) => Ok(ext
|
||||
.call_labels_for_symbols(store, &language_server_id.0, &symbols)
|
||||
.await?
|
||||
.map(|labels| {
|
||||
labels
|
||||
.into_iter()
|
||||
.map(|label| label.map(Into::into))
|
||||
.collect()
|
||||
})),
|
||||
Extension::V006(ext) => Ok(ext
|
||||
.call_labels_for_symbols(store, &language_server_id.0, &symbols)
|
||||
.await?
|
||||
.map(|labels| {
|
||||
labels
|
||||
.into_iter()
|
||||
.map(|label| label.map(Into::into))
|
||||
.collect()
|
||||
})),
|
||||
Extension::V001(_) | Extension::V004(_) => Ok(Ok(Vec::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_complete_slash_command_argument(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
command: &SlashCommand,
|
||||
arguments: &[String],
|
||||
) -> Result<Result<Vec<SlashCommandArgumentCompletion>, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
ext.call_complete_slash_command_argument(store, command, arguments)
|
||||
.await
|
||||
}
|
||||
Extension::V010(ext) => {
|
||||
ext.call_complete_slash_command_argument(store, command, arguments)
|
||||
.await
|
||||
}
|
||||
Extension::V001(_) | Extension::V004(_) | Extension::V006(_) => Ok(Ok(Vec::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_run_slash_command(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
command: &SlashCommand,
|
||||
arguments: &[String],
|
||||
resource: Option<Resource<Arc<dyn LspAdapterDelegate>>>,
|
||||
) -> Result<Result<SlashCommandOutput, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
ext.call_run_slash_command(store, command, arguments, resource)
|
||||
.await
|
||||
}
|
||||
Extension::V010(ext) => {
|
||||
ext.call_run_slash_command(store, command, arguments, resource)
|
||||
.await
|
||||
}
|
||||
Extension::V001(_) | Extension::V004(_) | Extension::V006(_) => {
|
||||
Err(anyhow!("`run_slash_command` not available prior to v0.1.0"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_suggest_docs_packages(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
provider: &str,
|
||||
) -> Result<Result<Vec<String>, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => ext.call_suggest_docs_packages(store, provider).await,
|
||||
Extension::V010(ext) => ext.call_suggest_docs_packages(store, provider).await,
|
||||
Extension::V001(_) | Extension::V004(_) | Extension::V006(_) => Err(anyhow!(
|
||||
"`suggest_docs_packages` not available prior to v0.1.0"
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn call_index_docs(
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
provider: &str,
|
||||
package_name: &str,
|
||||
database: Resource<Arc<IndexedDocsDatabase>>,
|
||||
) -> Result<Result<(), String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
ext.call_index_docs(store, provider, package_name, database)
|
||||
.await
|
||||
}
|
||||
Extension::V010(ext) => {
|
||||
ext.call_index_docs(store, provider, package_name, database)
|
||||
.await
|
||||
}
|
||||
Extension::V001(_) | Extension::V004(_) | Extension::V006(_) => {
|
||||
Err(anyhow!("`index_docs` not available prior to v0.1.0"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait ToWasmtimeResult<T> {
|
||||
fn to_wasmtime_result(self) -> wasmtime::Result<Result<T, String>>;
|
||||
}
|
||||
|
||||
impl<T> ToWasmtimeResult<T> for Result<T> {
|
||||
fn to_wasmtime_result(self) -> wasmtime::Result<Result<T, String>> {
|
||||
Ok(self.map_err(|error| error.to_string()))
|
||||
}
|
||||
}
|
|
@ -1,164 +0,0 @@
|
|||
use super::latest;
|
||||
use crate::wasm_host::wit::since_v0_0_4;
|
||||
use crate::wasm_host::WasmState;
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use language::{LanguageServerBinaryStatus, LspAdapterDelegate};
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use wasmtime::component::{Linker, Resource};
|
||||
|
||||
pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 1);
|
||||
|
||||
wasmtime::component::bindgen!({
|
||||
async: true,
|
||||
trappable_imports: true,
|
||||
path: "../extension_api/wit/since_v0.0.1",
|
||||
with: {
|
||||
"worktree": ExtensionWorktree,
|
||||
"zed:extension/github": latest::zed::extension::github,
|
||||
"zed:extension/platform": latest::zed::extension::platform,
|
||||
},
|
||||
});
|
||||
|
||||
pub type ExtensionWorktree = Arc<dyn LspAdapterDelegate>;
|
||||
|
||||
pub fn linker() -> &'static Linker<WasmState> {
|
||||
static LINKER: OnceLock<Linker<WasmState>> = OnceLock::new();
|
||||
LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker))
|
||||
}
|
||||
|
||||
impl From<DownloadedFileType> for latest::DownloadedFileType {
|
||||
fn from(value: DownloadedFileType) -> Self {
|
||||
match value {
|
||||
DownloadedFileType::Gzip => latest::DownloadedFileType::Gzip,
|
||||
DownloadedFileType::GzipTar => latest::DownloadedFileType::GzipTar,
|
||||
DownloadedFileType::Zip => latest::DownloadedFileType::Zip,
|
||||
DownloadedFileType::Uncompressed => latest::DownloadedFileType::Uncompressed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<since_v0_0_4::LanguageServerConfig> for LanguageServerConfig {
|
||||
fn from(value: since_v0_0_4::LanguageServerConfig) -> Self {
|
||||
Self {
|
||||
name: value.name,
|
||||
language_name: value.language_name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Command> for latest::Command {
|
||||
fn from(value: Command) -> Self {
|
||||
Self {
|
||||
command: value.command,
|
||||
args: value.args,
|
||||
env: value.env,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl HostWorktree for WasmState {
|
||||
async fn read_text_file(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
path: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::HostWorktree::read_text_file(self, delegate, path).await
|
||||
}
|
||||
|
||||
async fn shell_env(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<EnvVars> {
|
||||
latest::HostWorktree::shell_env(self, delegate).await
|
||||
}
|
||||
|
||||
async fn which(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
binary_name: String,
|
||||
) -> wasmtime::Result<Option<String>> {
|
||||
latest::HostWorktree::which(self, delegate, binary_name).await
|
||||
}
|
||||
|
||||
fn drop(&mut self, _worktree: Resource<Worktree>) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ExtensionImports for WasmState {
|
||||
async fn node_binary_path(&mut self) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::nodejs::Host::node_binary_path(self).await
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::nodejs::Host::npm_package_latest_version(self, package_name).await
|
||||
}
|
||||
|
||||
async fn npm_package_installed_version(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
) -> wasmtime::Result<Result<Option<String>, String>> {
|
||||
latest::nodejs::Host::npm_package_installed_version(self, package_name).await
|
||||
}
|
||||
|
||||
async fn npm_install_package(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
version: String,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
latest::nodejs::Host::npm_install_package(self, package_name, version).await
|
||||
}
|
||||
|
||||
async fn latest_github_release(
|
||||
&mut self,
|
||||
repo: String,
|
||||
options: GithubReleaseOptions,
|
||||
) -> wasmtime::Result<Result<GithubRelease, String>> {
|
||||
latest::zed::extension::github::Host::latest_github_release(self, repo, options).await
|
||||
}
|
||||
|
||||
async fn current_platform(&mut self) -> Result<(Os, Architecture)> {
|
||||
latest::zed::extension::platform::Host::current_platform(self).await
|
||||
}
|
||||
|
||||
async fn set_language_server_installation_status(
|
||||
&mut self,
|
||||
server_name: String,
|
||||
status: LanguageServerInstallationStatus,
|
||||
) -> wasmtime::Result<()> {
|
||||
let status = match status {
|
||||
LanguageServerInstallationStatus::CheckingForUpdate => {
|
||||
LanguageServerBinaryStatus::CheckingForUpdate
|
||||
}
|
||||
LanguageServerInstallationStatus::Downloading => {
|
||||
LanguageServerBinaryStatus::Downloading
|
||||
}
|
||||
LanguageServerInstallationStatus::Cached
|
||||
| LanguageServerInstallationStatus::Downloaded => LanguageServerBinaryStatus::None,
|
||||
LanguageServerInstallationStatus::Failed(error) => {
|
||||
LanguageServerBinaryStatus::Failed { error }
|
||||
}
|
||||
};
|
||||
|
||||
self.host
|
||||
.language_registry
|
||||
.update_lsp_status(language::LanguageServerName(server_name.into()), status);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn download_file(
|
||||
&mut self,
|
||||
url: String,
|
||||
path: String,
|
||||
file_type: DownloadedFileType,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
latest::ExtensionImports::download_file(self, url, path, file_type.into()).await
|
||||
}
|
||||
}
|
|
@ -1,166 +0,0 @@
|
|||
use super::latest;
|
||||
use crate::wasm_host::WasmState;
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use language::LspAdapterDelegate;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use wasmtime::component::{Linker, Resource};
|
||||
|
||||
pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 4);
|
||||
|
||||
wasmtime::component::bindgen!({
|
||||
async: true,
|
||||
trappable_imports: true,
|
||||
path: "../extension_api/wit/since_v0.0.4",
|
||||
with: {
|
||||
"worktree": ExtensionWorktree,
|
||||
"zed:extension/github": latest::zed::extension::github,
|
||||
"zed:extension/platform": latest::zed::extension::platform,
|
||||
},
|
||||
});
|
||||
|
||||
pub type ExtensionWorktree = Arc<dyn LspAdapterDelegate>;
|
||||
|
||||
pub fn linker() -> &'static Linker<WasmState> {
|
||||
static LINKER: OnceLock<Linker<WasmState>> = OnceLock::new();
|
||||
LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker))
|
||||
}
|
||||
|
||||
impl From<DownloadedFileType> for latest::DownloadedFileType {
|
||||
fn from(value: DownloadedFileType) -> Self {
|
||||
match value {
|
||||
DownloadedFileType::Gzip => latest::DownloadedFileType::Gzip,
|
||||
DownloadedFileType::GzipTar => latest::DownloadedFileType::GzipTar,
|
||||
DownloadedFileType::Zip => latest::DownloadedFileType::Zip,
|
||||
DownloadedFileType::Uncompressed => latest::DownloadedFileType::Uncompressed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LanguageServerInstallationStatus> for latest::LanguageServerInstallationStatus {
|
||||
fn from(value: LanguageServerInstallationStatus) -> Self {
|
||||
match value {
|
||||
LanguageServerInstallationStatus::None => {
|
||||
latest::LanguageServerInstallationStatus::None
|
||||
}
|
||||
LanguageServerInstallationStatus::Downloading => {
|
||||
latest::LanguageServerInstallationStatus::Downloading
|
||||
}
|
||||
LanguageServerInstallationStatus::CheckingForUpdate => {
|
||||
latest::LanguageServerInstallationStatus::CheckingForUpdate
|
||||
}
|
||||
LanguageServerInstallationStatus::Failed(error) => {
|
||||
latest::LanguageServerInstallationStatus::Failed(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Command> for latest::Command {
|
||||
fn from(value: Command) -> Self {
|
||||
Self {
|
||||
command: value.command,
|
||||
args: value.args,
|
||||
env: value.env,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl HostWorktree for WasmState {
|
||||
async fn read_text_file(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
path: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::HostWorktree::read_text_file(self, delegate, path).await
|
||||
}
|
||||
|
||||
async fn shell_env(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<EnvVars> {
|
||||
latest::HostWorktree::shell_env(self, delegate).await
|
||||
}
|
||||
|
||||
async fn which(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
binary_name: String,
|
||||
) -> wasmtime::Result<Option<String>> {
|
||||
latest::HostWorktree::which(self, delegate, binary_name).await
|
||||
}
|
||||
|
||||
fn drop(&mut self, _worktree: Resource<Worktree>) -> Result<()> {
|
||||
// We only ever hand out borrows of worktrees.
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ExtensionImports for WasmState {
|
||||
async fn node_binary_path(&mut self) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::nodejs::Host::node_binary_path(self).await
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::nodejs::Host::npm_package_latest_version(self, package_name).await
|
||||
}
|
||||
|
||||
async fn npm_package_installed_version(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
) -> wasmtime::Result<Result<Option<String>, String>> {
|
||||
latest::nodejs::Host::npm_package_installed_version(self, package_name).await
|
||||
}
|
||||
|
||||
async fn npm_install_package(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
version: String,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
latest::nodejs::Host::npm_install_package(self, package_name, version).await
|
||||
}
|
||||
|
||||
async fn latest_github_release(
|
||||
&mut self,
|
||||
repo: String,
|
||||
options: GithubReleaseOptions,
|
||||
) -> wasmtime::Result<Result<GithubRelease, String>> {
|
||||
latest::zed::extension::github::Host::latest_github_release(self, repo, options).await
|
||||
}
|
||||
|
||||
async fn current_platform(&mut self) -> Result<(Os, Architecture)> {
|
||||
latest::zed::extension::platform::Host::current_platform(self).await
|
||||
}
|
||||
|
||||
async fn set_language_server_installation_status(
|
||||
&mut self,
|
||||
server_name: String,
|
||||
status: LanguageServerInstallationStatus,
|
||||
) -> wasmtime::Result<()> {
|
||||
latest::ExtensionImports::set_language_server_installation_status(
|
||||
self,
|
||||
server_name,
|
||||
status.into(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn download_file(
|
||||
&mut self,
|
||||
url: String,
|
||||
path: String,
|
||||
file_type: DownloadedFileType,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
latest::ExtensionImports::download_file(self, url, path, file_type.into()).await
|
||||
}
|
||||
|
||||
async fn make_file_executable(&mut self, path: String) -> wasmtime::Result<Result<(), String>> {
|
||||
latest::ExtensionImports::make_file_executable(self, path).await
|
||||
}
|
||||
}
|
|
@ -1,201 +0,0 @@
|
|||
use super::latest;
|
||||
use crate::wasm_host::WasmState;
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use language::LspAdapterDelegate;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use wasmtime::component::{Linker, Resource};
|
||||
|
||||
pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 6);
|
||||
|
||||
wasmtime::component::bindgen!({
|
||||
async: true,
|
||||
trappable_imports: true,
|
||||
path: "../extension_api/wit/since_v0.0.6",
|
||||
with: {
|
||||
"worktree": ExtensionWorktree,
|
||||
"zed:extension/github": latest::zed::extension::github,
|
||||
"zed:extension/lsp": latest::zed::extension::lsp,
|
||||
"zed:extension/nodejs": latest::zed::extension::nodejs,
|
||||
"zed:extension/platform": latest::zed::extension::platform,
|
||||
},
|
||||
});
|
||||
|
||||
mod settings {
|
||||
include!(concat!(env!("OUT_DIR"), "/since_v0.0.6/settings.rs"));
|
||||
}
|
||||
|
||||
pub type ExtensionWorktree = Arc<dyn LspAdapterDelegate>;
|
||||
|
||||
pub fn linker() -> &'static Linker<WasmState> {
|
||||
static LINKER: OnceLock<Linker<WasmState>> = OnceLock::new();
|
||||
LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker))
|
||||
}
|
||||
|
||||
impl From<Command> for latest::Command {
|
||||
fn from(value: Command) -> Self {
|
||||
Self {
|
||||
command: value.command,
|
||||
args: value.args,
|
||||
env: value.env,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SettingsLocation> for latest::SettingsLocation {
|
||||
fn from(value: SettingsLocation) -> Self {
|
||||
Self {
|
||||
worktree_id: value.worktree_id,
|
||||
path: value.path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LanguageServerInstallationStatus> for latest::LanguageServerInstallationStatus {
|
||||
fn from(value: LanguageServerInstallationStatus) -> Self {
|
||||
match value {
|
||||
LanguageServerInstallationStatus::None => Self::None,
|
||||
LanguageServerInstallationStatus::Downloading => Self::Downloading,
|
||||
LanguageServerInstallationStatus::CheckingForUpdate => Self::CheckingForUpdate,
|
||||
LanguageServerInstallationStatus::Failed(message) => Self::Failed(message),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DownloadedFileType> for latest::DownloadedFileType {
|
||||
fn from(value: DownloadedFileType) -> Self {
|
||||
match value {
|
||||
DownloadedFileType::Gzip => Self::Gzip,
|
||||
DownloadedFileType::GzipTar => Self::GzipTar,
|
||||
DownloadedFileType::Zip => Self::Zip,
|
||||
DownloadedFileType::Uncompressed => Self::Uncompressed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Range> for latest::Range {
|
||||
fn from(value: Range) -> Self {
|
||||
Self {
|
||||
start: value.start,
|
||||
end: value.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CodeLabelSpan> for latest::CodeLabelSpan {
|
||||
fn from(value: CodeLabelSpan) -> Self {
|
||||
match value {
|
||||
CodeLabelSpan::CodeRange(range) => Self::CodeRange(range.into()),
|
||||
CodeLabelSpan::Literal(literal) => Self::Literal(literal.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CodeLabelSpanLiteral> for latest::CodeLabelSpanLiteral {
|
||||
fn from(value: CodeLabelSpanLiteral) -> Self {
|
||||
Self {
|
||||
text: value.text,
|
||||
highlight_name: value.highlight_name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CodeLabel> for latest::CodeLabel {
|
||||
fn from(value: CodeLabel) -> Self {
|
||||
Self {
|
||||
code: value.code,
|
||||
spans: value.spans.into_iter().map(Into::into).collect(),
|
||||
filter_range: value.filter_range.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl HostWorktree for WasmState {
|
||||
async fn id(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<u64> {
|
||||
latest::HostWorktree::id(self, delegate).await
|
||||
}
|
||||
|
||||
async fn root_path(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<String> {
|
||||
latest::HostWorktree::root_path(self, delegate).await
|
||||
}
|
||||
|
||||
async fn read_text_file(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
path: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::HostWorktree::read_text_file(self, delegate, path).await
|
||||
}
|
||||
|
||||
async fn shell_env(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<EnvVars> {
|
||||
latest::HostWorktree::shell_env(self, delegate).await
|
||||
}
|
||||
|
||||
async fn which(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
binary_name: String,
|
||||
) -> wasmtime::Result<Option<String>> {
|
||||
latest::HostWorktree::which(self, delegate, binary_name).await
|
||||
}
|
||||
|
||||
fn drop(&mut self, _worktree: Resource<Worktree>) -> Result<()> {
|
||||
// We only ever hand out borrows of worktrees.
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ExtensionImports for WasmState {
|
||||
async fn get_settings(
|
||||
&mut self,
|
||||
location: Option<self::SettingsLocation>,
|
||||
category: String,
|
||||
key: Option<String>,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::ExtensionImports::get_settings(
|
||||
self,
|
||||
location.map(|location| location.into()),
|
||||
category,
|
||||
key,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn set_language_server_installation_status(
|
||||
&mut self,
|
||||
server_name: String,
|
||||
status: LanguageServerInstallationStatus,
|
||||
) -> wasmtime::Result<()> {
|
||||
latest::ExtensionImports::set_language_server_installation_status(
|
||||
self,
|
||||
server_name,
|
||||
status.into(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn download_file(
|
||||
&mut self,
|
||||
url: String,
|
||||
path: String,
|
||||
file_type: DownloadedFileType,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
latest::ExtensionImports::download_file(self, url, path, file_type.into()).await
|
||||
}
|
||||
|
||||
async fn make_file_executable(&mut self, path: String) -> wasmtime::Result<Result<(), String>> {
|
||||
latest::ExtensionImports::make_file_executable(self, path).await
|
||||
}
|
||||
}
|
|
@ -1,509 +0,0 @@
|
|||
use crate::wasm_host::{wit::ToWasmtimeResult, WasmState};
|
||||
use ::http_client::{AsyncBody, HttpRequestExt};
|
||||
use ::settings::{Settings, WorktreeId};
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use async_trait::async_trait;
|
||||
use futures::{io::BufReader, FutureExt as _};
|
||||
use futures::{lock::Mutex, AsyncReadExt};
|
||||
use indexed_docs::IndexedDocsDatabase;
|
||||
use language::{
|
||||
language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate,
|
||||
};
|
||||
use language::{LanguageName, LanguageServerName};
|
||||
use project::project_settings::ProjectSettings;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::{Arc, OnceLock},
|
||||
};
|
||||
use util::maybe;
|
||||
use wasmtime::component::{Linker, Resource};
|
||||
|
||||
use super::latest;
|
||||
|
||||
pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 1, 0);
|
||||
pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 1, 0);
|
||||
|
||||
wasmtime::component::bindgen!({
|
||||
async: true,
|
||||
trappable_imports: true,
|
||||
path: "../extension_api/wit/since_v0.1.0",
|
||||
with: {
|
||||
"worktree": ExtensionWorktree,
|
||||
"key-value-store": ExtensionKeyValueStore,
|
||||
"zed:extension/http-client/http-response-stream": ExtensionHttpResponseStream,
|
||||
"zed:extension/github": latest::zed::extension::github,
|
||||
"zed:extension/lsp": latest::zed::extension::lsp,
|
||||
"zed:extension/nodejs": latest::zed::extension::nodejs,
|
||||
"zed:extension/platform": latest::zed::extension::platform,
|
||||
"zed:extension/slash-command": latest::zed::extension::slash_command,
|
||||
},
|
||||
});
|
||||
|
||||
pub use self::zed::extension::*;
|
||||
|
||||
mod settings {
|
||||
include!(concat!(env!("OUT_DIR"), "/since_v0.1.0/settings.rs"));
|
||||
}
|
||||
|
||||
pub type ExtensionWorktree = Arc<dyn LspAdapterDelegate>;
|
||||
pub type ExtensionKeyValueStore = Arc<IndexedDocsDatabase>;
|
||||
pub type ExtensionHttpResponseStream = Arc<Mutex<::http_client::Response<AsyncBody>>>;
|
||||
|
||||
pub fn linker() -> &'static Linker<WasmState> {
|
||||
static LINKER: OnceLock<Linker<WasmState>> = OnceLock::new();
|
||||
LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker))
|
||||
}
|
||||
|
||||
impl From<Command> for latest::Command {
|
||||
fn from(value: Command) -> Self {
|
||||
Self {
|
||||
command: value.command,
|
||||
args: value.args,
|
||||
env: value.env,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SettingsLocation> for latest::SettingsLocation {
|
||||
fn from(value: SettingsLocation) -> Self {
|
||||
Self {
|
||||
worktree_id: value.worktree_id,
|
||||
path: value.path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LanguageServerInstallationStatus> for latest::LanguageServerInstallationStatus {
|
||||
fn from(value: LanguageServerInstallationStatus) -> Self {
|
||||
match value {
|
||||
LanguageServerInstallationStatus::None => Self::None,
|
||||
LanguageServerInstallationStatus::Downloading => Self::Downloading,
|
||||
LanguageServerInstallationStatus::CheckingForUpdate => Self::CheckingForUpdate,
|
||||
LanguageServerInstallationStatus::Failed(message) => Self::Failed(message),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DownloadedFileType> for latest::DownloadedFileType {
|
||||
fn from(value: DownloadedFileType) -> Self {
|
||||
match value {
|
||||
DownloadedFileType::Gzip => Self::Gzip,
|
||||
DownloadedFileType::GzipTar => Self::GzipTar,
|
||||
DownloadedFileType::Zip => Self::Zip,
|
||||
DownloadedFileType::Uncompressed => Self::Uncompressed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Range> for latest::Range {
|
||||
fn from(value: Range) -> Self {
|
||||
Self {
|
||||
start: value.start,
|
||||
end: value.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CodeLabelSpan> for latest::CodeLabelSpan {
|
||||
fn from(value: CodeLabelSpan) -> Self {
|
||||
match value {
|
||||
CodeLabelSpan::CodeRange(range) => Self::CodeRange(range.into()),
|
||||
CodeLabelSpan::Literal(literal) => Self::Literal(literal.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CodeLabelSpanLiteral> for latest::CodeLabelSpanLiteral {
|
||||
fn from(value: CodeLabelSpanLiteral) -> Self {
|
||||
Self {
|
||||
text: value.text,
|
||||
highlight_name: value.highlight_name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CodeLabel> for latest::CodeLabel {
|
||||
fn from(value: CodeLabel) -> Self {
|
||||
Self {
|
||||
code: value.code,
|
||||
spans: value.spans.into_iter().map(Into::into).collect(),
|
||||
filter_range: value.filter_range.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl HostKeyValueStore for WasmState {
|
||||
async fn insert(
|
||||
&mut self,
|
||||
kv_store: Resource<ExtensionKeyValueStore>,
|
||||
key: String,
|
||||
value: String,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
let kv_store = self.table.get(&kv_store)?;
|
||||
kv_store.insert(key, value).await.to_wasmtime_result()
|
||||
}
|
||||
|
||||
fn drop(&mut self, _worktree: Resource<ExtensionKeyValueStore>) -> Result<()> {
|
||||
// We only ever hand out borrows of key-value stores.
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl HostWorktree for WasmState {
|
||||
async fn id(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<u64> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.worktree_id().to_proto())
|
||||
}
|
||||
|
||||
async fn root_path(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<String> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.worktree_root_path().to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
async fn read_text_file(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
path: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate
|
||||
.read_text_file(path.into())
|
||||
.await
|
||||
.map_err(|error| error.to_string()))
|
||||
}
|
||||
|
||||
async fn shell_env(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<EnvVars> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.shell_env().await.into_iter().collect())
|
||||
}
|
||||
|
||||
async fn which(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
binary_name: String,
|
||||
) -> wasmtime::Result<Option<String>> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate
|
||||
.which(binary_name.as_ref())
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string()))
|
||||
}
|
||||
|
||||
fn drop(&mut self, _worktree: Resource<Worktree>) -> Result<()> {
|
||||
// We only ever hand out borrows of worktrees.
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl common::Host for WasmState {}
|
||||
|
||||
#[async_trait]
|
||||
impl http_client::Host for WasmState {
|
||||
async fn fetch(
|
||||
&mut self,
|
||||
request: http_client::HttpRequest,
|
||||
) -> wasmtime::Result<Result<http_client::HttpResponse, String>> {
|
||||
maybe!(async {
|
||||
let url = &request.url;
|
||||
let request = convert_request(&request)?;
|
||||
let mut response = self.host.http_client.send(request).await?;
|
||||
|
||||
if response.status().is_client_error() || response.status().is_server_error() {
|
||||
bail!("failed to fetch '{url}': status code {}", response.status())
|
||||
}
|
||||
convert_response(&mut response).await
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
async fn fetch_stream(
|
||||
&mut self,
|
||||
request: http_client::HttpRequest,
|
||||
) -> wasmtime::Result<Result<Resource<ExtensionHttpResponseStream>, String>> {
|
||||
let request = convert_request(&request)?;
|
||||
let response = self.host.http_client.send(request);
|
||||
maybe!(async {
|
||||
let response = response.await?;
|
||||
let stream = Arc::new(Mutex::new(response));
|
||||
let resource = self.table.push(stream)?;
|
||||
Ok(resource)
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl http_client::HostHttpResponseStream for WasmState {
|
||||
async fn next_chunk(
|
||||
&mut self,
|
||||
resource: Resource<ExtensionHttpResponseStream>,
|
||||
) -> wasmtime::Result<Result<Option<Vec<u8>>, String>> {
|
||||
let stream = self.table.get(&resource)?.clone();
|
||||
maybe!(async move {
|
||||
let mut response = stream.lock().await;
|
||||
let mut buffer = vec![0; 8192]; // 8KB buffer
|
||||
let bytes_read = response.body_mut().read(&mut buffer).await?;
|
||||
if bytes_read == 0 {
|
||||
Ok(None)
|
||||
} else {
|
||||
buffer.truncate(bytes_read);
|
||||
Ok(Some(buffer))
|
||||
}
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
fn drop(&mut self, _resource: Resource<ExtensionHttpResponseStream>) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<http_client::HttpMethod> for ::http_client::Method {
|
||||
fn from(value: http_client::HttpMethod) -> Self {
|
||||
match value {
|
||||
http_client::HttpMethod::Get => Self::GET,
|
||||
http_client::HttpMethod::Post => Self::POST,
|
||||
http_client::HttpMethod::Put => Self::PUT,
|
||||
http_client::HttpMethod::Delete => Self::DELETE,
|
||||
http_client::HttpMethod::Head => Self::HEAD,
|
||||
http_client::HttpMethod::Options => Self::OPTIONS,
|
||||
http_client::HttpMethod::Patch => Self::PATCH,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_request(
|
||||
extension_request: &http_client::HttpRequest,
|
||||
) -> Result<::http_client::Request<AsyncBody>, anyhow::Error> {
|
||||
let mut request = ::http_client::Request::builder()
|
||||
.method(::http_client::Method::from(extension_request.method))
|
||||
.uri(&extension_request.url)
|
||||
.follow_redirects(match extension_request.redirect_policy {
|
||||
http_client::RedirectPolicy::NoFollow => ::http_client::RedirectPolicy::NoFollow,
|
||||
http_client::RedirectPolicy::FollowLimit(limit) => {
|
||||
::http_client::RedirectPolicy::FollowLimit(limit)
|
||||
}
|
||||
http_client::RedirectPolicy::FollowAll => ::http_client::RedirectPolicy::FollowAll,
|
||||
});
|
||||
for (key, value) in &extension_request.headers {
|
||||
request = request.header(key, value);
|
||||
}
|
||||
let body = extension_request
|
||||
.body
|
||||
.clone()
|
||||
.map(AsyncBody::from)
|
||||
.unwrap_or_default();
|
||||
request.body(body).map_err(anyhow::Error::from)
|
||||
}
|
||||
|
||||
async fn convert_response(
|
||||
response: &mut ::http_client::Response<AsyncBody>,
|
||||
) -> Result<http_client::HttpResponse, anyhow::Error> {
|
||||
let mut extension_response = http_client::HttpResponse {
|
||||
body: Vec::new(),
|
||||
headers: Vec::new(),
|
||||
};
|
||||
|
||||
for (key, value) in response.headers() {
|
||||
extension_response
|
||||
.headers
|
||||
.push((key.to_string(), value.to_str().unwrap_or("").to_string()));
|
||||
}
|
||||
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut extension_response.body)
|
||||
.await?;
|
||||
|
||||
Ok(extension_response)
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ExtensionImports for WasmState {
|
||||
async fn get_settings(
|
||||
&mut self,
|
||||
location: Option<self::SettingsLocation>,
|
||||
category: String,
|
||||
key: Option<String>,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
self.on_main_thread(|cx| {
|
||||
async move {
|
||||
let location = location
|
||||
.as_ref()
|
||||
.map(|location| ::settings::SettingsLocation {
|
||||
worktree_id: WorktreeId::from_proto(location.worktree_id),
|
||||
path: Path::new(&location.path),
|
||||
});
|
||||
|
||||
cx.update(|cx| match category.as_str() {
|
||||
"language" => {
|
||||
let key = key.map(|k| LanguageName::new(&k));
|
||||
let settings = AllLanguageSettings::get(location, cx).language(
|
||||
location,
|
||||
key.as_ref(),
|
||||
cx,
|
||||
);
|
||||
Ok(serde_json::to_string(&settings::LanguageSettings {
|
||||
tab_size: settings.tab_size,
|
||||
})?)
|
||||
}
|
||||
"lsp" => {
|
||||
let settings = key
|
||||
.and_then(|key| {
|
||||
ProjectSettings::get(location, cx)
|
||||
.lsp
|
||||
.get(&LanguageServerName(key.into()))
|
||||
})
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
Ok(serde_json::to_string(&settings::LspSettings {
|
||||
binary: settings.binary.map(|binary| settings::BinarySettings {
|
||||
path: binary.path,
|
||||
arguments: binary.arguments,
|
||||
}),
|
||||
settings: settings.settings,
|
||||
initialization_options: settings.initialization_options,
|
||||
})?)
|
||||
}
|
||||
_ => {
|
||||
bail!("Unknown settings category: {}", category);
|
||||
}
|
||||
})
|
||||
}
|
||||
.boxed_local()
|
||||
})
|
||||
.await?
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
async fn set_language_server_installation_status(
|
||||
&mut self,
|
||||
server_name: String,
|
||||
status: LanguageServerInstallationStatus,
|
||||
) -> wasmtime::Result<()> {
|
||||
let status = match status {
|
||||
LanguageServerInstallationStatus::CheckingForUpdate => {
|
||||
LanguageServerBinaryStatus::CheckingForUpdate
|
||||
}
|
||||
LanguageServerInstallationStatus::Downloading => {
|
||||
LanguageServerBinaryStatus::Downloading
|
||||
}
|
||||
LanguageServerInstallationStatus::None => LanguageServerBinaryStatus::None,
|
||||
LanguageServerInstallationStatus::Failed(error) => {
|
||||
LanguageServerBinaryStatus::Failed { error }
|
||||
}
|
||||
};
|
||||
|
||||
self.host
|
||||
.language_registry
|
||||
.update_lsp_status(language::LanguageServerName(server_name.into()), status);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn download_file(
|
||||
&mut self,
|
||||
url: String,
|
||||
path: String,
|
||||
file_type: DownloadedFileType,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
maybe!(async {
|
||||
let path = PathBuf::from(path);
|
||||
let extension_work_dir = self.host.work_dir.join(self.manifest.id.as_ref());
|
||||
|
||||
self.host.fs.create_dir(&extension_work_dir).await?;
|
||||
|
||||
let destination_path = self
|
||||
.host
|
||||
.writeable_path_from_extension(&self.manifest.id, &path)?;
|
||||
|
||||
let mut response = self
|
||||
.host
|
||||
.http_client
|
||||
.get(&url, Default::default(), true)
|
||||
.await
|
||||
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
Err(anyhow!(
|
||||
"download failed with status {}",
|
||||
response.status().to_string()
|
||||
))?;
|
||||
}
|
||||
let body = BufReader::new(response.body_mut());
|
||||
|
||||
match file_type {
|
||||
DownloadedFileType::Uncompressed => {
|
||||
futures::pin_mut!(body);
|
||||
self.host
|
||||
.fs
|
||||
.create_file_with(&destination_path, body)
|
||||
.await?;
|
||||
}
|
||||
DownloadedFileType::Gzip => {
|
||||
let body = GzipDecoder::new(body);
|
||||
futures::pin_mut!(body);
|
||||
self.host
|
||||
.fs
|
||||
.create_file_with(&destination_path, body)
|
||||
.await?;
|
||||
}
|
||||
DownloadedFileType::GzipTar => {
|
||||
let body = GzipDecoder::new(body);
|
||||
futures::pin_mut!(body);
|
||||
self.host
|
||||
.fs
|
||||
.extract_tar_file(&destination_path, Archive::new(body))
|
||||
.await?;
|
||||
}
|
||||
DownloadedFileType::Zip => {
|
||||
futures::pin_mut!(body);
|
||||
node_runtime::extract_zip(&destination_path, body)
|
||||
.await
|
||||
.with_context(|| format!("failed to unzip {} archive", path.display()))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
async fn make_file_executable(&mut self, path: String) -> wasmtime::Result<Result<(), String>> {
|
||||
#[allow(unused)]
|
||||
let path = self
|
||||
.host
|
||||
.writeable_path_from_extension(&self.manifest.id, Path::new(&path))?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::fs::{self, Permissions};
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
return fs::set_permissions(&path, Permissions::from_mode(0o755))
|
||||
.map_err(|error| anyhow!("failed to set permissions for path {path:?}: {error}"))
|
||||
.to_wasmtime_result();
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
Ok(Ok(()))
|
||||
}
|
||||
}
|
|
@ -1,555 +0,0 @@
|
|||
use crate::wasm_host::{wit::ToWasmtimeResult, WasmState};
|
||||
use ::http_client::{AsyncBody, HttpRequestExt};
|
||||
use ::settings::{Settings, WorktreeId};
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use async_trait::async_trait;
|
||||
use futures::{io::BufReader, FutureExt as _};
|
||||
use futures::{lock::Mutex, AsyncReadExt};
|
||||
use indexed_docs::IndexedDocsDatabase;
|
||||
use language::{
|
||||
language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate,
|
||||
};
|
||||
use language::{LanguageName, LanguageServerName};
|
||||
use project::project_settings::ProjectSettings;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::{
|
||||
env,
|
||||
path::{Path, PathBuf},
|
||||
sync::{Arc, OnceLock},
|
||||
};
|
||||
use util::maybe;
|
||||
use wasmtime::component::{Linker, Resource};
|
||||
|
||||
pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 2, 0);
|
||||
pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 2, 0);
|
||||
|
||||
wasmtime::component::bindgen!({
|
||||
async: true,
|
||||
trappable_imports: true,
|
||||
path: "../extension_api/wit/since_v0.2.0",
|
||||
with: {
|
||||
"worktree": ExtensionWorktree,
|
||||
"key-value-store": ExtensionKeyValueStore,
|
||||
"zed:extension/http-client/http-response-stream": ExtensionHttpResponseStream
|
||||
},
|
||||
});
|
||||
|
||||
pub use self::zed::extension::*;
|
||||
|
||||
mod settings {
|
||||
include!(concat!(env!("OUT_DIR"), "/since_v0.2.0/settings.rs"));
|
||||
}
|
||||
|
||||
pub type ExtensionWorktree = Arc<dyn LspAdapterDelegate>;
|
||||
pub type ExtensionKeyValueStore = Arc<IndexedDocsDatabase>;
|
||||
pub type ExtensionHttpResponseStream = Arc<Mutex<::http_client::Response<AsyncBody>>>;
|
||||
|
||||
pub fn linker() -> &'static Linker<WasmState> {
|
||||
static LINKER: OnceLock<Linker<WasmState>> = OnceLock::new();
|
||||
LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker))
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl HostKeyValueStore for WasmState {
|
||||
async fn insert(
|
||||
&mut self,
|
||||
kv_store: Resource<ExtensionKeyValueStore>,
|
||||
key: String,
|
||||
value: String,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
let kv_store = self.table.get(&kv_store)?;
|
||||
kv_store.insert(key, value).await.to_wasmtime_result()
|
||||
}
|
||||
|
||||
fn drop(&mut self, _worktree: Resource<ExtensionKeyValueStore>) -> Result<()> {
|
||||
// We only ever hand out borrows of key-value stores.
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl HostWorktree for WasmState {
|
||||
async fn id(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<u64> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.worktree_id().to_proto())
|
||||
}
|
||||
|
||||
async fn root_path(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<String> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.worktree_root_path().to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
async fn read_text_file(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
path: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate
|
||||
.read_text_file(path.into())
|
||||
.await
|
||||
.map_err(|error| error.to_string()))
|
||||
}
|
||||
|
||||
async fn shell_env(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<EnvVars> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.shell_env().await.into_iter().collect())
|
||||
}
|
||||
|
||||
async fn which(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
binary_name: String,
|
||||
) -> wasmtime::Result<Option<String>> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate
|
||||
.which(binary_name.as_ref())
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string()))
|
||||
}
|
||||
|
||||
fn drop(&mut self, _worktree: Resource<Worktree>) -> Result<()> {
|
||||
// We only ever hand out borrows of worktrees.
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl common::Host for WasmState {}
|
||||
|
||||
#[async_trait]
|
||||
impl http_client::Host for WasmState {
|
||||
async fn fetch(
|
||||
&mut self,
|
||||
request: http_client::HttpRequest,
|
||||
) -> wasmtime::Result<Result<http_client::HttpResponse, String>> {
|
||||
maybe!(async {
|
||||
let url = &request.url;
|
||||
let request = convert_request(&request)?;
|
||||
let mut response = self.host.http_client.send(request).await?;
|
||||
|
||||
if response.status().is_client_error() || response.status().is_server_error() {
|
||||
bail!("failed to fetch '{url}': status code {}", response.status())
|
||||
}
|
||||
convert_response(&mut response).await
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
async fn fetch_stream(
|
||||
&mut self,
|
||||
request: http_client::HttpRequest,
|
||||
) -> wasmtime::Result<Result<Resource<ExtensionHttpResponseStream>, String>> {
|
||||
let request = convert_request(&request)?;
|
||||
let response = self.host.http_client.send(request);
|
||||
maybe!(async {
|
||||
let response = response.await?;
|
||||
let stream = Arc::new(Mutex::new(response));
|
||||
let resource = self.table.push(stream)?;
|
||||
Ok(resource)
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl http_client::HostHttpResponseStream for WasmState {
|
||||
async fn next_chunk(
|
||||
&mut self,
|
||||
resource: Resource<ExtensionHttpResponseStream>,
|
||||
) -> wasmtime::Result<Result<Option<Vec<u8>>, String>> {
|
||||
let stream = self.table.get(&resource)?.clone();
|
||||
maybe!(async move {
|
||||
let mut response = stream.lock().await;
|
||||
let mut buffer = vec![0; 8192]; // 8KB buffer
|
||||
let bytes_read = response.body_mut().read(&mut buffer).await?;
|
||||
if bytes_read == 0 {
|
||||
Ok(None)
|
||||
} else {
|
||||
buffer.truncate(bytes_read);
|
||||
Ok(Some(buffer))
|
||||
}
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
fn drop(&mut self, _resource: Resource<ExtensionHttpResponseStream>) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<http_client::HttpMethod> for ::http_client::Method {
|
||||
fn from(value: http_client::HttpMethod) -> Self {
|
||||
match value {
|
||||
http_client::HttpMethod::Get => Self::GET,
|
||||
http_client::HttpMethod::Post => Self::POST,
|
||||
http_client::HttpMethod::Put => Self::PUT,
|
||||
http_client::HttpMethod::Delete => Self::DELETE,
|
||||
http_client::HttpMethod::Head => Self::HEAD,
|
||||
http_client::HttpMethod::Options => Self::OPTIONS,
|
||||
http_client::HttpMethod::Patch => Self::PATCH,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_request(
|
||||
extension_request: &http_client::HttpRequest,
|
||||
) -> Result<::http_client::Request<AsyncBody>, anyhow::Error> {
|
||||
let mut request = ::http_client::Request::builder()
|
||||
.method(::http_client::Method::from(extension_request.method))
|
||||
.uri(&extension_request.url)
|
||||
.follow_redirects(match extension_request.redirect_policy {
|
||||
http_client::RedirectPolicy::NoFollow => ::http_client::RedirectPolicy::NoFollow,
|
||||
http_client::RedirectPolicy::FollowLimit(limit) => {
|
||||
::http_client::RedirectPolicy::FollowLimit(limit)
|
||||
}
|
||||
http_client::RedirectPolicy::FollowAll => ::http_client::RedirectPolicy::FollowAll,
|
||||
});
|
||||
for (key, value) in &extension_request.headers {
|
||||
request = request.header(key, value);
|
||||
}
|
||||
let body = extension_request
|
||||
.body
|
||||
.clone()
|
||||
.map(AsyncBody::from)
|
||||
.unwrap_or_default();
|
||||
request.body(body).map_err(anyhow::Error::from)
|
||||
}
|
||||
|
||||
async fn convert_response(
|
||||
response: &mut ::http_client::Response<AsyncBody>,
|
||||
) -> Result<http_client::HttpResponse, anyhow::Error> {
|
||||
let mut extension_response = http_client::HttpResponse {
|
||||
body: Vec::new(),
|
||||
headers: Vec::new(),
|
||||
};
|
||||
|
||||
for (key, value) in response.headers() {
|
||||
extension_response
|
||||
.headers
|
||||
.push((key.to_string(), value.to_str().unwrap_or("").to_string()));
|
||||
}
|
||||
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut extension_response.body)
|
||||
.await?;
|
||||
|
||||
Ok(extension_response)
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl nodejs::Host for WasmState {
|
||||
async fn node_binary_path(&mut self) -> wasmtime::Result<Result<String, String>> {
|
||||
self.host
|
||||
.node_runtime
|
||||
.binary_path()
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
self.host
|
||||
.node_runtime
|
||||
.npm_package_latest_version(&package_name)
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
async fn npm_package_installed_version(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
) -> wasmtime::Result<Result<Option<String>, String>> {
|
||||
self.host
|
||||
.node_runtime
|
||||
.npm_package_installed_version(&self.work_dir(), &package_name)
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
async fn npm_install_package(
|
||||
&mut self,
|
||||
package_name: String,
|
||||
version: String,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
self.host
|
||||
.node_runtime
|
||||
.npm_install_packages(&self.work_dir(), &[(&package_name, &version)])
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl lsp::Host for WasmState {}
|
||||
|
||||
impl From<::http_client::github::GithubRelease> for github::GithubRelease {
|
||||
fn from(value: ::http_client::github::GithubRelease) -> Self {
|
||||
Self {
|
||||
version: value.tag_name,
|
||||
assets: value.assets.into_iter().map(Into::into).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<::http_client::github::GithubReleaseAsset> for github::GithubReleaseAsset {
|
||||
fn from(value: ::http_client::github::GithubReleaseAsset) -> Self {
|
||||
Self {
|
||||
name: value.name,
|
||||
download_url: value.browser_download_url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl github::Host for WasmState {
|
||||
async fn latest_github_release(
|
||||
&mut self,
|
||||
repo: String,
|
||||
options: github::GithubReleaseOptions,
|
||||
) -> wasmtime::Result<Result<github::GithubRelease, String>> {
|
||||
maybe!(async {
|
||||
let release = ::http_client::github::latest_github_release(
|
||||
&repo,
|
||||
options.require_assets,
|
||||
options.pre_release,
|
||||
self.host.http_client.clone(),
|
||||
)
|
||||
.await?;
|
||||
Ok(release.into())
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
async fn github_release_by_tag_name(
|
||||
&mut self,
|
||||
repo: String,
|
||||
tag: String,
|
||||
) -> wasmtime::Result<Result<github::GithubRelease, String>> {
|
||||
maybe!(async {
|
||||
let release = ::http_client::github::get_release_by_tag_name(
|
||||
&repo,
|
||||
&tag,
|
||||
self.host.http_client.clone(),
|
||||
)
|
||||
.await?;
|
||||
Ok(release.into())
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl platform::Host for WasmState {
|
||||
async fn current_platform(&mut self) -> Result<(platform::Os, platform::Architecture)> {
|
||||
Ok((
|
||||
match env::consts::OS {
|
||||
"macos" => platform::Os::Mac,
|
||||
"linux" => platform::Os::Linux,
|
||||
"windows" => platform::Os::Windows,
|
||||
_ => panic!("unsupported os"),
|
||||
},
|
||||
match env::consts::ARCH {
|
||||
"aarch64" => platform::Architecture::Aarch64,
|
||||
"x86" => platform::Architecture::X86,
|
||||
"x86_64" => platform::Architecture::X8664,
|
||||
_ => panic!("unsupported architecture"),
|
||||
},
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl slash_command::Host for WasmState {}
|
||||
|
||||
#[async_trait]
|
||||
impl ExtensionImports for WasmState {
|
||||
async fn get_settings(
|
||||
&mut self,
|
||||
location: Option<self::SettingsLocation>,
|
||||
category: String,
|
||||
key: Option<String>,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
self.on_main_thread(|cx| {
|
||||
async move {
|
||||
let location = location
|
||||
.as_ref()
|
||||
.map(|location| ::settings::SettingsLocation {
|
||||
worktree_id: WorktreeId::from_proto(location.worktree_id),
|
||||
path: Path::new(&location.path),
|
||||
});
|
||||
|
||||
cx.update(|cx| match category.as_str() {
|
||||
"language" => {
|
||||
let key = key.map(|k| LanguageName::new(&k));
|
||||
let settings = AllLanguageSettings::get(location, cx).language(
|
||||
location,
|
||||
key.as_ref(),
|
||||
cx,
|
||||
);
|
||||
Ok(serde_json::to_string(&settings::LanguageSettings {
|
||||
tab_size: settings.tab_size,
|
||||
})?)
|
||||
}
|
||||
"lsp" => {
|
||||
let settings = key
|
||||
.and_then(|key| {
|
||||
ProjectSettings::get(location, cx)
|
||||
.lsp
|
||||
.get(&LanguageServerName::from_proto(key))
|
||||
})
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
Ok(serde_json::to_string(&settings::LspSettings {
|
||||
binary: settings.binary.map(|binary| settings::BinarySettings {
|
||||
path: binary.path,
|
||||
arguments: binary.arguments,
|
||||
}),
|
||||
settings: settings.settings,
|
||||
initialization_options: settings.initialization_options,
|
||||
})?)
|
||||
}
|
||||
_ => {
|
||||
bail!("Unknown settings category: {}", category);
|
||||
}
|
||||
})
|
||||
}
|
||||
.boxed_local()
|
||||
})
|
||||
.await?
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
async fn set_language_server_installation_status(
|
||||
&mut self,
|
||||
server_name: String,
|
||||
status: LanguageServerInstallationStatus,
|
||||
) -> wasmtime::Result<()> {
|
||||
let status = match status {
|
||||
LanguageServerInstallationStatus::CheckingForUpdate => {
|
||||
LanguageServerBinaryStatus::CheckingForUpdate
|
||||
}
|
||||
LanguageServerInstallationStatus::Downloading => {
|
||||
LanguageServerBinaryStatus::Downloading
|
||||
}
|
||||
LanguageServerInstallationStatus::None => LanguageServerBinaryStatus::None,
|
||||
LanguageServerInstallationStatus::Failed(error) => {
|
||||
LanguageServerBinaryStatus::Failed { error }
|
||||
}
|
||||
};
|
||||
|
||||
self.host
|
||||
.language_registry
|
||||
.update_lsp_status(language::LanguageServerName(server_name.into()), status);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn download_file(
|
||||
&mut self,
|
||||
url: String,
|
||||
path: String,
|
||||
file_type: DownloadedFileType,
|
||||
) -> wasmtime::Result<Result<(), String>> {
|
||||
maybe!(async {
|
||||
let path = PathBuf::from(path);
|
||||
let extension_work_dir = self.host.work_dir.join(self.manifest.id.as_ref());
|
||||
|
||||
self.host.fs.create_dir(&extension_work_dir).await?;
|
||||
|
||||
let destination_path = self
|
||||
.host
|
||||
.writeable_path_from_extension(&self.manifest.id, &path)?;
|
||||
|
||||
let mut response = self
|
||||
.host
|
||||
.http_client
|
||||
.get(&url, Default::default(), true)
|
||||
.await
|
||||
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
Err(anyhow!(
|
||||
"download failed with status {}",
|
||||
response.status().to_string()
|
||||
))?;
|
||||
}
|
||||
let body = BufReader::new(response.body_mut());
|
||||
|
||||
match file_type {
|
||||
DownloadedFileType::Uncompressed => {
|
||||
futures::pin_mut!(body);
|
||||
self.host
|
||||
.fs
|
||||
.create_file_with(&destination_path, body)
|
||||
.await?;
|
||||
}
|
||||
DownloadedFileType::Gzip => {
|
||||
let body = GzipDecoder::new(body);
|
||||
futures::pin_mut!(body);
|
||||
self.host
|
||||
.fs
|
||||
.create_file_with(&destination_path, body)
|
||||
.await?;
|
||||
}
|
||||
DownloadedFileType::GzipTar => {
|
||||
let body = GzipDecoder::new(body);
|
||||
futures::pin_mut!(body);
|
||||
self.host
|
||||
.fs
|
||||
.extract_tar_file(&destination_path, Archive::new(body))
|
||||
.await?;
|
||||
}
|
||||
DownloadedFileType::Zip => {
|
||||
futures::pin_mut!(body);
|
||||
node_runtime::extract_zip(&destination_path, body)
|
||||
.await
|
||||
.with_context(|| format!("failed to unzip {} archive", path.display()))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.to_wasmtime_result()
|
||||
}
|
||||
|
||||
async fn make_file_executable(&mut self, path: String) -> wasmtime::Result<Result<(), String>> {
|
||||
#[allow(unused)]
|
||||
let path = self
|
||||
.host
|
||||
.writeable_path_from_extension(&self.manifest.id, Path::new(&path))?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::fs::{self, Permissions};
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
return fs::set_permissions(&path, Permissions::from_mode(0o755))
|
||||
.map_err(|error| anyhow!("failed to set permissions for path {path:?}: {error}"))
|
||||
.to_wasmtime_result();
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
Ok(Ok(()))
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue