Merge branch 'main' into guests

This commit is contained in:
Conrad Irwin 2023-10-17 09:51:35 -06:00
commit 9cc55f895c
159 changed files with 3535 additions and 11178 deletions

View file

@ -15,6 +15,9 @@ doctest = false
name = "Zed"
path = "src/main.rs"
[[example]]
name = "semantic_index_eval"
[dependencies]
audio = { path = "../audio" }
activity_indicator = { path = "../activity_indicator" }
@ -135,12 +138,14 @@ tree-sitter-yaml.workspace = true
tree-sitter-lua.workspace = true
tree-sitter-nix.workspace = true
tree-sitter-nu.workspace = true
tree-sitter-vue.workspace = true
url = "2.2"
urlencoding = "2.1.2"
uuid.workspace = true
[dev-dependencies]
ai = { path = "../ai" }
call = { path = "../call", features = ["test-support"] }
client = { path = "../client", features = ["test-support"] }
editor = { path = "../editor", features = ["test-support"] }

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,532 @@
use ai::embedding::OpenAIEmbeddings;
use anyhow::{anyhow, Result};
use client::{self, UserStore};
use gpui::{AsyncAppContext, ModelHandle, Task};
use language::LanguageRegistry;
use node_runtime::RealNodeRuntime;
use project::{Project, RealFs};
use semantic_index::semantic_index_settings::SemanticIndexSettings;
use semantic_index::{SearchResult, SemanticIndex};
use serde::{Deserialize, Serialize};
use settings::{default_settings, SettingsStore};
use std::path::{Path, PathBuf};
use std::process::Command;
use std::sync::Arc;
use std::time::{Duration, Instant};
use std::{cmp, env, fs};
use util::channel::{RELEASE_CHANNEL, RELEASE_CHANNEL_NAME};
use util::http::{self};
use util::paths::EMBEDDINGS_DIR;
use zed::languages;
#[derive(Deserialize, Clone, Serialize)]
struct EvaluationQuery {
query: String,
matches: Vec<String>,
}
impl EvaluationQuery {
fn match_pairs(&self) -> Vec<(PathBuf, u32)> {
let mut pairs = Vec::new();
for match_identifier in self.matches.iter() {
let mut match_parts = match_identifier.split(":");
if let Some(file_path) = match_parts.next() {
if let Some(row_number) = match_parts.next() {
pairs.push((PathBuf::from(file_path), row_number.parse::<u32>().unwrap()));
}
}
}
pairs
}
}
#[derive(Deserialize, Clone)]
struct RepoEval {
repo: String,
commit: String,
assertions: Vec<EvaluationQuery>,
}
const TMP_REPO_PATH: &str = "eval_repos";
fn parse_eval() -> anyhow::Result<Vec<RepoEval>> {
let eval_folder = env::current_dir()?
.as_path()
.parent()
.unwrap()
.join("crates/semantic_index/eval");
let mut repo_evals: Vec<RepoEval> = Vec::new();
for entry in fs::read_dir(eval_folder)? {
let file_path = entry.unwrap().path();
if let Some(extension) = file_path.extension() {
if extension == "json" {
if let Ok(file) = fs::read_to_string(file_path) {
let repo_eval = serde_json::from_str(file.as_str());
match repo_eval {
Ok(repo_eval) => {
repo_evals.push(repo_eval);
}
Err(err) => {
println!("Err: {:?}", err);
}
}
}
}
}
}
Ok(repo_evals)
}
fn clone_repo(repo_eval: RepoEval) -> anyhow::Result<(String, PathBuf)> {
let repo_name = Path::new(repo_eval.repo.as_str())
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_owned()
.replace(".git", "");
let clone_path = fs::canonicalize(env::current_dir()?)?
.parent()
.ok_or(anyhow!("path canonicalization failed"))?
.parent()
.unwrap()
.join(TMP_REPO_PATH);
// Delete Clone Path if already exists
let _ = fs::remove_dir_all(&clone_path);
let _ = fs::create_dir(&clone_path);
let _ = Command::new("git")
.args(["clone", repo_eval.repo.as_str()])
.current_dir(clone_path.clone())
.output()?;
// Update clone path to be new directory housing the repo.
let clone_path = clone_path.join(repo_name.clone());
let _ = Command::new("git")
.args(["checkout", repo_eval.commit.as_str()])
.current_dir(clone_path.clone())
.output()?;
Ok((repo_name, clone_path))
}
fn dcg(hits: Vec<usize>) -> f32 {
let mut result = 0.0;
for (idx, hit) in hits.iter().enumerate() {
result += *hit as f32 / (2.0 + idx as f32).log2();
}
result
}
fn get_hits(
eval_query: EvaluationQuery,
search_results: Vec<SearchResult>,
k: usize,
cx: &AsyncAppContext,
) -> (Vec<usize>, Vec<usize>) {
let ideal = vec![1; cmp::min(eval_query.matches.len(), k)];
let mut hits = Vec::new();
for result in search_results {
let (path, start_row, end_row) = result.buffer.read_with(cx, |buffer, _cx| {
let path = buffer.file().unwrap().path().to_path_buf();
let start_row = buffer.offset_to_point(result.range.start.offset).row;
let end_row = buffer.offset_to_point(result.range.end.offset).row;
(path, start_row, end_row)
});
let match_pairs = eval_query.match_pairs();
let mut found = 0;
for (match_path, match_row) in match_pairs {
if match_path == path {
if match_row >= start_row && match_row <= end_row {
found = 1;
break;
}
}
}
hits.push(found);
}
// For now, we are calculating ideal_hits a bit different, as technically
// with overlapping ranges, one match can result in more than result.
let mut ideal_hits = hits.clone();
ideal_hits.retain(|x| x == &1);
let ideal = if ideal.len() > ideal_hits.len() {
ideal
} else {
ideal_hits
};
// Fill ideal to 10 length
let mut filled_ideal = [0; 10];
for (idx, i) in ideal.to_vec().into_iter().enumerate() {
filled_ideal[idx] = i;
}
(filled_ideal.to_vec(), hits)
}
fn evaluate_ndcg(hits: Vec<usize>, ideal: Vec<usize>) -> Vec<f32> {
// NDCG or Normalized Discounted Cumulative Gain, is determined by comparing the relevance of
// items returned by the search engine relative to the hypothetical ideal.
// Relevance is represented as a series of booleans, in which each search result returned
// is identified as being inside the test set of matches (1) or not (0).
// For example, if result 1, 3 and 5 match the 3 relevant results provided
// actual dcg is calculated against a vector of [1, 0, 1, 0, 1]
// whereas ideal dcg is calculated against a vector of [1, 1, 1, 0, 0]
// as this ideal vector assumes the 3 relevant results provided were returned first
// normalized dcg is then calculated as actual dcg / ideal dcg.
// NDCG ranges from 0 to 1, which higher values indicating better performance
// Commonly NDCG is expressed as NDCG@k, in which k represents the metric calculated
// including only the top k values returned.
// The @k metrics can help you identify, at what point does the relevant results start to fall off.
// Ie. a NDCG@1 of 0.9 and a NDCG@3 of 0.5 may indicate that the first result returned in usually
// very high quality, whereas rank results quickly drop off after the first result.
let mut ndcg = Vec::new();
for idx in 1..(hits.len() + 1) {
let hits_at_k = hits[0..idx].to_vec();
let ideal_at_k = ideal[0..idx].to_vec();
let at_k = dcg(hits_at_k.clone()) / dcg(ideal_at_k.clone());
ndcg.push(at_k);
}
ndcg
}
fn evaluate_map(hits: Vec<usize>) -> Vec<f32> {
let mut map_at_k = Vec::new();
let non_zero = hits.iter().sum::<usize>() as f32;
if non_zero == 0.0 {
return vec![0.0; hits.len()];
}
let mut rolling_non_zero = 0.0;
let mut rolling_map = 0.0;
for (idx, h) in hits.into_iter().enumerate() {
rolling_non_zero += h as f32;
if h == 1 {
rolling_map += rolling_non_zero / (idx + 1) as f32;
}
map_at_k.push(rolling_map / non_zero);
}
map_at_k
}
fn evaluate_mrr(hits: Vec<usize>) -> f32 {
for (idx, h) in hits.into_iter().enumerate() {
if h == 1 {
return 1.0 / (idx + 1) as f32;
}
}
return 0.0;
}
fn init_logger() {
env_logger::init();
}
#[derive(Serialize)]
struct QueryMetrics {
query: EvaluationQuery,
millis_to_search: Duration,
ndcg: Vec<f32>,
map: Vec<f32>,
mrr: f32,
hits: Vec<usize>,
precision: Vec<f32>,
recall: Vec<f32>,
}
#[derive(Serialize)]
struct SummaryMetrics {
millis_to_search: f32,
ndcg: Vec<f32>,
map: Vec<f32>,
mrr: f32,
precision: Vec<f32>,
recall: Vec<f32>,
}
#[derive(Serialize)]
struct RepoEvaluationMetrics {
millis_to_index: Duration,
query_metrics: Vec<QueryMetrics>,
repo_metrics: Option<SummaryMetrics>,
}
impl RepoEvaluationMetrics {
fn new(millis_to_index: Duration) -> Self {
RepoEvaluationMetrics {
millis_to_index,
query_metrics: Vec::new(),
repo_metrics: None,
}
}
fn save(&self, repo_name: String) -> Result<()> {
let results_string = serde_json::to_string(&self)?;
fs::write(format!("./{}_evaluation.json", repo_name), results_string)
.expect("Unable to write file");
Ok(())
}
fn summarize(&mut self) {
let l = self.query_metrics.len() as f32;
let millis_to_search: f32 = self
.query_metrics
.iter()
.map(|metrics| metrics.millis_to_search.as_millis())
.sum::<u128>() as f32
/ l;
let mut ndcg_sum = vec![0.0; 10];
let mut map_sum = vec![0.0; 10];
let mut precision_sum = vec![0.0; 10];
let mut recall_sum = vec![0.0; 10];
let mut mmr_sum = 0.0;
for query_metric in self.query_metrics.iter() {
for (ndcg, query_ndcg) in ndcg_sum.iter_mut().zip(query_metric.ndcg.clone()) {
*ndcg += query_ndcg;
}
for (mapp, query_map) in map_sum.iter_mut().zip(query_metric.map.clone()) {
*mapp += query_map;
}
for (pre, query_pre) in precision_sum.iter_mut().zip(query_metric.precision.clone()) {
*pre += query_pre;
}
for (rec, query_rec) in recall_sum.iter_mut().zip(query_metric.recall.clone()) {
*rec += query_rec;
}
mmr_sum += query_metric.mrr;
}
let ndcg = ndcg_sum.iter().map(|val| val / l).collect::<Vec<f32>>();
let map = map_sum.iter().map(|val| val / l).collect::<Vec<f32>>();
let precision = precision_sum
.iter()
.map(|val| val / l)
.collect::<Vec<f32>>();
let recall = recall_sum.iter().map(|val| val / l).collect::<Vec<f32>>();
let mrr = mmr_sum / l;
self.repo_metrics = Some(SummaryMetrics {
millis_to_search,
ndcg,
map,
mrr,
precision,
recall,
})
}
}
fn evaluate_precision(hits: Vec<usize>) -> Vec<f32> {
let mut rolling_hit: f32 = 0.0;
let mut precision = Vec::new();
for (idx, hit) in hits.into_iter().enumerate() {
rolling_hit += hit as f32;
precision.push(rolling_hit / ((idx as f32) + 1.0));
}
precision
}
fn evaluate_recall(hits: Vec<usize>, ideal: Vec<usize>) -> Vec<f32> {
let total_relevant = ideal.iter().sum::<usize>() as f32;
let mut recall = Vec::new();
let mut rolling_hit: f32 = 0.0;
for hit in hits {
rolling_hit += hit as f32;
recall.push(rolling_hit / total_relevant);
}
recall
}
async fn evaluate_repo(
repo_name: String,
index: ModelHandle<SemanticIndex>,
project: ModelHandle<Project>,
query_matches: Vec<EvaluationQuery>,
cx: &mut AsyncAppContext,
) -> Result<RepoEvaluationMetrics> {
// Index Project
let index_t0 = Instant::now();
index
.update(cx, |index, cx| index.index_project(project.clone(), cx))
.await?;
let mut repo_metrics = RepoEvaluationMetrics::new(index_t0.elapsed());
for query in query_matches {
// Query each match in order
let search_t0 = Instant::now();
let search_results = index
.update(cx, |index, cx| {
index.search_project(project.clone(), query.clone().query, 10, vec![], vec![], cx)
})
.await?;
let millis_to_search = search_t0.elapsed();
// Get Hits/Ideal
let k = 10;
let (ideal, hits) = self::get_hits(query.clone(), search_results, k, cx);
// Evaluate ndcg@k, for k = 1, 3, 5, 10
let ndcg = evaluate_ndcg(hits.clone(), ideal.clone());
// Evaluate map@k, for k = 1, 3, 5, 10
let map = evaluate_map(hits.clone());
// Evaluate mrr
let mrr = evaluate_mrr(hits.clone());
// Evaluate precision
let precision = evaluate_precision(hits.clone());
// Evaluate Recall
let recall = evaluate_recall(hits.clone(), ideal);
let query_metrics = QueryMetrics {
query,
millis_to_search,
ndcg,
map,
mrr,
hits,
precision,
recall,
};
repo_metrics.query_metrics.push(query_metrics);
}
repo_metrics.summarize();
let _ = repo_metrics.save(repo_name);
anyhow::Ok(repo_metrics)
}
fn main() {
// Launch new repo as a new Zed workspace/project
let app = gpui::App::new(()).unwrap();
let fs = Arc::new(RealFs);
let http = http::client();
let http_client = http::client();
init_logger();
app.run(move |cx| {
cx.set_global(*RELEASE_CHANNEL);
let client = client::Client::new(http.clone(), cx);
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client.clone(), cx));
// Initialize Settings
let mut store = SettingsStore::default();
store
.set_default_settings(default_settings().as_ref(), cx)
.unwrap();
cx.set_global(store);
// Initialize Languages
let login_shell_env_loaded = Task::ready(());
let mut languages = LanguageRegistry::new(login_shell_env_loaded);
languages.set_executor(cx.background().clone());
let languages = Arc::new(languages);
let node_runtime = RealNodeRuntime::new(http.clone());
languages::init(languages.clone(), node_runtime.clone(), cx);
language::init(cx);
project::Project::init(&client, cx);
semantic_index::init(fs.clone(), http.clone(), languages.clone(), cx);
settings::register::<SemanticIndexSettings>(cx);
let db_file_path = EMBEDDINGS_DIR
.join(Path::new(RELEASE_CHANNEL_NAME.as_str()))
.join("embeddings_db");
let languages = languages.clone();
let fs = fs.clone();
cx.spawn(|mut cx| async move {
let semantic_index = SemanticIndex::new(
fs.clone(),
db_file_path,
Arc::new(OpenAIEmbeddings::new(http_client, cx.background())),
languages.clone(),
cx.clone(),
)
.await?;
if let Ok(repo_evals) = parse_eval() {
for repo in repo_evals {
let cloned = clone_repo(repo.clone());
match cloned {
Ok((repo_name, clone_path)) => {
println!(
"Cloned {:?} @ {:?} into {:?}",
repo.repo, repo.commit, &clone_path
);
// Create Project
let project = cx.update(|cx| {
Project::local(
client.clone(),
node_runtime::FakeNodeRuntime::new(),
user_store.clone(),
languages.clone(),
fs.clone(),
cx,
)
});
// Register Worktree
let _ = project
.update(&mut cx, |project, cx| {
project.find_or_create_local_worktree(clone_path, true, cx)
})
.await;
let _ = evaluate_repo(
repo_name,
semantic_index.clone(),
project,
repo.assertions,
&mut cx,
)
.await?;
}
Err(err) => {
println!("Error cloning: {:?}", err);
}
}
}
}
anyhow::Ok(())
})
.detach();
});
}

View file

@ -2,6 +2,8 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.developer.associated-domains</key>
<array><string>applinks:zed.dev</string></array>
<key>com.apple.security.automation.apple-events</key>
<true/>
<key>com.apple.security.cs.allow-jit</key>
@ -10,14 +12,8 @@
<true/>
<key>com.apple.security.device.camera</key>
<true/>
<key>com.apple.security.personal-information.addressbook</key>
<true/>
<key>com.apple.security.personal-information.calendars</key>
<true/>
<key>com.apple.security.personal-information.location</key>
<true/>
<key>com.apple.security.personal-information.photos-library</key>
<true/>
<key>com.apple.security.keychain-access-groups</key>
<array><string>MQ55VZLNZQ.dev.zed.Shared</string></array>
<!-- <key>com.apple.security.cs.disable-library-validation</key>
<true/> -->
</dict>

View file

@ -24,6 +24,7 @@ mod rust;
mod svelte;
mod tailwind;
mod typescript;
mod vue;
mod yaml;
// 1. Add tree-sitter-{language} parser to zed crate
@ -190,13 +191,20 @@ pub fn init(
language(
"php",
tree_sitter_php::language(),
vec![Arc::new(php::IntelephenseLspAdapter::new(node_runtime))],
vec![Arc::new(php::IntelephenseLspAdapter::new(
node_runtime.clone(),
))],
);
language("elm", tree_sitter_elm::language(), vec![]);
language("glsl", tree_sitter_glsl::language(), vec![]);
language("nix", tree_sitter_nix::language(), vec![]);
language("nu", tree_sitter_nu::language(), vec![]);
language(
"vue",
tree_sitter_vue::language(),
vec![Arc::new(vue::VueLspAdapter::new(node_runtime))],
);
}
#[cfg(any(test, feature = "test-support"))]

View file

@ -1,7 +1,7 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::StreamExt;
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use language::{BundledFormatter, LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use serde_json::json;
@ -96,6 +96,10 @@ impl LspAdapter for CssLspAdapter {
"provideFormatter": true
}))
}
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
vec![BundledFormatter::prettier("css")]
}
}
async fn get_cached_server_binary(

View file

@ -1,7 +1,7 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::StreamExt;
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use language::{BundledFormatter, LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use serde_json::json;
@ -96,6 +96,10 @@ impl LspAdapter for HtmlLspAdapter {
"provideFormatter": true
}))
}
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
vec![BundledFormatter::prettier("html")]
}
}
async fn get_cached_server_binary(

View file

@ -4,7 +4,9 @@ use collections::HashMap;
use feature_flags::FeatureFlagAppExt;
use futures::{future::BoxFuture, FutureExt, StreamExt};
use gpui::AppContext;
use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate};
use language::{
BundledFormatter, LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate,
};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use serde_json::json;
@ -144,6 +146,10 @@ impl LspAdapter for JsonLspAdapter {
async fn language_ids(&self) -> HashMap<String, String> {
[("JSON".into(), "jsonc".into())].into_iter().collect()
}
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
vec![BundledFormatter::prettier("json")]
}
}
async fn get_cached_server_binary(

View file

@ -1,7 +1,7 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::StreamExt;
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use language::{BundledFormatter, LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use serde_json::json;
@ -95,6 +95,13 @@ impl LspAdapter for SvelteLspAdapter {
"provideFormatter": true
}))
}
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
vec![BundledFormatter::Prettier {
parser_name: Some("svelte"),
plugin_names: vec!["prettier-plugin-svelte"],
}]
}
}
async fn get_cached_server_binary(

View file

@ -6,7 +6,7 @@ use futures::{
FutureExt, StreamExt,
};
use gpui::AppContext;
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use language::{BundledFormatter, LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use serde_json::{json, Value};
@ -127,6 +127,13 @@ impl LspAdapter for TailwindLspAdapter {
.into_iter(),
)
}
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
vec![BundledFormatter::Prettier {
parser_name: None,
plugin_names: vec!["prettier-plugin-tailwindcss"],
}]
}
}
async fn get_cached_server_binary(

View file

@ -4,7 +4,7 @@ use async_tar::Archive;
use async_trait::async_trait;
use futures::{future::BoxFuture, FutureExt};
use gpui::AppContext;
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use language::{BundledFormatter, LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::{CodeActionKind, LanguageServerBinary};
use node_runtime::NodeRuntime;
use serde_json::{json, Value};
@ -161,6 +161,10 @@ impl LspAdapter for TypeScriptLspAdapter {
"provideFormatter": true
}))
}
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
vec![BundledFormatter::prettier("typescript")]
}
}
async fn get_cached_ts_server_binary(
@ -309,6 +313,10 @@ impl LspAdapter for EsLintLspAdapter {
async fn initialization_options(&self) -> Option<serde_json::Value> {
None
}
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
vec![BundledFormatter::prettier("babel")]
}
}
async fn get_cached_eslint_server_binary(

View file

@ -0,0 +1,214 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::StreamExt;
pub use language::*;
use lsp::{CodeActionKind, LanguageServerBinary};
use node_runtime::NodeRuntime;
use parking_lot::Mutex;
use serde_json::Value;
use smol::fs::{self};
use std::{
any::Any,
ffi::OsString,
path::{Path, PathBuf},
sync::Arc,
};
use util::ResultExt;
pub struct VueLspVersion {
vue_version: String,
ts_version: String,
}
pub struct VueLspAdapter {
node: Arc<dyn NodeRuntime>,
typescript_install_path: Mutex<Option<PathBuf>>,
}
impl VueLspAdapter {
const SERVER_PATH: &'static str =
"node_modules/@vue/language-server/bin/vue-language-server.js";
// TODO: this can't be hardcoded, yet we have to figure out how to pass it in initialization_options.
const TYPESCRIPT_PATH: &'static str = "node_modules/typescript/lib";
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
let typescript_install_path = Mutex::new(None);
Self {
node,
typescript_install_path,
}
}
}
#[async_trait]
impl super::LspAdapter for VueLspAdapter {
async fn name(&self) -> LanguageServerName {
LanguageServerName("vue-language-server".into())
}
fn short_name(&self) -> &'static str {
"vue-language-server"
}
async fn fetch_latest_server_version(
&self,
_: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
Ok(Box::new(VueLspVersion {
vue_version: self
.node
.npm_package_latest_version("@vue/language-server")
.await?,
ts_version: self.node.npm_package_latest_version("typescript").await?,
}) as Box<_>)
}
async fn initialization_options(&self) -> Option<Value> {
let typescript_sdk_path = self.typescript_install_path.lock();
let typescript_sdk_path = typescript_sdk_path
.as_ref()
.expect("initialization_options called without a container_dir for typescript");
Some(serde_json::json!({
"typescript": {
"tsdk": typescript_sdk_path
}
}))
}
fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
// REFACTOR is explicitly disabled, as vue-lsp does not adhere to LSP protocol for code actions with these - it
// sends back a CodeAction with neither `command` nor `edits` fields set, which is against the spec.
Some(vec![
CodeActionKind::EMPTY,
CodeActionKind::QUICKFIX,
CodeActionKind::REFACTOR_REWRITE,
])
}
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<VueLspVersion>().unwrap();
let server_path = container_dir.join(Self::SERVER_PATH);
let ts_path = container_dir.join(Self::TYPESCRIPT_PATH);
if fs::metadata(&server_path).await.is_err() {
self.node
.npm_install_packages(
&container_dir,
&[("@vue/language-server", version.vue_version.as_str())],
)
.await?;
}
assert!(fs::metadata(&server_path).await.is_ok());
if fs::metadata(&ts_path).await.is_err() {
self.node
.npm_install_packages(
&container_dir,
&[("typescript", version.ts_version.as_str())],
)
.await?;
}
assert!(fs::metadata(&ts_path).await.is_ok());
*self.typescript_install_path.lock() = Some(ts_path);
Ok(LanguageServerBinary {
path: self.node.binary_path().await?,
arguments: vue_server_binary_arguments(&server_path),
})
}
async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
let (server, ts_path) = get_cached_server_binary(container_dir, self.node.clone()).await?;
*self.typescript_install_path.lock() = Some(ts_path);
Some(server)
}
async fn installation_test_binary(
&self,
container_dir: PathBuf,
) -> Option<LanguageServerBinary> {
let (server, ts_path) = get_cached_server_binary(container_dir, self.node.clone())
.await
.map(|(mut binary, ts_path)| {
binary.arguments = vec!["--help".into()];
(binary, ts_path)
})?;
*self.typescript_install_path.lock() = Some(ts_path);
Some(server)
}
async fn label_for_completion(
&self,
item: &lsp::CompletionItem,
language: &Arc<language::Language>,
) -> Option<language::CodeLabel> {
use lsp::CompletionItemKind as Kind;
let len = item.label.len();
let grammar = language.grammar()?;
let highlight_id = match item.kind? {
Kind::CLASS | Kind::INTERFACE => grammar.highlight_id_for_name("type"),
Kind::CONSTRUCTOR => grammar.highlight_id_for_name("type"),
Kind::CONSTANT => grammar.highlight_id_for_name("constant"),
Kind::FUNCTION | Kind::METHOD => grammar.highlight_id_for_name("function"),
Kind::PROPERTY | Kind::FIELD => grammar.highlight_id_for_name("tag"),
Kind::VARIABLE => grammar.highlight_id_for_name("type"),
Kind::KEYWORD => grammar.highlight_id_for_name("keyword"),
Kind::VALUE => grammar.highlight_id_for_name("tag"),
_ => None,
}?;
let text = match &item.detail {
Some(detail) => format!("{} {}", item.label, detail),
None => item.label.clone(),
};
Some(language::CodeLabel {
text,
runs: vec![(0..len, highlight_id)],
filter_range: 0..len,
})
}
}
fn vue_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
vec![server_path.into(), "--stdio".into()]
}
type TypescriptPath = PathBuf;
async fn get_cached_server_binary(
container_dir: PathBuf,
node: Arc<dyn NodeRuntime>,
) -> Option<(LanguageServerBinary, TypescriptPath)> {
(|| async move {
let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
let entry = entry?;
if entry.file_type().await?.is_dir() {
last_version_dir = Some(entry.path());
}
}
let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
let server_path = last_version_dir.join(VueLspAdapter::SERVER_PATH);
let typescript_path = last_version_dir.join(VueLspAdapter::TYPESCRIPT_PATH);
if server_path.exists() && typescript_path.exists() {
Ok((
LanguageServerBinary {
path: node.binary_path().await?,
arguments: vue_server_binary_arguments(&server_path),
},
typescript_path,
))
} else {
Err(anyhow!(
"missing executable in directory {:?}",
last_version_dir
))
}
})()
.await
.log_err()
}

View file

@ -0,0 +1,2 @@
("<" @open ">" @close)
("\"" @open "\"" @close)

View file

@ -0,0 +1,14 @@
name = "Vue.js"
path_suffixes = ["vue"]
block_comment = ["<!-- ", " -->"]
autoclose_before = ";:.,=}])>"
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },
{ start = "(", end = ")", close = true, newline = true },
{ start = "<", end = ">", close = true, newline = true, not_in = ["string", "comment"] },
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string"] },
{ start = "'", end = "'", close = true, newline = false, not_in = ["string", "comment"] },
{ start = "`", end = "`", close = true, newline = false, not_in = ["string"] },
]
word_characters = ["-"]

View file

@ -0,0 +1,15 @@
(attribute) @property
(directive_attribute) @property
(quoted_attribute_value) @string
(interpolation) @punctuation.special
(raw_text) @embedded
((tag_name) @type
(#match? @type "^[A-Z]"))
((directive_name) @keyword
(#match? @keyword "^v-"))
(start_tag) @tag
(end_tag) @tag
(self_closing_tag) @tag

View file

@ -0,0 +1,7 @@
(script_element
(raw_text) @content
(#set! "language" "javascript"))
(style_element
(raw_text) @content
(#set! "language" "css"))

View file

@ -3,7 +3,8 @@ use async_trait::async_trait;
use futures::{future::BoxFuture, FutureExt, StreamExt};
use gpui::AppContext;
use language::{
language_settings::all_language_settings, LanguageServerName, LspAdapter, LspAdapterDelegate,
language_settings::all_language_settings, BundledFormatter, LanguageServerName, LspAdapter,
LspAdapterDelegate,
};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
@ -108,6 +109,10 @@ impl LspAdapter for YamlLspAdapter {
}))
.boxed()
}
fn enabled_formatters(&self) -> Vec<BundledFormatter> {
vec![BundledFormatter::prettier("yaml")]
}
}
async fn get_cached_server_binary(

View file

@ -143,7 +143,12 @@ fn main() {
semantic_index::init(fs.clone(), http.clone(), languages.clone(), cx);
vim::init(cx);
terminal_view::init(cx);
copilot::init(copilot_language_server_id, http.clone(), node_runtime, cx);
copilot::init(
copilot_language_server_id,
http.clone(),
node_runtime.clone(),
cx,
);
assistant::init(cx);
component_test::init(cx);
@ -170,6 +175,7 @@ fn main() {
initialize_workspace,
background_actions,
workspace_store,
node_runtime,
});
cx.set_global(Arc::downgrade(&app_state));