Merge remote-tracking branch 'origin/main' into editor2

This commit is contained in:
Antonio Scandurra 2023-11-03 14:47:49 +01:00
commit 097171b9b8
37 changed files with 2910 additions and 1842 deletions

View file

@ -12,7 +12,7 @@ doctest = false
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
workspace = { path = "../workspace" }
workspace2 = { path = "../workspace2" }
settings2 = { path = "../settings2" }
anyhow.workspace = true

View file

@ -9,7 +9,7 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
use workspace::AppState;
use workspace2::AppState;
// use zed::AppState;
// todo!();
@ -59,7 +59,7 @@ pub fn init(_: Arc<AppState>, cx: &mut AppContext) {
// cx.add_global_action(move |_: &NewJournalEntry, cx| new_journal_entry(app_state.clone(), cx));
}
pub fn new_journal_entry(_: Arc<AppState>, cx: &mut AppContext) {
pub fn new_journal_entry(app_state: Arc<AppState>, cx: &mut AppContext) {
let settings = JournalSettings::get_global(cx);
let journal_dir = match journal_dir(settings.path.as_ref().unwrap()) {
Some(journal_dir) => journal_dir,
@ -77,7 +77,7 @@ pub fn new_journal_entry(_: Arc<AppState>, cx: &mut AppContext) {
let now = now.time();
let _entry_heading = heading_entry(now, &settings.hour_format);
let _create_entry = cx.background_executor().spawn(async move {
let create_entry = cx.background_executor().spawn(async move {
std::fs::create_dir_all(month_dir)?;
OpenOptions::new()
.create(true)
@ -86,37 +86,38 @@ pub fn new_journal_entry(_: Arc<AppState>, cx: &mut AppContext) {
Ok::<_, std::io::Error>((journal_dir, entry_path))
});
// todo!("workspace")
// cx.spawn(|cx| async move {
// let (journal_dir, entry_path) = create_entry.await?;
// let (workspace, _) =
// cx.update(|cx| workspace::open_paths(&[journal_dir], &app_state, None, cx))?;
cx.spawn(|mut cx| async move {
let (journal_dir, entry_path) = create_entry.await?;
let (workspace, _) = cx
.update(|cx| workspace2::open_paths(&[journal_dir], &app_state, None, cx))?
.await?;
// let opened = workspace
// .update(&mut cx, |workspace, cx| {
// workspace.open_paths(vec![entry_path], true, cx)
// })?
// .await;
let _opened = workspace
.update(&mut cx, |workspace, cx| {
workspace.open_paths(vec![entry_path], true, cx)
})?
.await;
// if let Some(Some(Ok(item))) = opened.first() {
// if let Some(editor) = item.downcast::<Editor>().map(|editor| editor.downgrade()) {
// editor.update(&mut cx, |editor, cx| {
// let len = editor.buffer().read(cx).len(cx);
// editor.change_selections(Some(Autoscroll::center()), cx, |s| {
// s.select_ranges([len..len])
// });
// if len > 0 {
// editor.insert("\n\n", cx);
// }
// editor.insert(&entry_heading, cx);
// editor.insert("\n\n", cx);
// })?;
// }
// }
// todo!("editor")
// if let Some(Some(Ok(item))) = opened.first() {
// if let Some(editor) = item.downcast::<Editor>().map(|editor| editor.downgrade()) {
// editor.update(&mut cx, |editor, cx| {
// let len = editor.buffer().read(cx).len(cx);
// editor.change_selections(Some(Autoscroll::center()), cx, |s| {
// s.select_ranges([len..len])
// });
// if len > 0 {
// editor.insert("\n\n", cx);
// }
// editor.insert(&entry_heading, cx);
// editor.insert("\n\n", cx);
// })?;
// }
// }
// anyhow::Ok(())
// })
// .detach_and_log_err(cx);
anyhow::Ok(())
})
.detach_and_log_err(cx);
}
fn journal_dir(path: &str) -> Option<PathBuf> {

View file

@ -234,7 +234,6 @@ impl SyntaxMap {
self.snapshot.interpolate(text);
}
#[allow(dead_code)] // todo!()
#[cfg(test)]
pub fn reparse(&mut self, language: Arc<Language>, text: &BufferSnapshot) {
self.snapshot
@ -786,7 +785,6 @@ impl SyntaxSnapshot {
)
}
#[allow(dead_code)] // todo!()
#[cfg(test)]
pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayerInfo> {
self.layers_for_range(0..buffer.len(), buffer).collect()

View file

@ -1,9 +1,8 @@
use std::collections::VecDeque;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use anyhow::Context;
use collections::HashMap;
use collections::{HashMap, HashSet};
use fs::Fs;
use gpui::{AsyncAppContext, ModelHandle};
use language::language_settings::language_settings;
@ -11,7 +10,7 @@ use language::{Buffer, Diff};
use lsp::{LanguageServer, LanguageServerId};
use node_runtime::NodeRuntime;
use serde::{Deserialize, Serialize};
use util::paths::DEFAULT_PRETTIER_DIR;
use util::paths::{PathMatcher, DEFAULT_PRETTIER_DIR};
pub enum Prettier {
Real(RealPrettier),
@ -20,7 +19,6 @@ pub enum Prettier {
}
pub struct RealPrettier {
worktree_id: Option<usize>,
default: bool,
prettier_dir: PathBuf,
server: Arc<LanguageServer>,
@ -28,17 +26,10 @@ pub struct RealPrettier {
#[cfg(any(test, feature = "test-support"))]
pub struct TestPrettier {
worktree_id: Option<usize>,
prettier_dir: PathBuf,
default: bool,
}
#[derive(Debug)]
pub struct LocateStart {
pub worktree_root_path: Arc<Path>,
pub starting_path: Arc<Path>,
}
pub const PRETTIER_SERVER_FILE: &str = "prettier_server.js";
pub const PRETTIER_SERVER_JS: &str = include_str!("./prettier_server.js");
const PRETTIER_PACKAGE_NAME: &str = "prettier";
@ -63,79 +54,106 @@ impl Prettier {
".editorconfig",
];
pub async fn locate(
starting_path: Option<LocateStart>,
fs: Arc<dyn Fs>,
) -> anyhow::Result<PathBuf> {
fn is_node_modules(path_component: &std::path::Component<'_>) -> bool {
path_component.as_os_str().to_string_lossy() == "node_modules"
pub async fn locate_prettier_installation(
fs: &dyn Fs,
installed_prettiers: &HashSet<PathBuf>,
locate_from: &Path,
) -> anyhow::Result<Option<PathBuf>> {
let mut path_to_check = locate_from
.components()
.take_while(|component| component.as_os_str().to_string_lossy() != "node_modules")
.collect::<PathBuf>();
let path_to_check_metadata = fs
.metadata(&path_to_check)
.await
.with_context(|| format!("failed to get metadata for initial path {path_to_check:?}"))?
.with_context(|| format!("empty metadata for initial path {path_to_check:?}"))?;
if !path_to_check_metadata.is_dir {
path_to_check.pop();
}
let paths_to_check = match starting_path.as_ref() {
Some(starting_path) => {
let worktree_root = starting_path
.worktree_root_path
.components()
.into_iter()
.take_while(|path_component| !is_node_modules(path_component))
.collect::<PathBuf>();
if worktree_root != starting_path.worktree_root_path.as_ref() {
vec![worktree_root]
let mut project_path_with_prettier_dependency = None;
loop {
if installed_prettiers.contains(&path_to_check) {
log::debug!("Found prettier path {path_to_check:?} in installed prettiers");
return Ok(Some(path_to_check));
} else if let Some(package_json_contents) =
read_package_json(fs, &path_to_check).await?
{
if has_prettier_in_package_json(&package_json_contents) {
if has_prettier_in_node_modules(fs, &path_to_check).await? {
log::debug!("Found prettier path {path_to_check:?} in both package.json and node_modules");
return Ok(Some(path_to_check));
} else if project_path_with_prettier_dependency.is_none() {
project_path_with_prettier_dependency = Some(path_to_check.clone());
}
} else {
if starting_path.starting_path.as_ref() == Path::new("") {
worktree_root
.parent()
.map(|path| vec![path.to_path_buf()])
.unwrap_or_default()
} else {
let file_to_format = starting_path.starting_path.as_ref();
let mut paths_to_check = VecDeque::new();
let mut current_path = worktree_root;
for path_component in file_to_format.components().into_iter() {
let new_path = current_path.join(path_component);
let old_path = std::mem::replace(&mut current_path, new_path);
paths_to_check.push_front(old_path);
if is_node_modules(&path_component) {
break;
match package_json_contents.get("workspaces") {
Some(serde_json::Value::Array(workspaces)) => {
match &project_path_with_prettier_dependency {
Some(project_path_with_prettier_dependency) => {
let subproject_path = project_path_with_prettier_dependency.strip_prefix(&path_to_check).expect("traversing path parents, should be able to strip prefix");
if workspaces.iter().filter_map(|value| {
if let serde_json::Value::String(s) = value {
Some(s.clone())
} else {
log::warn!("Skipping non-string 'workspaces' value: {value:?}");
None
}
}).any(|workspace_definition| {
if let Some(path_matcher) = PathMatcher::new(&workspace_definition).ok() {
path_matcher.is_match(subproject_path)
} else {
workspace_definition == subproject_path.to_string_lossy()
}
}) {
anyhow::ensure!(has_prettier_in_node_modules(fs, &path_to_check).await?, "Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}, but it's not installed into workspace root's node_modules");
log::info!("Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}");
return Ok(Some(path_to_check));
} else {
log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but is not included in its package.json workspaces {workspaces:?}");
}
}
None => {
log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but has no prettier in its package.json");
}
}
}
Vec::from(paths_to_check)
},
Some(unknown) => log::error!("Failed to parse workspaces for {path_to_check:?} from package.json, got {unknown:?}. Skipping."),
None => log::warn!("Skipping path {path_to_check:?} that has no prettier dependency and no workspaces section in its package.json"),
}
}
}
None => Vec::new(),
};
match find_closest_prettier_dir(paths_to_check, fs.as_ref())
.await
.with_context(|| format!("finding prettier starting with {starting_path:?}"))?
{
Some(prettier_dir) => Ok(prettier_dir),
None => Ok(DEFAULT_PRETTIER_DIR.to_path_buf()),
if !path_to_check.pop() {
match project_path_with_prettier_dependency {
Some(closest_prettier_discovered) => {
anyhow::bail!("No prettier found in node_modules for ancestors of {locate_from:?}, but discovered prettier package.json dependency in {closest_prettier_discovered:?}")
}
None => {
log::debug!("Found no prettier in ancestors of {locate_from:?}");
return Ok(None);
}
}
}
}
}
#[cfg(any(test, feature = "test-support"))]
pub async fn start(
worktree_id: Option<usize>,
_: LanguageServerId,
prettier_dir: PathBuf,
_: Arc<dyn NodeRuntime>,
_: AsyncAppContext,
) -> anyhow::Result<Self> {
Ok(
#[cfg(any(test, feature = "test-support"))]
Self::Test(TestPrettier {
worktree_id,
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir,
}),
)
Ok(Self::Test(TestPrettier {
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir,
}))
}
#[cfg(not(any(test, feature = "test-support")))]
pub async fn start(
worktree_id: Option<usize>,
server_id: LanguageServerId,
prettier_dir: PathBuf,
node: Arc<dyn NodeRuntime>,
@ -143,7 +161,7 @@ impl Prettier {
) -> anyhow::Result<Self> {
use lsp::LanguageServerBinary;
let backgroud = cx.background();
let background = cx.background();
anyhow::ensure!(
prettier_dir.is_dir(),
"Prettier dir {prettier_dir:?} is not a directory"
@ -154,7 +172,7 @@ impl Prettier {
"no prettier server package found at {prettier_server:?}"
);
let node_path = backgroud
let node_path = background
.spawn(async move { node.binary_path().await })
.await?;
let server = LanguageServer::new(
@ -169,12 +187,11 @@ impl Prettier {
cx,
)
.context("prettier server creation")?;
let server = backgroud
let server = background
.spawn(server.initialize(None))
.await
.context("prettier server initialization")?;
Ok(Self::Real(RealPrettier {
worktree_id,
server,
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir,
@ -340,64 +357,61 @@ impl Prettier {
Self::Test(test_prettier) => &test_prettier.prettier_dir,
}
}
pub fn worktree_id(&self) -> Option<usize> {
match self {
Self::Real(local) => local.worktree_id,
#[cfg(any(test, feature = "test-support"))]
Self::Test(test_prettier) => test_prettier.worktree_id,
}
}
}
async fn find_closest_prettier_dir(
paths_to_check: Vec<PathBuf>,
fs: &dyn Fs,
) -> anyhow::Result<Option<PathBuf>> {
for path in paths_to_check {
let possible_package_json = path.join("package.json");
if let Some(package_json_metadata) = fs
.metadata(&possible_package_json)
.await
.with_context(|| format!("Fetching metadata for {possible_package_json:?}"))?
{
if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
let package_json_contents = fs
.load(&possible_package_json)
.await
.with_context(|| format!("reading {possible_package_json:?} file contents"))?;
if let Ok(json_contents) = serde_json::from_str::<HashMap<String, serde_json::Value>>(
&package_json_contents,
) {
if let Some(serde_json::Value::Object(o)) = json_contents.get("dependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return Ok(Some(path));
}
}
if let Some(serde_json::Value::Object(o)) = json_contents.get("devDependencies")
{
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return Ok(Some(path));
}
}
}
}
}
async fn has_prettier_in_node_modules(fs: &dyn Fs, path: &Path) -> anyhow::Result<bool> {
let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
if let Some(node_modules_location_metadata) = fs
.metadata(&possible_node_modules_location)
.await
.with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))?
{
return Ok(node_modules_location_metadata.is_dir);
}
Ok(false)
}
let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
if let Some(node_modules_location_metadata) = fs
.metadata(&possible_node_modules_location)
.await
.with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))?
{
if node_modules_location_metadata.is_dir {
return Ok(Some(path));
}
async fn read_package_json(
fs: &dyn Fs,
path: &Path,
) -> anyhow::Result<Option<HashMap<String, serde_json::Value>>> {
let possible_package_json = path.join("package.json");
if let Some(package_json_metadata) = fs
.metadata(&possible_package_json)
.await
.with_context(|| format!("fetching metadata for package json {possible_package_json:?}"))?
{
if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
let package_json_contents = fs
.load(&possible_package_json)
.await
.with_context(|| format!("reading {possible_package_json:?} file contents"))?;
return serde_json::from_str::<HashMap<String, serde_json::Value>>(
&package_json_contents,
)
.map(Some)
.with_context(|| format!("parsing {possible_package_json:?} file contents"));
}
}
Ok(None)
}
fn has_prettier_in_package_json(
package_json_contents: &HashMap<String, serde_json::Value>,
) -> bool {
if let Some(serde_json::Value::Object(o)) = package_json_contents.get("dependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return true;
}
}
if let Some(serde_json::Value::Object(o)) = package_json_contents.get("devDependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return true;
}
}
false
}
enum Format {}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
@ -436,3 +450,316 @@ impl lsp::request::Request for ClearCache {
type Result = ();
const METHOD: &'static str = "prettier/clear_cache";
}
#[cfg(test)]
mod tests {
use fs::FakeFs;
use serde_json::json;
use super::*;
#[gpui::test]
async fn test_prettier_lookup_finds_nothing(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
json!({
".config": {
"zed": {
"settings.json": r#"{ "formatter": "auto" }"#,
},
},
"work": {
"project": {
"src": {
"index.js": "// index.js file contents",
},
"node_modules": {
"expect": {
"build": {
"print.js": "// print.js file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.5.1"
}
}"#,
},
"prettier": {
"index.js": "// Dummy prettier package file",
},
},
"package.json": r#"{}"#
},
}
}),
)
.await;
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/.config/zed/settings.json"),
)
.await
.unwrap()
.is_none(),
"Should successfully find no prettier for path hierarchy without it"
);
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/project/src/index.js")
)
.await
.unwrap()
.is_none(),
"Should successfully find no prettier for path hierarchy that has node_modules with prettier, but no package.json mentions of it"
);
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/project/node_modules/expect/build/print.js")
)
.await
.unwrap()
.is_none(),
"Even though it has package.json with prettier in it and no prettier on node_modules along the path, nothing should fail since declared inside node_modules"
);
}
#[gpui::test]
async fn test_prettier_lookup_in_simple_npm_projects(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
json!({
"web_blog": {
"node_modules": {
"prettier": {
"index.js": "// Dummy prettier package file",
},
"expect": {
"build": {
"print.js": "// print.js file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.5.1"
}
}"#,
},
},
"pages": {
"[slug].tsx": "// [slug].tsx file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.3.0"
},
"prettier": {
"semi": false,
"printWidth": 80,
"htmlWhitespaceSensitivity": "strict",
"tabWidth": 4
}
}"#
}
}),
)
.await;
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/web_blog/pages/[slug].tsx")
)
.await
.unwrap(),
Some(PathBuf::from("/root/web_blog")),
"Should find a preinstalled prettier in the project root"
);
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/web_blog/node_modules/expect/build/print.js")
)
.await
.unwrap(),
Some(PathBuf::from("/root/web_blog")),
"Should find a preinstalled prettier in the project root even for node_modules files"
);
}
#[gpui::test]
async fn test_prettier_lookup_for_not_installed(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
json!({
"work": {
"web_blog": {
"pages": {
"[slug].tsx": "// [slug].tsx file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.3.0"
},
"prettier": {
"semi": false,
"printWidth": 80,
"htmlWhitespaceSensitivity": "strict",
"tabWidth": 4
}
}"#
}
}
}),
)
.await;
let path = "/root/work/web_blog/node_modules/pages/[slug].tsx";
match Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new(path)
)
.await {
Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
Err(e) => {
let message = e.to_string();
assert!(message.contains(path), "Error message should mention which start file was used for location");
assert!(message.contains("/root/work/web_blog"), "Error message should mention potential candidates without prettier node_modules contents");
},
};
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::from_iter(
[PathBuf::from("/root"), PathBuf::from("/root/work")].into_iter()
),
Path::new("/root/work/web_blog/node_modules/pages/[slug].tsx")
)
.await
.unwrap(),
Some(PathBuf::from("/root/work")),
"Should return first cached value found without path checks"
);
}
#[gpui::test]
async fn test_prettier_lookup_in_npm_workspaces(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
json!({
"work": {
"full-stack-foundations": {
"exercises": {
"03.loading": {
"01.problem.loader": {
"app": {
"routes": {
"users+": {
"$username_+": {
"notes.tsx": "// notes.tsx file contents",
},
},
},
},
"node_modules": {},
"package.json": r#"{
"devDependencies": {
"prettier": "^3.0.3"
}
}"#
},
},
},
"package.json": r#"{
"workspaces": ["exercises/*/*", "examples/*"]
}"#,
"node_modules": {
"prettier": {
"index.js": "// Dummy prettier package file",
},
},
},
}
}),
)
.await;
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx"),
).await.unwrap(),
Some(PathBuf::from("/root/work/full-stack-foundations")),
"Should ascend to the multi-workspace root and find the prettier there",
);
}
#[gpui::test]
async fn test_prettier_lookup_in_npm_workspaces_for_not_installed(
cx: &mut gpui::TestAppContext,
) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
json!({
"work": {
"full-stack-foundations": {
"exercises": {
"03.loading": {
"01.problem.loader": {
"app": {
"routes": {
"users+": {
"$username_+": {
"notes.tsx": "// notes.tsx file contents",
},
},
},
},
"node_modules": {},
"package.json": r#"{
"devDependencies": {
"prettier": "^3.0.3"
}
}"#
},
},
},
"package.json": r#"{
"workspaces": ["exercises/*/*", "examples/*"]
}"#,
},
}
}),
)
.await;
match Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx")
)
.await {
Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
Err(e) => {
let message = e.to_string();
assert!(message.contains("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader"), "Error message should mention which project had prettier defined");
assert!(message.contains("/root/work/full-stack-foundations"), "Error message should mention potential candidates without prettier node_modules contents");
},
};
}
}

View file

@ -1,11 +1,13 @@
const { Buffer } = require('buffer');
const { Buffer } = require("buffer");
const fs = require("fs");
const path = require("path");
const { once } = require('events');
const { once } = require("events");
const prettierContainerPath = process.argv[2];
if (prettierContainerPath == null || prettierContainerPath.length == 0) {
process.stderr.write(`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`);
process.stderr.write(
`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`,
);
process.exit(1);
}
fs.stat(prettierContainerPath, (err, stats) => {
@ -19,7 +21,7 @@ fs.stat(prettierContainerPath, (err, stats) => {
process.exit(1);
}
});
const prettierPath = path.join(prettierContainerPath, 'node_modules/prettier');
const prettierPath = path.join(prettierContainerPath, "node_modules/prettier");
class Prettier {
constructor(path, prettier, config) {
@ -34,7 +36,7 @@ class Prettier {
let config;
try {
prettier = await loadPrettier(prettierPath);
config = await prettier.resolveConfig(prettierPath) || {};
config = (await prettier.resolveConfig(prettierPath)) || {};
} catch (e) {
process.stderr.write(`Failed to load prettier: ${e}\n`);
process.exit(1);
@ -42,7 +44,7 @@ class Prettier {
process.stderr.write(`Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`);
process.stdin.resume();
handleBuffer(new Prettier(prettierPath, prettier, config));
})()
})();
async function handleBuffer(prettier) {
for await (const messageText of readStdin()) {
@ -54,25 +56,29 @@ async function handleBuffer(prettier) {
continue;
}
// allow concurrent request handling by not `await`ing the message handling promise (async function)
handleMessage(message, prettier).catch(e => {
handleMessage(message, prettier).catch((e) => {
const errorMessage = message;
if ((errorMessage.params || {}).text !== undefined) {
errorMessage.params.text = "..snip..";
}
sendResponse({ id: message.id, ...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`) }); });
sendResponse({
id: message.id,
...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`),
});
});
}
}
const headerSeparator = "\r\n";
const contentLengthHeaderName = 'Content-Length';
const contentLengthHeaderName = "Content-Length";
async function* readStdin() {
let buffer = Buffer.alloc(0);
let streamEnded = false;
process.stdin.on('end', () => {
process.stdin.on("end", () => {
streamEnded = true;
});
process.stdin.on('data', (data) => {
process.stdin.on("data", (data) => {
buffer = Buffer.concat([buffer, data]);
});
@ -80,7 +86,7 @@ async function* readStdin() {
sendResponse(makeError(errorMessage));
buffer = Buffer.alloc(0);
messageLength = null;
await once(process.stdin, 'readable');
await once(process.stdin, "readable");
streamEnded = false;
}
@ -91,20 +97,25 @@ async function* readStdin() {
if (messageLength === null) {
while (buffer.indexOf(`${headerSeparator}${headerSeparator}`) === -1) {
if (streamEnded) {
await handleStreamEnded('Unexpected end of stream: headers not found');
await handleStreamEnded("Unexpected end of stream: headers not found");
continue main_loop;
} else if (buffer.length > contentLengthHeaderName.length * 10) {
await handleStreamEnded(`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`);
await handleStreamEnded(
`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`,
);
continue main_loop;
}
await once(process.stdin, 'readable');
await once(process.stdin, "readable");
}
const headers = buffer.subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`)).toString('ascii');
const contentLengthHeader = headers.split(headerSeparator)
.map(header => header.split(':'))
.filter(header => header[2] === undefined)
.filter(header => (header[1] || '').length > 0)
.find(header => (header[0] || '').trim() === contentLengthHeaderName);
const headers = buffer
.subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`))
.toString("ascii");
const contentLengthHeader = headers
.split(headerSeparator)
.map((header) => header.split(":"))
.filter((header) => header[2] === undefined)
.filter((header) => (header[1] || "").length > 0)
.find((header) => (header[0] || "").trim() === contentLengthHeaderName);
const contentLength = (contentLengthHeader || [])[1];
if (contentLength === undefined) {
await handleStreamEnded(`Missing or incorrect ${contentLengthHeaderName} header: ${headers}`);
@ -114,13 +125,14 @@ async function* readStdin() {
messageLength = parseInt(contentLength, 10);
}
while (buffer.length < (headersLength + messageLength)) {
while (buffer.length < headersLength + messageLength) {
if (streamEnded) {
await handleStreamEnded(
`Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`);
`Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`,
);
continue main_loop;
}
await once(process.stdin, 'readable');
await once(process.stdin, "readable");
}
const messageEnd = headersLength + messageLength;
@ -128,12 +140,12 @@ async function* readStdin() {
buffer = buffer.subarray(messageEnd);
headersLength = null;
messageLength = null;
yield message.toString('utf8');
yield message.toString("utf8");
}
} catch (e) {
sendResponse(makeError(`Error reading stdin: ${e}`));
} finally {
process.stdin.off('data', () => { });
process.stdin.off("data", () => {});
}
}
@ -146,7 +158,7 @@ async function handleMessage(message, prettier) {
throw new Error(`Message id is undefined: ${JSON.stringify(message)}`);
}
if (method === 'prettier/format') {
if (method === "prettier/format") {
if (params === undefined || params.text === undefined) {
throw new Error(`Message params.text is undefined: ${JSON.stringify(message)}`);
}
@ -156,7 +168,7 @@ async function handleMessage(message, prettier) {
let resolvedConfig = {};
if (params.options.filepath !== undefined) {
resolvedConfig = await prettier.prettier.resolveConfig(params.options.filepath) || {};
resolvedConfig = (await prettier.prettier.resolveConfig(params.options.filepath)) || {};
}
const options = {
@ -164,21 +176,25 @@ async function handleMessage(message, prettier) {
...resolvedConfig,
parser: params.options.parser,
plugins: params.options.plugins,
path: params.options.filepath
path: params.options.filepath,
};
process.stderr.write(`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${params.options.filepath || ''}' with options: ${JSON.stringify(options)}\n`);
process.stderr.write(
`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${
params.options.filepath || ""
}' with options: ${JSON.stringify(options)}\n`,
);
const formattedText = await prettier.prettier.format(params.text, options);
sendResponse({ id, result: { text: formattedText } });
} else if (method === 'prettier/clear_cache') {
} else if (method === "prettier/clear_cache") {
prettier.prettier.clearConfigCache();
prettier.config = await prettier.prettier.resolveConfig(prettier.path) || {};
prettier.config = (await prettier.prettier.resolveConfig(prettier.path)) || {};
sendResponse({ id, result: null });
} else if (method === 'initialize') {
} else if (method === "initialize") {
sendResponse({
id: id || 0,
id,
result: {
"capabilities": {}
}
capabilities: {},
},
});
} else {
throw new Error(`Unknown method: ${method}`);
@ -188,18 +204,20 @@ async function handleMessage(message, prettier) {
function makeError(message) {
return {
error: {
"code": -32600, // invalid request code
code: -32600, // invalid request code
message,
}
},
};
}
function sendResponse(response) {
const responsePayloadString = JSON.stringify({
jsonrpc: "2.0",
...response
...response,
});
const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(responsePayloadString)}${headerSeparator}${headerSeparator}`;
const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(
responsePayloadString,
)}${headerSeparator}${headerSeparator}`;
process.stdout.write(headers + responsePayloadString);
}

View file

@ -1,5 +1,5 @@
use anyhow::Context;
use collections::HashMap;
use collections::{HashMap, HashSet};
use fs::Fs;
use gpui::{AsyncAppContext, Model};
use language::{language_settings::language_settings, Buffer, Diff};
@ -7,11 +7,10 @@ use lsp::{LanguageServer, LanguageServerId};
use node_runtime::NodeRuntime;
use serde::{Deserialize, Serialize};
use std::{
collections::VecDeque,
path::{Path, PathBuf},
sync::Arc,
};
use util::paths::DEFAULT_PRETTIER_DIR;
use util::paths::{PathMatcher, DEFAULT_PRETTIER_DIR};
pub enum Prettier {
Real(RealPrettier),
@ -20,7 +19,6 @@ pub enum Prettier {
}
pub struct RealPrettier {
worktree_id: Option<usize>,
default: bool,
prettier_dir: PathBuf,
server: Arc<LanguageServer>,
@ -28,17 +26,10 @@ pub struct RealPrettier {
#[cfg(any(test, feature = "test-support"))]
pub struct TestPrettier {
worktree_id: Option<usize>,
prettier_dir: PathBuf,
default: bool,
}
#[derive(Debug)]
pub struct LocateStart {
pub worktree_root_path: Arc<Path>,
pub starting_path: Arc<Path>,
}
pub const PRETTIER_SERVER_FILE: &str = "prettier_server.js";
pub const PRETTIER_SERVER_JS: &str = include_str!("./prettier_server.js");
const PRETTIER_PACKAGE_NAME: &str = "prettier";
@ -63,79 +54,106 @@ impl Prettier {
".editorconfig",
];
pub async fn locate(
starting_path: Option<LocateStart>,
fs: Arc<dyn Fs>,
) -> anyhow::Result<PathBuf> {
fn is_node_modules(path_component: &std::path::Component<'_>) -> bool {
path_component.as_os_str().to_string_lossy() == "node_modules"
pub async fn locate_prettier_installation(
fs: &dyn Fs,
installed_prettiers: &HashSet<PathBuf>,
locate_from: &Path,
) -> anyhow::Result<Option<PathBuf>> {
let mut path_to_check = locate_from
.components()
.take_while(|component| component.as_os_str().to_string_lossy() != "node_modules")
.collect::<PathBuf>();
let path_to_check_metadata = fs
.metadata(&path_to_check)
.await
.with_context(|| format!("failed to get metadata for initial path {path_to_check:?}"))?
.with_context(|| format!("empty metadata for initial path {path_to_check:?}"))?;
if !path_to_check_metadata.is_dir {
path_to_check.pop();
}
let paths_to_check = match starting_path.as_ref() {
Some(starting_path) => {
let worktree_root = starting_path
.worktree_root_path
.components()
.into_iter()
.take_while(|path_component| !is_node_modules(path_component))
.collect::<PathBuf>();
if worktree_root != starting_path.worktree_root_path.as_ref() {
vec![worktree_root]
let mut project_path_with_prettier_dependency = None;
loop {
if installed_prettiers.contains(&path_to_check) {
log::debug!("Found prettier path {path_to_check:?} in installed prettiers");
return Ok(Some(path_to_check));
} else if let Some(package_json_contents) =
read_package_json(fs, &path_to_check).await?
{
if has_prettier_in_package_json(&package_json_contents) {
if has_prettier_in_node_modules(fs, &path_to_check).await? {
log::debug!("Found prettier path {path_to_check:?} in both package.json and node_modules");
return Ok(Some(path_to_check));
} else if project_path_with_prettier_dependency.is_none() {
project_path_with_prettier_dependency = Some(path_to_check.clone());
}
} else {
if starting_path.starting_path.as_ref() == Path::new("") {
worktree_root
.parent()
.map(|path| vec![path.to_path_buf()])
.unwrap_or_default()
} else {
let file_to_format = starting_path.starting_path.as_ref();
let mut paths_to_check = VecDeque::new();
let mut current_path = worktree_root;
for path_component in file_to_format.components().into_iter() {
let new_path = current_path.join(path_component);
let old_path = std::mem::replace(&mut current_path, new_path);
paths_to_check.push_front(old_path);
if is_node_modules(&path_component) {
break;
}
match package_json_contents.get("workspaces") {
Some(serde_json::Value::Array(workspaces)) => {
match &project_path_with_prettier_dependency {
Some(project_path_with_prettier_dependency) => {
let subproject_path = project_path_with_prettier_dependency.strip_prefix(&path_to_check).expect("traversing path parents, should be able to strip prefix");
if workspaces.iter().filter_map(|value| {
if let serde_json::Value::String(s) = value {
Some(s.clone())
} else {
log::warn!("Skipping non-string 'workspaces' value: {value:?}");
None
}
}).any(|workspace_definition| {
if let Some(path_matcher) = PathMatcher::new(&workspace_definition).ok() {
path_matcher.is_match(subproject_path)
} else {
workspace_definition == subproject_path.to_string_lossy()
}
}) {
anyhow::ensure!(has_prettier_in_node_modules(fs, &path_to_check).await?, "Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}, but it's not installed into workspace root's node_modules");
log::info!("Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}");
return Ok(Some(path_to_check));
} else {
log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but is not included in its package.json workspaces {workspaces:?}");
}
}
None => {
log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but has no prettier in its package.json");
}
}
},
Some(unknown) => log::error!("Failed to parse workspaces for {path_to_check:?} from package.json, got {unknown:?}. Skipping."),
None => log::warn!("Skipping path {path_to_check:?} that has no prettier dependency and no workspaces section in its package.json"),
}
Vec::from(paths_to_check)
}
}
if !path_to_check.pop() {
match project_path_with_prettier_dependency {
Some(closest_prettier_discovered) => {
anyhow::bail!("No prettier found in node_modules for ancestors of {locate_from:?}, but discovered prettier package.json dependency in {closest_prettier_discovered:?}")
}
None => {
log::debug!("Found no prettier in ancestors of {locate_from:?}");
return Ok(None);
}
}
}
None => Vec::new(),
};
match find_closest_prettier_dir(paths_to_check, fs.as_ref())
.await
.with_context(|| format!("finding prettier starting with {starting_path:?}"))?
{
Some(prettier_dir) => Ok(prettier_dir),
None => Ok(DEFAULT_PRETTIER_DIR.to_path_buf()),
}
}
#[cfg(any(test, feature = "test-support"))]
pub async fn start(
worktree_id: Option<usize>,
_: LanguageServerId,
prettier_dir: PathBuf,
_: Arc<dyn NodeRuntime>,
_: AsyncAppContext,
) -> anyhow::Result<Self> {
Ok(
#[cfg(any(test, feature = "test-support"))]
Self::Test(TestPrettier {
worktree_id,
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir,
}),
)
Ok(Self::Test(TestPrettier {
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir,
}))
}
#[cfg(not(any(test, feature = "test-support")))]
pub async fn start(
worktree_id: Option<usize>,
server_id: LanguageServerId,
prettier_dir: PathBuf,
node: Arc<dyn NodeRuntime>,
@ -174,7 +192,6 @@ impl Prettier {
.await
.context("prettier server initialization")?;
Ok(Self::Real(RealPrettier {
worktree_id,
server,
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir,
@ -370,64 +387,61 @@ impl Prettier {
Self::Test(test_prettier) => &test_prettier.prettier_dir,
}
}
pub fn worktree_id(&self) -> Option<usize> {
match self {
Self::Real(local) => local.worktree_id,
#[cfg(any(test, feature = "test-support"))]
Self::Test(test_prettier) => test_prettier.worktree_id,
}
}
}
async fn find_closest_prettier_dir(
paths_to_check: Vec<PathBuf>,
fs: &dyn Fs,
) -> anyhow::Result<Option<PathBuf>> {
for path in paths_to_check {
let possible_package_json = path.join("package.json");
if let Some(package_json_metadata) = fs
.metadata(&possible_package_json)
.await
.with_context(|| format!("Fetching metadata for {possible_package_json:?}"))?
{
if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
let package_json_contents = fs
.load(&possible_package_json)
.await
.with_context(|| format!("reading {possible_package_json:?} file contents"))?;
if let Ok(json_contents) = serde_json::from_str::<HashMap<String, serde_json::Value>>(
&package_json_contents,
) {
if let Some(serde_json::Value::Object(o)) = json_contents.get("dependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return Ok(Some(path));
}
}
if let Some(serde_json::Value::Object(o)) = json_contents.get("devDependencies")
{
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return Ok(Some(path));
}
}
}
}
}
async fn has_prettier_in_node_modules(fs: &dyn Fs, path: &Path) -> anyhow::Result<bool> {
let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
if let Some(node_modules_location_metadata) = fs
.metadata(&possible_node_modules_location)
.await
.with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))?
{
return Ok(node_modules_location_metadata.is_dir);
}
Ok(false)
}
let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
if let Some(node_modules_location_metadata) = fs
.metadata(&possible_node_modules_location)
.await
.with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))?
{
if node_modules_location_metadata.is_dir {
return Ok(Some(path));
}
async fn read_package_json(
fs: &dyn Fs,
path: &Path,
) -> anyhow::Result<Option<HashMap<String, serde_json::Value>>> {
let possible_package_json = path.join("package.json");
if let Some(package_json_metadata) = fs
.metadata(&possible_package_json)
.await
.with_context(|| format!("fetching metadata for package json {possible_package_json:?}"))?
{
if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
let package_json_contents = fs
.load(&possible_package_json)
.await
.with_context(|| format!("reading {possible_package_json:?} file contents"))?;
return serde_json::from_str::<HashMap<String, serde_json::Value>>(
&package_json_contents,
)
.map(Some)
.with_context(|| format!("parsing {possible_package_json:?} file contents"));
}
}
Ok(None)
}
fn has_prettier_in_package_json(
package_json_contents: &HashMap<String, serde_json::Value>,
) -> bool {
if let Some(serde_json::Value::Object(o)) = package_json_contents.get("dependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return true;
}
}
if let Some(serde_json::Value::Object(o)) = package_json_contents.get("devDependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return true;
}
}
false
}
enum Format {}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
@ -466,3 +480,316 @@ impl lsp::request::Request for ClearCache {
type Result = ();
const METHOD: &'static str = "prettier/clear_cache";
}
#[cfg(test)]
mod tests {
use fs::FakeFs;
use serde_json::json;
use super::*;
#[gpui::test]
async fn test_prettier_lookup_finds_nothing(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
".config": {
"zed": {
"settings.json": r#"{ "formatter": "auto" }"#,
},
},
"work": {
"project": {
"src": {
"index.js": "// index.js file contents",
},
"node_modules": {
"expect": {
"build": {
"print.js": "// print.js file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.5.1"
}
}"#,
},
"prettier": {
"index.js": "// Dummy prettier package file",
},
},
"package.json": r#"{}"#
},
}
}),
)
.await;
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/.config/zed/settings.json"),
)
.await
.unwrap()
.is_none(),
"Should successfully find no prettier for path hierarchy without it"
);
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/project/src/index.js")
)
.await
.unwrap()
.is_none(),
"Should successfully find no prettier for path hierarchy that has node_modules with prettier, but no package.json mentions of it"
);
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/project/node_modules/expect/build/print.js")
)
.await
.unwrap()
.is_none(),
"Even though it has package.json with prettier in it and no prettier on node_modules along the path, nothing should fail since declared inside node_modules"
);
}
#[gpui::test]
async fn test_prettier_lookup_in_simple_npm_projects(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
"web_blog": {
"node_modules": {
"prettier": {
"index.js": "// Dummy prettier package file",
},
"expect": {
"build": {
"print.js": "// print.js file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.5.1"
}
}"#,
},
},
"pages": {
"[slug].tsx": "// [slug].tsx file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.3.0"
},
"prettier": {
"semi": false,
"printWidth": 80,
"htmlWhitespaceSensitivity": "strict",
"tabWidth": 4
}
}"#
}
}),
)
.await;
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/web_blog/pages/[slug].tsx")
)
.await
.unwrap(),
Some(PathBuf::from("/root/web_blog")),
"Should find a preinstalled prettier in the project root"
);
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/web_blog/node_modules/expect/build/print.js")
)
.await
.unwrap(),
Some(PathBuf::from("/root/web_blog")),
"Should find a preinstalled prettier in the project root even for node_modules files"
);
}
#[gpui::test]
async fn test_prettier_lookup_for_not_installed(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
"work": {
"web_blog": {
"pages": {
"[slug].tsx": "// [slug].tsx file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.3.0"
},
"prettier": {
"semi": false,
"printWidth": 80,
"htmlWhitespaceSensitivity": "strict",
"tabWidth": 4
}
}"#
}
}
}),
)
.await;
let path = "/root/work/web_blog/node_modules/pages/[slug].tsx";
match Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new(path)
)
.await {
Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
Err(e) => {
let message = e.to_string();
assert!(message.contains(path), "Error message should mention which start file was used for location");
assert!(message.contains("/root/work/web_blog"), "Error message should mention potential candidates without prettier node_modules contents");
},
};
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::from_iter(
[PathBuf::from("/root"), PathBuf::from("/root/work")].into_iter()
),
Path::new("/root/work/web_blog/node_modules/pages/[slug].tsx")
)
.await
.unwrap(),
Some(PathBuf::from("/root/work")),
"Should return first cached value found without path checks"
);
}
#[gpui::test]
async fn test_prettier_lookup_in_npm_workspaces(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
"work": {
"full-stack-foundations": {
"exercises": {
"03.loading": {
"01.problem.loader": {
"app": {
"routes": {
"users+": {
"$username_+": {
"notes.tsx": "// notes.tsx file contents",
},
},
},
},
"node_modules": {},
"package.json": r#"{
"devDependencies": {
"prettier": "^3.0.3"
}
}"#
},
},
},
"package.json": r#"{
"workspaces": ["exercises/*/*", "examples/*"]
}"#,
"node_modules": {
"prettier": {
"index.js": "// Dummy prettier package file",
},
},
},
}
}),
)
.await;
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx"),
).await.unwrap(),
Some(PathBuf::from("/root/work/full-stack-foundations")),
"Should ascend to the multi-workspace root and find the prettier there",
);
}
#[gpui::test]
async fn test_prettier_lookup_in_npm_workspaces_for_not_installed(
cx: &mut gpui::TestAppContext,
) {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
"work": {
"full-stack-foundations": {
"exercises": {
"03.loading": {
"01.problem.loader": {
"app": {
"routes": {
"users+": {
"$username_+": {
"notes.tsx": "// notes.tsx file contents",
},
},
},
},
"node_modules": {},
"package.json": r#"{
"devDependencies": {
"prettier": "^3.0.3"
}
}"#
},
},
},
"package.json": r#"{
"workspaces": ["exercises/*/*", "examples/*"]
}"#,
},
}
}),
)
.await;
match Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx")
)
.await {
Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
Err(e) => {
let message = e.to_string();
assert!(message.contains("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader"), "Error message should mention which project had prettier defined");
assert!(message.contains("/root/work/full-stack-foundations"), "Error message should mention potential candidates without prettier node_modules contents");
},
};
}
}

View file

@ -1,11 +1,13 @@
const { Buffer } = require('buffer');
const { Buffer } = require("buffer");
const fs = require("fs");
const path = require("path");
const { once } = require('events');
const { once } = require("events");
const prettierContainerPath = process.argv[2];
if (prettierContainerPath == null || prettierContainerPath.length == 0) {
process.stderr.write(`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`);
process.stderr.write(
`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`,
);
process.exit(1);
}
fs.stat(prettierContainerPath, (err, stats) => {
@ -19,7 +21,7 @@ fs.stat(prettierContainerPath, (err, stats) => {
process.exit(1);
}
});
const prettierPath = path.join(prettierContainerPath, 'node_modules/prettier');
const prettierPath = path.join(prettierContainerPath, "node_modules/prettier");
class Prettier {
constructor(path, prettier, config) {
@ -34,7 +36,7 @@ class Prettier {
let config;
try {
prettier = await loadPrettier(prettierPath);
config = await prettier.resolveConfig(prettierPath) || {};
config = (await prettier.resolveConfig(prettierPath)) || {};
} catch (e) {
process.stderr.write(`Failed to load prettier: ${e}\n`);
process.exit(1);
@ -42,7 +44,7 @@ class Prettier {
process.stderr.write(`Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`);
process.stdin.resume();
handleBuffer(new Prettier(prettierPath, prettier, config));
})()
})();
async function handleBuffer(prettier) {
for await (const messageText of readStdin()) {
@ -54,22 +56,29 @@ async function handleBuffer(prettier) {
continue;
}
// allow concurrent request handling by not `await`ing the message handling promise (async function)
handleMessage(message, prettier).catch(e => {
sendResponse({ id: message.id, ...makeError(`error during message handling: ${e}`) });
handleMessage(message, prettier).catch((e) => {
const errorMessage = message;
if ((errorMessage.params || {}).text !== undefined) {
errorMessage.params.text = "..snip..";
}
sendResponse({
id: message.id,
...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`),
});
});
}
}
const headerSeparator = "\r\n";
const contentLengthHeaderName = 'Content-Length';
const contentLengthHeaderName = "Content-Length";
async function* readStdin() {
let buffer = Buffer.alloc(0);
let streamEnded = false;
process.stdin.on('end', () => {
process.stdin.on("end", () => {
streamEnded = true;
});
process.stdin.on('data', (data) => {
process.stdin.on("data", (data) => {
buffer = Buffer.concat([buffer, data]);
});
@ -77,7 +86,7 @@ async function* readStdin() {
sendResponse(makeError(errorMessage));
buffer = Buffer.alloc(0);
messageLength = null;
await once(process.stdin, 'readable');
await once(process.stdin, "readable");
streamEnded = false;
}
@ -88,20 +97,25 @@ async function* readStdin() {
if (messageLength === null) {
while (buffer.indexOf(`${headerSeparator}${headerSeparator}`) === -1) {
if (streamEnded) {
await handleStreamEnded('Unexpected end of stream: headers not found');
await handleStreamEnded("Unexpected end of stream: headers not found");
continue main_loop;
} else if (buffer.length > contentLengthHeaderName.length * 10) {
await handleStreamEnded(`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`);
await handleStreamEnded(
`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`,
);
continue main_loop;
}
await once(process.stdin, 'readable');
await once(process.stdin, "readable");
}
const headers = buffer.subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`)).toString('ascii');
const contentLengthHeader = headers.split(headerSeparator)
.map(header => header.split(':'))
.filter(header => header[2] === undefined)
.filter(header => (header[1] || '').length > 0)
.find(header => (header[0] || '').trim() === contentLengthHeaderName);
const headers = buffer
.subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`))
.toString("ascii");
const contentLengthHeader = headers
.split(headerSeparator)
.map((header) => header.split(":"))
.filter((header) => header[2] === undefined)
.filter((header) => (header[1] || "").length > 0)
.find((header) => (header[0] || "").trim() === contentLengthHeaderName);
const contentLength = (contentLengthHeader || [])[1];
if (contentLength === undefined) {
await handleStreamEnded(`Missing or incorrect ${contentLengthHeaderName} header: ${headers}`);
@ -111,13 +125,14 @@ async function* readStdin() {
messageLength = parseInt(contentLength, 10);
}
while (buffer.length < (headersLength + messageLength)) {
while (buffer.length < headersLength + messageLength) {
if (streamEnded) {
await handleStreamEnded(
`Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`);
`Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`,
);
continue main_loop;
}
await once(process.stdin, 'readable');
await once(process.stdin, "readable");
}
const messageEnd = headersLength + messageLength;
@ -125,12 +140,12 @@ async function* readStdin() {
buffer = buffer.subarray(messageEnd);
headersLength = null;
messageLength = null;
yield message.toString('utf8');
yield message.toString("utf8");
}
} catch (e) {
sendResponse(makeError(`Error reading stdin: ${e}`));
} finally {
process.stdin.off('data', () => { });
process.stdin.off("data", () => {});
}
}
@ -143,7 +158,7 @@ async function handleMessage(message, prettier) {
throw new Error(`Message id is undefined: ${JSON.stringify(message)}`);
}
if (method === 'prettier/format') {
if (method === "prettier/format") {
if (params === undefined || params.text === undefined) {
throw new Error(`Message params.text is undefined: ${JSON.stringify(message)}`);
}
@ -153,7 +168,7 @@ async function handleMessage(message, prettier) {
let resolvedConfig = {};
if (params.options.filepath !== undefined) {
resolvedConfig = await prettier.prettier.resolveConfig(params.options.filepath) || {};
resolvedConfig = (await prettier.prettier.resolveConfig(params.options.filepath)) || {};
}
const options = {
@ -161,21 +176,25 @@ async function handleMessage(message, prettier) {
...resolvedConfig,
parser: params.options.parser,
plugins: params.options.plugins,
path: params.options.filepath
path: params.options.filepath,
};
process.stderr.write(`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${params.options.filepath || ''}' with options: ${JSON.stringify(options)}\n`);
process.stderr.write(
`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${
params.options.filepath || ""
}' with options: ${JSON.stringify(options)}\n`,
);
const formattedText = await prettier.prettier.format(params.text, options);
sendResponse({ id, result: { text: formattedText } });
} else if (method === 'prettier/clear_cache') {
} else if (method === "prettier/clear_cache") {
prettier.prettier.clearConfigCache();
prettier.config = await prettier.prettier.resolveConfig(prettier.path) || {};
prettier.config = (await prettier.prettier.resolveConfig(prettier.path)) || {};
sendResponse({ id, result: null });
} else if (method === 'initialize') {
} else if (method === "initialize") {
sendResponse({
id,
result: {
"capabilities": {}
}
capabilities: {},
},
});
} else {
throw new Error(`Unknown method: ${method}`);
@ -185,18 +204,20 @@ async function handleMessage(message, prettier) {
function makeError(message) {
return {
error: {
"code": -32600, // invalid request code
code: -32600, // invalid request code
message,
}
},
};
}
function sendResponse(response) {
const responsePayloadString = JSON.stringify({
jsonrpc: "2.0",
...response
...response,
});
const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(responsePayloadString)}${headerSeparator}${headerSeparator}`;
const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(
responsePayloadString,
)}${headerSeparator}${headerSeparator}`;
process.stdout.write(headers + responsePayloadString);
}

View file

@ -54,7 +54,7 @@ use lsp_command::*;
use node_runtime::NodeRuntime;
use parking_lot::Mutex;
use postage::watch;
use prettier::{LocateStart, Prettier};
use prettier::Prettier;
use project_settings::{LspSettings, ProjectSettings};
use rand::prelude::*;
use search::SearchQuery;
@ -82,8 +82,11 @@ use std::{
use terminals::Terminals;
use text::Anchor;
use util::{
debug_panic, defer, http::HttpClient, merge_json_value_into,
paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
debug_panic, defer,
http::HttpClient,
merge_json_value_into,
paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH},
post_inc, ResultExt, TryFutureExt as _,
};
pub use fs::*;
@ -162,17 +165,15 @@ pub struct Project {
copilot_log_subscription: Option<lsp::Subscription>,
current_lsp_settings: HashMap<Arc<str>, LspSettings>,
node: Option<Arc<dyn NodeRuntime>>,
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: Option<DefaultPrettier>,
prettier_instances: HashMap<
(Option<WorktreeId>, PathBuf),
Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
>,
prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
prettier_instances: HashMap<PathBuf, Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
}
#[cfg(not(any(test, feature = "test-support")))]
struct DefaultPrettier {
installation_process: Option<Shared<Task<()>>>,
instance: Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
installation_process: Option<Shared<Task<Result<(), Arc<anyhow::Error>>>>>,
#[cfg(not(any(test, feature = "test-support")))]
installed_plugins: HashSet<&'static str>,
}
@ -685,8 +686,8 @@ impl Project {
copilot_log_subscription: None,
current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
node: Some(node),
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: None,
prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(),
}
})
@ -786,8 +787,8 @@ impl Project {
copilot_log_subscription: None,
current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
node: None,
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: None,
prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(),
};
for worktree in worktrees {
@ -924,8 +925,7 @@ impl Project {
}
for (worktree, language, settings) in language_formatters_to_check {
self.install_default_formatters(worktree, &language, &settings, cx)
.detach_and_log_err(cx);
self.install_default_formatters(worktree, &language, &settings, cx);
}
// Start all the newly-enabled language servers.
@ -2681,20 +2681,7 @@ impl Project {
let buffer_file = File::from_dyn(buffer_file.as_ref());
let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
let task_buffer = buffer.clone();
let prettier_installation_task =
self.install_default_formatters(worktree, &new_language, &settings, cx);
cx.spawn(|project, mut cx| async move {
prettier_installation_task.await?;
let _ = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(&task_buffer, cx)
})
.await;
anyhow::Ok(())
})
.detach_and_log_err(cx);
self.install_default_formatters(worktree, &new_language, &settings, cx);
if let Some(file) = buffer_file {
let worktree = file.worktree.clone();
if let Some(tree) = worktree.read(cx).as_local() {
@ -4029,7 +4016,7 @@ impl Project {
}
pub fn format(
&self,
&mut self,
buffers: HashSet<ModelHandle<Buffer>>,
push_to_history: bool,
trigger: FormatTrigger,
@ -4049,10 +4036,10 @@ impl Project {
})
.collect::<Vec<_>>();
cx.spawn(|this, mut cx| async move {
cx.spawn(|project, mut cx| async move {
// Do not allow multiple concurrent formatting requests for the
// same buffer.
this.update(&mut cx, |this, cx| {
project.update(&mut cx, |this, cx| {
buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
this.buffers_being_formatted
.insert(buffer.read(cx).remote_id())
@ -4060,7 +4047,7 @@ impl Project {
});
let _cleanup = defer({
let this = this.clone();
let this = project.clone();
let mut cx = cx.clone();
let buffers = &buffers_with_paths_and_servers;
move || {
@ -4128,7 +4115,7 @@ impl Project {
{
format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp(
&this,
&project,
&buffer,
buffer_abs_path,
&language_server,
@ -4163,14 +4150,14 @@ impl Project {
}
}
(Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
if let Some(prettier_task) = this
if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx)
}).await {
match prettier_task.await
{
Ok(prettier) => {
let buffer_path = buffer.read_with(&cx, |buffer, cx| {
let buffer_path = buffer.update(&mut cx, |buffer, cx| {
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
});
format_operation = Some(FormatOperation::Prettier(
@ -4180,16 +4167,35 @@ impl Project {
.context("formatting via prettier")?,
));
}
Err(e) => anyhow::bail!(
"Failed to create prettier instance for buffer during autoformatting: {e:#}"
),
Err(e) => {
project.update(&mut cx, |project, _| {
match &prettier_path {
Some(prettier_path) => {
project.prettier_instances.remove(prettier_path);
},
None => {
if let Some(default_prettier) = project.default_prettier.as_mut() {
default_prettier.instance = None;
}
},
}
});
match &prettier_path {
Some(prettier_path) => {
log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
},
None => {
log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
},
}
}
}
} else if let Some((language_server, buffer_abs_path)) =
language_server.as_ref().zip(buffer_abs_path.as_ref())
{
format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp(
&this,
&project,
&buffer,
buffer_abs_path,
&language_server,
@ -4202,14 +4208,14 @@ impl Project {
}
}
(Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => {
if let Some(prettier_task) = this
if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx)
}).await {
match prettier_task.await
{
Ok(prettier) => {
let buffer_path = buffer.read_with(&cx, |buffer, cx| {
let buffer_path = buffer.update(&mut cx, |buffer, cx| {
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
});
format_operation = Some(FormatOperation::Prettier(
@ -4219,9 +4225,28 @@ impl Project {
.context("formatting via prettier")?,
));
}
Err(e) => anyhow::bail!(
"Failed to create prettier instance for buffer during formatting: {e:#}"
),
Err(e) => {
project.update(&mut cx, |project, _| {
match &prettier_path {
Some(prettier_path) => {
project.prettier_instances.remove(prettier_path);
},
None => {
if let Some(default_prettier) = project.default_prettier.as_mut() {
default_prettier.instance = None;
}
},
}
});
match &prettier_path {
Some(prettier_path) => {
log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
},
None => {
log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
},
}
}
}
}
}
@ -6431,15 +6456,25 @@ impl Project {
"Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}"
);
let prettiers_to_reload = self
.prettier_instances
.prettiers_per_worktree
.get(&current_worktree_id)
.iter()
.filter_map(|((worktree_id, prettier_path), prettier_task)| {
if worktree_id.is_none() || worktree_id == &Some(current_worktree_id) {
Some((*worktree_id, prettier_path.clone(), prettier_task.clone()))
} else {
None
}
.flat_map(|prettier_paths| prettier_paths.iter())
.flatten()
.filter_map(|prettier_path| {
Some((
current_worktree_id,
Some(prettier_path.clone()),
self.prettier_instances.get(prettier_path)?.clone(),
))
})
.chain(self.default_prettier.iter().filter_map(|default_prettier| {
Some((
current_worktree_id,
None,
default_prettier.instance.clone()?,
))
}))
.collect::<Vec<_>>();
cx.background()
@ -6450,9 +6485,15 @@ impl Project {
.clear_cache()
.await
.with_context(|| {
format!(
"clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
)
match prettier_path {
Some(prettier_path) => format!(
"clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
),
None => format!(
"clearing default prettier cache for worktree {worktree_id:?} on prettier settings update"
),
}
})
.map_err(Arc::new)
}
@ -8364,7 +8405,12 @@ impl Project {
&mut self,
buffer: &ModelHandle<Buffer>,
cx: &mut ModelContext<Self>,
) -> Task<Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>> {
) -> Task<
Option<(
Option<PathBuf>,
Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
)>,
> {
let buffer = buffer.read(cx);
let buffer_file = buffer.file();
let Some(buffer_language) = buffer.language() else {
@ -8374,136 +8420,119 @@ impl Project {
return Task::ready(None);
}
let buffer_file = File::from_dyn(buffer_file);
let buffer_path = buffer_file.map(|file| Arc::clone(file.path()));
let worktree_path = buffer_file
.as_ref()
.and_then(|file| Some(file.worktree.read(cx).abs_path()));
let worktree_id = buffer_file.map(|file| file.worktree_id(cx));
if self.is_local() || worktree_id.is_none() || worktree_path.is_none() {
if self.is_local() {
let Some(node) = self.node.as_ref().map(Arc::clone) else {
return Task::ready(None);
};
cx.spawn(|this, mut cx| async move {
let fs = this.update(&mut cx, |project, _| Arc::clone(&project.fs));
let prettier_dir = match cx
.background()
.spawn(Prettier::locate(
worktree_path.zip(buffer_path).map(
|(worktree_root_path, starting_path)| LocateStart {
worktree_root_path,
starting_path,
},
),
fs,
))
.await
{
Ok(path) => path,
Err(e) => {
return Some(
Task::ready(Err(Arc::new(e.context(
"determining prettier path for worktree {worktree_path:?}",
))))
.shared(),
);
}
};
if let Some(existing_prettier) = this.update(&mut cx, |project, _| {
project
.prettier_instances
.get(&(worktree_id, prettier_dir.clone()))
.cloned()
}) {
return Some(existing_prettier);
}
log::info!("Found prettier in {prettier_dir:?}, starting.");
let task_prettier_dir = prettier_dir.clone();
let weak_project = this.downgrade();
let new_server_id =
this.update(&mut cx, |this, _| this.languages.next_language_server_id());
let new_prettier_task = cx
.spawn(|mut cx| async move {
let prettier = Prettier::start(
worktree_id.map(|id| id.to_usize()),
new_server_id,
task_prettier_dir,
node,
cx.clone(),
)
.await
.context("prettier start")
.map_err(Arc::new)?;
log::info!("Started prettier in {:?}", prettier.prettier_dir());
if let Some((project, prettier_server)) =
weak_project.upgrade(&mut cx).zip(prettier.server())
match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx)))
{
Some((worktree_id, buffer_path)) => {
let fs = Arc::clone(&self.fs);
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
return cx.spawn(|project, mut cx| async move {
match cx
.background()
.spawn(async move {
Prettier::locate_prettier_installation(
fs.as_ref(),
&installed_prettiers,
&buffer_path,
)
.await
})
.await
{
project.update(&mut cx, |project, cx| {
let name = if prettier.is_default() {
LanguageServerName(Arc::from("prettier (default)"))
} else {
let prettier_dir = prettier.prettier_dir();
let worktree_path = prettier
.worktree_id()
.map(WorktreeId::from_usize)
.and_then(|id| project.worktree_for_id(id, cx))
.map(|worktree| worktree.read(cx).abs_path());
match worktree_path {
Some(worktree_path) => {
if worktree_path.as_ref() == prettier_dir {
LanguageServerName(Arc::from(format!(
"prettier ({})",
prettier_dir
.file_name()
.and_then(|name| name.to_str())
.unwrap_or_default()
)))
} else {
let dir_to_display = match prettier_dir
.strip_prefix(&worktree_path)
.ok()
{
Some(relative_path) => relative_path,
None => prettier_dir,
};
LanguageServerName(Arc::from(format!(
"prettier ({})",
dir_to_display.display(),
)))
}
}
None => LanguageServerName(Arc::from(format!(
"prettier ({})",
prettier_dir.display(),
))),
Ok(None) => {
let started_default_prettier =
project.update(&mut cx, |project, _| {
project
.prettiers_per_worktree
.entry(worktree_id)
.or_default()
.insert(None);
project.default_prettier.as_ref().and_then(
|default_prettier| default_prettier.instance.clone(),
)
});
match started_default_prettier {
Some(old_task) => return Some((None, old_task)),
None => {
let new_default_prettier = project
.update(&mut cx, |_, cx| {
start_default_prettier(node, Some(worktree_id), cx)
})
.await;
return Some((None, new_default_prettier));
}
};
}
}
Ok(Some(prettier_dir)) => {
project.update(&mut cx, |project, _| {
project
.prettiers_per_worktree
.entry(worktree_id)
.or_default()
.insert(Some(prettier_dir.clone()))
});
if let Some(existing_prettier) =
project.update(&mut cx, |project, _| {
project.prettier_instances.get(&prettier_dir).cloned()
})
{
log::debug!(
"Found already started prettier in {prettier_dir:?}"
);
return Some((Some(prettier_dir), existing_prettier));
}
project
.supplementary_language_servers
.insert(new_server_id, (name, Arc::clone(prettier_server)));
cx.emit(Event::LanguageServerAdded(new_server_id));
});
log::info!("Found prettier in {prettier_dir:?}, starting.");
let new_prettier_task = project.update(&mut cx, |project, cx| {
let new_prettier_task = start_prettier(
node,
prettier_dir.clone(),
Some(worktree_id),
cx,
);
project
.prettier_instances
.insert(prettier_dir.clone(), new_prettier_task.clone());
new_prettier_task
});
Some((Some(prettier_dir), new_prettier_task))
}
Err(e) => {
return Some((
None,
Task::ready(Err(Arc::new(
e.context("determining prettier path"),
)))
.shared(),
));
}
}
Ok(Arc::new(prettier)).map_err(Arc::new)
})
.shared();
this.update(&mut cx, |project, _| {
project
.prettier_instances
.insert((worktree_id, prettier_dir), new_prettier_task.clone());
});
Some(new_prettier_task)
})
});
}
None => {
let started_default_prettier = self
.default_prettier
.as_ref()
.and_then(|default_prettier| default_prettier.instance.clone());
match started_default_prettier {
Some(old_task) => return Task::ready(Some((None, old_task))),
None => {
let new_task = start_default_prettier(node, None, cx);
return cx.spawn(|_, _| async move { Some((None, new_task.await)) });
}
}
}
}
} else if self.remote_id().is_some() {
return Task::ready(None);
} else {
Task::ready(Some(
Task::ready(Some((
None,
Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(),
))
)))
}
}
@ -8514,8 +8543,7 @@ impl Project {
_new_language: &Language,
_language_settings: &LanguageSettings,
_cx: &mut ModelContext<Self>,
) -> Task<anyhow::Result<()>> {
return Task::ready(Ok(()));
) {
}
#[cfg(not(any(test, feature = "test-support")))]
@ -8525,19 +8553,19 @@ impl Project {
new_language: &Language,
language_settings: &LanguageSettings,
cx: &mut ModelContext<Self>,
) -> Task<anyhow::Result<()>> {
) {
match &language_settings.formatter {
Formatter::Prettier { .. } | Formatter::Auto => {}
Formatter::LanguageServer | Formatter::External { .. } => return Task::ready(Ok(())),
Formatter::LanguageServer | Formatter::External { .. } => return,
};
let Some(node) = self.node.as_ref().cloned() else {
return Task::ready(Ok(()));
return;
};
let mut prettier_plugins = None;
if new_language.prettier_parser_name().is_some() {
prettier_plugins
.get_or_insert_with(|| HashSet::default())
.get_or_insert_with(|| HashSet::<&'static str>::default())
.extend(
new_language
.lsp_adapters()
@ -8546,114 +8574,270 @@ impl Project {
)
}
let Some(prettier_plugins) = prettier_plugins else {
return Task::ready(Ok(()));
return;
};
let fs = Arc::clone(&self.fs);
let locate_prettier_installation = match worktree.and_then(|worktree_id| {
self.worktree_for_id(worktree_id, cx)
.map(|worktree| worktree.read(cx).abs_path())
}) {
Some(locate_from) => {
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
cx.background().spawn(async move {
Prettier::locate_prettier_installation(
fs.as_ref(),
&installed_prettiers,
locate_from.as_ref(),
)
.await
})
}
None => Task::ready(Ok(None)),
};
let mut plugins_to_install = prettier_plugins;
let (mut install_success_tx, mut install_success_rx) =
futures::channel::mpsc::channel::<HashSet<&'static str>>(1);
let new_installation_process = cx
.spawn(|this, mut cx| async move {
if let Some(installed_plugins) = install_success_rx.next().await {
this.update(&mut cx, |this, _| {
let default_prettier =
this.default_prettier
.get_or_insert_with(|| DefaultPrettier {
installation_process: None,
installed_plugins: HashSet::default(),
});
if !installed_plugins.is_empty() {
log::info!("Installed new prettier plugins: {installed_plugins:?}");
default_prettier.installed_plugins.extend(installed_plugins);
}
})
}
})
.shared();
let previous_installation_process =
if let Some(default_prettier) = &mut self.default_prettier {
plugins_to_install
.retain(|plugin| !default_prettier.installed_plugins.contains(plugin));
if plugins_to_install.is_empty() {
return Task::ready(Ok(()));
return;
}
std::mem::replace(
&mut default_prettier.installation_process,
Some(new_installation_process.clone()),
)
default_prettier.installation_process.clone()
} else {
None
};
let default_prettier_dir = util::paths::DEFAULT_PRETTIER_DIR.as_path();
let already_running_prettier = self
.prettier_instances
.get(&(worktree, default_prettier_dir.to_path_buf()))
.cloned();
let fs = Arc::clone(&self.fs);
cx.spawn(|this, mut cx| async move {
if let Some(previous_installation_process) = previous_installation_process {
previous_installation_process.await;
}
let mut everything_was_installed = false;
this.update(&mut cx, |this, _| {
match &mut this.default_prettier {
Some(default_prettier) => {
plugins_to_install
.retain(|plugin| !default_prettier.installed_plugins.contains(plugin));
everything_was_installed = plugins_to_install.is_empty();
},
None => this.default_prettier = Some(DefaultPrettier { installation_process: Some(new_installation_process), installed_plugins: HashSet::default() }),
}
let default_prettier = self
.default_prettier
.get_or_insert_with(|| DefaultPrettier {
instance: None,
installation_process: None,
installed_plugins: HashSet::default(),
});
if everything_was_installed {
return Ok(());
}
cx.background()
.spawn(async move {
let prettier_wrapper_path = default_prettier_dir.join(prettier::PRETTIER_SERVER_FILE);
// method creates parent directory if it doesn't exist
fs.save(&prettier_wrapper_path, &text::Rope::from(prettier::PRETTIER_SERVER_JS), text::LineEnding::Unix).await
.with_context(|| format!("writing {} file at {prettier_wrapper_path:?}", prettier::PRETTIER_SERVER_FILE))?;
let packages_to_versions = future::try_join_all(
plugins_to_install
.iter()
.chain(Some(&"prettier"))
.map(|package_name| async {
let returned_package_name = package_name.to_string();
let latest_version = node.npm_package_latest_version(package_name)
.await
.with_context(|| {
format!("fetching latest npm version for package {returned_package_name}")
})?;
anyhow::Ok((returned_package_name, latest_version))
}),
)
default_prettier.installation_process = Some(
cx.spawn(|this, mut cx| async move {
match locate_prettier_installation
.await
.context("fetching latest npm versions")?;
log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
let borrowed_packages = packages_to_versions.iter().map(|(package, version)| {
(package.as_str(), version.as_str())
}).collect::<Vec<_>>();
node.npm_install_packages(default_prettier_dir, &borrowed_packages).await.context("fetching formatter packages")?;
let installed_packages = !plugins_to_install.is_empty();
install_success_tx.try_send(plugins_to_install).ok();
if !installed_packages {
if let Some(prettier) = already_running_prettier {
prettier.await.map_err(|e| anyhow::anyhow!("Default prettier startup await failure: {e:#}"))?.clear_cache().await.context("clearing default prettier cache after plugins install")?;
.context("locate prettier installation")
.map_err(Arc::new)?
{
Some(_non_default_prettier) => return Ok(()),
None => {
let mut needs_install = match previous_installation_process {
Some(previous_installation_process) => {
previous_installation_process.await.is_err()
}
None => true,
};
this.update(&mut cx, |this, _| {
if let Some(default_prettier) = &mut this.default_prettier {
plugins_to_install.retain(|plugin| {
!default_prettier.installed_plugins.contains(plugin)
});
needs_install |= !plugins_to_install.is_empty();
}
});
if needs_install {
let installed_plugins = plugins_to_install.clone();
cx.background()
.spawn(async move {
install_default_prettier(plugins_to_install, node, fs).await
})
.await
.context("prettier & plugins install")
.map_err(Arc::new)?;
this.update(&mut cx, |this, _| {
let default_prettier =
this.default_prettier
.get_or_insert_with(|| DefaultPrettier {
instance: None,
installation_process: Some(
Task::ready(Ok(())).shared(),
),
installed_plugins: HashSet::default(),
});
default_prettier.instance = None;
default_prettier.installed_plugins.extend(installed_plugins);
});
}
}
anyhow::Ok(())
}).await
})
}
Ok(())
})
.shared(),
);
}
}
fn start_default_prettier(
node: Arc<dyn NodeRuntime>,
worktree_id: Option<WorktreeId>,
cx: &mut ModelContext<'_, Project>,
) -> Task<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>> {
cx.spawn(|project, mut cx| async move {
loop {
let default_prettier_installing = project.update(&mut cx, |project, _| {
project
.default_prettier
.as_ref()
.and_then(|default_prettier| default_prettier.installation_process.clone())
});
match default_prettier_installing {
Some(installation_task) => {
if installation_task.await.is_ok() {
break;
}
}
None => break,
}
}
project.update(&mut cx, |project, cx| {
match project
.default_prettier
.as_mut()
.and_then(|default_prettier| default_prettier.instance.as_mut())
{
Some(default_prettier) => default_prettier.clone(),
None => {
let new_default_prettier =
start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), worktree_id, cx);
project
.default_prettier
.get_or_insert_with(|| DefaultPrettier {
instance: None,
installation_process: None,
#[cfg(not(any(test, feature = "test-support")))]
installed_plugins: HashSet::default(),
})
.instance = Some(new_default_prettier.clone());
new_default_prettier
}
}
})
})
}
fn start_prettier(
node: Arc<dyn NodeRuntime>,
prettier_dir: PathBuf,
worktree_id: Option<WorktreeId>,
cx: &mut ModelContext<'_, Project>,
) -> Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>> {
cx.spawn(|project, mut cx| async move {
let new_server_id = project.update(&mut cx, |project, _| {
project.languages.next_language_server_id()
});
let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone())
.await
.context("default prettier spawn")
.map(Arc::new)
.map_err(Arc::new)?;
register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx);
Ok(new_prettier)
})
.shared()
}
fn register_new_prettier(
project: &ModelHandle<Project>,
prettier: &Prettier,
worktree_id: Option<WorktreeId>,
new_server_id: LanguageServerId,
cx: &mut AsyncAppContext,
) {
let prettier_dir = prettier.prettier_dir();
let is_default = prettier.is_default();
if is_default {
log::info!("Started default prettier in {prettier_dir:?}");
} else {
log::info!("Started prettier in {prettier_dir:?}");
}
if let Some(prettier_server) = prettier.server() {
project.update(cx, |project, cx| {
let name = if is_default {
LanguageServerName(Arc::from("prettier (default)"))
} else {
let worktree_path = worktree_id
.and_then(|id| project.worktree_for_id(id, cx))
.map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path()));
let name = match worktree_path {
Some(worktree_path) => {
if prettier_dir == worktree_path.as_ref() {
let name = prettier_dir
.file_name()
.and_then(|name| name.to_str())
.unwrap_or_default();
format!("prettier ({name})")
} else {
let dir_to_display = prettier_dir
.strip_prefix(worktree_path.as_ref())
.ok()
.unwrap_or(prettier_dir);
format!("prettier ({})", dir_to_display.display())
}
}
None => format!("prettier ({})", prettier_dir.display()),
};
LanguageServerName(Arc::from(name))
};
project
.supplementary_language_servers
.insert(new_server_id, (name, Arc::clone(prettier_server)));
cx.emit(Event::LanguageServerAdded(new_server_id));
});
}
}
#[cfg(not(any(test, feature = "test-support")))]
async fn install_default_prettier(
plugins_to_install: HashSet<&'static str>,
node: Arc<dyn NodeRuntime>,
fs: Arc<dyn Fs>,
) -> anyhow::Result<()> {
let prettier_wrapper_path = DEFAULT_PRETTIER_DIR.join(prettier::PRETTIER_SERVER_FILE);
// method creates parent directory if it doesn't exist
fs.save(
&prettier_wrapper_path,
&text::Rope::from(prettier::PRETTIER_SERVER_JS),
text::LineEnding::Unix,
)
.await
.with_context(|| {
format!(
"writing {} file at {prettier_wrapper_path:?}",
prettier::PRETTIER_SERVER_FILE
)
})?;
let packages_to_versions =
future::try_join_all(plugins_to_install.iter().chain(Some(&"prettier")).map(
|package_name| async {
let returned_package_name = package_name.to_string();
let latest_version = node
.npm_package_latest_version(package_name)
.await
.with_context(|| {
format!("fetching latest npm version for package {returned_package_name}")
})?;
anyhow::Ok((returned_package_name, latest_version))
},
))
.await
.context("fetching latest npm versions")?;
log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
let borrowed_packages = packages_to_versions
.iter()
.map(|(package, version)| (package.as_str(), version.as_str()))
.collect::<Vec<_>>();
node.npm_install_packages(DEFAULT_PRETTIER_DIR.as_path(), &borrowed_packages)
.await
.context("fetching formatter packages")?;
anyhow::Ok(())
}
fn subscribe_for_copilot_events(
copilot: &ModelHandle<Copilot>,
cx: &mut ModelContext<'_, Project>,

View file

@ -1,4 +1,4 @@
use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
use crate::{worktree::WorktreeModelHandle, Event, *};
use fs::{FakeFs, RealFs};
use futures::{future, StreamExt};
use gpui::{executor::Deterministic, test::subscribe, AppContext};
@ -13,7 +13,7 @@ use pretty_assertions::assert_eq;
use serde_json::json;
use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
use unindent::Unindent as _;
use util::{assert_set_eq, test::temp_tree};
use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
#[cfg(test)]
#[ctor::ctor]

View file

@ -1,7 +1,6 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
use anyhow::{Context, Result};
use client::proto;
use globset::{Glob, GlobMatcher};
use itertools::Itertools;
use language::{char_kind, BufferSnapshot};
use regex::{Regex, RegexBuilder};
@ -10,9 +9,10 @@ use std::{
borrow::Cow,
io::{BufRead, BufReader, Read},
ops::Range,
path::{Path, PathBuf},
path::Path,
sync::Arc,
};
use util::paths::PathMatcher;
#[derive(Clone, Debug)]
pub struct SearchInputs {
@ -52,31 +52,6 @@ pub enum SearchQuery {
},
}
#[derive(Clone, Debug)]
pub struct PathMatcher {
maybe_path: PathBuf,
glob: GlobMatcher,
}
impl std::fmt::Display for PathMatcher {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.maybe_path.to_string_lossy().fmt(f)
}
}
impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher {
glob: Glob::new(&maybe_glob)?.compile_matcher(),
maybe_path: PathBuf::from(maybe_glob),
})
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
}
}
impl SearchQuery {
pub fn text(
query: impl ToString,

View file

@ -54,7 +54,7 @@ use lsp_command::*;
use node_runtime::NodeRuntime;
use parking_lot::Mutex;
use postage::watch;
use prettier::{LocateStart, Prettier};
use prettier::Prettier;
use project_settings::{LspSettings, ProjectSettings};
use rand::prelude::*;
use search::SearchQuery;
@ -82,8 +82,11 @@ use std::{
use terminals::Terminals;
use text::Anchor;
use util::{
debug_panic, defer, http::HttpClient, merge_json_value_into,
paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
debug_panic, defer,
http::HttpClient,
merge_json_value_into,
paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH},
post_inc, ResultExt, TryFutureExt as _,
};
pub use fs::*;
@ -162,17 +165,15 @@ pub struct Project {
copilot_log_subscription: Option<lsp::Subscription>,
current_lsp_settings: HashMap<Arc<str>, LspSettings>,
node: Option<Arc<dyn NodeRuntime>>,
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: Option<DefaultPrettier>,
prettier_instances: HashMap<
(Option<WorktreeId>, PathBuf),
Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
>,
prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
prettier_instances: HashMap<PathBuf, Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
}
#[cfg(not(any(test, feature = "test-support")))]
struct DefaultPrettier {
installation_process: Option<Shared<Task<()>>>,
instance: Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
installation_process: Option<Shared<Task<Result<(), Arc<anyhow::Error>>>>>,
#[cfg(not(any(test, feature = "test-support")))]
installed_plugins: HashSet<&'static str>,
}
@ -686,8 +687,8 @@ impl Project {
copilot_log_subscription: None,
current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
node: Some(node),
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: None,
prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(),
}
})
@ -789,8 +790,8 @@ impl Project {
copilot_log_subscription: None,
current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
node: None,
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: None,
prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(),
};
for worktree in worktrees {
@ -963,8 +964,7 @@ impl Project {
}
for (worktree, language, settings) in language_formatters_to_check {
self.install_default_formatters(worktree, &language, &settings, cx)
.detach_and_log_err(cx);
self.install_default_formatters(worktree, &language, &settings, cx);
}
// Start all the newly-enabled language servers.
@ -2720,20 +2720,7 @@ impl Project {
let buffer_file = File::from_dyn(buffer_file.as_ref());
let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
let task_buffer = buffer.clone();
let prettier_installation_task =
self.install_default_formatters(worktree, &new_language, &settings, cx);
cx.spawn(move |project, mut cx| async move {
prettier_installation_task.await?;
let _ = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(&task_buffer, cx)
})?
.await;
anyhow::Ok(())
})
.detach_and_log_err(cx);
self.install_default_formatters(worktree, &new_language, &settings, cx);
if let Some(file) = buffer_file {
let worktree = file.worktree.clone();
if let Some(tree) = worktree.read(cx).as_local() {
@ -4096,7 +4083,7 @@ impl Project {
}
pub fn format(
&self,
&mut self,
buffers: HashSet<Model<Buffer>>,
push_to_history: bool,
trigger: FormatTrigger,
@ -4116,10 +4103,10 @@ impl Project {
})
.collect::<Vec<_>>();
cx.spawn(move |this, mut cx| async move {
cx.spawn(move |project, mut cx| async move {
// Do not allow multiple concurrent formatting requests for the
// same buffer.
this.update(&mut cx, |this, cx| {
project.update(&mut cx, |this, cx| {
buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
this.buffers_being_formatted
.insert(buffer.read(cx).remote_id())
@ -4127,7 +4114,7 @@ impl Project {
})?;
let _cleanup = defer({
let this = this.clone();
let this = project.clone();
let mut cx = cx.clone();
let buffers = &buffers_with_paths_and_servers;
move || {
@ -4195,7 +4182,7 @@ impl Project {
{
format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp(
&this,
&project,
&buffer,
buffer_abs_path,
&language_server,
@ -4230,7 +4217,7 @@ impl Project {
}
}
(Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
if let Some(prettier_task) = this
if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx)
})?.await {
@ -4247,16 +4234,35 @@ impl Project {
.context("formatting via prettier")?,
));
}
Err(e) => anyhow::bail!(
"Failed to create prettier instance for buffer during autoformatting: {e:#}"
),
Err(e) => {
project.update(&mut cx, |project, _| {
match &prettier_path {
Some(prettier_path) => {
project.prettier_instances.remove(prettier_path);
},
None => {
if let Some(default_prettier) = project.default_prettier.as_mut() {
default_prettier.instance = None;
}
},
}
})?;
match &prettier_path {
Some(prettier_path) => {
log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
},
None => {
log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
},
}
}
}
} else if let Some((language_server, buffer_abs_path)) =
language_server.as_ref().zip(buffer_abs_path.as_ref())
{
format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp(
&this,
&project,
&buffer,
buffer_abs_path,
&language_server,
@ -4269,7 +4275,7 @@ impl Project {
}
}
(Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => {
if let Some(prettier_task) = this
if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx)
})?.await {
@ -4286,9 +4292,28 @@ impl Project {
.context("formatting via prettier")?,
));
}
Err(e) => anyhow::bail!(
"Failed to create prettier instance for buffer during formatting: {e:#}"
),
Err(e) => {
project.update(&mut cx, |project, _| {
match &prettier_path {
Some(prettier_path) => {
project.prettier_instances.remove(prettier_path);
},
None => {
if let Some(default_prettier) = project.default_prettier.as_mut() {
default_prettier.instance = None;
}
},
}
})?;
match &prettier_path {
Some(prettier_path) => {
log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
},
None => {
log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
},
}
}
}
}
}
@ -6506,15 +6531,25 @@ impl Project {
"Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}"
);
let prettiers_to_reload = self
.prettier_instances
.prettiers_per_worktree
.get(&current_worktree_id)
.iter()
.filter_map(|((worktree_id, prettier_path), prettier_task)| {
if worktree_id.is_none() || worktree_id == &Some(current_worktree_id) {
Some((*worktree_id, prettier_path.clone(), prettier_task.clone()))
} else {
None
}
.flat_map(|prettier_paths| prettier_paths.iter())
.flatten()
.filter_map(|prettier_path| {
Some((
current_worktree_id,
Some(prettier_path.clone()),
self.prettier_instances.get(prettier_path)?.clone(),
))
})
.chain(self.default_prettier.iter().filter_map(|default_prettier| {
Some((
current_worktree_id,
None,
default_prettier.instance.clone()?,
))
}))
.collect::<Vec<_>>();
cx.background_executor()
@ -6525,9 +6560,14 @@ impl Project {
.clear_cache()
.await
.with_context(|| {
format!(
"clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
)
match prettier_path {
Some(prettier_path) => format!(
"clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
),
None => format!(
"clearing default prettier cache for worktree {worktree_id:?} on prettier settings update"
),
}
})
.map_err(Arc::new)
}
@ -8411,7 +8451,12 @@ impl Project {
&mut self,
buffer: &Model<Buffer>,
cx: &mut ModelContext<Self>,
) -> Task<Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>> {
) -> Task<
Option<(
Option<PathBuf>,
Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
)>,
> {
let buffer = buffer.read(cx);
let buffer_file = buffer.file();
let Some(buffer_language) = buffer.language() else {
@ -8421,142 +8466,142 @@ impl Project {
return Task::ready(None);
}
let buffer_file = File::from_dyn(buffer_file);
let buffer_path = buffer_file.map(|file| Arc::clone(file.path()));
let worktree_path = buffer_file
.as_ref()
.and_then(|file| Some(file.worktree.read(cx).abs_path()));
let worktree_id = buffer_file.map(|file| file.worktree_id(cx));
if self.is_local() || worktree_id.is_none() || worktree_path.is_none() {
if self.is_local() {
let Some(node) = self.node.as_ref().map(Arc::clone) else {
return Task::ready(None);
};
let fs = self.fs.clone();
cx.spawn(move |this, mut cx| async move {
let prettier_dir = match cx
.background_executor()
.spawn(Prettier::locate(
worktree_path.zip(buffer_path).map(
|(worktree_root_path, starting_path)| LocateStart {
worktree_root_path,
starting_path,
},
),
fs,
))
.await
{
Ok(path) => path,
Err(e) => {
return Some(
Task::ready(Err(Arc::new(e.context(
"determining prettier path for worktree {worktree_path:?}",
))))
.shared(),
);
}
};
if let Some(existing_prettier) = this
.update(&mut cx, |project, _| {
project
.prettier_instances
.get(&(worktree_id, prettier_dir.clone()))
.cloned()
})
.ok()
.flatten()
{
return Some(existing_prettier);
}
log::info!("Found prettier in {prettier_dir:?}, starting.");
let task_prettier_dir = prettier_dir.clone();
let new_prettier_task = cx
.spawn({
let this = this.clone();
move |mut cx| async move {
let new_server_id = this.update(&mut cx, |this, _| {
this.languages.next_language_server_id()
})?;
let prettier = Prettier::start(
worktree_id.map(|id| id.to_usize()),
new_server_id,
task_prettier_dir,
node,
cx.clone(),
)
match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx)))
{
Some((worktree_id, buffer_path)) => {
let fs = Arc::clone(&self.fs);
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
return cx.spawn(|project, mut cx| async move {
match cx
.background_executor()
.spawn(async move {
Prettier::locate_prettier_installation(
fs.as_ref(),
&installed_prettiers,
&buffer_path,
)
.await
})
.await
.context("prettier start")
.map_err(Arc::new)?;
log::info!("Started prettier in {:?}", prettier.prettier_dir());
if let Some(prettier_server) = prettier.server() {
this.update(&mut cx, |project, cx| {
let name = if prettier.is_default() {
LanguageServerName(Arc::from("prettier (default)"))
} else {
let prettier_dir = prettier.prettier_dir();
let worktree_path = prettier
.worktree_id()
.map(WorktreeId::from_usize)
.and_then(|id| project.worktree_for_id(id, cx))
.map(|worktree| worktree.read(cx).abs_path());
match worktree_path {
Some(worktree_path) => {
if worktree_path.as_ref() == prettier_dir {
LanguageServerName(Arc::from(format!(
"prettier ({})",
prettier_dir
.file_name()
.and_then(|name| name.to_str())
.unwrap_or_default()
)))
} else {
let dir_to_display = match prettier_dir
.strip_prefix(&worktree_path)
.ok()
{
Some(relative_path) => relative_path,
None => prettier_dir,
};
LanguageServerName(Arc::from(format!(
"prettier ({})",
dir_to_display.display(),
)))
}
}
None => LanguageServerName(Arc::from(format!(
"prettier ({})",
prettier_dir.display(),
))),
}
};
{
Ok(None) => {
match project.update(&mut cx, |project, _| {
project
.supplementary_language_servers
.insert(new_server_id, (name, Arc::clone(prettier_server)));
cx.emit(Event::LanguageServerAdded(new_server_id));
})?;
.prettiers_per_worktree
.entry(worktree_id)
.or_default()
.insert(None);
project.default_prettier.as_ref().and_then(
|default_prettier| default_prettier.instance.clone(),
)
}) {
Ok(Some(old_task)) => Some((None, old_task)),
Ok(None) => {
match project.update(&mut cx, |_, cx| {
start_default_prettier(node, Some(worktree_id), cx)
}) {
Ok(new_default_prettier) => {
return Some((None, new_default_prettier.await))
}
Err(e) => {
Some((
None,
Task::ready(Err(Arc::new(e.context("project is gone during default prettier startup"))))
.shared(),
))
}
}
}
Err(e) => Some((None, Task::ready(Err(Arc::new(e.context("project is gone during default prettier checks"))))
.shared())),
}
}
Ok(Some(prettier_dir)) => {
match project.update(&mut cx, |project, _| {
project
.prettiers_per_worktree
.entry(worktree_id)
.or_default()
.insert(Some(prettier_dir.clone()));
project.prettier_instances.get(&prettier_dir).cloned()
}) {
Ok(Some(existing_prettier)) => {
log::debug!(
"Found already started prettier in {prettier_dir:?}"
);
return Some((Some(prettier_dir), existing_prettier));
}
Err(e) => {
return Some((
Some(prettier_dir),
Task::ready(Err(Arc::new(e.context("project is gone during custom prettier checks"))))
.shared(),
))
}
_ => {},
}
log::info!("Found prettier in {prettier_dir:?}, starting.");
let new_prettier_task =
match project.update(&mut cx, |project, cx| {
let new_prettier_task = start_prettier(
node,
prettier_dir.clone(),
Some(worktree_id),
cx,
);
project.prettier_instances.insert(
prettier_dir.clone(),
new_prettier_task.clone(),
);
new_prettier_task
}) {
Ok(task) => task,
Err(e) => return Some((
Some(prettier_dir),
Task::ready(Err(Arc::new(e.context("project is gone during custom prettier startup"))))
.shared()
)),
};
Some((Some(prettier_dir), new_prettier_task))
}
Err(e) => {
return Some((
None,
Task::ready(Err(Arc::new(
e.context("determining prettier path"),
)))
.shared(),
));
}
Ok(Arc::new(prettier)).map_err(Arc::new)
}
})
.shared();
this.update(&mut cx, |project, _| {
project
.prettier_instances
.insert((worktree_id, prettier_dir), new_prettier_task.clone());
})
.ok();
Some(new_prettier_task)
})
});
}
None => {
let started_default_prettier = self
.default_prettier
.as_ref()
.and_then(|default_prettier| default_prettier.instance.clone());
match started_default_prettier {
Some(old_task) => return Task::ready(Some((None, old_task))),
None => {
let new_task = start_default_prettier(node, None, cx);
return cx.spawn(|_, _| async move { Some((None, new_task.await)) });
}
}
}
}
} else if self.remote_id().is_some() {
return Task::ready(None);
} else {
Task::ready(Some(
Task::ready(Some((
None,
Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(),
))
)))
}
}
@ -8567,8 +8612,7 @@ impl Project {
_: &Language,
_: &LanguageSettings,
_: &mut ModelContext<Self>,
) -> Task<anyhow::Result<()>> {
Task::ready(Ok(()))
) {
}
#[cfg(not(any(test, feature = "test-support")))]
@ -8578,19 +8622,19 @@ impl Project {
new_language: &Language,
language_settings: &LanguageSettings,
cx: &mut ModelContext<Self>,
) -> Task<anyhow::Result<()>> {
) {
match &language_settings.formatter {
Formatter::Prettier { .. } | Formatter::Auto => {}
Formatter::LanguageServer | Formatter::External { .. } => return Task::ready(Ok(())),
Formatter::LanguageServer | Formatter::External { .. } => return,
};
let Some(node) = self.node.as_ref().cloned() else {
return Task::ready(Ok(()));
return;
};
let mut prettier_plugins = None;
if new_language.prettier_parser_name().is_some() {
prettier_plugins
.get_or_insert_with(|| HashSet::default())
.get_or_insert_with(|| HashSet::<&'static str>::default())
.extend(
new_language
.lsp_adapters()
@ -8599,114 +8643,287 @@ impl Project {
)
}
let Some(prettier_plugins) = prettier_plugins else {
return Task::ready(Ok(()));
return;
};
let fs = Arc::clone(&self.fs);
let locate_prettier_installation = match worktree.and_then(|worktree_id| {
self.worktree_for_id(worktree_id, cx)
.map(|worktree| worktree.read(cx).abs_path())
}) {
Some(locate_from) => {
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
cx.background_executor().spawn(async move {
Prettier::locate_prettier_installation(
fs.as_ref(),
&installed_prettiers,
locate_from.as_ref(),
)
.await
})
}
None => Task::ready(Ok(None)),
};
let mut plugins_to_install = prettier_plugins;
let (mut install_success_tx, mut install_success_rx) =
futures::channel::mpsc::channel::<HashSet<&'static str>>(1);
let new_installation_process = cx
.spawn(|this, mut cx| async move {
if let Some(installed_plugins) = install_success_rx.next().await {
this.update(&mut cx, |this, _| {
let default_prettier =
this.default_prettier
.get_or_insert_with(|| DefaultPrettier {
installation_process: None,
installed_plugins: HashSet::default(),
});
if !installed_plugins.is_empty() {
log::info!("Installed new prettier plugins: {installed_plugins:?}");
default_prettier.installed_plugins.extend(installed_plugins);
}
})
.ok();
}
})
.shared();
let previous_installation_process =
if let Some(default_prettier) = &mut self.default_prettier {
plugins_to_install
.retain(|plugin| !default_prettier.installed_plugins.contains(plugin));
if plugins_to_install.is_empty() {
return Task::ready(Ok(()));
return;
}
std::mem::replace(
&mut default_prettier.installation_process,
Some(new_installation_process.clone()),
)
default_prettier.installation_process.clone()
} else {
None
};
let default_prettier_dir = util::paths::DEFAULT_PRETTIER_DIR.as_path();
let already_running_prettier = self
.prettier_instances
.get(&(worktree, default_prettier_dir.to_path_buf()))
.cloned();
let fs = Arc::clone(&self.fs);
cx.spawn(move |this, mut cx| async move {
if let Some(previous_installation_process) = previous_installation_process {
previous_installation_process.await;
}
let mut everything_was_installed = false;
this.update(&mut cx, |this, _| {
match &mut this.default_prettier {
Some(default_prettier) => {
plugins_to_install
.retain(|plugin| !default_prettier.installed_plugins.contains(plugin));
everything_was_installed = plugins_to_install.is_empty();
},
None => this.default_prettier = Some(DefaultPrettier { installation_process: Some(new_installation_process), installed_plugins: HashSet::default() }),
}
})?;
if everything_was_installed {
return Ok(());
}
cx.spawn(move |_| async move {
let prettier_wrapper_path = default_prettier_dir.join(prettier::PRETTIER_SERVER_FILE);
// method creates parent directory if it doesn't exist
fs.save(&prettier_wrapper_path, &text::Rope::from(prettier::PRETTIER_SERVER_JS), text::LineEnding::Unix).await
.with_context(|| format!("writing {} file at {prettier_wrapper_path:?}", prettier::PRETTIER_SERVER_FILE))?;
let packages_to_versions = future::try_join_all(
plugins_to_install
.iter()
.chain(Some(&"prettier"))
.map(|package_name| async {
let returned_package_name = package_name.to_string();
let latest_version = node.npm_package_latest_version(package_name)
let default_prettier = self
.default_prettier
.get_or_insert_with(|| DefaultPrettier {
instance: None,
installation_process: None,
installed_plugins: HashSet::default(),
});
default_prettier.installation_process = Some(
cx.spawn(|this, mut cx| async move {
match locate_prettier_installation
.await
.context("locate prettier installation")
.map_err(Arc::new)?
{
Some(_non_default_prettier) => return Ok(()),
None => {
let mut needs_install = match previous_installation_process {
Some(previous_installation_process) => {
previous_installation_process.await.is_err()
}
None => true,
};
this.update(&mut cx, |this, _| {
if let Some(default_prettier) = &mut this.default_prettier {
plugins_to_install.retain(|plugin| {
!default_prettier.installed_plugins.contains(plugin)
});
needs_install |= !plugins_to_install.is_empty();
}
})?;
if needs_install {
let installed_plugins = plugins_to_install.clone();
cx.background_executor()
.spawn(async move {
install_default_prettier(plugins_to_install, node, fs).await
})
.await
.with_context(|| {
format!("fetching latest npm version for package {returned_package_name}")
})?;
anyhow::Ok((returned_package_name, latest_version))
}),
)
.await
.context("fetching latest npm versions")?;
log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
let borrowed_packages = packages_to_versions.iter().map(|(package, version)| {
(package.as_str(), version.as_str())
}).collect::<Vec<_>>();
node.npm_install_packages(default_prettier_dir, &borrowed_packages).await.context("fetching formatter packages")?;
let installed_packages = !plugins_to_install.is_empty();
install_success_tx.try_send(plugins_to_install).ok();
if !installed_packages {
if let Some(prettier) = already_running_prettier {
prettier.await.map_err(|e| anyhow::anyhow!("Default prettier startup await failure: {e:#}"))?.clear_cache().await.context("clearing default prettier cache after plugins install")?;
.context("prettier & plugins install")
.map_err(Arc::new)?;
this.update(&mut cx, |this, _| {
let default_prettier =
this.default_prettier
.get_or_insert_with(|| DefaultPrettier {
instance: None,
installation_process: Some(
Task::ready(Ok(())).shared(),
),
installed_plugins: HashSet::default(),
});
default_prettier.instance = None;
default_prettier.installed_plugins.extend(installed_plugins);
})?;
}
}
}
anyhow::Ok(())
}).await
})
Ok(())
})
.shared(),
);
}
}
fn start_default_prettier(
node: Arc<dyn NodeRuntime>,
worktree_id: Option<WorktreeId>,
cx: &mut ModelContext<'_, Project>,
) -> Task<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>> {
cx.spawn(|project, mut cx| async move {
loop {
let default_prettier_installing = match project.update(&mut cx, |project, _| {
project
.default_prettier
.as_ref()
.and_then(|default_prettier| default_prettier.installation_process.clone())
}) {
Ok(installation) => installation,
Err(e) => {
return Task::ready(Err(Arc::new(
e.context("project is gone during default prettier installation"),
)))
.shared()
}
};
match default_prettier_installing {
Some(installation_task) => {
if installation_task.await.is_ok() {
break;
}
}
None => break,
}
}
match project.update(&mut cx, |project, cx| {
match project
.default_prettier
.as_mut()
.and_then(|default_prettier| default_prettier.instance.as_mut())
{
Some(default_prettier) => default_prettier.clone(),
None => {
let new_default_prettier =
start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), worktree_id, cx);
project
.default_prettier
.get_or_insert_with(|| DefaultPrettier {
instance: None,
installation_process: None,
#[cfg(not(any(test, feature = "test-support")))]
installed_plugins: HashSet::default(),
})
.instance = Some(new_default_prettier.clone());
new_default_prettier
}
}
}) {
Ok(task) => task,
Err(e) => Task::ready(Err(Arc::new(
e.context("project is gone during default prettier startup"),
)))
.shared(),
}
})
}
fn start_prettier(
node: Arc<dyn NodeRuntime>,
prettier_dir: PathBuf,
worktree_id: Option<WorktreeId>,
cx: &mut ModelContext<'_, Project>,
) -> Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>> {
cx.spawn(|project, mut cx| async move {
let new_server_id = project.update(&mut cx, |project, _| {
project.languages.next_language_server_id()
})?;
let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone())
.await
.context("default prettier spawn")
.map(Arc::new)
.map_err(Arc::new)?;
register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx);
Ok(new_prettier)
})
.shared()
}
fn register_new_prettier(
project: &WeakModel<Project>,
prettier: &Prettier,
worktree_id: Option<WorktreeId>,
new_server_id: LanguageServerId,
cx: &mut AsyncAppContext,
) {
let prettier_dir = prettier.prettier_dir();
let is_default = prettier.is_default();
if is_default {
log::info!("Started default prettier in {prettier_dir:?}");
} else {
log::info!("Started prettier in {prettier_dir:?}");
}
if let Some(prettier_server) = prettier.server() {
project
.update(cx, |project, cx| {
let name = if is_default {
LanguageServerName(Arc::from("prettier (default)"))
} else {
let worktree_path = worktree_id
.and_then(|id| project.worktree_for_id(id, cx))
.map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path()));
let name = match worktree_path {
Some(worktree_path) => {
if prettier_dir == worktree_path.as_ref() {
let name = prettier_dir
.file_name()
.and_then(|name| name.to_str())
.unwrap_or_default();
format!("prettier ({name})")
} else {
let dir_to_display = prettier_dir
.strip_prefix(worktree_path.as_ref())
.ok()
.unwrap_or(prettier_dir);
format!("prettier ({})", dir_to_display.display())
}
}
None => format!("prettier ({})", prettier_dir.display()),
};
LanguageServerName(Arc::from(name))
};
project
.supplementary_language_servers
.insert(new_server_id, (name, Arc::clone(prettier_server)));
cx.emit(Event::LanguageServerAdded(new_server_id));
})
.ok();
}
}
#[cfg(not(any(test, feature = "test-support")))]
async fn install_default_prettier(
plugins_to_install: HashSet<&'static str>,
node: Arc<dyn NodeRuntime>,
fs: Arc<dyn Fs>,
) -> anyhow::Result<()> {
let prettier_wrapper_path = DEFAULT_PRETTIER_DIR.join(prettier::PRETTIER_SERVER_FILE);
// method creates parent directory if it doesn't exist
fs.save(
&prettier_wrapper_path,
&text::Rope::from(prettier::PRETTIER_SERVER_JS),
text::LineEnding::Unix,
)
.await
.with_context(|| {
format!(
"writing {} file at {prettier_wrapper_path:?}",
prettier::PRETTIER_SERVER_FILE
)
})?;
let packages_to_versions =
future::try_join_all(plugins_to_install.iter().chain(Some(&"prettier")).map(
|package_name| async {
let returned_package_name = package_name.to_string();
let latest_version = node
.npm_package_latest_version(package_name)
.await
.with_context(|| {
format!("fetching latest npm version for package {returned_package_name}")
})?;
anyhow::Ok((returned_package_name, latest_version))
},
))
.await
.context("fetching latest npm versions")?;
log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
let borrowed_packages = packages_to_versions
.iter()
.map(|(package, version)| (package.as_str(), version.as_str()))
.collect::<Vec<_>>();
node.npm_install_packages(DEFAULT_PRETTIER_DIR.as_path(), &borrowed_packages)
.await
.context("fetching formatter packages")?;
anyhow::Ok(())
}
fn subscribe_for_copilot_events(
copilot: &Model<Copilot>,
cx: &mut ModelContext<'_, Project>,

View file

@ -1,4 +1,4 @@
use crate::{search::PathMatcher, Event, *};
use crate::{Event, *};
use fs::FakeFs;
use futures::{future, StreamExt};
use gpui::AppContext;
@ -13,7 +13,7 @@ use pretty_assertions::assert_eq;
use serde_json::json;
use std::{os, task::Poll};
use unindent::Unindent as _;
use util::{assert_set_eq, test::temp_tree};
use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
#[gpui::test]
async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {

View file

@ -1,7 +1,6 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
use anyhow::{Context, Result};
use client::proto;
use globset::{Glob, GlobMatcher};
use itertools::Itertools;
use language::{char_kind, BufferSnapshot};
use regex::{Regex, RegexBuilder};
@ -10,9 +9,10 @@ use std::{
borrow::Cow,
io::{BufRead, BufReader, Read},
ops::Range,
path::{Path, PathBuf},
path::Path,
sync::Arc,
};
use util::paths::PathMatcher;
#[derive(Clone, Debug)]
pub struct SearchInputs {
@ -52,31 +52,6 @@ pub enum SearchQuery {
},
}
#[derive(Clone, Debug)]
pub struct PathMatcher {
maybe_path: PathBuf,
glob: GlobMatcher,
}
impl std::fmt::Display for PathMatcher {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.maybe_path.to_string_lossy().fmt(f)
}
}
impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher {
glob: Glob::new(&maybe_glob)?.compile_matcher(),
maybe_path: PathBuf::from(maybe_glob),
})
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
}
}
impl SearchQuery {
pub fn text(
query: impl ToString,

View file

@ -29,7 +29,6 @@ serde.workspace = true
serde_derive.workspace = true
smallvec.workspace = true
smol.workspace = true
globset.workspace = true
serde_json.workspace = true
[dev-dependencies]
client = { path = "../client", features = ["test-support"] }

View file

@ -22,7 +22,7 @@ use gpui::{
};
use menu::Confirm;
use project::{
search::{PathMatcher, SearchInputs, SearchQuery},
search::{SearchInputs, SearchQuery},
Entry, Project,
};
use semantic_index::{SemanticIndex, SemanticIndexStatus};
@ -37,7 +37,7 @@ use std::{
sync::Arc,
time::{Duration, Instant},
};
use util::ResultExt as _;
use util::{paths::PathMatcher, ResultExt as _};
use workspace::{
item::{BreadcrumbText, Item, ItemEvent, ItemHandle},
searchable::{Direction, SearchableItem, SearchableItemHandle},

View file

@ -9,7 +9,7 @@ use futures::channel::oneshot;
use gpui::executor;
use ndarray::{Array1, Array2};
use ordered_float::OrderedFloat;
use project::{search::PathMatcher, Fs};
use project::Fs;
use rpc::proto::Timestamp;
use rusqlite::params;
use rusqlite::types::Value;
@ -21,7 +21,7 @@ use std::{
sync::Arc,
time::SystemTime,
};
use util::TryFutureExt;
use util::{paths::PathMatcher, TryFutureExt};
pub fn argsort<T: Ord>(data: &[T]) -> Vec<usize> {
let mut indices = (0..data.len()).collect::<Vec<_>>();

View file

@ -21,7 +21,7 @@ use ordered_float::OrderedFloat;
use parking_lot::Mutex;
use parsing::{CodeContextRetriever, Span, SpanDigest, PARSEABLE_ENTIRE_FILE_TYPES};
use postage::watch;
use project::{search::PathMatcher, Fs, PathChange, Project, ProjectEntryId, Worktree, WorktreeId};
use project::{Fs, PathChange, Project, ProjectEntryId, Worktree, WorktreeId};
use smol::channel;
use std::{
cmp::Reverse,
@ -33,6 +33,7 @@ use std::{
sync::{Arc, Weak},
time::{Duration, Instant, SystemTime},
};
use util::paths::PathMatcher;
use util::{channel::RELEASE_CHANNEL_NAME, http::HttpClient, paths::EMBEDDINGS_DIR, ResultExt};
use workspace::WorkspaceCreated;

View file

@ -10,13 +10,13 @@ use gpui::{executor::Deterministic, Task, TestAppContext};
use language::{Language, LanguageConfig, LanguageRegistry, ToOffset};
use parking_lot::Mutex;
use pretty_assertions::assert_eq;
use project::{project_settings::ProjectSettings, search::PathMatcher, FakeFs, Fs, Project};
use project::{project_settings::ProjectSettings, FakeFs, Fs, Project};
use rand::{rngs::StdRng, Rng};
use serde_json::json;
use settings::SettingsStore;
use std::{path::Path, sync::Arc, time::SystemTime};
use unindent::Unindent;
use util::RandomCharIter;
use util::{paths::PathMatcher, RandomCharIter};
#[ctor::ctor]
fn init_logger() {
@ -289,12 +289,12 @@ async fn test_code_context_retrieval_rust() {
impl E {
// This is also a preceding comment
pub fn function_1() -> Option<()> {
todo!();
unimplemented!();
}
// This is a preceding comment
fn function_2() -> Result<()> {
todo!();
unimplemented!();
}
}
@ -344,7 +344,7 @@ async fn test_code_context_retrieval_rust() {
"
// This is also a preceding comment
pub fn function_1() -> Option<()> {
todo!();
unimplemented!();
}"
.unindent(),
text.find("pub fn function_1").unwrap(),
@ -353,7 +353,7 @@ async fn test_code_context_retrieval_rust() {
"
// This is a preceding comment
fn function_2() -> Result<()> {
todo!();
unimplemented!();
}"
.unindent(),
text.find("fn function_2").unwrap(),

View file

@ -14,6 +14,7 @@ test-support = ["tempdir", "git2"]
[dependencies]
anyhow.workspace = true
backtrace = "0.3"
globset.workspace = true
log.workspace = true
lazy_static.workspace = true
futures.workspace = true

View file

@ -1,5 +1,6 @@
use std::path::{Path, PathBuf};
use globset::{Glob, GlobMatcher};
use serde::{Deserialize, Serialize};
lazy_static::lazy_static! {
@ -189,6 +190,31 @@ impl<P> PathLikeWithPosition<P> {
}
}
#[derive(Clone, Debug)]
pub struct PathMatcher {
maybe_path: PathBuf,
glob: GlobMatcher,
}
impl std::fmt::Display for PathMatcher {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.maybe_path.to_string_lossy().fmt(f)
}
}
impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher {
glob: Glob::new(&maybe_glob)?.compile_matcher(),
maybe_path: PathBuf::from(maybe_glob),
})
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
}
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -14,7 +14,7 @@ test-support = [
"client2/test-support",
"project2/test-support",
"settings2/test-support",
"gpui2/test-support",
"gpui/test-support",
"fs2/test-support"
]
@ -25,7 +25,7 @@ client2 = { path = "../client2" }
collections = { path = "../collections" }
# context_menu = { path = "../context_menu" }
fs2 = { path = "../fs2" }
gpui2 = { path = "../gpui2" }
gpui = { package = "gpui2", path = "../gpui2" }
install_cli2 = { path = "../install_cli2" }
language2 = { path = "../language2" }
#menu = { path = "../menu" }
@ -56,7 +56,7 @@ uuid.workspace = true
[dev-dependencies]
call2 = { path = "../call2", features = ["test-support"] }
client2 = { path = "../client2", features = ["test-support"] }
gpui2 = { path = "../gpui2", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
project2 = { path = "../project2", features = ["test-support"] }
settings2 = { path = "../settings2", features = ["test-support"] }
fs2 = { path = "../fs2", features = ["test-support"] }

View file

@ -1,5 +1,5 @@
use crate::{status_bar::StatusItemView, Axis, Workspace};
use gpui2::{
use gpui::{
div, Action, AnyView, AppContext, Div, Entity, EntityId, EventEmitter, ParentElement, Render,
Subscription, View, ViewContext, WeakView, WindowContext,
};
@ -226,9 +226,9 @@ impl Dock {
// })
}
// pub fn active_panel_index(&self) -> usize {
// self.active_panel_index
// }
pub fn active_panel_index(&self) -> usize {
self.active_panel_index
}
pub(crate) fn set_open(&mut self, open: bool, cx: &mut ViewContext<Self>) {
if open != self.is_open {
@ -241,84 +241,87 @@ impl Dock {
}
}
// pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext<Self>) {
// for entry in &mut self.panel_entries {
// if entry.panel.as_any() == panel {
// if zoomed != entry.panel.is_zoomed(cx) {
// entry.panel.set_zoomed(zoomed, cx);
// }
// } else if entry.panel.is_zoomed(cx) {
// entry.panel.set_zoomed(false, cx);
// }
// }
// cx.notify();
// }
// pub fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
// for entry in &mut self.panel_entries {
// if entry.panel.is_zoomed(cx) {
// entry.panel.set_zoomed(false, cx);
// todo!()
// pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext<Self>) {
// for entry in &mut self.panel_entries {
// if entry.panel.as_any() == panel {
// if zoomed != entry.panel.is_zoomed(cx) {
// entry.panel.set_zoomed(zoomed, cx);
// }
// } else if entry.panel.is_zoomed(cx) {
// entry.panel.set_zoomed(false, cx);
// }
// }
// pub(crate) fn add_panel<T: Panel>(&mut self, panel: View<T>, cx: &mut ViewContext<Self>) {
// let subscriptions = [
// cx.observe(&panel, |_, _, cx| cx.notify()),
// cx.subscribe(&panel, |this, panel, event, cx| {
// if T::should_activate_on_event(event) {
// if let Some(ix) = this
// .panel_entries
// .iter()
// .position(|entry| entry.panel.id() == panel.id())
// {
// this.set_open(true, cx);
// this.activate_panel(ix, cx);
// cx.focus(&panel);
// }
// } else if T::should_close_on_event(event)
// && this.visible_panel().map_or(false, |p| p.id() == panel.id())
// {
// this.set_open(false, cx);
// }
// }),
// ];
// cx.notify();
// }
// let dock_view_id = cx.view_id();
// self.panel_entries.push(PanelEntry {
// panel: Arc::new(panel),
// // todo!()
// // context_menu: cx.add_view(|cx| {
// // let mut menu = ContextMenu::new(dock_view_id, cx);
// // menu.set_position_mode(OverlayPositionMode::Local);
// // menu
// // }),
// _subscriptions: subscriptions,
// });
// cx.notify()
// }
pub fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
for entry in &mut self.panel_entries {
if entry.panel.is_zoomed(cx) {
entry.panel.set_zoomed(false, cx);
}
}
}
// pub fn remove_panel<T: Panel>(&mut self, panel: &View<T>, cx: &mut ViewContext<Self>) {
// if let Some(panel_ix) = self
// .panel_entries
// .iter()
// .position(|entry| entry.panel.id() == panel.id())
// {
// if panel_ix == self.active_panel_index {
// self.active_panel_index = 0;
// self.set_open(false, cx);
// } else if panel_ix < self.active_panel_index {
// self.active_panel_index -= 1;
// }
// self.panel_entries.remove(panel_ix);
// cx.notify();
// }
// }
pub(crate) fn add_panel<T: Panel>(&mut self, panel: View<T>, cx: &mut ViewContext<Self>) {
let subscriptions = [
cx.observe(&panel, |_, _, cx| cx.notify()),
cx.subscribe(&panel, |this, panel, event, cx| {
if T::should_activate_on_event(event) {
if let Some(ix) = this
.panel_entries
.iter()
.position(|entry| entry.panel.id() == panel.id())
{
this.set_open(true, cx);
this.activate_panel(ix, cx);
// todo!()
// cx.focus(&panel);
}
} else if T::should_close_on_event(event)
&& this.visible_panel().map_or(false, |p| p.id() == panel.id())
{
this.set_open(false, cx);
}
}),
];
// pub fn panels_len(&self) -> usize {
// self.panel_entries.len()
// }
// todo!()
// let dock_view_id = cx.view_id();
self.panel_entries.push(PanelEntry {
panel: Arc::new(panel),
// todo!()
// context_menu: cx.add_view(|cx| {
// let mut menu = ContextMenu::new(dock_view_id, cx);
// menu.set_position_mode(OverlayPositionMode::Local);
// menu
// }),
_subscriptions: subscriptions,
});
cx.notify()
}
pub fn remove_panel<T: Panel>(&mut self, panel: &View<T>, cx: &mut ViewContext<Self>) {
if let Some(panel_ix) = self
.panel_entries
.iter()
.position(|entry| entry.panel.id() == panel.id())
{
if panel_ix == self.active_panel_index {
self.active_panel_index = 0;
self.set_open(false, cx);
} else if panel_ix < self.active_panel_index {
self.active_panel_index -= 1;
}
self.panel_entries.remove(panel_ix);
cx.notify();
}
}
pub fn panels_len(&self) -> usize {
self.panel_entries.len()
}
pub fn activate_panel(&mut self, panel_ix: usize, cx: &mut ViewContext<Self>) {
if panel_ix != self.active_panel_index {
@ -352,38 +355,38 @@ impl Dock {
}
}
// pub fn zoomed_panel(&self, cx: &WindowContext) -> Option<Arc<dyn PanelHandle>> {
// let entry = self.visible_entry()?;
// if entry.panel.is_zoomed(cx) {
// Some(entry.panel.clone())
// } else {
// None
// }
// }
pub fn zoomed_panel(&self, cx: &WindowContext) -> Option<Arc<dyn PanelHandle>> {
let entry = self.visible_entry()?;
if entry.panel.is_zoomed(cx) {
Some(entry.panel.clone())
} else {
None
}
}
// pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option<f32> {
// self.panel_entries
// .iter()
// .find(|entry| entry.panel.id() == panel.id())
// .map(|entry| entry.panel.size(cx))
// }
pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option<f32> {
self.panel_entries
.iter()
.find(|entry| entry.panel.id() == panel.id())
.map(|entry| entry.panel.size(cx))
}
// pub fn active_panel_size(&self, cx: &WindowContext) -> Option<f32> {
// if self.is_open {
// self.panel_entries
// .get(self.active_panel_index)
// .map(|entry| entry.panel.size(cx))
// } else {
// None
// }
// }
pub fn active_panel_size(&self, cx: &WindowContext) -> Option<f32> {
if self.is_open {
self.panel_entries
.get(self.active_panel_index)
.map(|entry| entry.panel.size(cx))
} else {
None
}
}
// pub fn resize_active_panel(&mut self, size: Option<f32>, cx: &mut ViewContext<Self>) {
// if let Some(entry) = self.panel_entries.get_mut(self.active_panel_index) {
// entry.panel.set_size(size, cx);
// cx.notify();
// }
// }
pub fn resize_active_panel(&mut self, size: Option<f32>, cx: &mut ViewContext<Self>) {
if let Some(entry) = self.panel_entries.get_mut(self.active_panel_index) {
entry.panel.set_size(size, cx);
cx.notify();
}
}
// pub fn render_placeholder(&self, cx: &WindowContext) -> AnyElement<Workspace> {
// todo!()
@ -629,7 +632,7 @@ impl StatusItemView for PanelButtons {
#[cfg(any(test, feature = "test-support"))]
pub mod test {
use super::*;
use gpui2::{div, Div, ViewContext, WindowContext};
use gpui::{div, Div, ViewContext, WindowContext};
#[derive(Debug)]
pub enum TestPanelEvent {
@ -678,7 +681,7 @@ pub mod test {
"TestPanel"
}
fn position(&self, _: &gpui2::WindowContext) -> super::DockPosition {
fn position(&self, _: &gpui::WindowContext) -> super::DockPosition {
self.position
}

View file

@ -11,7 +11,7 @@ use client2::{
proto::{self, PeerId},
Client,
};
use gpui2::{
use gpui::{
AnyElement, AnyView, AppContext, Entity, EntityId, EventEmitter, FocusHandle, HighlightStyle,
Model, Pixels, Point, Render, SharedString, Task, View, ViewContext, WeakView, WindowContext,
};
@ -219,7 +219,7 @@ pub trait ItemHandle: 'static + Send {
&self,
cx: &mut WindowContext,
handler: Box<dyn Fn(ItemEvent, &mut WindowContext) + Send>,
) -> gpui2::Subscription;
) -> gpui::Subscription;
fn tab_tooltip_text(&self, cx: &AppContext) -> Option<SharedString>;
fn tab_description(&self, detail: usize, cx: &AppContext) -> Option<SharedString>;
fn tab_content(&self, detail: Option<usize>, cx: &AppContext) -> AnyElement<Pane>;
@ -267,7 +267,7 @@ pub trait ItemHandle: 'static + Send {
&self,
cx: &mut AppContext,
callback: Box<dyn FnOnce(&mut AppContext) + Send>,
) -> gpui2::Subscription;
) -> gpui::Subscription;
fn to_searchable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn SearchableItemHandle>>;
fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation;
fn breadcrumbs(&self, theme: &ThemeVariant, cx: &AppContext) -> Option<Vec<BreadcrumbText>>;
@ -301,7 +301,7 @@ impl<T: Item> ItemHandle for View<T> {
&self,
cx: &mut WindowContext,
handler: Box<dyn Fn(ItemEvent, &mut WindowContext) + Send>,
) -> gpui2::Subscription {
) -> gpui::Subscription {
cx.subscribe(self, move |_, event, cx| {
for item_event in T::to_item_events(event) {
handler(item_event, cx)
@ -591,7 +591,7 @@ impl<T: Item> ItemHandle for View<T> {
&self,
cx: &mut AppContext,
callback: Box<dyn FnOnce(&mut AppContext) + Send>,
) -> gpui2::Subscription {
) -> gpui::Subscription {
cx.observe_release(self, move |_, cx| callback(cx))
}
@ -765,7 +765,7 @@ impl<T: FollowableItem> FollowableItemHandle for View<T> {
// pub mod test {
// use super::{Item, ItemEvent};
// use crate::{ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId};
// use gpui2::{
// use gpui::{
// elements::Empty, AnyElement, AppContext, Element, Entity, Model, Task, View,
// ViewContext, View, WeakViewHandle,
// };

View file

@ -1,6 +1,6 @@
use crate::{Toast, Workspace};
use collections::HashMap;
use gpui2::{AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext};
use gpui::{AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext};
use std::{any::TypeId, ops::DerefMut};
pub fn init(cx: &mut AppContext) {
@ -160,7 +160,7 @@ impl Workspace {
pub mod simple_message_notification {
use super::Notification;
use gpui2::{AnyElement, AppContext, Div, EventEmitter, Render, TextStyle, ViewContext};
use gpui::{AnyElement, AppContext, Div, EventEmitter, Render, TextStyle, ViewContext};
use serde::Deserialize;
use std::{borrow::Cow, sync::Arc};
@ -220,36 +220,36 @@ pub mod simple_message_notification {
}
}
pub fn new_element(
message: fn(TextStyle, &AppContext) -> AnyElement<MessageNotification>,
) -> MessageNotification {
Self {
message: NotificationMessage::Element(message),
on_click: None,
click_message: None,
}
}
pub fn with_click_message<S>(mut self, message: S) -> Self
where
S: Into<Cow<'static, str>>,
{
self.click_message = Some(message.into());
self
}
pub fn on_click<F>(mut self, on_click: F) -> Self
where
F: 'static + Send + Sync + Fn(&mut ViewContext<Self>),
{
self.on_click = Some(Arc::new(on_click));
self
}
// todo!()
// pub fn new_element(
// message: fn(TextStyle, &AppContext) -> AnyElement<MessageNotification>,
// ) -> MessageNotification {
// Self {
// message: NotificationMessage::Element(message),
// on_click: None,
// click_message: None,
// }
// }
// pub fn with_click_message<S>(mut self, message: S) -> Self
// where
// S: Into<Cow<'static, str>>,
// {
// self.click_message = Some(message.into());
// self
// }
// pub fn on_click<F>(mut self, on_click: F) -> Self
// where
// F: 'static + Fn(&mut ViewContext<Self>),
// {
// self.on_click = Some(Arc::new(on_click));
// self
// }
// pub fn dismiss(&mut self, _: &CancelMessageNotification, cx: &mut ViewContext<Self>) {
// cx.emit(MessageNotificationEvent::Dismiss);
// }
// pub fn dismiss(&mut self, _: &CancelMessageNotification, cx: &mut ViewContext<Self>) {
// cx.emit(MessageNotificationEvent::Dismiss);
// }
}
impl Render for MessageNotification {
@ -265,7 +265,7 @@ pub mod simple_message_notification {
// "MessageNotification"
// }
// fn render(&mut self, cx: &mut gpui2::ViewContext<Self>) -> gpui::AnyElement<Self> {
// fn render(&mut self, cx: &mut gpui::ViewContext<Self>) -> gpui::AnyElement<Self> {
// let theme = theme2::current(cx).clone();
// let theme = &theme.simple_message_notification;

View file

@ -8,7 +8,7 @@ use crate::{
};
use anyhow::Result;
use collections::{HashMap, HashSet, VecDeque};
use gpui2::{
use gpui::{
AppContext, AsyncWindowContext, Component, Div, EntityId, EventEmitter, FocusHandle, Model,
PromptLevel, Render, Task, View, ViewContext, VisualContext, WeakView, WindowContext,
};
@ -416,17 +416,17 @@ impl Pane {
}
}
// pub(crate) fn workspace(&self) -> &WeakView<Workspace> {
// &self.workspace
// }
pub(crate) fn workspace(&self) -> &WeakView<Workspace> {
&self.workspace
}
pub fn has_focus(&self, cx: &WindowContext) -> bool {
self.focus_handle.contains_focused(cx)
}
// pub fn active_item_index(&self) -> usize {
// self.active_item_index
// }
pub fn active_item_index(&self) -> usize {
self.active_item_index
}
// pub fn on_can_drop<F>(&mut self, can_drop: F)
// where
@ -2911,6 +2911,6 @@ impl Render for DraggedTab {
type Element = Div<Self>;
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
div().w_8().h_4().bg(gpui2::red())
div().w_8().h_4().bg(gpui::red())
}
}

View file

@ -1,6 +1,6 @@
use super::DraggedItem;
use crate::{Pane, SplitDirection, Workspace};
use gpui2::{
use gpui::{
color::Color,
elements::{Canvas, MouseEventHandler, ParentElement, Stack},
geometry::{rect::RectF, vector::Vector2F},

View file

@ -6,9 +6,7 @@ use db2::sqlez::{
bindable::{Bind, Column, StaticColumnCount},
statement::Statement,
};
use gpui2::{
point, size, AnyElement, AnyWeakView, Bounds, Model, Pixels, Point, View, ViewContext,
};
use gpui::{point, size, AnyElement, AnyWeakView, Bounds, Model, Pixels, Point, View, ViewContext};
use parking_lot::Mutex;
use project2::Project;
use serde::Deserialize;

View file

@ -6,7 +6,7 @@ use std::path::Path;
use anyhow::{anyhow, bail, Context, Result};
use db2::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql};
use gpui2::WindowBounds;
use gpui::WindowBounds;
use util::{unzip_option, ResultExt};
use uuid::Uuid;
@ -549,425 +549,425 @@ impl WorkspaceDb {
}
}
// todo!()
// #[cfg(test)]
// mod tests {
// use super::*;
// use db::open_test_db;
#[cfg(test)]
mod tests {
use super::*;
use db2::open_test_db;
use gpui;
// #[gpui::test]
// async fn test_next_id_stability() {
// env_logger::try_init().ok();
#[gpui::test]
async fn test_next_id_stability() {
env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("test_next_id_stability").await);
let db = WorkspaceDb(open_test_db("test_next_id_stability").await);
// db.write(|conn| {
// conn.migrate(
// "test_table",
// &[sql!(
// CREATE TABLE test_table(
// text TEXT,
// workspace_id INTEGER,
// FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
// ON DELETE CASCADE
// ) STRICT;
// )],
// )
// .unwrap();
// })
// .await;
db.write(|conn| {
conn.migrate(
"test_table",
&[sql!(
CREATE TABLE test_table(
text TEXT,
workspace_id INTEGER,
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
ON DELETE CASCADE
) STRICT;
)],
)
.unwrap();
})
.await;
// let id = db.next_id().await.unwrap();
// // Assert the empty row got inserted
// assert_eq!(
// Some(id),
// db.select_row_bound::<WorkspaceId, WorkspaceId>(sql!(
// SELECT workspace_id FROM workspaces WHERE workspace_id = ?
// ))
// .unwrap()(id)
// .unwrap()
// );
let id = db.next_id().await.unwrap();
// Assert the empty row got inserted
assert_eq!(
Some(id),
db.select_row_bound::<WorkspaceId, WorkspaceId>(sql!(
SELECT workspace_id FROM workspaces WHERE workspace_id = ?
))
.unwrap()(id)
.unwrap()
);
// db.write(move |conn| {
// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
// .unwrap()(("test-text-1", id))
// .unwrap()
// })
// .await;
db.write(move |conn| {
conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
.unwrap()(("test-text-1", id))
.unwrap()
})
.await;
// let test_text_1 = db
// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
// .unwrap()(1)
// .unwrap()
// .unwrap();
// assert_eq!(test_text_1, "test-text-1");
// }
let test_text_1 = db
.select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
.unwrap()(1)
.unwrap()
.unwrap();
assert_eq!(test_text_1, "test-text-1");
}
// #[gpui::test]
// async fn test_workspace_id_stability() {
// env_logger::try_init().ok();
#[gpui::test]
async fn test_workspace_id_stability() {
env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("test_workspace_id_stability").await);
let db = WorkspaceDb(open_test_db("test_workspace_id_stability").await);
// db.write(|conn| {
// conn.migrate(
// "test_table",
// &[sql!(
// CREATE TABLE test_table(
// text TEXT,
// workspace_id INTEGER,
// FOREIGN KEY(workspace_id)
// REFERENCES workspaces(workspace_id)
// ON DELETE CASCADE
// ) STRICT;)],
// )
// })
// .await
// .unwrap();
db.write(|conn| {
conn.migrate(
"test_table",
&[sql!(
CREATE TABLE test_table(
text TEXT,
workspace_id INTEGER,
FOREIGN KEY(workspace_id)
REFERENCES workspaces(workspace_id)
ON DELETE CASCADE
) STRICT;)],
)
})
.await
.unwrap();
// let mut workspace_1 = SerializedWorkspace {
// id: 1,
// location: (["/tmp", "/tmp2"]).into(),
// center_group: Default::default(),
// bounds: Default::default(),
// display: Default::default(),
// docks: Default::default(),
// };
let mut workspace_1 = SerializedWorkspace {
id: 1,
location: (["/tmp", "/tmp2"]).into(),
center_group: Default::default(),
bounds: Default::default(),
display: Default::default(),
docks: Default::default(),
};
// let workspace_2 = SerializedWorkspace {
// id: 2,
// location: (["/tmp"]).into(),
// center_group: Default::default(),
// bounds: Default::default(),
// display: Default::default(),
// docks: Default::default(),
// };
let workspace_2 = SerializedWorkspace {
id: 2,
location: (["/tmp"]).into(),
center_group: Default::default(),
bounds: Default::default(),
display: Default::default(),
docks: Default::default(),
};
// db.save_workspace(workspace_1.clone()).await;
db.save_workspace(workspace_1.clone()).await;
// db.write(|conn| {
// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
// .unwrap()(("test-text-1", 1))
// .unwrap();
// })
// .await;
db.write(|conn| {
conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
.unwrap()(("test-text-1", 1))
.unwrap();
})
.await;
// db.save_workspace(workspace_2.clone()).await;
db.save_workspace(workspace_2.clone()).await;
// db.write(|conn| {
// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
// .unwrap()(("test-text-2", 2))
// .unwrap();
// })
// .await;
db.write(|conn| {
conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
.unwrap()(("test-text-2", 2))
.unwrap();
})
.await;
// workspace_1.location = (["/tmp", "/tmp3"]).into();
// db.save_workspace(workspace_1.clone()).await;
// db.save_workspace(workspace_1).await;
// db.save_workspace(workspace_2).await;
workspace_1.location = (["/tmp", "/tmp3"]).into();
db.save_workspace(workspace_1.clone()).await;
db.save_workspace(workspace_1).await;
db.save_workspace(workspace_2).await;
// let test_text_2 = db
// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
// .unwrap()(2)
// .unwrap()
// .unwrap();
// assert_eq!(test_text_2, "test-text-2");
let test_text_2 = db
.select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
.unwrap()(2)
.unwrap()
.unwrap();
assert_eq!(test_text_2, "test-text-2");
// let test_text_1 = db
// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
// .unwrap()(1)
// .unwrap()
// .unwrap();
// assert_eq!(test_text_1, "test-text-1");
// }
let test_text_1 = db
.select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
.unwrap()(1)
.unwrap()
.unwrap();
assert_eq!(test_text_1, "test-text-1");
}
// fn group(axis: gpui::Axis, children: Vec<SerializedPaneGroup>) -> SerializedPaneGroup {
// SerializedPaneGroup::Group {
// axis,
// flexes: None,
// children,
// }
// }
fn group(axis: Axis, children: Vec<SerializedPaneGroup>) -> SerializedPaneGroup {
SerializedPaneGroup::Group {
axis,
flexes: None,
children,
}
}
// #[gpui::test]
// async fn test_full_workspace_serialization() {
// env_logger::try_init().ok();
#[gpui::test]
async fn test_full_workspace_serialization() {
env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("test_full_workspace_serialization").await);
let db = WorkspaceDb(open_test_db("test_full_workspace_serialization").await);
// // -----------------
// // | 1,2 | 5,6 |
// // | - - - | |
// // | 3,4 | |
// // -----------------
// let center_group = group(
// gpui::Axis::Horizontal,
// vec![
// group(
// gpui::Axis::Vertical,
// vec![
// SerializedPaneGroup::Pane(SerializedPane::new(
// vec![
// SerializedItem::new("Terminal", 5, false),
// SerializedItem::new("Terminal", 6, true),
// ],
// false,
// )),
// SerializedPaneGroup::Pane(SerializedPane::new(
// vec![
// SerializedItem::new("Terminal", 7, true),
// SerializedItem::new("Terminal", 8, false),
// ],
// false,
// )),
// ],
// ),
// SerializedPaneGroup::Pane(SerializedPane::new(
// vec![
// SerializedItem::new("Terminal", 9, false),
// SerializedItem::new("Terminal", 10, true),
// ],
// false,
// )),
// ],
// );
// -----------------
// | 1,2 | 5,6 |
// | - - - | |
// | 3,4 | |
// -----------------
let center_group = group(
Axis::Horizontal,
vec![
group(
Axis::Vertical,
vec![
SerializedPaneGroup::Pane(SerializedPane::new(
vec![
SerializedItem::new("Terminal", 5, false),
SerializedItem::new("Terminal", 6, true),
],
false,
)),
SerializedPaneGroup::Pane(SerializedPane::new(
vec![
SerializedItem::new("Terminal", 7, true),
SerializedItem::new("Terminal", 8, false),
],
false,
)),
],
),
SerializedPaneGroup::Pane(SerializedPane::new(
vec![
SerializedItem::new("Terminal", 9, false),
SerializedItem::new("Terminal", 10, true),
],
false,
)),
],
);
// let workspace = SerializedWorkspace {
// id: 5,
// location: (["/tmp", "/tmp2"]).into(),
// center_group,
// bounds: Default::default(),
// display: Default::default(),
// docks: Default::default(),
// };
let workspace = SerializedWorkspace {
id: 5,
location: (["/tmp", "/tmp2"]).into(),
center_group,
bounds: Default::default(),
display: Default::default(),
docks: Default::default(),
};
// db.save_workspace(workspace.clone()).await;
// let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]);
db.save_workspace(workspace.clone()).await;
let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]);
// assert_eq!(workspace, round_trip_workspace.unwrap());
assert_eq!(workspace, round_trip_workspace.unwrap());
// // Test guaranteed duplicate IDs
// db.save_workspace(workspace.clone()).await;
// db.save_workspace(workspace.clone()).await;
// Test guaranteed duplicate IDs
db.save_workspace(workspace.clone()).await;
db.save_workspace(workspace.clone()).await;
// let round_trip_workspace = db.workspace_for_roots(&["/tmp", "/tmp2"]);
// assert_eq!(workspace, round_trip_workspace.unwrap());
// }
let round_trip_workspace = db.workspace_for_roots(&["/tmp", "/tmp2"]);
assert_eq!(workspace, round_trip_workspace.unwrap());
}
// #[gpui::test]
// async fn test_workspace_assignment() {
// env_logger::try_init().ok();
#[gpui::test]
async fn test_workspace_assignment() {
env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("test_basic_functionality").await);
let db = WorkspaceDb(open_test_db("test_basic_functionality").await);
// let workspace_1 = SerializedWorkspace {
// id: 1,
// location: (["/tmp", "/tmp2"]).into(),
// center_group: Default::default(),
// bounds: Default::default(),
// display: Default::default(),
// docks: Default::default(),
// };
let workspace_1 = SerializedWorkspace {
id: 1,
location: (["/tmp", "/tmp2"]).into(),
center_group: Default::default(),
bounds: Default::default(),
display: Default::default(),
docks: Default::default(),
};
// let mut workspace_2 = SerializedWorkspace {
// id: 2,
// location: (["/tmp"]).into(),
// center_group: Default::default(),
// bounds: Default::default(),
// display: Default::default(),
// docks: Default::default(),
// };
let mut workspace_2 = SerializedWorkspace {
id: 2,
location: (["/tmp"]).into(),
center_group: Default::default(),
bounds: Default::default(),
display: Default::default(),
docks: Default::default(),
};
// db.save_workspace(workspace_1.clone()).await;
// db.save_workspace(workspace_2.clone()).await;
db.save_workspace(workspace_1.clone()).await;
db.save_workspace(workspace_2.clone()).await;
// // Test that paths are treated as a set
// assert_eq!(
// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
// workspace_1
// );
// assert_eq!(
// db.workspace_for_roots(&["/tmp2", "/tmp"]).unwrap(),
// workspace_1
// );
// Test that paths are treated as a set
assert_eq!(
db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
workspace_1
);
assert_eq!(
db.workspace_for_roots(&["/tmp2", "/tmp"]).unwrap(),
workspace_1
);
// // Make sure that other keys work
// assert_eq!(db.workspace_for_roots(&["/tmp"]).unwrap(), workspace_2);
// assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None);
// Make sure that other keys work
assert_eq!(db.workspace_for_roots(&["/tmp"]).unwrap(), workspace_2);
assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None);
// // Test 'mutate' case of updating a pre-existing id
// workspace_2.location = (["/tmp", "/tmp2"]).into();
// Test 'mutate' case of updating a pre-existing id
workspace_2.location = (["/tmp", "/tmp2"]).into();
// db.save_workspace(workspace_2.clone()).await;
// assert_eq!(
// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
// workspace_2
// );
db.save_workspace(workspace_2.clone()).await;
assert_eq!(
db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
workspace_2
);
// // Test other mechanism for mutating
// let mut workspace_3 = SerializedWorkspace {
// id: 3,
// location: (&["/tmp", "/tmp2"]).into(),
// center_group: Default::default(),
// bounds: Default::default(),
// display: Default::default(),
// docks: Default::default(),
// };
// Test other mechanism for mutating
let mut workspace_3 = SerializedWorkspace {
id: 3,
location: (&["/tmp", "/tmp2"]).into(),
center_group: Default::default(),
bounds: Default::default(),
display: Default::default(),
docks: Default::default(),
};
// db.save_workspace(workspace_3.clone()).await;
// assert_eq!(
// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
// workspace_3
// );
db.save_workspace(workspace_3.clone()).await;
assert_eq!(
db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
workspace_3
);
// // Make sure that updating paths differently also works
// workspace_3.location = (["/tmp3", "/tmp4", "/tmp2"]).into();
// db.save_workspace(workspace_3.clone()).await;
// assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None);
// assert_eq!(
// db.workspace_for_roots(&["/tmp2", "/tmp3", "/tmp4"])
// .unwrap(),
// workspace_3
// );
// }
// Make sure that updating paths differently also works
workspace_3.location = (["/tmp3", "/tmp4", "/tmp2"]).into();
db.save_workspace(workspace_3.clone()).await;
assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None);
assert_eq!(
db.workspace_for_roots(&["/tmp2", "/tmp3", "/tmp4"])
.unwrap(),
workspace_3
);
}
// use crate::persistence::model::SerializedWorkspace;
// use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup};
use crate::persistence::model::SerializedWorkspace;
use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup};
// fn default_workspace<P: AsRef<Path>>(
// workspace_id: &[P],
// center_group: &SerializedPaneGroup,
// ) -> SerializedWorkspace {
// SerializedWorkspace {
// id: 4,
// location: workspace_id.into(),
// center_group: center_group.clone(),
// bounds: Default::default(),
// display: Default::default(),
// docks: Default::default(),
// }
// }
fn default_workspace<P: AsRef<Path>>(
workspace_id: &[P],
center_group: &SerializedPaneGroup,
) -> SerializedWorkspace {
SerializedWorkspace {
id: 4,
location: workspace_id.into(),
center_group: center_group.clone(),
bounds: Default::default(),
display: Default::default(),
docks: Default::default(),
}
}
// #[gpui::test]
// async fn test_simple_split() {
// env_logger::try_init().ok();
#[gpui::test]
async fn test_simple_split() {
env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("simple_split").await);
let db = WorkspaceDb(open_test_db("simple_split").await);
// // -----------------
// // | 1,2 | 5,6 |
// // | - - - | |
// // | 3,4 | |
// // -----------------
// let center_pane = group(
// gpui::Axis::Horizontal,
// vec![
// group(
// gpui::Axis::Vertical,
// vec![
// SerializedPaneGroup::Pane(SerializedPane::new(
// vec![
// SerializedItem::new("Terminal", 1, false),
// SerializedItem::new("Terminal", 2, true),
// ],
// false,
// )),
// SerializedPaneGroup::Pane(SerializedPane::new(
// vec![
// SerializedItem::new("Terminal", 4, false),
// SerializedItem::new("Terminal", 3, true),
// ],
// true,
// )),
// ],
// ),
// SerializedPaneGroup::Pane(SerializedPane::new(
// vec![
// SerializedItem::new("Terminal", 5, true),
// SerializedItem::new("Terminal", 6, false),
// ],
// false,
// )),
// ],
// );
// -----------------
// | 1,2 | 5,6 |
// | - - - | |
// | 3,4 | |
// -----------------
let center_pane = group(
Axis::Horizontal,
vec![
group(
Axis::Vertical,
vec![
SerializedPaneGroup::Pane(SerializedPane::new(
vec![
SerializedItem::new("Terminal", 1, false),
SerializedItem::new("Terminal", 2, true),
],
false,
)),
SerializedPaneGroup::Pane(SerializedPane::new(
vec![
SerializedItem::new("Terminal", 4, false),
SerializedItem::new("Terminal", 3, true),
],
true,
)),
],
),
SerializedPaneGroup::Pane(SerializedPane::new(
vec![
SerializedItem::new("Terminal", 5, true),
SerializedItem::new("Terminal", 6, false),
],
false,
)),
],
);
// let workspace = default_workspace(&["/tmp"], &center_pane);
let workspace = default_workspace(&["/tmp"], &center_pane);
// db.save_workspace(workspace.clone()).await;
db.save_workspace(workspace.clone()).await;
// let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap();
let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap();
// assert_eq!(workspace.center_group, new_workspace.center_group);
// }
assert_eq!(workspace.center_group, new_workspace.center_group);
}
// #[gpui::test]
// async fn test_cleanup_panes() {
// env_logger::try_init().ok();
#[gpui::test]
async fn test_cleanup_panes() {
env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("test_cleanup_panes").await);
let db = WorkspaceDb(open_test_db("test_cleanup_panes").await);
// let center_pane = group(
// gpui::Axis::Horizontal,
// vec![
// group(
// gpui::Axis::Vertical,
// vec![
// SerializedPaneGroup::Pane(SerializedPane::new(
// vec![
// SerializedItem::new("Terminal", 1, false),
// SerializedItem::new("Terminal", 2, true),
// ],
// false,
// )),
// SerializedPaneGroup::Pane(SerializedPane::new(
// vec![
// SerializedItem::new("Terminal", 4, false),
// SerializedItem::new("Terminal", 3, true),
// ],
// true,
// )),
// ],
// ),
// SerializedPaneGroup::Pane(SerializedPane::new(
// vec![
// SerializedItem::new("Terminal", 5, false),
// SerializedItem::new("Terminal", 6, true),
// ],
// false,
// )),
// ],
// );
let center_pane = group(
Axis::Horizontal,
vec![
group(
Axis::Vertical,
vec![
SerializedPaneGroup::Pane(SerializedPane::new(
vec![
SerializedItem::new("Terminal", 1, false),
SerializedItem::new("Terminal", 2, true),
],
false,
)),
SerializedPaneGroup::Pane(SerializedPane::new(
vec![
SerializedItem::new("Terminal", 4, false),
SerializedItem::new("Terminal", 3, true),
],
true,
)),
],
),
SerializedPaneGroup::Pane(SerializedPane::new(
vec![
SerializedItem::new("Terminal", 5, false),
SerializedItem::new("Terminal", 6, true),
],
false,
)),
],
);
// let id = &["/tmp"];
let id = &["/tmp"];
// let mut workspace = default_workspace(id, &center_pane);
let mut workspace = default_workspace(id, &center_pane);
// db.save_workspace(workspace.clone()).await;
db.save_workspace(workspace.clone()).await;
// workspace.center_group = group(
// gpui::Axis::Vertical,
// vec![
// SerializedPaneGroup::Pane(SerializedPane::new(
// vec![
// SerializedItem::new("Terminal", 1, false),
// SerializedItem::new("Terminal", 2, true),
// ],
// false,
// )),
// SerializedPaneGroup::Pane(SerializedPane::new(
// vec![
// SerializedItem::new("Terminal", 4, true),
// SerializedItem::new("Terminal", 3, false),
// ],
// true,
// )),
// ],
// );
workspace.center_group = group(
Axis::Vertical,
vec![
SerializedPaneGroup::Pane(SerializedPane::new(
vec![
SerializedItem::new("Terminal", 1, false),
SerializedItem::new("Terminal", 2, true),
],
false,
)),
SerializedPaneGroup::Pane(SerializedPane::new(
vec![
SerializedItem::new("Terminal", 4, true),
SerializedItem::new("Terminal", 3, false),
],
true,
)),
],
);
// db.save_workspace(workspace.clone()).await;
db.save_workspace(workspace.clone()).await;
// let new_workspace = db.workspace_for_roots(id).unwrap();
let new_workspace = db.workspace_for_roots(id).unwrap();
// assert_eq!(workspace.center_group, new_workspace.center_group);
// }
// }
assert_eq!(workspace.center_group, new_workspace.center_group);
}
}

View file

@ -7,7 +7,7 @@ use db2::sqlez::{
bindable::{Bind, Column, StaticColumnCount},
statement::Statement,
};
use gpui2::{AsyncWindowContext, Model, Task, View, WeakView, WindowBounds};
use gpui::{AsyncWindowContext, Model, Task, View, WeakView, WindowBounds};
use project2::Project;
use std::{
path::{Path, PathBuf},
@ -55,7 +55,7 @@ impl Column for WorkspaceLocation {
}
}
#[derive(PartialEq, Clone)]
#[derive(Debug, PartialEq, Clone)]
pub struct SerializedWorkspace {
pub id: WorkspaceId,
pub location: WorkspaceLocation,
@ -127,7 +127,7 @@ impl Bind for DockData {
}
}
#[derive(PartialEq, Clone)]
#[derive(Debug, PartialEq, Clone)]
pub enum SerializedPaneGroup {
Group {
axis: Axis,
@ -286,15 +286,15 @@ pub struct SerializedItem {
pub active: bool,
}
// impl SerializedItem {
// pub fn new(kind: impl AsRef<str>, item_id: ItemId, active: bool) -> Self {
// Self {
// kind: Arc::from(kind.as_ref()),
// item_id,
// active,
// }
// }
// }
impl SerializedItem {
pub fn new(kind: impl AsRef<str>, item_id: ItemId, active: bool) -> Self {
Self {
kind: Arc::from(kind.as_ref()),
item_id,
active,
}
}
}
#[cfg(test)]
impl Default for SerializedItem {

View file

@ -1,6 +1,6 @@
use std::{any::Any, sync::Arc};
use gpui2::{AnyView, AppContext, Subscription, Task, View, ViewContext, WindowContext};
use gpui::{AnyView, AppContext, Subscription, Task, View, ViewContext, WindowContext};
use project2::search::SearchQuery;
use crate::{

View file

@ -1,7 +1,7 @@
use std::any::TypeId;
use crate::{ItemHandle, Pane};
use gpui2::{
use gpui::{
div, AnyView, Component, Div, ParentElement, Render, Styled, Subscription, View, ViewContext,
WindowContext,
};

View file

@ -1,5 +1,5 @@
use crate::ItemHandle;
use gpui2::{
use gpui::{
AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext, WindowContext,
};

View file

@ -8,6 +8,7 @@ pub mod pane;
pub mod pane_group;
mod persistence;
pub mod searchable;
// todo!()
// pub mod shared_screen;
mod status_bar;
mod toolbar;
@ -33,7 +34,7 @@ use futures::{
future::try_join_all,
Future, FutureExt, StreamExt,
};
use gpui2::{
use gpui::{
div, point, size, AnyModel, AnyView, AnyWeakView, AppContext, AsyncAppContext,
AsyncWindowContext, Bounds, Component, Div, Entity, EntityId, EventEmitter, FocusHandle,
GlobalPixels, Model, ModelContext, ParentElement, Point, Render, Size, StatefulInteractive,
@ -41,6 +42,7 @@ use gpui2::{
WindowContext, WindowHandle, WindowOptions,
};
use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, ProjectItem};
use itertools::Itertools;
use language2::LanguageRegistry;
use lazy_static::lazy_static;
use node_runtime::NodeRuntime;
@ -175,42 +177,42 @@ pub struct Toast {
on_click: Option<(Cow<'static, str>, Arc<dyn Fn(&mut WindowContext)>)>,
}
// impl Toast {
// pub fn new<I: Into<Cow<'static, str>>>(id: usize, msg: I) -> Self {
// Toast {
// id,
// msg: msg.into(),
// on_click: None,
// }
// }
impl Toast {
pub fn new<I: Into<Cow<'static, str>>>(id: usize, msg: I) -> Self {
Toast {
id,
msg: msg.into(),
on_click: None,
}
}
// pub fn on_click<F, M>(mut self, message: M, on_click: F) -> Self
// where
// M: Into<Cow<'static, str>>,
// F: Fn(&mut WindowContext) + 'static,
// {
// self.on_click = Some((message.into(), Arc::new(on_click)));
// self
// }
// }
pub fn on_click<F, M>(mut self, message: M, on_click: F) -> Self
where
M: Into<Cow<'static, str>>,
F: Fn(&mut WindowContext) + 'static,
{
self.on_click = Some((message.into(), Arc::new(on_click)));
self
}
}
// impl PartialEq for Toast {
// fn eq(&self, other: &Self) -> bool {
// self.id == other.id
// && self.msg == other.msg
// && self.on_click.is_some() == other.on_click.is_some()
// }
// }
impl PartialEq for Toast {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
&& self.msg == other.msg
&& self.on_click.is_some() == other.on_click.is_some()
}
}
// impl Clone for Toast {
// fn clone(&self) -> Self {
// Toast {
// id: self.id,
// msg: self.msg.to_owned(),
// on_click: self.on_click.clone(),
// }
// }
// }
impl Clone for Toast {
fn clone(&self) -> Self {
Toast {
id: self.id,
msg: self.msg.to_owned(),
on_click: self.on_click.clone(),
}
}
}
// #[derive(Clone, Deserialize, PartialEq)]
// pub struct OpenTerminal {
@ -460,7 +462,7 @@ struct Follower {
impl AppState {
#[cfg(any(test, feature = "test-support"))]
pub fn test(cx: &mut AppContext) -> Arc<Self> {
use gpui2::Context;
use gpui::Context;
use node_runtime::FakeNodeRuntime;
use settings2::SettingsStore;
@ -476,8 +478,7 @@ impl AppState {
let user_store = cx.build_model(|cx| UserStore::new(client.clone(), http_client, cx));
let workspace_store = cx.build_model(|cx| WorkspaceStore::new(client.clone(), cx));
// todo!()
// theme::init((), cx);
theme2::init(cx);
client2::init(&client, cx);
crate::init_settings(cx);
@ -1061,183 +1062,185 @@ impl Workspace {
&self.project
}
// pub fn recent_navigation_history(
// &self,
// limit: Option<usize>,
// cx: &AppContext,
// ) -> Vec<(ProjectPath, Option<PathBuf>)> {
// let mut abs_paths_opened: HashMap<PathBuf, HashSet<ProjectPath>> = HashMap::default();
// let mut history: HashMap<ProjectPath, (Option<PathBuf>, usize)> = HashMap::default();
// for pane in &self.panes {
// let pane = pane.read(cx);
// pane.nav_history()
// .for_each_entry(cx, |entry, (project_path, fs_path)| {
// if let Some(fs_path) = &fs_path {
// abs_paths_opened
// .entry(fs_path.clone())
// .or_default()
// .insert(project_path.clone());
// }
// let timestamp = entry.timestamp;
// match history.entry(project_path) {
// hash_map::Entry::Occupied(mut entry) => {
// let (_, old_timestamp) = entry.get();
// if &timestamp > old_timestamp {
// entry.insert((fs_path, timestamp));
// }
// }
// hash_map::Entry::Vacant(entry) => {
// entry.insert((fs_path, timestamp));
// }
// }
// });
// }
pub fn recent_navigation_history(
&self,
limit: Option<usize>,
cx: &AppContext,
) -> Vec<(ProjectPath, Option<PathBuf>)> {
let mut abs_paths_opened: HashMap<PathBuf, HashSet<ProjectPath>> = HashMap::default();
let mut history: HashMap<ProjectPath, (Option<PathBuf>, usize)> = HashMap::default();
for pane in &self.panes {
let pane = pane.read(cx);
pane.nav_history()
.for_each_entry(cx, |entry, (project_path, fs_path)| {
if let Some(fs_path) = &fs_path {
abs_paths_opened
.entry(fs_path.clone())
.or_default()
.insert(project_path.clone());
}
let timestamp = entry.timestamp;
match history.entry(project_path) {
hash_map::Entry::Occupied(mut entry) => {
let (_, old_timestamp) = entry.get();
if &timestamp > old_timestamp {
entry.insert((fs_path, timestamp));
}
}
hash_map::Entry::Vacant(entry) => {
entry.insert((fs_path, timestamp));
}
}
});
}
// history
// .into_iter()
// .sorted_by_key(|(_, (_, timestamp))| *timestamp)
// .map(|(project_path, (fs_path, _))| (project_path, fs_path))
// .rev()
// .filter(|(history_path, abs_path)| {
// let latest_project_path_opened = abs_path
// .as_ref()
// .and_then(|abs_path| abs_paths_opened.get(abs_path))
// .and_then(|project_paths| {
// project_paths
// .iter()
// .max_by(|b1, b2| b1.worktree_id.cmp(&b2.worktree_id))
// });
history
.into_iter()
.sorted_by_key(|(_, (_, timestamp))| *timestamp)
.map(|(project_path, (fs_path, _))| (project_path, fs_path))
.rev()
.filter(|(history_path, abs_path)| {
let latest_project_path_opened = abs_path
.as_ref()
.and_then(|abs_path| abs_paths_opened.get(abs_path))
.and_then(|project_paths| {
project_paths
.iter()
.max_by(|b1, b2| b1.worktree_id.cmp(&b2.worktree_id))
});
// match latest_project_path_opened {
// Some(latest_project_path_opened) => latest_project_path_opened == history_path,
// None => true,
// }
// })
// .take(limit.unwrap_or(usize::MAX))
// .collect()
// }
match latest_project_path_opened {
Some(latest_project_path_opened) => latest_project_path_opened == history_path,
None => true,
}
})
.take(limit.unwrap_or(usize::MAX))
.collect()
}
// fn navigate_history(
// &mut self,
// pane: WeakView<Pane>,
// mode: NavigationMode,
// cx: &mut ViewContext<Workspace>,
// ) -> Task<Result<()>> {
// let to_load = if let Some(pane) = pane.upgrade(cx) {
// cx.focus(&pane);
fn navigate_history(
&mut self,
pane: WeakView<Pane>,
mode: NavigationMode,
cx: &mut ViewContext<Workspace>,
) -> Task<Result<()>> {
let to_load = if let Some(pane) = pane.upgrade() {
// todo!("focus")
// cx.focus(&pane);
// pane.update(cx, |pane, cx| {
// loop {
// // Retrieve the weak item handle from the history.
// let entry = pane.nav_history_mut().pop(mode, cx)?;
pane.update(cx, |pane, cx| {
loop {
// Retrieve the weak item handle from the history.
let entry = pane.nav_history_mut().pop(mode, cx)?;
// // If the item is still present in this pane, then activate it.
// if let Some(index) = entry
// .item
// .upgrade(cx)
// .and_then(|v| pane.index_for_item(v.as_ref()))
// {
// let prev_active_item_index = pane.active_item_index();
// pane.nav_history_mut().set_mode(mode);
// pane.activate_item(index, true, true, cx);
// pane.nav_history_mut().set_mode(NavigationMode::Normal);
// If the item is still present in this pane, then activate it.
if let Some(index) = entry
.item
.upgrade()
.and_then(|v| pane.index_for_item(v.as_ref()))
{
let prev_active_item_index = pane.active_item_index();
pane.nav_history_mut().set_mode(mode);
pane.activate_item(index, true, true, cx);
pane.nav_history_mut().set_mode(NavigationMode::Normal);
// let mut navigated = prev_active_item_index != pane.active_item_index();
// if let Some(data) = entry.data {
// navigated |= pane.active_item()?.navigate(data, cx);
// }
let mut navigated = prev_active_item_index != pane.active_item_index();
if let Some(data) = entry.data {
navigated |= pane.active_item()?.navigate(data, cx);
}
// if navigated {
// break None;
// }
// }
// // If the item is no longer present in this pane, then retrieve its
// // project path in order to reopen it.
// else {
// break pane
// .nav_history()
// .path_for_item(entry.item.id())
// .map(|(project_path, _)| (project_path, entry));
// }
// }
// })
// } else {
// None
// };
if navigated {
break None;
}
}
// If the item is no longer present in this pane, then retrieve its
// project path in order to reopen it.
else {
break pane
.nav_history()
.path_for_item(entry.item.id())
.map(|(project_path, _)| (project_path, entry));
}
}
})
} else {
None
};
// if let Some((project_path, entry)) = to_load {
// // If the item was no longer present, then load it again from its previous path.
// let task = self.load_path(project_path, cx);
// cx.spawn(|workspace, mut cx| async move {
// let task = task.await;
// let mut navigated = false;
// if let Some((project_entry_id, build_item)) = task.log_err() {
// let prev_active_item_id = pane.update(&mut cx, |pane, _| {
// pane.nav_history_mut().set_mode(mode);
// pane.active_item().map(|p| p.id())
// })?;
if let Some((project_path, entry)) = to_load {
// If the item was no longer present, then load it again from its previous path.
let task = self.load_path(project_path, cx);
cx.spawn(|workspace, mut cx| async move {
let task = task.await;
let mut navigated = false;
if let Some((project_entry_id, build_item)) = task.log_err() {
let prev_active_item_id = pane.update(&mut cx, |pane, _| {
pane.nav_history_mut().set_mode(mode);
pane.active_item().map(|p| p.id())
})?;
// pane.update(&mut cx, |pane, cx| {
// let item = pane.open_item(project_entry_id, true, cx, build_item);
// navigated |= Some(item.id()) != prev_active_item_id;
// pane.nav_history_mut().set_mode(NavigationMode::Normal);
// if let Some(data) = entry.data {
// navigated |= item.navigate(data, cx);
// }
// })?;
// }
pane.update(&mut cx, |pane, cx| {
let item = pane.open_item(project_entry_id, true, cx, build_item);
navigated |= Some(item.id()) != prev_active_item_id;
pane.nav_history_mut().set_mode(NavigationMode::Normal);
if let Some(data) = entry.data {
navigated |= item.navigate(data, cx);
}
})?;
}
// if !navigated {
// workspace
// .update(&mut cx, |workspace, cx| {
// Self::navigate_history(workspace, pane, mode, cx)
// })?
// .await?;
// }
if !navigated {
workspace
.update(&mut cx, |workspace, cx| {
Self::navigate_history(workspace, pane, mode, cx)
})?
.await?;
}
// Ok(())
// })
// } else {
// Task::ready(Ok(()))
// }
// }
Ok(())
})
} else {
Task::ready(Ok(()))
}
}
// pub fn go_back(
// &mut self,
// pane: WeakView<Pane>,
// cx: &mut ViewContext<Workspace>,
// ) -> Task<Result<()>> {
// self.navigate_history(pane, NavigationMode::GoingBack, cx)
// }
pub fn go_back(
&mut self,
pane: WeakView<Pane>,
cx: &mut ViewContext<Workspace>,
) -> Task<Result<()>> {
self.navigate_history(pane, NavigationMode::GoingBack, cx)
}
// pub fn go_forward(
// &mut self,
// pane: WeakView<Pane>,
// cx: &mut ViewContext<Workspace>,
// ) -> Task<Result<()>> {
// self.navigate_history(pane, NavigationMode::GoingForward, cx)
// }
pub fn go_forward(
&mut self,
pane: WeakView<Pane>,
cx: &mut ViewContext<Workspace>,
) -> Task<Result<()>> {
self.navigate_history(pane, NavigationMode::GoingForward, cx)
}
// pub fn reopen_closed_item(&mut self, cx: &mut ViewContext<Workspace>) -> Task<Result<()>> {
// self.navigate_history(
// self.active_pane().downgrade(),
// NavigationMode::ReopeningClosedItem,
// cx,
// )
// }
pub fn reopen_closed_item(&mut self, cx: &mut ViewContext<Workspace>) -> Task<Result<()>> {
self.navigate_history(
self.active_pane().downgrade(),
NavigationMode::ReopeningClosedItem,
cx,
)
}
// pub fn client(&self) -> &Client {
// &self.app_state.client
// }
pub fn client(&self) -> &Client {
&self.app_state.client
}
// pub fn set_titlebar_item(&mut self, item: AnyViewHandle, cx: &mut ViewContext<Self>) {
// self.titlebar_item = Some(item);
// cx.notify();
// }
// todo!()
// pub fn set_titlebar_item(&mut self, item: AnyViewHandle, cx: &mut ViewContext<Self>) {
// self.titlebar_item = Some(item);
// cx.notify();
// }
// pub fn titlebar_item(&self) -> Option<AnyViewHandle> {
// self.titlebar_item.clone()
// }
// pub fn titlebar_item(&self) -> Option<AnyViewHandle> {
// self.titlebar_item.clone()
// }
// /// Call the given callback with a workspace whose project is local.
// ///
@ -1263,32 +1266,29 @@ impl Workspace {
// }
// }
// pub fn worktrees<'a>(
// &self,
// cx: &'a AppContext,
// ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
// self.project.read(cx).worktrees(cx)
// }
pub fn worktrees<'a>(&self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Model<Worktree>> {
self.project.read(cx).worktrees()
}
// pub fn visible_worktrees<'a>(
// &self,
// cx: &'a AppContext,
// ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
// self.project.read(cx).visible_worktrees(cx)
// }
pub fn visible_worktrees<'a>(
&self,
cx: &'a AppContext,
) -> impl 'a + Iterator<Item = Model<Worktree>> {
self.project.read(cx).visible_worktrees(cx)
}
// pub fn worktree_scans_complete(&self, cx: &AppContext) -> impl Future<Output = ()> + 'static {
// let futures = self
// .worktrees(cx)
// .filter_map(|worktree| worktree.read(cx).as_local())
// .map(|worktree| worktree.scan_complete())
// .collect::<Vec<_>>();
// async move {
// for future in futures {
// future.await;
// }
// }
// }
pub fn worktree_scans_complete(&self, cx: &AppContext) -> impl Future<Output = ()> + 'static {
let futures = self
.worktrees(cx)
.filter_map(|worktree| worktree.read(cx).as_local())
.map(|worktree| worktree.scan_complete())
.collect::<Vec<_>>();
async move {
for future in futures {
future.await;
}
}
}
// pub fn close_global(_: &CloseWindow, cx: &mut AppContext) {
// cx.spawn(|mut cx| async move {
@ -1705,27 +1705,27 @@ impl Workspace {
self.active_item(cx).and_then(|item| item.project_path(cx))
}
// pub fn save_active_item(
// &mut self,
// save_intent: SaveIntent,
// cx: &mut ViewContext<Self>,
// ) -> Task<Result<()>> {
// let project = self.project.clone();
// let pane = self.active_pane();
// let item_ix = pane.read(cx).active_item_index();
// let item = pane.read(cx).active_item();
// let pane = pane.downgrade();
pub fn save_active_item(
&mut self,
save_intent: SaveIntent,
cx: &mut ViewContext<Self>,
) -> Task<Result<()>> {
let project = self.project.clone();
let pane = self.active_pane();
let item_ix = pane.read(cx).active_item_index();
let item = pane.read(cx).active_item();
let pane = pane.downgrade();
// cx.spawn(|_, mut cx| async move {
// if let Some(item) = item {
// Pane::save_item(project, &pane, item_ix, item.as_ref(), save_intent, &mut cx)
// .await
// .map(|_| ())
// } else {
// Ok(())
// }
// })
// }
cx.spawn(|_, mut cx| async move {
if let Some(item) = item {
Pane::save_item(project, &pane, item_ix, item.as_ref(), save_intent, &mut cx)
.await
.map(|_| ())
} else {
Ok(())
}
})
}
// pub fn close_inactive_items_and_panes(
// &mut self,
@ -1827,19 +1827,20 @@ impl Workspace {
// self.serialize_workspace(cx);
// }
// pub fn close_all_docks(&mut self, cx: &mut ViewContext<Self>) {
// let docks = [&self.left_dock, &self.bottom_dock, &self.right_dock];
pub fn close_all_docks(&mut self, cx: &mut ViewContext<Self>) {
let docks = [&self.left_dock, &self.bottom_dock, &self.right_dock];
// for dock in docks {
// dock.update(cx, |dock, cx| {
// dock.set_open(false, cx);
// });
// }
for dock in docks {
dock.update(cx, |dock, cx| {
dock.set_open(false, cx);
});
}
// cx.focus_self();
// cx.notify();
// self.serialize_workspace(cx);
// }
// todo!("focus")
// cx.focus_self();
cx.notify();
self.serialize_workspace(cx);
}
// /// Transfer focus to the panel of the given type.
// pub fn focus_panel<T: Panel>(&mut self, cx: &mut ViewContext<Self>) -> Option<View<T>> {
@ -1906,19 +1907,19 @@ impl Workspace {
// None
// }
// fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
// for pane in &self.panes {
// pane.update(cx, |pane, cx| pane.set_zoomed(false, cx));
// }
fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
for pane in &self.panes {
pane.update(cx, |pane, cx| pane.set_zoomed(false, cx));
}
// self.left_dock.update(cx, |dock, cx| dock.zoom_out(cx));
// self.bottom_dock.update(cx, |dock, cx| dock.zoom_out(cx));
// self.right_dock.update(cx, |dock, cx| dock.zoom_out(cx));
// self.zoomed = None;
// self.zoomed_position = None;
self.left_dock.update(cx, |dock, cx| dock.zoom_out(cx));
self.bottom_dock.update(cx, |dock, cx| dock.zoom_out(cx));
self.right_dock.update(cx, |dock, cx| dock.zoom_out(cx));
self.zoomed = None;
self.zoomed_position = None;
// cx.notify();
// }
cx.notify();
}
// #[cfg(any(test, feature = "test-support"))]
// pub fn zoomed_view(&self, cx: &AppContext) -> Option<AnyViewHandle> {
@ -1964,22 +1965,21 @@ impl Workspace {
cx.notify();
}
fn add_pane(&mut self, _cx: &mut ViewContext<Self>) -> View<Pane> {
todo!()
// let pane = cx.build_view(|cx| {
// Pane::new(
// self.weak_handle(),
// self.project.clone(),
// self.pane_history_timestamp.clone(),
// cx,
// )
// });
// cx.subscribe(&pane, Self::handle_pane_event).detach();
// self.panes.push(pane.clone());
fn add_pane(&mut self, cx: &mut ViewContext<Self>) -> View<Pane> {
let pane = cx.build_view(|cx| {
Pane::new(
self.weak_handle(),
self.project.clone(),
self.pane_history_timestamp.clone(),
cx,
)
});
cx.subscribe(&pane, Self::handle_pane_event).detach();
self.panes.push(pane.clone());
// todo!()
// cx.focus(&pane);
// cx.emit(Event::PaneAdded(pane.clone()));
// pane
cx.emit(Event::PaneAdded(pane.clone()));
pane
}
// pub fn add_item_to_center(
@ -3125,6 +3125,7 @@ impl Workspace {
None
}
// todo!()
// fn shared_screen_for_peer(
// &self,
// peer_id: PeerId,
@ -3501,6 +3502,7 @@ impl Workspace {
})
}
// todo!()
// #[cfg(any(test, feature = "test-support"))]
// pub fn test_new(project: ModelHandle<Project>, cx: &mut ViewContext<Self>) -> Self {
// use node_runtime::FakeNodeRuntime;
@ -3661,6 +3663,7 @@ fn open_items(
})
}
// todo!()
// fn notify_of_new_dock(workspace: &WeakView<Workspace>, cx: &mut AsyncAppContext) {
// const NEW_PANEL_BLOG_POST: &str = "https://zed.dev/blog/new-panel-system";
// const NEW_DOCK_HINT_KEY: &str = "show_new_dock_key";
@ -3741,23 +3744,22 @@ fn open_items(
// })
// .ok();
fn notify_if_database_failed(_workspace: WindowHandle<Workspace>, _cx: &mut AsyncAppContext) {
fn notify_if_database_failed(workspace: WindowHandle<Workspace>, cx: &mut AsyncAppContext) {
const REPORT_ISSUE_URL: &str ="https://github.com/zed-industries/community/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml";
// todo!()
// workspace
// .update(cx, |workspace, cx| {
// if (*db::ALL_FILE_DB_FAILED).load(std::sync::atomic::Ordering::Acquire) {
// workspace.show_notification_once(0, cx, |cx| {
// cx.build_view(|_| {
// MessageNotification::new("Failed to load the database file.")
// .with_click_message("Click to let us know about this error")
// .on_click(|cx| cx.platform().open_url(REPORT_ISSUE_URL))
// })
// });
// }
// })
// .log_err();
workspace
.update(cx, |workspace, cx| {
if (*db2::ALL_FILE_DB_FAILED).load(std::sync::atomic::Ordering::Acquire) {
workspace.show_notification_once(0, cx, |cx| {
cx.build_view(|_| {
MessageNotification::new("Failed to load the database file.")
.with_click_message("Click to let us know about this error")
.on_click(|cx| cx.open_url(REPORT_ISSUE_URL))
})
});
}
})
.log_err();
}
impl EventEmitter for Workspace {
@ -4179,36 +4181,32 @@ impl WorkspaceStore {
}
async fn handle_update_followers(
_this: Model<Self>,
_envelope: TypedEnvelope<proto::UpdateFollowers>,
this: Model<Self>,
envelope: TypedEnvelope<proto::UpdateFollowers>,
_: Arc<Client>,
mut _cx: AsyncWindowContext,
mut cx: AsyncWindowContext,
) -> Result<()> {
// let leader_id = envelope.original_sender_id()?;
// let update = envelope.payload;
let leader_id = envelope.original_sender_id()?;
let update = envelope.payload;
// this.update(&mut cx, |this, cx| {
// for workspace in &this.workspaces {
// let Some(workspace) = workspace.upgrade() else {
// continue;
// };
// workspace.update(cx, |workspace, cx| {
// let project_id = workspace.project.read(cx).remote_id();
// if update.project_id != project_id && update.project_id.is_some() {
// return;
// }
// workspace.handle_update_followers(leader_id, update.clone(), cx);
// });
// }
// Ok(())
// })?
todo!()
this.update(&mut cx, |this, cx| {
for workspace in &this.workspaces {
workspace.update(cx, |workspace, cx| {
let project_id = workspace.project.read(cx).remote_id();
if update.project_id != project_id && update.project_id.is_some() {
return;
}
workspace.handle_update_followers(leader_id, update.clone(), cx);
})?;
}
Ok(())
})?
}
}
// impl Entity for WorkspaceStore {
// type Event = ();
// }
impl EventEmitter for WorkspaceStore {
type Event = ();
}
impl ViewId {
pub(crate) fn from_proto(message: proto::ViewId) -> Result<Self> {

View file

@ -49,7 +49,7 @@ impl Settings for WorkspaceSettings {
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui2::AppContext,
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}

View file

@ -37,10 +37,9 @@ pub enum IsOnlyInstance {
}
pub fn ensure_only_instance() -> IsOnlyInstance {
// todo!("zed_stateless")
// if *db::ZED_STATELESS {
// return IsOnlyInstance::Yes;
// }
if *db::ZED_STATELESS {
return IsOnlyInstance::Yes;
}
if check_got_handshake() {
return IsOnlyInstance::No;

View file

@ -60,33 +60,33 @@ pub fn initialize_workspace(
move |workspace, _, event, cx| {
if let workspace::Event::PaneAdded(pane) = event {
pane.update(cx, |pane, cx| {
// todo!()
// pane.toolbar().update(cx, |toolbar, cx| {
// let breadcrumbs = cx.add_view(|_| Breadcrumbs::new(workspace));
// toolbar.add_item(breadcrumbs, cx);
// let buffer_search_bar = cx.add_view(BufferSearchBar::new);
// toolbar.add_item(buffer_search_bar.clone(), cx);
// let quick_action_bar = cx.add_view(|_| {
// QuickActionBar::new(buffer_search_bar, workspace)
// });
// toolbar.add_item(quick_action_bar, cx);
// let diagnostic_editor_controls =
// cx.add_view(|_| diagnostics2::ToolbarControls::new());
// toolbar.add_item(diagnostic_editor_controls, cx);
// let project_search_bar = cx.add_view(|_| ProjectSearchBar::new());
// toolbar.add_item(project_search_bar, cx);
// let submit_feedback_button =
// cx.add_view(|_| SubmitFeedbackButton::new());
// toolbar.add_item(submit_feedback_button, cx);
// let feedback_info_text = cx.add_view(|_| FeedbackInfoText::new());
// toolbar.add_item(feedback_info_text, cx);
// let lsp_log_item =
// cx.add_view(|_| language_tools::LspLogToolbarItemView::new());
// toolbar.add_item(lsp_log_item, cx);
// let syntax_tree_item = cx
// .add_view(|_| language_tools::SyntaxTreeToolbarItemView::new());
// toolbar.add_item(syntax_tree_item, cx);
// })
pane.toolbar().update(cx, |toolbar, cx| {
// todo!()
// let breadcrumbs = cx.add_view(|_| Breadcrumbs::new(workspace));
// toolbar.add_item(breadcrumbs, cx);
// let buffer_search_bar = cx.add_view(BufferSearchBar::new);
// toolbar.add_item(buffer_search_bar.clone(), cx);
// let quick_action_bar = cx.add_view(|_| {
// QuickActionBar::new(buffer_search_bar, workspace)
// });
// toolbar.add_item(quick_action_bar, cx);
// let diagnostic_editor_controls =
// cx.add_view(|_| diagnostics2::ToolbarControls::new());
// toolbar.add_item(diagnostic_editor_controls, cx);
// let project_search_bar = cx.add_view(|_| ProjectSearchBar::new());
// toolbar.add_item(project_search_bar, cx);
// let submit_feedback_button =
// cx.add_view(|_| SubmitFeedbackButton::new());
// toolbar.add_item(submit_feedback_button, cx);
// let feedback_info_text = cx.add_view(|_| FeedbackInfoText::new());
// toolbar.add_item(feedback_info_text, cx);
// let lsp_log_item =
// cx.add_view(|_| language_tools::LspLogToolbarItemView::new());
// toolbar.add_item(lsp_log_item, cx);
// let syntax_tree_item = cx
// .add_view(|_| language_tools::SyntaxTreeToolbarItemView::new());
// toolbar.add_item(syntax_tree_item, cx);
})
});
}
}