Merge branch 'main' into import-theme

This commit is contained in:
Marshall Bowers 2023-11-03 10:11:35 -04:00
commit a7edd380e3
49 changed files with 3890 additions and 1929 deletions

62
Cargo.lock generated
View file

@ -1546,7 +1546,7 @@ dependencies = [
"sum_tree", "sum_tree",
"sysinfo", "sysinfo",
"tempfile", "tempfile",
"text", "text2",
"thiserror", "thiserror",
"time", "time",
"tiny_http", "tiny_http",
@ -3079,7 +3079,7 @@ dependencies = [
"smol", "smol",
"sum_tree", "sum_tree",
"tempfile", "tempfile",
"text", "text2",
"time", "time",
"util", "util",
] ]
@ -3371,6 +3371,26 @@ dependencies = [
"url", "url",
] ]
[[package]]
name = "git3"
version = "0.1.0"
dependencies = [
"anyhow",
"async-trait",
"clock",
"collections",
"futures 0.3.28",
"git2",
"lazy_static",
"log",
"parking_lot 0.11.2",
"smol",
"sum_tree",
"text2",
"unindent",
"util",
]
[[package]] [[package]]
name = "glob" name = "glob"
version = "0.3.1" version = "0.3.1"
@ -4212,7 +4232,7 @@ dependencies = [
"settings2", "settings2",
"shellexpand", "shellexpand",
"util", "util",
"workspace", "workspace2",
] ]
[[package]] [[package]]
@ -4345,7 +4365,7 @@ dependencies = [
"env_logger 0.9.3", "env_logger 0.9.3",
"futures 0.3.28", "futures 0.3.28",
"fuzzy2", "fuzzy2",
"git", "git3",
"globset", "globset",
"gpui2", "gpui2",
"indoc", "indoc",
@ -4366,7 +4386,7 @@ dependencies = [
"smallvec", "smallvec",
"smol", "smol",
"sum_tree", "sum_tree",
"text", "text2",
"theme2", "theme2",
"tree-sitter", "tree-sitter",
"tree-sitter-elixir", "tree-sitter-elixir",
@ -5081,7 +5101,7 @@ dependencies = [
"ctor", "ctor",
"env_logger 0.9.3", "env_logger 0.9.3",
"futures 0.3.28", "futures 0.3.28",
"git", "git3",
"gpui2", "gpui2",
"indoc", "indoc",
"itertools 0.10.5", "itertools 0.10.5",
@ -5095,7 +5115,7 @@ dependencies = [
"project2", "project2",
"pulldown-cmark", "pulldown-cmark",
"rand 0.8.5", "rand 0.8.5",
"rich_text", "rich_text2",
"schemars", "schemars",
"serde", "serde",
"serde_derive", "serde_derive",
@ -5104,7 +5124,7 @@ dependencies = [
"smol", "smol",
"snippet", "snippet",
"sum_tree", "sum_tree",
"text", "text2",
"theme2", "theme2",
"tree-sitter", "tree-sitter",
"tree-sitter-html", "tree-sitter-html",
@ -6284,8 +6304,8 @@ dependencies = [
"fsevent", "fsevent",
"futures 0.3.28", "futures 0.3.28",
"fuzzy2", "fuzzy2",
"git",
"git2", "git2",
"git3",
"globset", "globset",
"gpui2", "gpui2",
"ignore", "ignore",
@ -6313,7 +6333,7 @@ dependencies = [
"sum_tree", "sum_tree",
"tempdir", "tempdir",
"terminal2", "terminal2",
"text", "text2",
"thiserror", "thiserror",
"toml 0.5.11", "toml 0.5.11",
"unindent", "unindent",
@ -6927,6 +6947,24 @@ dependencies = [
"util", "util",
] ]
[[package]]
name = "rich_text2"
version = "0.1.0"
dependencies = [
"anyhow",
"collections",
"futures 0.3.28",
"gpui2",
"language2",
"lazy_static",
"pulldown-cmark",
"smallvec",
"smol",
"sum_tree",
"theme2",
"util",
]
[[package]] [[package]]
name = "ring" name = "ring"
version = "0.16.20" version = "0.16.20"
@ -7542,7 +7580,6 @@ dependencies = [
"collections", "collections",
"editor", "editor",
"futures 0.3.28", "futures 0.3.28",
"globset",
"gpui", "gpui",
"language", "language",
"log", "log",
@ -8856,7 +8893,7 @@ name = "theme2"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"fs", "fs2",
"gpui2", "gpui2",
"indexmap 1.9.3", "indexmap 1.9.3",
"parking_lot 0.11.2", "parking_lot 0.11.2",
@ -9864,6 +9901,7 @@ dependencies = [
"dirs 3.0.2", "dirs 3.0.2",
"futures 0.3.28", "futures 0.3.28",
"git2", "git2",
"globset",
"isahc", "isahc",
"lazy_static", "lazy_static",
"log", "log",

View file

@ -17,7 +17,7 @@ db = { package = "db2", path = "../db2" }
gpui = { package = "gpui2", path = "../gpui2" } gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" } util = { path = "../util" }
rpc = { package = "rpc2", path = "../rpc2" } rpc = { package = "rpc2", path = "../rpc2" }
text = { path = "../text" } text = { package = "text2", path = "../text2" }
settings = { package = "settings2", path = "../settings2" } settings = { package = "settings2", path = "../settings2" }
feature_flags = { package = "feature_flags2", path = "../feature_flags2" } feature_flags = { package = "feature_flags2", path = "../feature_flags2" }
sum_tree = { path = "../sum_tree" } sum_tree = { path = "../sum_tree" }

View file

@ -10,7 +10,7 @@ path = "src/fs2.rs"
[dependencies] [dependencies]
collections = { path = "../collections" } collections = { path = "../collections" }
rope = { path = "../rope" } rope = { path = "../rope" }
text = { path = "../text" } text = { package = "text2", path = "../text2" }
util = { path = "../util" } util = { path = "../util" }
sum_tree = { path = "../sum_tree" } sum_tree = { path = "../sum_tree" }

30
crates/git3/Cargo.toml Normal file
View file

@ -0,0 +1,30 @@
[package]
# git2 was already taken.
name = "git3"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/git.rs"
[dependencies]
anyhow.workspace = true
clock = { path = "../clock" }
lazy_static.workspace = true
sum_tree = { path = "../sum_tree" }
text = { package = "text2", path = "../text2" }
collections = { path = "../collections" }
util = { path = "../util" }
log.workspace = true
smol.workspace = true
parking_lot.workspace = true
async-trait.workspace = true
futures.workspace = true
git2.workspace = true
[dev-dependencies]
unindent.workspace = true
[features]
test-support = []

412
crates/git3/src/diff.rs Normal file
View file

@ -0,0 +1,412 @@
use std::{iter, ops::Range};
use sum_tree::SumTree;
use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point};
pub use git2 as libgit;
use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DiffHunkStatus {
Added,
Modified,
Removed,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct DiffHunk<T> {
pub buffer_range: Range<T>,
pub diff_base_byte_range: Range<usize>,
}
impl DiffHunk<u32> {
pub fn status(&self) -> DiffHunkStatus {
if self.diff_base_byte_range.is_empty() {
DiffHunkStatus::Added
} else if self.buffer_range.is_empty() {
DiffHunkStatus::Removed
} else {
DiffHunkStatus::Modified
}
}
}
impl sum_tree::Item for DiffHunk<Anchor> {
type Summary = DiffHunkSummary;
fn summary(&self) -> Self::Summary {
DiffHunkSummary {
buffer_range: self.buffer_range.clone(),
}
}
}
#[derive(Debug, Default, Clone)]
pub struct DiffHunkSummary {
buffer_range: Range<Anchor>,
}
impl sum_tree::Summary for DiffHunkSummary {
type Context = text::BufferSnapshot;
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
self.buffer_range.start = self
.buffer_range
.start
.min(&other.buffer_range.start, buffer);
self.buffer_range.end = self.buffer_range.end.max(&other.buffer_range.end, buffer);
}
}
#[derive(Clone)]
pub struct BufferDiff {
last_buffer_version: Option<clock::Global>,
tree: SumTree<DiffHunk<Anchor>>,
}
impl BufferDiff {
pub fn new() -> BufferDiff {
BufferDiff {
last_buffer_version: None,
tree: SumTree::new(),
}
}
pub fn is_empty(&self) -> bool {
self.tree.is_empty()
}
pub fn hunks_in_row_range<'a>(
&'a self,
range: Range<u32>,
buffer: &'a BufferSnapshot,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let start = buffer.anchor_before(Point::new(range.start, 0));
let end = buffer.anchor_after(Point::new(range.end, 0));
self.hunks_intersecting_range(start..end, buffer)
}
pub fn hunks_intersecting_range<'a>(
&'a self,
range: Range<Anchor>,
buffer: &'a BufferSnapshot,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| {
let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt();
!before_start && !after_end
});
let anchor_iter = std::iter::from_fn(move || {
cursor.next(buffer);
cursor.item()
})
.flat_map(move |hunk| {
[
(&hunk.buffer_range.start, hunk.diff_base_byte_range.start),
(&hunk.buffer_range.end, hunk.diff_base_byte_range.end),
]
.into_iter()
});
let mut summaries = buffer.summaries_for_anchors_with_payload::<Point, _, _>(anchor_iter);
iter::from_fn(move || {
let (start_point, start_base) = summaries.next()?;
let (end_point, end_base) = summaries.next()?;
let end_row = if end_point.column > 0 {
end_point.row + 1
} else {
end_point.row
};
Some(DiffHunk {
buffer_range: start_point.row..end_row,
diff_base_byte_range: start_base..end_base,
})
})
}
pub fn hunks_intersecting_range_rev<'a>(
&'a self,
range: Range<Anchor>,
buffer: &'a BufferSnapshot,
) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| {
let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt();
!before_start && !after_end
});
std::iter::from_fn(move || {
cursor.prev(buffer);
let hunk = cursor.item()?;
let range = hunk.buffer_range.to_point(buffer);
let end_row = if range.end.column > 0 {
range.end.row + 1
} else {
range.end.row
};
Some(DiffHunk {
buffer_range: range.start.row..end_row,
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
})
})
}
pub fn clear(&mut self, buffer: &text::BufferSnapshot) {
self.last_buffer_version = Some(buffer.version().clone());
self.tree = SumTree::new();
}
pub async fn update(&mut self, diff_base: &str, buffer: &text::BufferSnapshot) {
let mut tree = SumTree::new();
let buffer_text = buffer.as_rope().to_string();
let patch = Self::diff(&diff_base, &buffer_text);
if let Some(patch) = patch {
let mut divergence = 0;
for hunk_index in 0..patch.num_hunks() {
let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence);
tree.push(hunk, buffer);
}
}
self.tree = tree;
self.last_buffer_version = Some(buffer.version().clone());
}
#[cfg(test)]
fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
let start = text.anchor_before(Point::new(0, 0));
let end = text.anchor_after(Point::new(u32::MAX, u32::MAX));
self.hunks_intersecting_range(start..end, text)
}
fn diff<'a>(head: &'a str, current: &'a str) -> Option<GitPatch<'a>> {
let mut options = GitOptions::default();
options.context_lines(0);
let patch = GitPatch::from_buffers(
head.as_bytes(),
None,
current.as_bytes(),
None,
Some(&mut options),
);
match patch {
Ok(patch) => Some(patch),
Err(err) => {
log::error!("`GitPatch::from_buffers` failed: {}", err);
None
}
}
}
fn process_patch_hunk<'a>(
patch: &GitPatch<'a>,
hunk_index: usize,
buffer: &text::BufferSnapshot,
buffer_row_divergence: &mut i64,
) -> DiffHunk<Anchor> {
let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap();
assert!(line_item_count > 0);
let mut first_deletion_buffer_row: Option<u32> = None;
let mut buffer_row_range: Option<Range<u32>> = None;
let mut diff_base_byte_range: Option<Range<usize>> = None;
for line_index in 0..line_item_count {
let line = patch.line_in_hunk(hunk_index, line_index).unwrap();
let kind = line.origin_value();
let content_offset = line.content_offset() as isize;
let content_len = line.content().len() as isize;
if kind == GitDiffLineType::Addition {
*buffer_row_divergence += 1;
let row = line.new_lineno().unwrap().saturating_sub(1);
match &mut buffer_row_range {
Some(buffer_row_range) => buffer_row_range.end = row + 1,
None => buffer_row_range = Some(row..row + 1),
}
}
if kind == GitDiffLineType::Deletion {
let end = content_offset + content_len;
match &mut diff_base_byte_range {
Some(head_byte_range) => head_byte_range.end = end as usize,
None => diff_base_byte_range = Some(content_offset as usize..end as usize),
}
if first_deletion_buffer_row.is_none() {
let old_row = line.old_lineno().unwrap().saturating_sub(1);
let row = old_row as i64 + *buffer_row_divergence;
first_deletion_buffer_row = Some(row as u32);
}
*buffer_row_divergence -= 1;
}
}
//unwrap_or deletion without addition
let buffer_row_range = buffer_row_range.unwrap_or_else(|| {
//we cannot have an addition-less hunk without deletion(s) or else there would be no hunk
let row = first_deletion_buffer_row.unwrap();
row..row
});
//unwrap_or addition without deletion
let diff_base_byte_range = diff_base_byte_range.unwrap_or(0..0);
let start = Point::new(buffer_row_range.start, 0);
let end = Point::new(buffer_row_range.end, 0);
let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end);
DiffHunk {
buffer_range,
diff_base_byte_range,
}
}
}
/// Range (crossing new lines), old, new
#[cfg(any(test, feature = "test-support"))]
#[track_caller]
pub fn assert_hunks<Iter>(
diff_hunks: Iter,
buffer: &BufferSnapshot,
diff_base: &str,
expected_hunks: &[(Range<u32>, &str, &str)],
) where
Iter: Iterator<Item = DiffHunk<u32>>,
{
let actual_hunks = diff_hunks
.map(|hunk| {
(
hunk.buffer_range.clone(),
&diff_base[hunk.diff_base_byte_range],
buffer
.text_for_range(
Point::new(hunk.buffer_range.start, 0)
..Point::new(hunk.buffer_range.end, 0),
)
.collect::<String>(),
)
})
.collect::<Vec<_>>();
let expected_hunks: Vec<_> = expected_hunks
.iter()
.map(|(r, s, h)| (r.clone(), *s, h.to_string()))
.collect();
assert_eq!(actual_hunks, expected_hunks);
}
#[cfg(test)]
mod tests {
use std::assert_eq;
use super::*;
use text::Buffer;
use unindent::Unindent as _;
#[test]
fn test_buffer_diff_simple() {
let diff_base = "
one
two
three
"
.unindent();
let buffer_text = "
one
HELLO
three
"
.unindent();
let mut buffer = Buffer::new(0, 0, buffer_text);
let mut diff = BufferDiff::new();
smol::block_on(diff.update(&diff_base, &buffer));
assert_hunks(
diff.hunks(&buffer),
&buffer,
&diff_base,
&[(1..2, "two\n", "HELLO\n")],
);
buffer.edit([(0..0, "point five\n")]);
smol::block_on(diff.update(&diff_base, &buffer));
assert_hunks(
diff.hunks(&buffer),
&buffer,
&diff_base,
&[(0..1, "", "point five\n"), (2..3, "two\n", "HELLO\n")],
);
diff.clear(&buffer);
assert_hunks(diff.hunks(&buffer), &buffer, &diff_base, &[]);
}
#[test]
fn test_buffer_diff_range() {
let diff_base = "
one
two
three
four
five
six
seven
eight
nine
ten
"
.unindent();
let buffer_text = "
A
one
B
two
C
three
HELLO
four
five
SIXTEEN
seven
eight
WORLD
nine
ten
"
.unindent();
let buffer = Buffer::new(0, 0, buffer_text);
let mut diff = BufferDiff::new();
smol::block_on(diff.update(&diff_base, &buffer));
assert_eq!(diff.hunks(&buffer).count(), 8);
assert_hunks(
diff.hunks_in_row_range(7..12, &buffer),
&buffer,
&diff_base,
&[
(6..7, "", "HELLO\n"),
(9..10, "six\n", "SIXTEEN\n"),
(12..13, "", "WORLD\n"),
],
);
}
}

11
crates/git3/src/git.rs Normal file
View file

@ -0,0 +1,11 @@
use std::ffi::OsStr;
pub use git2 as libgit;
pub use lazy_static::lazy_static;
pub mod diff;
lazy_static! {
pub static ref DOT_GIT: &'static OsStr = OsStr::new(".git");
pub static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore");
}

View file

@ -12,7 +12,7 @@ doctest = false
editor = { path = "../editor" } editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" } gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" } util = { path = "../util" }
workspace = { path = "../workspace" } workspace2 = { path = "../workspace2" }
settings2 = { path = "../settings2" } settings2 = { path = "../settings2" }
anyhow.workspace = true anyhow.workspace = true

View file

@ -9,7 +9,7 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use workspace::AppState; use workspace2::AppState;
// use zed::AppState; // use zed::AppState;
// todo!(); // todo!();
@ -59,7 +59,7 @@ pub fn init(_: Arc<AppState>, cx: &mut AppContext) {
// cx.add_global_action(move |_: &NewJournalEntry, cx| new_journal_entry(app_state.clone(), cx)); // cx.add_global_action(move |_: &NewJournalEntry, cx| new_journal_entry(app_state.clone(), cx));
} }
pub fn new_journal_entry(_: Arc<AppState>, cx: &mut AppContext) { pub fn new_journal_entry(app_state: Arc<AppState>, cx: &mut AppContext) {
let settings = JournalSettings::get_global(cx); let settings = JournalSettings::get_global(cx);
let journal_dir = match journal_dir(settings.path.as_ref().unwrap()) { let journal_dir = match journal_dir(settings.path.as_ref().unwrap()) {
Some(journal_dir) => journal_dir, Some(journal_dir) => journal_dir,
@ -77,7 +77,7 @@ pub fn new_journal_entry(_: Arc<AppState>, cx: &mut AppContext) {
let now = now.time(); let now = now.time();
let _entry_heading = heading_entry(now, &settings.hour_format); let _entry_heading = heading_entry(now, &settings.hour_format);
let _create_entry = cx.background_executor().spawn(async move { let create_entry = cx.background_executor().spawn(async move {
std::fs::create_dir_all(month_dir)?; std::fs::create_dir_all(month_dir)?;
OpenOptions::new() OpenOptions::new()
.create(true) .create(true)
@ -86,37 +86,38 @@ pub fn new_journal_entry(_: Arc<AppState>, cx: &mut AppContext) {
Ok::<_, std::io::Error>((journal_dir, entry_path)) Ok::<_, std::io::Error>((journal_dir, entry_path))
}); });
// todo!("workspace") cx.spawn(|mut cx| async move {
// cx.spawn(|cx| async move { let (journal_dir, entry_path) = create_entry.await?;
// let (journal_dir, entry_path) = create_entry.await?; let (workspace, _) = cx
// let (workspace, _) = .update(|cx| workspace2::open_paths(&[journal_dir], &app_state, None, cx))?
// cx.update(|cx| workspace::open_paths(&[journal_dir], &app_state, None, cx))?; .await?;
// let opened = workspace let _opened = workspace
// .update(&mut cx, |workspace, cx| { .update(&mut cx, |workspace, cx| {
// workspace.open_paths(vec![entry_path], true, cx) workspace.open_paths(vec![entry_path], true, cx)
// })? })?
// .await; .await;
// if let Some(Some(Ok(item))) = opened.first() { // todo!("editor")
// if let Some(editor) = item.downcast::<Editor>().map(|editor| editor.downgrade()) { // if let Some(Some(Ok(item))) = opened.first() {
// editor.update(&mut cx, |editor, cx| { // if let Some(editor) = item.downcast::<Editor>().map(|editor| editor.downgrade()) {
// let len = editor.buffer().read(cx).len(cx); // editor.update(&mut cx, |editor, cx| {
// editor.change_selections(Some(Autoscroll::center()), cx, |s| { // let len = editor.buffer().read(cx).len(cx);
// s.select_ranges([len..len]) // editor.change_selections(Some(Autoscroll::center()), cx, |s| {
// }); // s.select_ranges([len..len])
// if len > 0 { // });
// editor.insert("\n\n", cx); // if len > 0 {
// } // editor.insert("\n\n", cx);
// editor.insert(&entry_heading, cx); // }
// editor.insert("\n\n", cx); // editor.insert(&entry_heading, cx);
// })?; // editor.insert("\n\n", cx);
// } // })?;
// } // }
// }
// anyhow::Ok(()) anyhow::Ok(())
// }) })
// .detach_and_log_err(cx); .detach_and_log_err(cx);
} }
fn journal_dir(path: &str) -> Option<PathBuf> { fn journal_dir(path: &str) -> Option<PathBuf> {

View file

@ -25,13 +25,13 @@ test-support = [
clock = { path = "../clock" } clock = { path = "../clock" }
collections = { path = "../collections" } collections = { path = "../collections" }
fuzzy = { package = "fuzzy2", path = "../fuzzy2" } fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
git = { path = "../git" } git = { package = "git3", path = "../git3" }
gpui = { package = "gpui2", path = "../gpui2" } gpui = { package = "gpui2", path = "../gpui2" }
lsp = { package = "lsp2", path = "../lsp2" } lsp = { package = "lsp2", path = "../lsp2" }
rpc = { package = "rpc2", path = "../rpc2" } rpc = { package = "rpc2", path = "../rpc2" }
settings = { package = "settings2", path = "../settings2" } settings = { package = "settings2", path = "../settings2" }
sum_tree = { path = "../sum_tree" } sum_tree = { path = "../sum_tree" }
text = { path = "../text" } text = { package = "text2", path = "../text2" }
theme = { package = "theme2", path = "../theme2" } theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" } util = { path = "../util" }
@ -64,7 +64,7 @@ client = { package = "client2", path = "../client2", features = ["test-support"]
collections = { path = "../collections", features = ["test-support"] } collections = { path = "../collections", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] } gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] } lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
text = { path = "../text", features = ["test-support"] } text = { package = "text2", path = "../text2", features = ["test-support"] }
settings = { package = "settings2", path = "../settings2", features = ["test-support"] } settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] } util = { path = "../util", features = ["test-support"] }
ctor.workspace = true ctor.workspace = true

View file

@ -234,7 +234,6 @@ impl SyntaxMap {
self.snapshot.interpolate(text); self.snapshot.interpolate(text);
} }
#[allow(dead_code)] // todo!()
#[cfg(test)] #[cfg(test)]
pub fn reparse(&mut self, language: Arc<Language>, text: &BufferSnapshot) { pub fn reparse(&mut self, language: Arc<Language>, text: &BufferSnapshot) {
self.snapshot self.snapshot
@ -786,7 +785,6 @@ impl SyntaxSnapshot {
) )
} }
#[allow(dead_code)] // todo!()
#[cfg(test)] #[cfg(test)]
pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayerInfo> { pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayerInfo> {
self.layers_for_range(0..buffer.len(), buffer).collect() self.layers_for_range(0..buffer.len(), buffer).collect()

View file

@ -23,15 +23,15 @@ test-support = [
client = { package = "client2", path = "../client2" } client = { package = "client2", path = "../client2" }
clock = { path = "../clock" } clock = { path = "../clock" }
collections = { path = "../collections" } collections = { path = "../collections" }
git = { path = "../git" } git = { package = "git3", path = "../git3" }
gpui = { package = "gpui2", path = "../gpui2" } gpui = { package = "gpui2", path = "../gpui2" }
language = { package = "language2", path = "../language2" } language = { package = "language2", path = "../language2" }
lsp = { package = "lsp2", path = "../lsp2" } lsp = { package = "lsp2", path = "../lsp2" }
rich_text = { path = "../rich_text" } rich_text = { package = "rich_text2", path = "../rich_text2" }
settings = { package = "settings2", path = "../settings2" } settings = { package = "settings2", path = "../settings2" }
snippet = { path = "../snippet" } snippet = { path = "../snippet" }
sum_tree = { path = "../sum_tree" } sum_tree = { path = "../sum_tree" }
text = { path = "../text" } text = { package = "text2", path = "../text2" }
theme = { package = "theme2", path = "../theme2" } theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" } util = { path = "../util" }
@ -60,7 +60,7 @@ tree-sitter-typescript = { workspace = true, optional = true }
[dev-dependencies] [dev-dependencies]
copilot = { package = "copilot2", path = "../copilot2", features = ["test-support"] } copilot = { package = "copilot2", path = "../copilot2", features = ["test-support"] }
text = { path = "../text", features = ["test-support"] } text = { package = "text2", path = "../text2", features = ["test-support"] }
language = { package = "language2", path = "../language2", features = ["test-support"] } language = { package = "language2", path = "../language2", features = ["test-support"] }
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] } lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] } gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }

View file

@ -1,9 +1,8 @@
use std::collections::VecDeque;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
use anyhow::Context; use anyhow::Context;
use collections::HashMap; use collections::{HashMap, HashSet};
use fs::Fs; use fs::Fs;
use gpui::{AsyncAppContext, ModelHandle}; use gpui::{AsyncAppContext, ModelHandle};
use language::language_settings::language_settings; use language::language_settings::language_settings;
@ -11,7 +10,7 @@ use language::{Buffer, Diff};
use lsp::{LanguageServer, LanguageServerId}; use lsp::{LanguageServer, LanguageServerId};
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use util::paths::DEFAULT_PRETTIER_DIR; use util::paths::{PathMatcher, DEFAULT_PRETTIER_DIR};
pub enum Prettier { pub enum Prettier {
Real(RealPrettier), Real(RealPrettier),
@ -20,7 +19,6 @@ pub enum Prettier {
} }
pub struct RealPrettier { pub struct RealPrettier {
worktree_id: Option<usize>,
default: bool, default: bool,
prettier_dir: PathBuf, prettier_dir: PathBuf,
server: Arc<LanguageServer>, server: Arc<LanguageServer>,
@ -28,17 +26,10 @@ pub struct RealPrettier {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub struct TestPrettier { pub struct TestPrettier {
worktree_id: Option<usize>,
prettier_dir: PathBuf, prettier_dir: PathBuf,
default: bool, default: bool,
} }
#[derive(Debug)]
pub struct LocateStart {
pub worktree_root_path: Arc<Path>,
pub starting_path: Arc<Path>,
}
pub const PRETTIER_SERVER_FILE: &str = "prettier_server.js"; pub const PRETTIER_SERVER_FILE: &str = "prettier_server.js";
pub const PRETTIER_SERVER_JS: &str = include_str!("./prettier_server.js"); pub const PRETTIER_SERVER_JS: &str = include_str!("./prettier_server.js");
const PRETTIER_PACKAGE_NAME: &str = "prettier"; const PRETTIER_PACKAGE_NAME: &str = "prettier";
@ -63,79 +54,106 @@ impl Prettier {
".editorconfig", ".editorconfig",
]; ];
pub async fn locate( pub async fn locate_prettier_installation(
starting_path: Option<LocateStart>, fs: &dyn Fs,
fs: Arc<dyn Fs>, installed_prettiers: &HashSet<PathBuf>,
) -> anyhow::Result<PathBuf> { locate_from: &Path,
fn is_node_modules(path_component: &std::path::Component<'_>) -> bool { ) -> anyhow::Result<Option<PathBuf>> {
path_component.as_os_str().to_string_lossy() == "node_modules" let mut path_to_check = locate_from
.components()
.take_while(|component| component.as_os_str().to_string_lossy() != "node_modules")
.collect::<PathBuf>();
let path_to_check_metadata = fs
.metadata(&path_to_check)
.await
.with_context(|| format!("failed to get metadata for initial path {path_to_check:?}"))?
.with_context(|| format!("empty metadata for initial path {path_to_check:?}"))?;
if !path_to_check_metadata.is_dir {
path_to_check.pop();
} }
let paths_to_check = match starting_path.as_ref() { let mut project_path_with_prettier_dependency = None;
Some(starting_path) => { loop {
let worktree_root = starting_path if installed_prettiers.contains(&path_to_check) {
.worktree_root_path log::debug!("Found prettier path {path_to_check:?} in installed prettiers");
.components() return Ok(Some(path_to_check));
.into_iter() } else if let Some(package_json_contents) =
.take_while(|path_component| !is_node_modules(path_component)) read_package_json(fs, &path_to_check).await?
.collect::<PathBuf>(); {
if worktree_root != starting_path.worktree_root_path.as_ref() { if has_prettier_in_package_json(&package_json_contents) {
vec![worktree_root] if has_prettier_in_node_modules(fs, &path_to_check).await? {
log::debug!("Found prettier path {path_to_check:?} in both package.json and node_modules");
return Ok(Some(path_to_check));
} else if project_path_with_prettier_dependency.is_none() {
project_path_with_prettier_dependency = Some(path_to_check.clone());
}
} else { } else {
if starting_path.starting_path.as_ref() == Path::new("") { match package_json_contents.get("workspaces") {
worktree_root Some(serde_json::Value::Array(workspaces)) => {
.parent() match &project_path_with_prettier_dependency {
.map(|path| vec![path.to_path_buf()]) Some(project_path_with_prettier_dependency) => {
.unwrap_or_default() let subproject_path = project_path_with_prettier_dependency.strip_prefix(&path_to_check).expect("traversing path parents, should be able to strip prefix");
} else { if workspaces.iter().filter_map(|value| {
let file_to_format = starting_path.starting_path.as_ref(); if let serde_json::Value::String(s) = value {
let mut paths_to_check = VecDeque::new(); Some(s.clone())
let mut current_path = worktree_root; } else {
for path_component in file_to_format.components().into_iter() { log::warn!("Skipping non-string 'workspaces' value: {value:?}");
let new_path = current_path.join(path_component); None
let old_path = std::mem::replace(&mut current_path, new_path); }
paths_to_check.push_front(old_path); }).any(|workspace_definition| {
if is_node_modules(&path_component) { if let Some(path_matcher) = PathMatcher::new(&workspace_definition).ok() {
break; path_matcher.is_match(subproject_path)
} else {
workspace_definition == subproject_path.to_string_lossy()
}
}) {
anyhow::ensure!(has_prettier_in_node_modules(fs, &path_to_check).await?, "Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}, but it's not installed into workspace root's node_modules");
log::info!("Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}");
return Ok(Some(path_to_check));
} else {
log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but is not included in its package.json workspaces {workspaces:?}");
}
}
None => {
log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but has no prettier in its package.json");
}
} }
} },
Vec::from(paths_to_check) Some(unknown) => log::error!("Failed to parse workspaces for {path_to_check:?} from package.json, got {unknown:?}. Skipping."),
None => log::warn!("Skipping path {path_to_check:?} that has no prettier dependency and no workspaces section in its package.json"),
} }
} }
} }
None => Vec::new(),
};
match find_closest_prettier_dir(paths_to_check, fs.as_ref()) if !path_to_check.pop() {
.await match project_path_with_prettier_dependency {
.with_context(|| format!("finding prettier starting with {starting_path:?}"))? Some(closest_prettier_discovered) => {
{ anyhow::bail!("No prettier found in node_modules for ancestors of {locate_from:?}, but discovered prettier package.json dependency in {closest_prettier_discovered:?}")
Some(prettier_dir) => Ok(prettier_dir), }
None => Ok(DEFAULT_PRETTIER_DIR.to_path_buf()), None => {
log::debug!("Found no prettier in ancestors of {locate_from:?}");
return Ok(None);
}
}
}
} }
} }
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub async fn start( pub async fn start(
worktree_id: Option<usize>,
_: LanguageServerId, _: LanguageServerId,
prettier_dir: PathBuf, prettier_dir: PathBuf,
_: Arc<dyn NodeRuntime>, _: Arc<dyn NodeRuntime>,
_: AsyncAppContext, _: AsyncAppContext,
) -> anyhow::Result<Self> { ) -> anyhow::Result<Self> {
Ok( Ok(Self::Test(TestPrettier {
#[cfg(any(test, feature = "test-support"))] default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
Self::Test(TestPrettier { prettier_dir,
worktree_id, }))
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir,
}),
)
} }
#[cfg(not(any(test, feature = "test-support")))] #[cfg(not(any(test, feature = "test-support")))]
pub async fn start( pub async fn start(
worktree_id: Option<usize>,
server_id: LanguageServerId, server_id: LanguageServerId,
prettier_dir: PathBuf, prettier_dir: PathBuf,
node: Arc<dyn NodeRuntime>, node: Arc<dyn NodeRuntime>,
@ -143,7 +161,7 @@ impl Prettier {
) -> anyhow::Result<Self> { ) -> anyhow::Result<Self> {
use lsp::LanguageServerBinary; use lsp::LanguageServerBinary;
let backgroud = cx.background(); let background = cx.background();
anyhow::ensure!( anyhow::ensure!(
prettier_dir.is_dir(), prettier_dir.is_dir(),
"Prettier dir {prettier_dir:?} is not a directory" "Prettier dir {prettier_dir:?} is not a directory"
@ -154,7 +172,7 @@ impl Prettier {
"no prettier server package found at {prettier_server:?}" "no prettier server package found at {prettier_server:?}"
); );
let node_path = backgroud let node_path = background
.spawn(async move { node.binary_path().await }) .spawn(async move { node.binary_path().await })
.await?; .await?;
let server = LanguageServer::new( let server = LanguageServer::new(
@ -169,12 +187,11 @@ impl Prettier {
cx, cx,
) )
.context("prettier server creation")?; .context("prettier server creation")?;
let server = backgroud let server = background
.spawn(server.initialize(None)) .spawn(server.initialize(None))
.await .await
.context("prettier server initialization")?; .context("prettier server initialization")?;
Ok(Self::Real(RealPrettier { Ok(Self::Real(RealPrettier {
worktree_id,
server, server,
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(), default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir, prettier_dir,
@ -340,64 +357,61 @@ impl Prettier {
Self::Test(test_prettier) => &test_prettier.prettier_dir, Self::Test(test_prettier) => &test_prettier.prettier_dir,
} }
} }
pub fn worktree_id(&self) -> Option<usize> {
match self {
Self::Real(local) => local.worktree_id,
#[cfg(any(test, feature = "test-support"))]
Self::Test(test_prettier) => test_prettier.worktree_id,
}
}
} }
async fn find_closest_prettier_dir( async fn has_prettier_in_node_modules(fs: &dyn Fs, path: &Path) -> anyhow::Result<bool> {
paths_to_check: Vec<PathBuf>, let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
fs: &dyn Fs, if let Some(node_modules_location_metadata) = fs
) -> anyhow::Result<Option<PathBuf>> { .metadata(&possible_node_modules_location)
for path in paths_to_check { .await
let possible_package_json = path.join("package.json"); .with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))?
if let Some(package_json_metadata) = fs {
.metadata(&possible_package_json) return Ok(node_modules_location_metadata.is_dir);
.await }
.with_context(|| format!("Fetching metadata for {possible_package_json:?}"))? Ok(false)
{ }
if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
let package_json_contents = fs
.load(&possible_package_json)
.await
.with_context(|| format!("reading {possible_package_json:?} file contents"))?;
if let Ok(json_contents) = serde_json::from_str::<HashMap<String, serde_json::Value>>(
&package_json_contents,
) {
if let Some(serde_json::Value::Object(o)) = json_contents.get("dependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return Ok(Some(path));
}
}
if let Some(serde_json::Value::Object(o)) = json_contents.get("devDependencies")
{
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return Ok(Some(path));
}
}
}
}
}
let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME); async fn read_package_json(
if let Some(node_modules_location_metadata) = fs fs: &dyn Fs,
.metadata(&possible_node_modules_location) path: &Path,
.await ) -> anyhow::Result<Option<HashMap<String, serde_json::Value>>> {
.with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))? let possible_package_json = path.join("package.json");
{ if let Some(package_json_metadata) = fs
if node_modules_location_metadata.is_dir { .metadata(&possible_package_json)
return Ok(Some(path)); .await
} .with_context(|| format!("fetching metadata for package json {possible_package_json:?}"))?
{
if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
let package_json_contents = fs
.load(&possible_package_json)
.await
.with_context(|| format!("reading {possible_package_json:?} file contents"))?;
return serde_json::from_str::<HashMap<String, serde_json::Value>>(
&package_json_contents,
)
.map(Some)
.with_context(|| format!("parsing {possible_package_json:?} file contents"));
} }
} }
Ok(None) Ok(None)
} }
fn has_prettier_in_package_json(
package_json_contents: &HashMap<String, serde_json::Value>,
) -> bool {
if let Some(serde_json::Value::Object(o)) = package_json_contents.get("dependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return true;
}
}
if let Some(serde_json::Value::Object(o)) = package_json_contents.get("devDependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return true;
}
}
false
}
enum Format {} enum Format {}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
@ -436,3 +450,316 @@ impl lsp::request::Request for ClearCache {
type Result = (); type Result = ();
const METHOD: &'static str = "prettier/clear_cache"; const METHOD: &'static str = "prettier/clear_cache";
} }
#[cfg(test)]
mod tests {
use fs::FakeFs;
use serde_json::json;
use super::*;
#[gpui::test]
async fn test_prettier_lookup_finds_nothing(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
json!({
".config": {
"zed": {
"settings.json": r#"{ "formatter": "auto" }"#,
},
},
"work": {
"project": {
"src": {
"index.js": "// index.js file contents",
},
"node_modules": {
"expect": {
"build": {
"print.js": "// print.js file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.5.1"
}
}"#,
},
"prettier": {
"index.js": "// Dummy prettier package file",
},
},
"package.json": r#"{}"#
},
}
}),
)
.await;
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/.config/zed/settings.json"),
)
.await
.unwrap()
.is_none(),
"Should successfully find no prettier for path hierarchy without it"
);
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/project/src/index.js")
)
.await
.unwrap()
.is_none(),
"Should successfully find no prettier for path hierarchy that has node_modules with prettier, but no package.json mentions of it"
);
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/project/node_modules/expect/build/print.js")
)
.await
.unwrap()
.is_none(),
"Even though it has package.json with prettier in it and no prettier on node_modules along the path, nothing should fail since declared inside node_modules"
);
}
#[gpui::test]
async fn test_prettier_lookup_in_simple_npm_projects(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
json!({
"web_blog": {
"node_modules": {
"prettier": {
"index.js": "// Dummy prettier package file",
},
"expect": {
"build": {
"print.js": "// print.js file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.5.1"
}
}"#,
},
},
"pages": {
"[slug].tsx": "// [slug].tsx file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.3.0"
},
"prettier": {
"semi": false,
"printWidth": 80,
"htmlWhitespaceSensitivity": "strict",
"tabWidth": 4
}
}"#
}
}),
)
.await;
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/web_blog/pages/[slug].tsx")
)
.await
.unwrap(),
Some(PathBuf::from("/root/web_blog")),
"Should find a preinstalled prettier in the project root"
);
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/web_blog/node_modules/expect/build/print.js")
)
.await
.unwrap(),
Some(PathBuf::from("/root/web_blog")),
"Should find a preinstalled prettier in the project root even for node_modules files"
);
}
#[gpui::test]
async fn test_prettier_lookup_for_not_installed(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
json!({
"work": {
"web_blog": {
"pages": {
"[slug].tsx": "// [slug].tsx file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.3.0"
},
"prettier": {
"semi": false,
"printWidth": 80,
"htmlWhitespaceSensitivity": "strict",
"tabWidth": 4
}
}"#
}
}
}),
)
.await;
let path = "/root/work/web_blog/node_modules/pages/[slug].tsx";
match Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new(path)
)
.await {
Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
Err(e) => {
let message = e.to_string();
assert!(message.contains(path), "Error message should mention which start file was used for location");
assert!(message.contains("/root/work/web_blog"), "Error message should mention potential candidates without prettier node_modules contents");
},
};
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::from_iter(
[PathBuf::from("/root"), PathBuf::from("/root/work")].into_iter()
),
Path::new("/root/work/web_blog/node_modules/pages/[slug].tsx")
)
.await
.unwrap(),
Some(PathBuf::from("/root/work")),
"Should return first cached value found without path checks"
);
}
#[gpui::test]
async fn test_prettier_lookup_in_npm_workspaces(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
json!({
"work": {
"full-stack-foundations": {
"exercises": {
"03.loading": {
"01.problem.loader": {
"app": {
"routes": {
"users+": {
"$username_+": {
"notes.tsx": "// notes.tsx file contents",
},
},
},
},
"node_modules": {},
"package.json": r#"{
"devDependencies": {
"prettier": "^3.0.3"
}
}"#
},
},
},
"package.json": r#"{
"workspaces": ["exercises/*/*", "examples/*"]
}"#,
"node_modules": {
"prettier": {
"index.js": "// Dummy prettier package file",
},
},
},
}
}),
)
.await;
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx"),
).await.unwrap(),
Some(PathBuf::from("/root/work/full-stack-foundations")),
"Should ascend to the multi-workspace root and find the prettier there",
);
}
#[gpui::test]
async fn test_prettier_lookup_in_npm_workspaces_for_not_installed(
cx: &mut gpui::TestAppContext,
) {
let fs = FakeFs::new(cx.background());
fs.insert_tree(
"/root",
json!({
"work": {
"full-stack-foundations": {
"exercises": {
"03.loading": {
"01.problem.loader": {
"app": {
"routes": {
"users+": {
"$username_+": {
"notes.tsx": "// notes.tsx file contents",
},
},
},
},
"node_modules": {},
"package.json": r#"{
"devDependencies": {
"prettier": "^3.0.3"
}
}"#
},
},
},
"package.json": r#"{
"workspaces": ["exercises/*/*", "examples/*"]
}"#,
},
}
}),
)
.await;
match Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx")
)
.await {
Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
Err(e) => {
let message = e.to_string();
assert!(message.contains("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader"), "Error message should mention which project had prettier defined");
assert!(message.contains("/root/work/full-stack-foundations"), "Error message should mention potential candidates without prettier node_modules contents");
},
};
}
}

View file

@ -1,11 +1,13 @@
const { Buffer } = require('buffer'); const { Buffer } = require("buffer");
const fs = require("fs"); const fs = require("fs");
const path = require("path"); const path = require("path");
const { once } = require('events'); const { once } = require("events");
const prettierContainerPath = process.argv[2]; const prettierContainerPath = process.argv[2];
if (prettierContainerPath == null || prettierContainerPath.length == 0) { if (prettierContainerPath == null || prettierContainerPath.length == 0) {
process.stderr.write(`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`); process.stderr.write(
`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`,
);
process.exit(1); process.exit(1);
} }
fs.stat(prettierContainerPath, (err, stats) => { fs.stat(prettierContainerPath, (err, stats) => {
@ -19,7 +21,7 @@ fs.stat(prettierContainerPath, (err, stats) => {
process.exit(1); process.exit(1);
} }
}); });
const prettierPath = path.join(prettierContainerPath, 'node_modules/prettier'); const prettierPath = path.join(prettierContainerPath, "node_modules/prettier");
class Prettier { class Prettier {
constructor(path, prettier, config) { constructor(path, prettier, config) {
@ -34,7 +36,7 @@ class Prettier {
let config; let config;
try { try {
prettier = await loadPrettier(prettierPath); prettier = await loadPrettier(prettierPath);
config = await prettier.resolveConfig(prettierPath) || {}; config = (await prettier.resolveConfig(prettierPath)) || {};
} catch (e) { } catch (e) {
process.stderr.write(`Failed to load prettier: ${e}\n`); process.stderr.write(`Failed to load prettier: ${e}\n`);
process.exit(1); process.exit(1);
@ -42,7 +44,7 @@ class Prettier {
process.stderr.write(`Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`); process.stderr.write(`Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`);
process.stdin.resume(); process.stdin.resume();
handleBuffer(new Prettier(prettierPath, prettier, config)); handleBuffer(new Prettier(prettierPath, prettier, config));
})() })();
async function handleBuffer(prettier) { async function handleBuffer(prettier) {
for await (const messageText of readStdin()) { for await (const messageText of readStdin()) {
@ -54,25 +56,29 @@ async function handleBuffer(prettier) {
continue; continue;
} }
// allow concurrent request handling by not `await`ing the message handling promise (async function) // allow concurrent request handling by not `await`ing the message handling promise (async function)
handleMessage(message, prettier).catch(e => { handleMessage(message, prettier).catch((e) => {
const errorMessage = message; const errorMessage = message;
if ((errorMessage.params || {}).text !== undefined) { if ((errorMessage.params || {}).text !== undefined) {
errorMessage.params.text = "..snip.."; errorMessage.params.text = "..snip..";
} }
sendResponse({ id: message.id, ...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`) }); }); sendResponse({
id: message.id,
...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`),
});
});
} }
} }
const headerSeparator = "\r\n"; const headerSeparator = "\r\n";
const contentLengthHeaderName = 'Content-Length'; const contentLengthHeaderName = "Content-Length";
async function* readStdin() { async function* readStdin() {
let buffer = Buffer.alloc(0); let buffer = Buffer.alloc(0);
let streamEnded = false; let streamEnded = false;
process.stdin.on('end', () => { process.stdin.on("end", () => {
streamEnded = true; streamEnded = true;
}); });
process.stdin.on('data', (data) => { process.stdin.on("data", (data) => {
buffer = Buffer.concat([buffer, data]); buffer = Buffer.concat([buffer, data]);
}); });
@ -80,7 +86,7 @@ async function* readStdin() {
sendResponse(makeError(errorMessage)); sendResponse(makeError(errorMessage));
buffer = Buffer.alloc(0); buffer = Buffer.alloc(0);
messageLength = null; messageLength = null;
await once(process.stdin, 'readable'); await once(process.stdin, "readable");
streamEnded = false; streamEnded = false;
} }
@ -91,20 +97,25 @@ async function* readStdin() {
if (messageLength === null) { if (messageLength === null) {
while (buffer.indexOf(`${headerSeparator}${headerSeparator}`) === -1) { while (buffer.indexOf(`${headerSeparator}${headerSeparator}`) === -1) {
if (streamEnded) { if (streamEnded) {
await handleStreamEnded('Unexpected end of stream: headers not found'); await handleStreamEnded("Unexpected end of stream: headers not found");
continue main_loop; continue main_loop;
} else if (buffer.length > contentLengthHeaderName.length * 10) { } else if (buffer.length > contentLengthHeaderName.length * 10) {
await handleStreamEnded(`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`); await handleStreamEnded(
`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`,
);
continue main_loop; continue main_loop;
} }
await once(process.stdin, 'readable'); await once(process.stdin, "readable");
} }
const headers = buffer.subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`)).toString('ascii'); const headers = buffer
const contentLengthHeader = headers.split(headerSeparator) .subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`))
.map(header => header.split(':')) .toString("ascii");
.filter(header => header[2] === undefined) const contentLengthHeader = headers
.filter(header => (header[1] || '').length > 0) .split(headerSeparator)
.find(header => (header[0] || '').trim() === contentLengthHeaderName); .map((header) => header.split(":"))
.filter((header) => header[2] === undefined)
.filter((header) => (header[1] || "").length > 0)
.find((header) => (header[0] || "").trim() === contentLengthHeaderName);
const contentLength = (contentLengthHeader || [])[1]; const contentLength = (contentLengthHeader || [])[1];
if (contentLength === undefined) { if (contentLength === undefined) {
await handleStreamEnded(`Missing or incorrect ${contentLengthHeaderName} header: ${headers}`); await handleStreamEnded(`Missing or incorrect ${contentLengthHeaderName} header: ${headers}`);
@ -114,13 +125,14 @@ async function* readStdin() {
messageLength = parseInt(contentLength, 10); messageLength = parseInt(contentLength, 10);
} }
while (buffer.length < (headersLength + messageLength)) { while (buffer.length < headersLength + messageLength) {
if (streamEnded) { if (streamEnded) {
await handleStreamEnded( await handleStreamEnded(
`Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`); `Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`,
);
continue main_loop; continue main_loop;
} }
await once(process.stdin, 'readable'); await once(process.stdin, "readable");
} }
const messageEnd = headersLength + messageLength; const messageEnd = headersLength + messageLength;
@ -128,12 +140,12 @@ async function* readStdin() {
buffer = buffer.subarray(messageEnd); buffer = buffer.subarray(messageEnd);
headersLength = null; headersLength = null;
messageLength = null; messageLength = null;
yield message.toString('utf8'); yield message.toString("utf8");
} }
} catch (e) { } catch (e) {
sendResponse(makeError(`Error reading stdin: ${e}`)); sendResponse(makeError(`Error reading stdin: ${e}`));
} finally { } finally {
process.stdin.off('data', () => { }); process.stdin.off("data", () => {});
} }
} }
@ -146,7 +158,7 @@ async function handleMessage(message, prettier) {
throw new Error(`Message id is undefined: ${JSON.stringify(message)}`); throw new Error(`Message id is undefined: ${JSON.stringify(message)}`);
} }
if (method === 'prettier/format') { if (method === "prettier/format") {
if (params === undefined || params.text === undefined) { if (params === undefined || params.text === undefined) {
throw new Error(`Message params.text is undefined: ${JSON.stringify(message)}`); throw new Error(`Message params.text is undefined: ${JSON.stringify(message)}`);
} }
@ -156,7 +168,7 @@ async function handleMessage(message, prettier) {
let resolvedConfig = {}; let resolvedConfig = {};
if (params.options.filepath !== undefined) { if (params.options.filepath !== undefined) {
resolvedConfig = await prettier.prettier.resolveConfig(params.options.filepath) || {}; resolvedConfig = (await prettier.prettier.resolveConfig(params.options.filepath)) || {};
} }
const options = { const options = {
@ -164,21 +176,25 @@ async function handleMessage(message, prettier) {
...resolvedConfig, ...resolvedConfig,
parser: params.options.parser, parser: params.options.parser,
plugins: params.options.plugins, plugins: params.options.plugins,
path: params.options.filepath path: params.options.filepath,
}; };
process.stderr.write(`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${params.options.filepath || ''}' with options: ${JSON.stringify(options)}\n`); process.stderr.write(
`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${
params.options.filepath || ""
}' with options: ${JSON.stringify(options)}\n`,
);
const formattedText = await prettier.prettier.format(params.text, options); const formattedText = await prettier.prettier.format(params.text, options);
sendResponse({ id, result: { text: formattedText } }); sendResponse({ id, result: { text: formattedText } });
} else if (method === 'prettier/clear_cache') { } else if (method === "prettier/clear_cache") {
prettier.prettier.clearConfigCache(); prettier.prettier.clearConfigCache();
prettier.config = await prettier.prettier.resolveConfig(prettier.path) || {}; prettier.config = (await prettier.prettier.resolveConfig(prettier.path)) || {};
sendResponse({ id, result: null }); sendResponse({ id, result: null });
} else if (method === 'initialize') { } else if (method === "initialize") {
sendResponse({ sendResponse({
id: id || 0, id,
result: { result: {
"capabilities": {} capabilities: {},
} },
}); });
} else { } else {
throw new Error(`Unknown method: ${method}`); throw new Error(`Unknown method: ${method}`);
@ -188,18 +204,20 @@ async function handleMessage(message, prettier) {
function makeError(message) { function makeError(message) {
return { return {
error: { error: {
"code": -32600, // invalid request code code: -32600, // invalid request code
message, message,
} },
}; };
} }
function sendResponse(response) { function sendResponse(response) {
const responsePayloadString = JSON.stringify({ const responsePayloadString = JSON.stringify({
jsonrpc: "2.0", jsonrpc: "2.0",
...response ...response,
}); });
const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(responsePayloadString)}${headerSeparator}${headerSeparator}`; const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(
responsePayloadString,
)}${headerSeparator}${headerSeparator}`;
process.stdout.write(headers + responsePayloadString); process.stdout.write(headers + responsePayloadString);
} }

View file

@ -1,5 +1,5 @@
use anyhow::Context; use anyhow::Context;
use collections::HashMap; use collections::{HashMap, HashSet};
use fs::Fs; use fs::Fs;
use gpui::{AsyncAppContext, Model}; use gpui::{AsyncAppContext, Model};
use language::{language_settings::language_settings, Buffer, Diff}; use language::{language_settings::language_settings, Buffer, Diff};
@ -7,11 +7,10 @@ use lsp::{LanguageServer, LanguageServerId};
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
collections::VecDeque,
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use util::paths::DEFAULT_PRETTIER_DIR; use util::paths::{PathMatcher, DEFAULT_PRETTIER_DIR};
pub enum Prettier { pub enum Prettier {
Real(RealPrettier), Real(RealPrettier),
@ -20,7 +19,6 @@ pub enum Prettier {
} }
pub struct RealPrettier { pub struct RealPrettier {
worktree_id: Option<usize>,
default: bool, default: bool,
prettier_dir: PathBuf, prettier_dir: PathBuf,
server: Arc<LanguageServer>, server: Arc<LanguageServer>,
@ -28,17 +26,10 @@ pub struct RealPrettier {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub struct TestPrettier { pub struct TestPrettier {
worktree_id: Option<usize>,
prettier_dir: PathBuf, prettier_dir: PathBuf,
default: bool, default: bool,
} }
#[derive(Debug)]
pub struct LocateStart {
pub worktree_root_path: Arc<Path>,
pub starting_path: Arc<Path>,
}
pub const PRETTIER_SERVER_FILE: &str = "prettier_server.js"; pub const PRETTIER_SERVER_FILE: &str = "prettier_server.js";
pub const PRETTIER_SERVER_JS: &str = include_str!("./prettier_server.js"); pub const PRETTIER_SERVER_JS: &str = include_str!("./prettier_server.js");
const PRETTIER_PACKAGE_NAME: &str = "prettier"; const PRETTIER_PACKAGE_NAME: &str = "prettier";
@ -63,79 +54,106 @@ impl Prettier {
".editorconfig", ".editorconfig",
]; ];
pub async fn locate( pub async fn locate_prettier_installation(
starting_path: Option<LocateStart>, fs: &dyn Fs,
fs: Arc<dyn Fs>, installed_prettiers: &HashSet<PathBuf>,
) -> anyhow::Result<PathBuf> { locate_from: &Path,
fn is_node_modules(path_component: &std::path::Component<'_>) -> bool { ) -> anyhow::Result<Option<PathBuf>> {
path_component.as_os_str().to_string_lossy() == "node_modules" let mut path_to_check = locate_from
.components()
.take_while(|component| component.as_os_str().to_string_lossy() != "node_modules")
.collect::<PathBuf>();
let path_to_check_metadata = fs
.metadata(&path_to_check)
.await
.with_context(|| format!("failed to get metadata for initial path {path_to_check:?}"))?
.with_context(|| format!("empty metadata for initial path {path_to_check:?}"))?;
if !path_to_check_metadata.is_dir {
path_to_check.pop();
} }
let paths_to_check = match starting_path.as_ref() { let mut project_path_with_prettier_dependency = None;
Some(starting_path) => { loop {
let worktree_root = starting_path if installed_prettiers.contains(&path_to_check) {
.worktree_root_path log::debug!("Found prettier path {path_to_check:?} in installed prettiers");
.components() return Ok(Some(path_to_check));
.into_iter() } else if let Some(package_json_contents) =
.take_while(|path_component| !is_node_modules(path_component)) read_package_json(fs, &path_to_check).await?
.collect::<PathBuf>(); {
if worktree_root != starting_path.worktree_root_path.as_ref() { if has_prettier_in_package_json(&package_json_contents) {
vec![worktree_root] if has_prettier_in_node_modules(fs, &path_to_check).await? {
log::debug!("Found prettier path {path_to_check:?} in both package.json and node_modules");
return Ok(Some(path_to_check));
} else if project_path_with_prettier_dependency.is_none() {
project_path_with_prettier_dependency = Some(path_to_check.clone());
}
} else { } else {
if starting_path.starting_path.as_ref() == Path::new("") { match package_json_contents.get("workspaces") {
worktree_root Some(serde_json::Value::Array(workspaces)) => {
.parent() match &project_path_with_prettier_dependency {
.map(|path| vec![path.to_path_buf()]) Some(project_path_with_prettier_dependency) => {
.unwrap_or_default() let subproject_path = project_path_with_prettier_dependency.strip_prefix(&path_to_check).expect("traversing path parents, should be able to strip prefix");
} else { if workspaces.iter().filter_map(|value| {
let file_to_format = starting_path.starting_path.as_ref(); if let serde_json::Value::String(s) = value {
let mut paths_to_check = VecDeque::new(); Some(s.clone())
let mut current_path = worktree_root; } else {
for path_component in file_to_format.components().into_iter() { log::warn!("Skipping non-string 'workspaces' value: {value:?}");
let new_path = current_path.join(path_component); None
let old_path = std::mem::replace(&mut current_path, new_path); }
paths_to_check.push_front(old_path); }).any(|workspace_definition| {
if is_node_modules(&path_component) { if let Some(path_matcher) = PathMatcher::new(&workspace_definition).ok() {
break; path_matcher.is_match(subproject_path)
} } else {
workspace_definition == subproject_path.to_string_lossy()
}
}) {
anyhow::ensure!(has_prettier_in_node_modules(fs, &path_to_check).await?, "Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}, but it's not installed into workspace root's node_modules");
log::info!("Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}");
return Ok(Some(path_to_check));
} else {
log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but is not included in its package.json workspaces {workspaces:?}");
}
}
None => {
log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but has no prettier in its package.json");
}
}
},
Some(unknown) => log::error!("Failed to parse workspaces for {path_to_check:?} from package.json, got {unknown:?}. Skipping."),
None => log::warn!("Skipping path {path_to_check:?} that has no prettier dependency and no workspaces section in its package.json"),
} }
Vec::from(paths_to_check) }
}
if !path_to_check.pop() {
match project_path_with_prettier_dependency {
Some(closest_prettier_discovered) => {
anyhow::bail!("No prettier found in node_modules for ancestors of {locate_from:?}, but discovered prettier package.json dependency in {closest_prettier_discovered:?}")
}
None => {
log::debug!("Found no prettier in ancestors of {locate_from:?}");
return Ok(None);
} }
} }
} }
None => Vec::new(),
};
match find_closest_prettier_dir(paths_to_check, fs.as_ref())
.await
.with_context(|| format!("finding prettier starting with {starting_path:?}"))?
{
Some(prettier_dir) => Ok(prettier_dir),
None => Ok(DEFAULT_PRETTIER_DIR.to_path_buf()),
} }
} }
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub async fn start( pub async fn start(
worktree_id: Option<usize>,
_: LanguageServerId, _: LanguageServerId,
prettier_dir: PathBuf, prettier_dir: PathBuf,
_: Arc<dyn NodeRuntime>, _: Arc<dyn NodeRuntime>,
_: AsyncAppContext, _: AsyncAppContext,
) -> anyhow::Result<Self> { ) -> anyhow::Result<Self> {
Ok( Ok(Self::Test(TestPrettier {
#[cfg(any(test, feature = "test-support"))] default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
Self::Test(TestPrettier { prettier_dir,
worktree_id, }))
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir,
}),
)
} }
#[cfg(not(any(test, feature = "test-support")))] #[cfg(not(any(test, feature = "test-support")))]
pub async fn start( pub async fn start(
worktree_id: Option<usize>,
server_id: LanguageServerId, server_id: LanguageServerId,
prettier_dir: PathBuf, prettier_dir: PathBuf,
node: Arc<dyn NodeRuntime>, node: Arc<dyn NodeRuntime>,
@ -174,7 +192,6 @@ impl Prettier {
.await .await
.context("prettier server initialization")?; .context("prettier server initialization")?;
Ok(Self::Real(RealPrettier { Ok(Self::Real(RealPrettier {
worktree_id,
server, server,
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(), default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir, prettier_dir,
@ -370,64 +387,61 @@ impl Prettier {
Self::Test(test_prettier) => &test_prettier.prettier_dir, Self::Test(test_prettier) => &test_prettier.prettier_dir,
} }
} }
pub fn worktree_id(&self) -> Option<usize> {
match self {
Self::Real(local) => local.worktree_id,
#[cfg(any(test, feature = "test-support"))]
Self::Test(test_prettier) => test_prettier.worktree_id,
}
}
} }
async fn find_closest_prettier_dir( async fn has_prettier_in_node_modules(fs: &dyn Fs, path: &Path) -> anyhow::Result<bool> {
paths_to_check: Vec<PathBuf>, let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
fs: &dyn Fs, if let Some(node_modules_location_metadata) = fs
) -> anyhow::Result<Option<PathBuf>> { .metadata(&possible_node_modules_location)
for path in paths_to_check { .await
let possible_package_json = path.join("package.json"); .with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))?
if let Some(package_json_metadata) = fs {
.metadata(&possible_package_json) return Ok(node_modules_location_metadata.is_dir);
.await }
.with_context(|| format!("Fetching metadata for {possible_package_json:?}"))? Ok(false)
{ }
if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
let package_json_contents = fs
.load(&possible_package_json)
.await
.with_context(|| format!("reading {possible_package_json:?} file contents"))?;
if let Ok(json_contents) = serde_json::from_str::<HashMap<String, serde_json::Value>>(
&package_json_contents,
) {
if let Some(serde_json::Value::Object(o)) = json_contents.get("dependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return Ok(Some(path));
}
}
if let Some(serde_json::Value::Object(o)) = json_contents.get("devDependencies")
{
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return Ok(Some(path));
}
}
}
}
}
let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME); async fn read_package_json(
if let Some(node_modules_location_metadata) = fs fs: &dyn Fs,
.metadata(&possible_node_modules_location) path: &Path,
.await ) -> anyhow::Result<Option<HashMap<String, serde_json::Value>>> {
.with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))? let possible_package_json = path.join("package.json");
{ if let Some(package_json_metadata) = fs
if node_modules_location_metadata.is_dir { .metadata(&possible_package_json)
return Ok(Some(path)); .await
} .with_context(|| format!("fetching metadata for package json {possible_package_json:?}"))?
{
if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
let package_json_contents = fs
.load(&possible_package_json)
.await
.with_context(|| format!("reading {possible_package_json:?} file contents"))?;
return serde_json::from_str::<HashMap<String, serde_json::Value>>(
&package_json_contents,
)
.map(Some)
.with_context(|| format!("parsing {possible_package_json:?} file contents"));
} }
} }
Ok(None) Ok(None)
} }
fn has_prettier_in_package_json(
package_json_contents: &HashMap<String, serde_json::Value>,
) -> bool {
if let Some(serde_json::Value::Object(o)) = package_json_contents.get("dependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return true;
}
}
if let Some(serde_json::Value::Object(o)) = package_json_contents.get("devDependencies") {
if o.contains_key(PRETTIER_PACKAGE_NAME) {
return true;
}
}
false
}
enum Format {} enum Format {}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
@ -466,3 +480,316 @@ impl lsp::request::Request for ClearCache {
type Result = (); type Result = ();
const METHOD: &'static str = "prettier/clear_cache"; const METHOD: &'static str = "prettier/clear_cache";
} }
#[cfg(test)]
mod tests {
use fs::FakeFs;
use serde_json::json;
use super::*;
#[gpui::test]
async fn test_prettier_lookup_finds_nothing(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
".config": {
"zed": {
"settings.json": r#"{ "formatter": "auto" }"#,
},
},
"work": {
"project": {
"src": {
"index.js": "// index.js file contents",
},
"node_modules": {
"expect": {
"build": {
"print.js": "// print.js file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.5.1"
}
}"#,
},
"prettier": {
"index.js": "// Dummy prettier package file",
},
},
"package.json": r#"{}"#
},
}
}),
)
.await;
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/.config/zed/settings.json"),
)
.await
.unwrap()
.is_none(),
"Should successfully find no prettier for path hierarchy without it"
);
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/project/src/index.js")
)
.await
.unwrap()
.is_none(),
"Should successfully find no prettier for path hierarchy that has node_modules with prettier, but no package.json mentions of it"
);
assert!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/project/node_modules/expect/build/print.js")
)
.await
.unwrap()
.is_none(),
"Even though it has package.json with prettier in it and no prettier on node_modules along the path, nothing should fail since declared inside node_modules"
);
}
#[gpui::test]
async fn test_prettier_lookup_in_simple_npm_projects(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
"web_blog": {
"node_modules": {
"prettier": {
"index.js": "// Dummy prettier package file",
},
"expect": {
"build": {
"print.js": "// print.js file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.5.1"
}
}"#,
},
},
"pages": {
"[slug].tsx": "// [slug].tsx file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.3.0"
},
"prettier": {
"semi": false,
"printWidth": 80,
"htmlWhitespaceSensitivity": "strict",
"tabWidth": 4
}
}"#
}
}),
)
.await;
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/web_blog/pages/[slug].tsx")
)
.await
.unwrap(),
Some(PathBuf::from("/root/web_blog")),
"Should find a preinstalled prettier in the project root"
);
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/web_blog/node_modules/expect/build/print.js")
)
.await
.unwrap(),
Some(PathBuf::from("/root/web_blog")),
"Should find a preinstalled prettier in the project root even for node_modules files"
);
}
#[gpui::test]
async fn test_prettier_lookup_for_not_installed(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
"work": {
"web_blog": {
"pages": {
"[slug].tsx": "// [slug].tsx file contents",
},
"package.json": r#"{
"devDependencies": {
"prettier": "2.3.0"
},
"prettier": {
"semi": false,
"printWidth": 80,
"htmlWhitespaceSensitivity": "strict",
"tabWidth": 4
}
}"#
}
}
}),
)
.await;
let path = "/root/work/web_blog/node_modules/pages/[slug].tsx";
match Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new(path)
)
.await {
Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
Err(e) => {
let message = e.to_string();
assert!(message.contains(path), "Error message should mention which start file was used for location");
assert!(message.contains("/root/work/web_blog"), "Error message should mention potential candidates without prettier node_modules contents");
},
};
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::from_iter(
[PathBuf::from("/root"), PathBuf::from("/root/work")].into_iter()
),
Path::new("/root/work/web_blog/node_modules/pages/[slug].tsx")
)
.await
.unwrap(),
Some(PathBuf::from("/root/work")),
"Should return first cached value found without path checks"
);
}
#[gpui::test]
async fn test_prettier_lookup_in_npm_workspaces(cx: &mut gpui::TestAppContext) {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
"work": {
"full-stack-foundations": {
"exercises": {
"03.loading": {
"01.problem.loader": {
"app": {
"routes": {
"users+": {
"$username_+": {
"notes.tsx": "// notes.tsx file contents",
},
},
},
},
"node_modules": {},
"package.json": r#"{
"devDependencies": {
"prettier": "^3.0.3"
}
}"#
},
},
},
"package.json": r#"{
"workspaces": ["exercises/*/*", "examples/*"]
}"#,
"node_modules": {
"prettier": {
"index.js": "// Dummy prettier package file",
},
},
},
}
}),
)
.await;
assert_eq!(
Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx"),
).await.unwrap(),
Some(PathBuf::from("/root/work/full-stack-foundations")),
"Should ascend to the multi-workspace root and find the prettier there",
);
}
#[gpui::test]
async fn test_prettier_lookup_in_npm_workspaces_for_not_installed(
cx: &mut gpui::TestAppContext,
) {
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root",
json!({
"work": {
"full-stack-foundations": {
"exercises": {
"03.loading": {
"01.problem.loader": {
"app": {
"routes": {
"users+": {
"$username_+": {
"notes.tsx": "// notes.tsx file contents",
},
},
},
},
"node_modules": {},
"package.json": r#"{
"devDependencies": {
"prettier": "^3.0.3"
}
}"#
},
},
},
"package.json": r#"{
"workspaces": ["exercises/*/*", "examples/*"]
}"#,
},
}
}),
)
.await;
match Prettier::locate_prettier_installation(
fs.as_ref(),
&HashSet::default(),
Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx")
)
.await {
Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
Err(e) => {
let message = e.to_string();
assert!(message.contains("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader"), "Error message should mention which project had prettier defined");
assert!(message.contains("/root/work/full-stack-foundations"), "Error message should mention potential candidates without prettier node_modules contents");
},
};
}
}

View file

@ -1,11 +1,13 @@
const { Buffer } = require('buffer'); const { Buffer } = require("buffer");
const fs = require("fs"); const fs = require("fs");
const path = require("path"); const path = require("path");
const { once } = require('events'); const { once } = require("events");
const prettierContainerPath = process.argv[2]; const prettierContainerPath = process.argv[2];
if (prettierContainerPath == null || prettierContainerPath.length == 0) { if (prettierContainerPath == null || prettierContainerPath.length == 0) {
process.stderr.write(`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`); process.stderr.write(
`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`,
);
process.exit(1); process.exit(1);
} }
fs.stat(prettierContainerPath, (err, stats) => { fs.stat(prettierContainerPath, (err, stats) => {
@ -19,7 +21,7 @@ fs.stat(prettierContainerPath, (err, stats) => {
process.exit(1); process.exit(1);
} }
}); });
const prettierPath = path.join(prettierContainerPath, 'node_modules/prettier'); const prettierPath = path.join(prettierContainerPath, "node_modules/prettier");
class Prettier { class Prettier {
constructor(path, prettier, config) { constructor(path, prettier, config) {
@ -34,7 +36,7 @@ class Prettier {
let config; let config;
try { try {
prettier = await loadPrettier(prettierPath); prettier = await loadPrettier(prettierPath);
config = await prettier.resolveConfig(prettierPath) || {}; config = (await prettier.resolveConfig(prettierPath)) || {};
} catch (e) { } catch (e) {
process.stderr.write(`Failed to load prettier: ${e}\n`); process.stderr.write(`Failed to load prettier: ${e}\n`);
process.exit(1); process.exit(1);
@ -42,7 +44,7 @@ class Prettier {
process.stderr.write(`Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`); process.stderr.write(`Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`);
process.stdin.resume(); process.stdin.resume();
handleBuffer(new Prettier(prettierPath, prettier, config)); handleBuffer(new Prettier(prettierPath, prettier, config));
})() })();
async function handleBuffer(prettier) { async function handleBuffer(prettier) {
for await (const messageText of readStdin()) { for await (const messageText of readStdin()) {
@ -54,22 +56,29 @@ async function handleBuffer(prettier) {
continue; continue;
} }
// allow concurrent request handling by not `await`ing the message handling promise (async function) // allow concurrent request handling by not `await`ing the message handling promise (async function)
handleMessage(message, prettier).catch(e => { handleMessage(message, prettier).catch((e) => {
sendResponse({ id: message.id, ...makeError(`error during message handling: ${e}`) }); const errorMessage = message;
if ((errorMessage.params || {}).text !== undefined) {
errorMessage.params.text = "..snip..";
}
sendResponse({
id: message.id,
...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`),
});
}); });
} }
} }
const headerSeparator = "\r\n"; const headerSeparator = "\r\n";
const contentLengthHeaderName = 'Content-Length'; const contentLengthHeaderName = "Content-Length";
async function* readStdin() { async function* readStdin() {
let buffer = Buffer.alloc(0); let buffer = Buffer.alloc(0);
let streamEnded = false; let streamEnded = false;
process.stdin.on('end', () => { process.stdin.on("end", () => {
streamEnded = true; streamEnded = true;
}); });
process.stdin.on('data', (data) => { process.stdin.on("data", (data) => {
buffer = Buffer.concat([buffer, data]); buffer = Buffer.concat([buffer, data]);
}); });
@ -77,7 +86,7 @@ async function* readStdin() {
sendResponse(makeError(errorMessage)); sendResponse(makeError(errorMessage));
buffer = Buffer.alloc(0); buffer = Buffer.alloc(0);
messageLength = null; messageLength = null;
await once(process.stdin, 'readable'); await once(process.stdin, "readable");
streamEnded = false; streamEnded = false;
} }
@ -88,20 +97,25 @@ async function* readStdin() {
if (messageLength === null) { if (messageLength === null) {
while (buffer.indexOf(`${headerSeparator}${headerSeparator}`) === -1) { while (buffer.indexOf(`${headerSeparator}${headerSeparator}`) === -1) {
if (streamEnded) { if (streamEnded) {
await handleStreamEnded('Unexpected end of stream: headers not found'); await handleStreamEnded("Unexpected end of stream: headers not found");
continue main_loop; continue main_loop;
} else if (buffer.length > contentLengthHeaderName.length * 10) { } else if (buffer.length > contentLengthHeaderName.length * 10) {
await handleStreamEnded(`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`); await handleStreamEnded(
`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`,
);
continue main_loop; continue main_loop;
} }
await once(process.stdin, 'readable'); await once(process.stdin, "readable");
} }
const headers = buffer.subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`)).toString('ascii'); const headers = buffer
const contentLengthHeader = headers.split(headerSeparator) .subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`))
.map(header => header.split(':')) .toString("ascii");
.filter(header => header[2] === undefined) const contentLengthHeader = headers
.filter(header => (header[1] || '').length > 0) .split(headerSeparator)
.find(header => (header[0] || '').trim() === contentLengthHeaderName); .map((header) => header.split(":"))
.filter((header) => header[2] === undefined)
.filter((header) => (header[1] || "").length > 0)
.find((header) => (header[0] || "").trim() === contentLengthHeaderName);
const contentLength = (contentLengthHeader || [])[1]; const contentLength = (contentLengthHeader || [])[1];
if (contentLength === undefined) { if (contentLength === undefined) {
await handleStreamEnded(`Missing or incorrect ${contentLengthHeaderName} header: ${headers}`); await handleStreamEnded(`Missing or incorrect ${contentLengthHeaderName} header: ${headers}`);
@ -111,13 +125,14 @@ async function* readStdin() {
messageLength = parseInt(contentLength, 10); messageLength = parseInt(contentLength, 10);
} }
while (buffer.length < (headersLength + messageLength)) { while (buffer.length < headersLength + messageLength) {
if (streamEnded) { if (streamEnded) {
await handleStreamEnded( await handleStreamEnded(
`Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`); `Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`,
);
continue main_loop; continue main_loop;
} }
await once(process.stdin, 'readable'); await once(process.stdin, "readable");
} }
const messageEnd = headersLength + messageLength; const messageEnd = headersLength + messageLength;
@ -125,12 +140,12 @@ async function* readStdin() {
buffer = buffer.subarray(messageEnd); buffer = buffer.subarray(messageEnd);
headersLength = null; headersLength = null;
messageLength = null; messageLength = null;
yield message.toString('utf8'); yield message.toString("utf8");
} }
} catch (e) { } catch (e) {
sendResponse(makeError(`Error reading stdin: ${e}`)); sendResponse(makeError(`Error reading stdin: ${e}`));
} finally { } finally {
process.stdin.off('data', () => { }); process.stdin.off("data", () => {});
} }
} }
@ -143,7 +158,7 @@ async function handleMessage(message, prettier) {
throw new Error(`Message id is undefined: ${JSON.stringify(message)}`); throw new Error(`Message id is undefined: ${JSON.stringify(message)}`);
} }
if (method === 'prettier/format') { if (method === "prettier/format") {
if (params === undefined || params.text === undefined) { if (params === undefined || params.text === undefined) {
throw new Error(`Message params.text is undefined: ${JSON.stringify(message)}`); throw new Error(`Message params.text is undefined: ${JSON.stringify(message)}`);
} }
@ -153,7 +168,7 @@ async function handleMessage(message, prettier) {
let resolvedConfig = {}; let resolvedConfig = {};
if (params.options.filepath !== undefined) { if (params.options.filepath !== undefined) {
resolvedConfig = await prettier.prettier.resolveConfig(params.options.filepath) || {}; resolvedConfig = (await prettier.prettier.resolveConfig(params.options.filepath)) || {};
} }
const options = { const options = {
@ -161,21 +176,25 @@ async function handleMessage(message, prettier) {
...resolvedConfig, ...resolvedConfig,
parser: params.options.parser, parser: params.options.parser,
plugins: params.options.plugins, plugins: params.options.plugins,
path: params.options.filepath path: params.options.filepath,
}; };
process.stderr.write(`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${params.options.filepath || ''}' with options: ${JSON.stringify(options)}\n`); process.stderr.write(
`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${
params.options.filepath || ""
}' with options: ${JSON.stringify(options)}\n`,
);
const formattedText = await prettier.prettier.format(params.text, options); const formattedText = await prettier.prettier.format(params.text, options);
sendResponse({ id, result: { text: formattedText } }); sendResponse({ id, result: { text: formattedText } });
} else if (method === 'prettier/clear_cache') { } else if (method === "prettier/clear_cache") {
prettier.prettier.clearConfigCache(); prettier.prettier.clearConfigCache();
prettier.config = await prettier.prettier.resolveConfig(prettier.path) || {}; prettier.config = (await prettier.prettier.resolveConfig(prettier.path)) || {};
sendResponse({ id, result: null }); sendResponse({ id, result: null });
} else if (method === 'initialize') { } else if (method === "initialize") {
sendResponse({ sendResponse({
id, id,
result: { result: {
"capabilities": {} capabilities: {},
} },
}); });
} else { } else {
throw new Error(`Unknown method: ${method}`); throw new Error(`Unknown method: ${method}`);
@ -185,18 +204,20 @@ async function handleMessage(message, prettier) {
function makeError(message) { function makeError(message) {
return { return {
error: { error: {
"code": -32600, // invalid request code code: -32600, // invalid request code
message, message,
} },
}; };
} }
function sendResponse(response) { function sendResponse(response) {
const responsePayloadString = JSON.stringify({ const responsePayloadString = JSON.stringify({
jsonrpc: "2.0", jsonrpc: "2.0",
...response ...response,
}); });
const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(responsePayloadString)}${headerSeparator}${headerSeparator}`; const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(
responsePayloadString,
)}${headerSeparator}${headerSeparator}`;
process.stdout.write(headers + responsePayloadString); process.stdout.write(headers + responsePayloadString);
} }

View file

@ -54,7 +54,7 @@ use lsp_command::*;
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
use parking_lot::Mutex; use parking_lot::Mutex;
use postage::watch; use postage::watch;
use prettier::{LocateStart, Prettier}; use prettier::Prettier;
use project_settings::{LspSettings, ProjectSettings}; use project_settings::{LspSettings, ProjectSettings};
use rand::prelude::*; use rand::prelude::*;
use search::SearchQuery; use search::SearchQuery;
@ -82,8 +82,11 @@ use std::{
use terminals::Terminals; use terminals::Terminals;
use text::Anchor; use text::Anchor;
use util::{ use util::{
debug_panic, defer, http::HttpClient, merge_json_value_into, debug_panic, defer,
paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _, http::HttpClient,
merge_json_value_into,
paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH},
post_inc, ResultExt, TryFutureExt as _,
}; };
pub use fs::*; pub use fs::*;
@ -162,17 +165,15 @@ pub struct Project {
copilot_log_subscription: Option<lsp::Subscription>, copilot_log_subscription: Option<lsp::Subscription>,
current_lsp_settings: HashMap<Arc<str>, LspSettings>, current_lsp_settings: HashMap<Arc<str>, LspSettings>,
node: Option<Arc<dyn NodeRuntime>>, node: Option<Arc<dyn NodeRuntime>>,
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: Option<DefaultPrettier>, default_prettier: Option<DefaultPrettier>,
prettier_instances: HashMap< prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
(Option<WorktreeId>, PathBuf), prettier_instances: HashMap<PathBuf, Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
>,
} }
#[cfg(not(any(test, feature = "test-support")))]
struct DefaultPrettier { struct DefaultPrettier {
installation_process: Option<Shared<Task<()>>>, instance: Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
installation_process: Option<Shared<Task<Result<(), Arc<anyhow::Error>>>>>,
#[cfg(not(any(test, feature = "test-support")))]
installed_plugins: HashSet<&'static str>, installed_plugins: HashSet<&'static str>,
} }
@ -685,8 +686,8 @@ impl Project {
copilot_log_subscription: None, copilot_log_subscription: None,
current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(), current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
node: Some(node), node: Some(node),
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: None, default_prettier: None,
prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(), prettier_instances: HashMap::default(),
} }
}) })
@ -786,8 +787,8 @@ impl Project {
copilot_log_subscription: None, copilot_log_subscription: None,
current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(), current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
node: None, node: None,
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: None, default_prettier: None,
prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(), prettier_instances: HashMap::default(),
}; };
for worktree in worktrees { for worktree in worktrees {
@ -924,8 +925,7 @@ impl Project {
} }
for (worktree, language, settings) in language_formatters_to_check { for (worktree, language, settings) in language_formatters_to_check {
self.install_default_formatters(worktree, &language, &settings, cx) self.install_default_formatters(worktree, &language, &settings, cx);
.detach_and_log_err(cx);
} }
// Start all the newly-enabled language servers. // Start all the newly-enabled language servers.
@ -2681,20 +2681,7 @@ impl Project {
let buffer_file = File::from_dyn(buffer_file.as_ref()); let buffer_file = File::from_dyn(buffer_file.as_ref());
let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx)); let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
let task_buffer = buffer.clone(); self.install_default_formatters(worktree, &new_language, &settings, cx);
let prettier_installation_task =
self.install_default_formatters(worktree, &new_language, &settings, cx);
cx.spawn(|project, mut cx| async move {
prettier_installation_task.await?;
let _ = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(&task_buffer, cx)
})
.await;
anyhow::Ok(())
})
.detach_and_log_err(cx);
if let Some(file) = buffer_file { if let Some(file) = buffer_file {
let worktree = file.worktree.clone(); let worktree = file.worktree.clone();
if let Some(tree) = worktree.read(cx).as_local() { if let Some(tree) = worktree.read(cx).as_local() {
@ -4029,7 +4016,7 @@ impl Project {
} }
pub fn format( pub fn format(
&self, &mut self,
buffers: HashSet<ModelHandle<Buffer>>, buffers: HashSet<ModelHandle<Buffer>>,
push_to_history: bool, push_to_history: bool,
trigger: FormatTrigger, trigger: FormatTrigger,
@ -4049,10 +4036,10 @@ impl Project {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
cx.spawn(|this, mut cx| async move { cx.spawn(|project, mut cx| async move {
// Do not allow multiple concurrent formatting requests for the // Do not allow multiple concurrent formatting requests for the
// same buffer. // same buffer.
this.update(&mut cx, |this, cx| { project.update(&mut cx, |this, cx| {
buffers_with_paths_and_servers.retain(|(buffer, _, _)| { buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
this.buffers_being_formatted this.buffers_being_formatted
.insert(buffer.read(cx).remote_id()) .insert(buffer.read(cx).remote_id())
@ -4060,7 +4047,7 @@ impl Project {
}); });
let _cleanup = defer({ let _cleanup = defer({
let this = this.clone(); let this = project.clone();
let mut cx = cx.clone(); let mut cx = cx.clone();
let buffers = &buffers_with_paths_and_servers; let buffers = &buffers_with_paths_and_servers;
move || { move || {
@ -4128,7 +4115,7 @@ impl Project {
{ {
format_operation = Some(FormatOperation::Lsp( format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp( Self::format_via_lsp(
&this, &project,
&buffer, &buffer,
buffer_abs_path, buffer_abs_path,
&language_server, &language_server,
@ -4163,14 +4150,14 @@ impl Project {
} }
} }
(Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => { (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
if let Some(prettier_task) = this if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| { .update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx) project.prettier_instance_for_buffer(buffer, cx)
}).await { }).await {
match prettier_task.await match prettier_task.await
{ {
Ok(prettier) => { Ok(prettier) => {
let buffer_path = buffer.read_with(&cx, |buffer, cx| { let buffer_path = buffer.update(&mut cx, |buffer, cx| {
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
}); });
format_operation = Some(FormatOperation::Prettier( format_operation = Some(FormatOperation::Prettier(
@ -4180,16 +4167,35 @@ impl Project {
.context("formatting via prettier")?, .context("formatting via prettier")?,
)); ));
} }
Err(e) => anyhow::bail!( Err(e) => {
"Failed to create prettier instance for buffer during autoformatting: {e:#}" project.update(&mut cx, |project, _| {
), match &prettier_path {
Some(prettier_path) => {
project.prettier_instances.remove(prettier_path);
},
None => {
if let Some(default_prettier) = project.default_prettier.as_mut() {
default_prettier.instance = None;
}
},
}
});
match &prettier_path {
Some(prettier_path) => {
log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
},
None => {
log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
},
}
}
} }
} else if let Some((language_server, buffer_abs_path)) = } else if let Some((language_server, buffer_abs_path)) =
language_server.as_ref().zip(buffer_abs_path.as_ref()) language_server.as_ref().zip(buffer_abs_path.as_ref())
{ {
format_operation = Some(FormatOperation::Lsp( format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp( Self::format_via_lsp(
&this, &project,
&buffer, &buffer,
buffer_abs_path, buffer_abs_path,
&language_server, &language_server,
@ -4202,14 +4208,14 @@ impl Project {
} }
} }
(Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => { (Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => {
if let Some(prettier_task) = this if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| { .update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx) project.prettier_instance_for_buffer(buffer, cx)
}).await { }).await {
match prettier_task.await match prettier_task.await
{ {
Ok(prettier) => { Ok(prettier) => {
let buffer_path = buffer.read_with(&cx, |buffer, cx| { let buffer_path = buffer.update(&mut cx, |buffer, cx| {
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
}); });
format_operation = Some(FormatOperation::Prettier( format_operation = Some(FormatOperation::Prettier(
@ -4219,9 +4225,28 @@ impl Project {
.context("formatting via prettier")?, .context("formatting via prettier")?,
)); ));
} }
Err(e) => anyhow::bail!( Err(e) => {
"Failed to create prettier instance for buffer during formatting: {e:#}" project.update(&mut cx, |project, _| {
), match &prettier_path {
Some(prettier_path) => {
project.prettier_instances.remove(prettier_path);
},
None => {
if let Some(default_prettier) = project.default_prettier.as_mut() {
default_prettier.instance = None;
}
},
}
});
match &prettier_path {
Some(prettier_path) => {
log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
},
None => {
log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
},
}
}
} }
} }
} }
@ -6431,15 +6456,25 @@ impl Project {
"Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}" "Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}"
); );
let prettiers_to_reload = self let prettiers_to_reload = self
.prettier_instances .prettiers_per_worktree
.get(&current_worktree_id)
.iter() .iter()
.filter_map(|((worktree_id, prettier_path), prettier_task)| { .flat_map(|prettier_paths| prettier_paths.iter())
if worktree_id.is_none() || worktree_id == &Some(current_worktree_id) { .flatten()
Some((*worktree_id, prettier_path.clone(), prettier_task.clone())) .filter_map(|prettier_path| {
} else { Some((
None current_worktree_id,
} Some(prettier_path.clone()),
self.prettier_instances.get(prettier_path)?.clone(),
))
}) })
.chain(self.default_prettier.iter().filter_map(|default_prettier| {
Some((
current_worktree_id,
None,
default_prettier.instance.clone()?,
))
}))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
cx.background() cx.background()
@ -6450,9 +6485,15 @@ impl Project {
.clear_cache() .clear_cache()
.await .await
.with_context(|| { .with_context(|| {
format!( match prettier_path {
"clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update" Some(prettier_path) => format!(
) "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
),
None => format!(
"clearing default prettier cache for worktree {worktree_id:?} on prettier settings update"
),
}
}) })
.map_err(Arc::new) .map_err(Arc::new)
} }
@ -8364,7 +8405,12 @@ impl Project {
&mut self, &mut self,
buffer: &ModelHandle<Buffer>, buffer: &ModelHandle<Buffer>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>> { ) -> Task<
Option<(
Option<PathBuf>,
Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
)>,
> {
let buffer = buffer.read(cx); let buffer = buffer.read(cx);
let buffer_file = buffer.file(); let buffer_file = buffer.file();
let Some(buffer_language) = buffer.language() else { let Some(buffer_language) = buffer.language() else {
@ -8374,136 +8420,119 @@ impl Project {
return Task::ready(None); return Task::ready(None);
} }
let buffer_file = File::from_dyn(buffer_file); if self.is_local() {
let buffer_path = buffer_file.map(|file| Arc::clone(file.path()));
let worktree_path = buffer_file
.as_ref()
.and_then(|file| Some(file.worktree.read(cx).abs_path()));
let worktree_id = buffer_file.map(|file| file.worktree_id(cx));
if self.is_local() || worktree_id.is_none() || worktree_path.is_none() {
let Some(node) = self.node.as_ref().map(Arc::clone) else { let Some(node) = self.node.as_ref().map(Arc::clone) else {
return Task::ready(None); return Task::ready(None);
}; };
cx.spawn(|this, mut cx| async move { match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx)))
let fs = this.update(&mut cx, |project, _| Arc::clone(&project.fs)); {
let prettier_dir = match cx Some((worktree_id, buffer_path)) => {
.background() let fs = Arc::clone(&self.fs);
.spawn(Prettier::locate( let installed_prettiers = self.prettier_instances.keys().cloned().collect();
worktree_path.zip(buffer_path).map( return cx.spawn(|project, mut cx| async move {
|(worktree_root_path, starting_path)| LocateStart { match cx
worktree_root_path, .background()
starting_path, .spawn(async move {
}, Prettier::locate_prettier_installation(
), fs.as_ref(),
fs, &installed_prettiers,
)) &buffer_path,
.await )
{ .await
Ok(path) => path, })
Err(e) => { .await
return Some(
Task::ready(Err(Arc::new(e.context(
"determining prettier path for worktree {worktree_path:?}",
))))
.shared(),
);
}
};
if let Some(existing_prettier) = this.update(&mut cx, |project, _| {
project
.prettier_instances
.get(&(worktree_id, prettier_dir.clone()))
.cloned()
}) {
return Some(existing_prettier);
}
log::info!("Found prettier in {prettier_dir:?}, starting.");
let task_prettier_dir = prettier_dir.clone();
let weak_project = this.downgrade();
let new_server_id =
this.update(&mut cx, |this, _| this.languages.next_language_server_id());
let new_prettier_task = cx
.spawn(|mut cx| async move {
let prettier = Prettier::start(
worktree_id.map(|id| id.to_usize()),
new_server_id,
task_prettier_dir,
node,
cx.clone(),
)
.await
.context("prettier start")
.map_err(Arc::new)?;
log::info!("Started prettier in {:?}", prettier.prettier_dir());
if let Some((project, prettier_server)) =
weak_project.upgrade(&mut cx).zip(prettier.server())
{ {
project.update(&mut cx, |project, cx| { Ok(None) => {
let name = if prettier.is_default() { let started_default_prettier =
LanguageServerName(Arc::from("prettier (default)")) project.update(&mut cx, |project, _| {
} else { project
let prettier_dir = prettier.prettier_dir(); .prettiers_per_worktree
let worktree_path = prettier .entry(worktree_id)
.worktree_id() .or_default()
.map(WorktreeId::from_usize) .insert(None);
.and_then(|id| project.worktree_for_id(id, cx)) project.default_prettier.as_ref().and_then(
.map(|worktree| worktree.read(cx).abs_path()); |default_prettier| default_prettier.instance.clone(),
match worktree_path { )
Some(worktree_path) => { });
if worktree_path.as_ref() == prettier_dir { match started_default_prettier {
LanguageServerName(Arc::from(format!( Some(old_task) => return Some((None, old_task)),
"prettier ({})", None => {
prettier_dir let new_default_prettier = project
.file_name() .update(&mut cx, |_, cx| {
.and_then(|name| name.to_str()) start_default_prettier(node, Some(worktree_id), cx)
.unwrap_or_default() })
))) .await;
} else { return Some((None, new_default_prettier));
let dir_to_display = match prettier_dir
.strip_prefix(&worktree_path)
.ok()
{
Some(relative_path) => relative_path,
None => prettier_dir,
};
LanguageServerName(Arc::from(format!(
"prettier ({})",
dir_to_display.display(),
)))
}
}
None => LanguageServerName(Arc::from(format!(
"prettier ({})",
prettier_dir.display(),
))),
} }
}; }
}
Ok(Some(prettier_dir)) => {
project.update(&mut cx, |project, _| {
project
.prettiers_per_worktree
.entry(worktree_id)
.or_default()
.insert(Some(prettier_dir.clone()))
});
if let Some(existing_prettier) =
project.update(&mut cx, |project, _| {
project.prettier_instances.get(&prettier_dir).cloned()
})
{
log::debug!(
"Found already started prettier in {prettier_dir:?}"
);
return Some((Some(prettier_dir), existing_prettier));
}
project log::info!("Found prettier in {prettier_dir:?}, starting.");
.supplementary_language_servers let new_prettier_task = project.update(&mut cx, |project, cx| {
.insert(new_server_id, (name, Arc::clone(prettier_server))); let new_prettier_task = start_prettier(
cx.emit(Event::LanguageServerAdded(new_server_id)); node,
}); prettier_dir.clone(),
Some(worktree_id),
cx,
);
project
.prettier_instances
.insert(prettier_dir.clone(), new_prettier_task.clone());
new_prettier_task
});
Some((Some(prettier_dir), new_prettier_task))
}
Err(e) => {
return Some((
None,
Task::ready(Err(Arc::new(
e.context("determining prettier path"),
)))
.shared(),
));
}
} }
Ok(Arc::new(prettier)).map_err(Arc::new) });
}) }
.shared(); None => {
this.update(&mut cx, |project, _| { let started_default_prettier = self
project .default_prettier
.prettier_instances .as_ref()
.insert((worktree_id, prettier_dir), new_prettier_task.clone()); .and_then(|default_prettier| default_prettier.instance.clone());
}); match started_default_prettier {
Some(new_prettier_task) Some(old_task) => return Task::ready(Some((None, old_task))),
}) None => {
let new_task = start_default_prettier(node, None, cx);
return cx.spawn(|_, _| async move { Some((None, new_task.await)) });
}
}
}
}
} else if self.remote_id().is_some() { } else if self.remote_id().is_some() {
return Task::ready(None); return Task::ready(None);
} else { } else {
Task::ready(Some( Task::ready(Some((
None,
Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(), Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(),
)) )))
} }
} }
@ -8514,8 +8543,7 @@ impl Project {
_new_language: &Language, _new_language: &Language,
_language_settings: &LanguageSettings, _language_settings: &LanguageSettings,
_cx: &mut ModelContext<Self>, _cx: &mut ModelContext<Self>,
) -> Task<anyhow::Result<()>> { ) {
return Task::ready(Ok(()));
} }
#[cfg(not(any(test, feature = "test-support")))] #[cfg(not(any(test, feature = "test-support")))]
@ -8525,19 +8553,19 @@ impl Project {
new_language: &Language, new_language: &Language,
language_settings: &LanguageSettings, language_settings: &LanguageSettings,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<anyhow::Result<()>> { ) {
match &language_settings.formatter { match &language_settings.formatter {
Formatter::Prettier { .. } | Formatter::Auto => {} Formatter::Prettier { .. } | Formatter::Auto => {}
Formatter::LanguageServer | Formatter::External { .. } => return Task::ready(Ok(())), Formatter::LanguageServer | Formatter::External { .. } => return,
}; };
let Some(node) = self.node.as_ref().cloned() else { let Some(node) = self.node.as_ref().cloned() else {
return Task::ready(Ok(())); return;
}; };
let mut prettier_plugins = None; let mut prettier_plugins = None;
if new_language.prettier_parser_name().is_some() { if new_language.prettier_parser_name().is_some() {
prettier_plugins prettier_plugins
.get_or_insert_with(|| HashSet::default()) .get_or_insert_with(|| HashSet::<&'static str>::default())
.extend( .extend(
new_language new_language
.lsp_adapters() .lsp_adapters()
@ -8546,114 +8574,270 @@ impl Project {
) )
} }
let Some(prettier_plugins) = prettier_plugins else { let Some(prettier_plugins) = prettier_plugins else {
return Task::ready(Ok(())); return;
}; };
let fs = Arc::clone(&self.fs);
let locate_prettier_installation = match worktree.and_then(|worktree_id| {
self.worktree_for_id(worktree_id, cx)
.map(|worktree| worktree.read(cx).abs_path())
}) {
Some(locate_from) => {
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
cx.background().spawn(async move {
Prettier::locate_prettier_installation(
fs.as_ref(),
&installed_prettiers,
locate_from.as_ref(),
)
.await
})
}
None => Task::ready(Ok(None)),
};
let mut plugins_to_install = prettier_plugins; let mut plugins_to_install = prettier_plugins;
let (mut install_success_tx, mut install_success_rx) =
futures::channel::mpsc::channel::<HashSet<&'static str>>(1);
let new_installation_process = cx
.spawn(|this, mut cx| async move {
if let Some(installed_plugins) = install_success_rx.next().await {
this.update(&mut cx, |this, _| {
let default_prettier =
this.default_prettier
.get_or_insert_with(|| DefaultPrettier {
installation_process: None,
installed_plugins: HashSet::default(),
});
if !installed_plugins.is_empty() {
log::info!("Installed new prettier plugins: {installed_plugins:?}");
default_prettier.installed_plugins.extend(installed_plugins);
}
})
}
})
.shared();
let previous_installation_process = let previous_installation_process =
if let Some(default_prettier) = &mut self.default_prettier { if let Some(default_prettier) = &mut self.default_prettier {
plugins_to_install plugins_to_install
.retain(|plugin| !default_prettier.installed_plugins.contains(plugin)); .retain(|plugin| !default_prettier.installed_plugins.contains(plugin));
if plugins_to_install.is_empty() { if plugins_to_install.is_empty() {
return Task::ready(Ok(())); return;
} }
std::mem::replace( default_prettier.installation_process.clone()
&mut default_prettier.installation_process,
Some(new_installation_process.clone()),
)
} else { } else {
None None
}; };
let default_prettier_dir = util::paths::DEFAULT_PRETTIER_DIR.as_path();
let already_running_prettier = self
.prettier_instances
.get(&(worktree, default_prettier_dir.to_path_buf()))
.cloned();
let fs = Arc::clone(&self.fs); let fs = Arc::clone(&self.fs);
cx.spawn(|this, mut cx| async move { let default_prettier = self
if let Some(previous_installation_process) = previous_installation_process { .default_prettier
previous_installation_process.await; .get_or_insert_with(|| DefaultPrettier {
} instance: None,
let mut everything_was_installed = false; installation_process: None,
this.update(&mut cx, |this, _| { installed_plugins: HashSet::default(),
match &mut this.default_prettier {
Some(default_prettier) => {
plugins_to_install
.retain(|plugin| !default_prettier.installed_plugins.contains(plugin));
everything_was_installed = plugins_to_install.is_empty();
},
None => this.default_prettier = Some(DefaultPrettier { installation_process: Some(new_installation_process), installed_plugins: HashSet::default() }),
}
}); });
if everything_was_installed { default_prettier.installation_process = Some(
return Ok(()); cx.spawn(|this, mut cx| async move {
} match locate_prettier_installation
cx.background()
.spawn(async move {
let prettier_wrapper_path = default_prettier_dir.join(prettier::PRETTIER_SERVER_FILE);
// method creates parent directory if it doesn't exist
fs.save(&prettier_wrapper_path, &text::Rope::from(prettier::PRETTIER_SERVER_JS), text::LineEnding::Unix).await
.with_context(|| format!("writing {} file at {prettier_wrapper_path:?}", prettier::PRETTIER_SERVER_FILE))?;
let packages_to_versions = future::try_join_all(
plugins_to_install
.iter()
.chain(Some(&"prettier"))
.map(|package_name| async {
let returned_package_name = package_name.to_string();
let latest_version = node.npm_package_latest_version(package_name)
.await
.with_context(|| {
format!("fetching latest npm version for package {returned_package_name}")
})?;
anyhow::Ok((returned_package_name, latest_version))
}),
)
.await .await
.context("fetching latest npm versions")?; .context("locate prettier installation")
.map_err(Arc::new)?
log::info!("Fetching default prettier and plugins: {packages_to_versions:?}"); {
let borrowed_packages = packages_to_versions.iter().map(|(package, version)| { Some(_non_default_prettier) => return Ok(()),
(package.as_str(), version.as_str()) None => {
}).collect::<Vec<_>>(); let mut needs_install = match previous_installation_process {
node.npm_install_packages(default_prettier_dir, &borrowed_packages).await.context("fetching formatter packages")?; Some(previous_installation_process) => {
let installed_packages = !plugins_to_install.is_empty(); previous_installation_process.await.is_err()
install_success_tx.try_send(plugins_to_install).ok(); }
None => true,
if !installed_packages { };
if let Some(prettier) = already_running_prettier { this.update(&mut cx, |this, _| {
prettier.await.map_err(|e| anyhow::anyhow!("Default prettier startup await failure: {e:#}"))?.clear_cache().await.context("clearing default prettier cache after plugins install")?; if let Some(default_prettier) = &mut this.default_prettier {
plugins_to_install.retain(|plugin| {
!default_prettier.installed_plugins.contains(plugin)
});
needs_install |= !plugins_to_install.is_empty();
}
});
if needs_install {
let installed_plugins = plugins_to_install.clone();
cx.background()
.spawn(async move {
install_default_prettier(plugins_to_install, node, fs).await
})
.await
.context("prettier & plugins install")
.map_err(Arc::new)?;
this.update(&mut cx, |this, _| {
let default_prettier =
this.default_prettier
.get_or_insert_with(|| DefaultPrettier {
instance: None,
installation_process: Some(
Task::ready(Ok(())).shared(),
),
installed_plugins: HashSet::default(),
});
default_prettier.instance = None;
default_prettier.installed_plugins.extend(installed_plugins);
});
} }
} }
}
anyhow::Ok(()) Ok(())
}).await })
}) .shared(),
);
} }
} }
fn start_default_prettier(
node: Arc<dyn NodeRuntime>,
worktree_id: Option<WorktreeId>,
cx: &mut ModelContext<'_, Project>,
) -> Task<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>> {
cx.spawn(|project, mut cx| async move {
loop {
let default_prettier_installing = project.update(&mut cx, |project, _| {
project
.default_prettier
.as_ref()
.and_then(|default_prettier| default_prettier.installation_process.clone())
});
match default_prettier_installing {
Some(installation_task) => {
if installation_task.await.is_ok() {
break;
}
}
None => break,
}
}
project.update(&mut cx, |project, cx| {
match project
.default_prettier
.as_mut()
.and_then(|default_prettier| default_prettier.instance.as_mut())
{
Some(default_prettier) => default_prettier.clone(),
None => {
let new_default_prettier =
start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), worktree_id, cx);
project
.default_prettier
.get_or_insert_with(|| DefaultPrettier {
instance: None,
installation_process: None,
#[cfg(not(any(test, feature = "test-support")))]
installed_plugins: HashSet::default(),
})
.instance = Some(new_default_prettier.clone());
new_default_prettier
}
}
})
})
}
fn start_prettier(
node: Arc<dyn NodeRuntime>,
prettier_dir: PathBuf,
worktree_id: Option<WorktreeId>,
cx: &mut ModelContext<'_, Project>,
) -> Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>> {
cx.spawn(|project, mut cx| async move {
let new_server_id = project.update(&mut cx, |project, _| {
project.languages.next_language_server_id()
});
let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone())
.await
.context("default prettier spawn")
.map(Arc::new)
.map_err(Arc::new)?;
register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx);
Ok(new_prettier)
})
.shared()
}
fn register_new_prettier(
project: &ModelHandle<Project>,
prettier: &Prettier,
worktree_id: Option<WorktreeId>,
new_server_id: LanguageServerId,
cx: &mut AsyncAppContext,
) {
let prettier_dir = prettier.prettier_dir();
let is_default = prettier.is_default();
if is_default {
log::info!("Started default prettier in {prettier_dir:?}");
} else {
log::info!("Started prettier in {prettier_dir:?}");
}
if let Some(prettier_server) = prettier.server() {
project.update(cx, |project, cx| {
let name = if is_default {
LanguageServerName(Arc::from("prettier (default)"))
} else {
let worktree_path = worktree_id
.and_then(|id| project.worktree_for_id(id, cx))
.map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path()));
let name = match worktree_path {
Some(worktree_path) => {
if prettier_dir == worktree_path.as_ref() {
let name = prettier_dir
.file_name()
.and_then(|name| name.to_str())
.unwrap_or_default();
format!("prettier ({name})")
} else {
let dir_to_display = prettier_dir
.strip_prefix(worktree_path.as_ref())
.ok()
.unwrap_or(prettier_dir);
format!("prettier ({})", dir_to_display.display())
}
}
None => format!("prettier ({})", prettier_dir.display()),
};
LanguageServerName(Arc::from(name))
};
project
.supplementary_language_servers
.insert(new_server_id, (name, Arc::clone(prettier_server)));
cx.emit(Event::LanguageServerAdded(new_server_id));
});
}
}
#[cfg(not(any(test, feature = "test-support")))]
async fn install_default_prettier(
plugins_to_install: HashSet<&'static str>,
node: Arc<dyn NodeRuntime>,
fs: Arc<dyn Fs>,
) -> anyhow::Result<()> {
let prettier_wrapper_path = DEFAULT_PRETTIER_DIR.join(prettier::PRETTIER_SERVER_FILE);
// method creates parent directory if it doesn't exist
fs.save(
&prettier_wrapper_path,
&text::Rope::from(prettier::PRETTIER_SERVER_JS),
text::LineEnding::Unix,
)
.await
.with_context(|| {
format!(
"writing {} file at {prettier_wrapper_path:?}",
prettier::PRETTIER_SERVER_FILE
)
})?;
let packages_to_versions =
future::try_join_all(plugins_to_install.iter().chain(Some(&"prettier")).map(
|package_name| async {
let returned_package_name = package_name.to_string();
let latest_version = node
.npm_package_latest_version(package_name)
.await
.with_context(|| {
format!("fetching latest npm version for package {returned_package_name}")
})?;
anyhow::Ok((returned_package_name, latest_version))
},
))
.await
.context("fetching latest npm versions")?;
log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
let borrowed_packages = packages_to_versions
.iter()
.map(|(package, version)| (package.as_str(), version.as_str()))
.collect::<Vec<_>>();
node.npm_install_packages(DEFAULT_PRETTIER_DIR.as_path(), &borrowed_packages)
.await
.context("fetching formatter packages")?;
anyhow::Ok(())
}
fn subscribe_for_copilot_events( fn subscribe_for_copilot_events(
copilot: &ModelHandle<Copilot>, copilot: &ModelHandle<Copilot>,
cx: &mut ModelContext<'_, Project>, cx: &mut ModelContext<'_, Project>,

View file

@ -1,4 +1,4 @@
use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *}; use crate::{worktree::WorktreeModelHandle, Event, *};
use fs::{FakeFs, RealFs}; use fs::{FakeFs, RealFs};
use futures::{future, StreamExt}; use futures::{future, StreamExt};
use gpui::{executor::Deterministic, test::subscribe, AppContext}; use gpui::{executor::Deterministic, test::subscribe, AppContext};
@ -13,7 +13,7 @@ use pretty_assertions::assert_eq;
use serde_json::json; use serde_json::json;
use std::{cell::RefCell, os::unix, rc::Rc, task::Poll}; use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
use unindent::Unindent as _; use unindent::Unindent as _;
use util::{assert_set_eq, test::temp_tree}; use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
#[cfg(test)] #[cfg(test)]
#[ctor::ctor] #[ctor::ctor]

View file

@ -1,7 +1,6 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder}; use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use client::proto; use client::proto;
use globset::{Glob, GlobMatcher};
use itertools::Itertools; use itertools::Itertools;
use language::{char_kind, BufferSnapshot}; use language::{char_kind, BufferSnapshot};
use regex::{Regex, RegexBuilder}; use regex::{Regex, RegexBuilder};
@ -10,9 +9,10 @@ use std::{
borrow::Cow, borrow::Cow,
io::{BufRead, BufReader, Read}, io::{BufRead, BufReader, Read},
ops::Range, ops::Range,
path::{Path, PathBuf}, path::Path,
sync::Arc, sync::Arc,
}; };
use util::paths::PathMatcher;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct SearchInputs { pub struct SearchInputs {
@ -52,31 +52,6 @@ pub enum SearchQuery {
}, },
} }
#[derive(Clone, Debug)]
pub struct PathMatcher {
maybe_path: PathBuf,
glob: GlobMatcher,
}
impl std::fmt::Display for PathMatcher {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.maybe_path.to_string_lossy().fmt(f)
}
}
impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher {
glob: Glob::new(&maybe_glob)?.compile_matcher(),
maybe_path: PathBuf::from(maybe_glob),
})
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
}
}
impl SearchQuery { impl SearchQuery {
pub fn text( pub fn text(
query: impl ToString, query: impl ToString,

View file

@ -20,7 +20,7 @@ test-support = [
] ]
[dependencies] [dependencies]
text = { path = "../text" } text = { package = "text2", path = "../text2" }
copilot = { package = "copilot2", path = "../copilot2" } copilot = { package = "copilot2", path = "../copilot2" }
client = { package = "client2", path = "../client2" } client = { package = "client2", path = "../client2" }
clock = { path = "../clock" } clock = { path = "../clock" }
@ -29,7 +29,7 @@ db = { package = "db2", path = "../db2" }
fs = { package = "fs2", path = "../fs2" } fs = { package = "fs2", path = "../fs2" }
fsevent = { path = "../fsevent" } fsevent = { path = "../fsevent" }
fuzzy = { package = "fuzzy2", path = "../fuzzy2" } fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
git = { path = "../git" } git = { package = "git3", path = "../git3" }
gpui = { package = "gpui2", path = "../gpui2" } gpui = { package = "gpui2", path = "../gpui2" }
language = { package = "language2", path = "../language2" } language = { package = "language2", path = "../language2" }
lsp = { package = "lsp2", path = "../lsp2" } lsp = { package = "lsp2", path = "../lsp2" }

View file

@ -54,7 +54,7 @@ use lsp_command::*;
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
use parking_lot::Mutex; use parking_lot::Mutex;
use postage::watch; use postage::watch;
use prettier::{LocateStart, Prettier}; use prettier::Prettier;
use project_settings::{LspSettings, ProjectSettings}; use project_settings::{LspSettings, ProjectSettings};
use rand::prelude::*; use rand::prelude::*;
use search::SearchQuery; use search::SearchQuery;
@ -82,8 +82,11 @@ use std::{
use terminals::Terminals; use terminals::Terminals;
use text::Anchor; use text::Anchor;
use util::{ use util::{
debug_panic, defer, http::HttpClient, merge_json_value_into, debug_panic, defer,
paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _, http::HttpClient,
merge_json_value_into,
paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH},
post_inc, ResultExt, TryFutureExt as _,
}; };
pub use fs::*; pub use fs::*;
@ -162,17 +165,15 @@ pub struct Project {
copilot_log_subscription: Option<lsp::Subscription>, copilot_log_subscription: Option<lsp::Subscription>,
current_lsp_settings: HashMap<Arc<str>, LspSettings>, current_lsp_settings: HashMap<Arc<str>, LspSettings>,
node: Option<Arc<dyn NodeRuntime>>, node: Option<Arc<dyn NodeRuntime>>,
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: Option<DefaultPrettier>, default_prettier: Option<DefaultPrettier>,
prettier_instances: HashMap< prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
(Option<WorktreeId>, PathBuf), prettier_instances: HashMap<PathBuf, Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
>,
} }
#[cfg(not(any(test, feature = "test-support")))]
struct DefaultPrettier { struct DefaultPrettier {
installation_process: Option<Shared<Task<()>>>, instance: Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
installation_process: Option<Shared<Task<Result<(), Arc<anyhow::Error>>>>>,
#[cfg(not(any(test, feature = "test-support")))]
installed_plugins: HashSet<&'static str>, installed_plugins: HashSet<&'static str>,
} }
@ -686,8 +687,8 @@ impl Project {
copilot_log_subscription: None, copilot_log_subscription: None,
current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
node: Some(node), node: Some(node),
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: None, default_prettier: None,
prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(), prettier_instances: HashMap::default(),
} }
}) })
@ -789,8 +790,8 @@ impl Project {
copilot_log_subscription: None, copilot_log_subscription: None,
current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
node: None, node: None,
#[cfg(not(any(test, feature = "test-support")))]
default_prettier: None, default_prettier: None,
prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(), prettier_instances: HashMap::default(),
}; };
for worktree in worktrees { for worktree in worktrees {
@ -963,8 +964,7 @@ impl Project {
} }
for (worktree, language, settings) in language_formatters_to_check { for (worktree, language, settings) in language_formatters_to_check {
self.install_default_formatters(worktree, &language, &settings, cx) self.install_default_formatters(worktree, &language, &settings, cx);
.detach_and_log_err(cx);
} }
// Start all the newly-enabled language servers. // Start all the newly-enabled language servers.
@ -2720,20 +2720,7 @@ impl Project {
let buffer_file = File::from_dyn(buffer_file.as_ref()); let buffer_file = File::from_dyn(buffer_file.as_ref());
let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx)); let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
let task_buffer = buffer.clone(); self.install_default_formatters(worktree, &new_language, &settings, cx);
let prettier_installation_task =
self.install_default_formatters(worktree, &new_language, &settings, cx);
cx.spawn(move |project, mut cx| async move {
prettier_installation_task.await?;
let _ = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(&task_buffer, cx)
})?
.await;
anyhow::Ok(())
})
.detach_and_log_err(cx);
if let Some(file) = buffer_file { if let Some(file) = buffer_file {
let worktree = file.worktree.clone(); let worktree = file.worktree.clone();
if let Some(tree) = worktree.read(cx).as_local() { if let Some(tree) = worktree.read(cx).as_local() {
@ -4096,7 +4083,7 @@ impl Project {
} }
pub fn format( pub fn format(
&self, &mut self,
buffers: HashSet<Model<Buffer>>, buffers: HashSet<Model<Buffer>>,
push_to_history: bool, push_to_history: bool,
trigger: FormatTrigger, trigger: FormatTrigger,
@ -4116,10 +4103,10 @@ impl Project {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
cx.spawn(move |this, mut cx| async move { cx.spawn(move |project, mut cx| async move {
// Do not allow multiple concurrent formatting requests for the // Do not allow multiple concurrent formatting requests for the
// same buffer. // same buffer.
this.update(&mut cx, |this, cx| { project.update(&mut cx, |this, cx| {
buffers_with_paths_and_servers.retain(|(buffer, _, _)| { buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
this.buffers_being_formatted this.buffers_being_formatted
.insert(buffer.read(cx).remote_id()) .insert(buffer.read(cx).remote_id())
@ -4127,7 +4114,7 @@ impl Project {
})?; })?;
let _cleanup = defer({ let _cleanup = defer({
let this = this.clone(); let this = project.clone();
let mut cx = cx.clone(); let mut cx = cx.clone();
let buffers = &buffers_with_paths_and_servers; let buffers = &buffers_with_paths_and_servers;
move || { move || {
@ -4195,7 +4182,7 @@ impl Project {
{ {
format_operation = Some(FormatOperation::Lsp( format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp( Self::format_via_lsp(
&this, &project,
&buffer, &buffer,
buffer_abs_path, buffer_abs_path,
&language_server, &language_server,
@ -4230,7 +4217,7 @@ impl Project {
} }
} }
(Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => { (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
if let Some(prettier_task) = this if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| { .update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx) project.prettier_instance_for_buffer(buffer, cx)
})?.await { })?.await {
@ -4247,16 +4234,35 @@ impl Project {
.context("formatting via prettier")?, .context("formatting via prettier")?,
)); ));
} }
Err(e) => anyhow::bail!( Err(e) => {
"Failed to create prettier instance for buffer during autoformatting: {e:#}" project.update(&mut cx, |project, _| {
), match &prettier_path {
Some(prettier_path) => {
project.prettier_instances.remove(prettier_path);
},
None => {
if let Some(default_prettier) = project.default_prettier.as_mut() {
default_prettier.instance = None;
}
},
}
})?;
match &prettier_path {
Some(prettier_path) => {
log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
},
None => {
log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
},
}
}
} }
} else if let Some((language_server, buffer_abs_path)) = } else if let Some((language_server, buffer_abs_path)) =
language_server.as_ref().zip(buffer_abs_path.as_ref()) language_server.as_ref().zip(buffer_abs_path.as_ref())
{ {
format_operation = Some(FormatOperation::Lsp( format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp( Self::format_via_lsp(
&this, &project,
&buffer, &buffer,
buffer_abs_path, buffer_abs_path,
&language_server, &language_server,
@ -4269,7 +4275,7 @@ impl Project {
} }
} }
(Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => { (Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => {
if let Some(prettier_task) = this if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| { .update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx) project.prettier_instance_for_buffer(buffer, cx)
})?.await { })?.await {
@ -4286,9 +4292,28 @@ impl Project {
.context("formatting via prettier")?, .context("formatting via prettier")?,
)); ));
} }
Err(e) => anyhow::bail!( Err(e) => {
"Failed to create prettier instance for buffer during formatting: {e:#}" project.update(&mut cx, |project, _| {
), match &prettier_path {
Some(prettier_path) => {
project.prettier_instances.remove(prettier_path);
},
None => {
if let Some(default_prettier) = project.default_prettier.as_mut() {
default_prettier.instance = None;
}
},
}
})?;
match &prettier_path {
Some(prettier_path) => {
log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
},
None => {
log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
},
}
}
} }
} }
} }
@ -6506,15 +6531,25 @@ impl Project {
"Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}" "Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}"
); );
let prettiers_to_reload = self let prettiers_to_reload = self
.prettier_instances .prettiers_per_worktree
.get(&current_worktree_id)
.iter() .iter()
.filter_map(|((worktree_id, prettier_path), prettier_task)| { .flat_map(|prettier_paths| prettier_paths.iter())
if worktree_id.is_none() || worktree_id == &Some(current_worktree_id) { .flatten()
Some((*worktree_id, prettier_path.clone(), prettier_task.clone())) .filter_map(|prettier_path| {
} else { Some((
None current_worktree_id,
} Some(prettier_path.clone()),
self.prettier_instances.get(prettier_path)?.clone(),
))
}) })
.chain(self.default_prettier.iter().filter_map(|default_prettier| {
Some((
current_worktree_id,
None,
default_prettier.instance.clone()?,
))
}))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
cx.background_executor() cx.background_executor()
@ -6525,9 +6560,14 @@ impl Project {
.clear_cache() .clear_cache()
.await .await
.with_context(|| { .with_context(|| {
format!( match prettier_path {
"clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update" Some(prettier_path) => format!(
) "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
),
None => format!(
"clearing default prettier cache for worktree {worktree_id:?} on prettier settings update"
),
}
}) })
.map_err(Arc::new) .map_err(Arc::new)
} }
@ -8411,7 +8451,12 @@ impl Project {
&mut self, &mut self,
buffer: &Model<Buffer>, buffer: &Model<Buffer>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>> { ) -> Task<
Option<(
Option<PathBuf>,
Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
)>,
> {
let buffer = buffer.read(cx); let buffer = buffer.read(cx);
let buffer_file = buffer.file(); let buffer_file = buffer.file();
let Some(buffer_language) = buffer.language() else { let Some(buffer_language) = buffer.language() else {
@ -8421,142 +8466,142 @@ impl Project {
return Task::ready(None); return Task::ready(None);
} }
let buffer_file = File::from_dyn(buffer_file); if self.is_local() {
let buffer_path = buffer_file.map(|file| Arc::clone(file.path()));
let worktree_path = buffer_file
.as_ref()
.and_then(|file| Some(file.worktree.read(cx).abs_path()));
let worktree_id = buffer_file.map(|file| file.worktree_id(cx));
if self.is_local() || worktree_id.is_none() || worktree_path.is_none() {
let Some(node) = self.node.as_ref().map(Arc::clone) else { let Some(node) = self.node.as_ref().map(Arc::clone) else {
return Task::ready(None); return Task::ready(None);
}; };
let fs = self.fs.clone(); match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx)))
cx.spawn(move |this, mut cx| async move { {
let prettier_dir = match cx Some((worktree_id, buffer_path)) => {
.background_executor() let fs = Arc::clone(&self.fs);
.spawn(Prettier::locate( let installed_prettiers = self.prettier_instances.keys().cloned().collect();
worktree_path.zip(buffer_path).map( return cx.spawn(|project, mut cx| async move {
|(worktree_root_path, starting_path)| LocateStart { match cx
worktree_root_path, .background_executor()
starting_path, .spawn(async move {
}, Prettier::locate_prettier_installation(
), fs.as_ref(),
fs, &installed_prettiers,
)) &buffer_path,
.await )
{ .await
Ok(path) => path, })
Err(e) => {
return Some(
Task::ready(Err(Arc::new(e.context(
"determining prettier path for worktree {worktree_path:?}",
))))
.shared(),
);
}
};
if let Some(existing_prettier) = this
.update(&mut cx, |project, _| {
project
.prettier_instances
.get(&(worktree_id, prettier_dir.clone()))
.cloned()
})
.ok()
.flatten()
{
return Some(existing_prettier);
}
log::info!("Found prettier in {prettier_dir:?}, starting.");
let task_prettier_dir = prettier_dir.clone();
let new_prettier_task = cx
.spawn({
let this = this.clone();
move |mut cx| async move {
let new_server_id = this.update(&mut cx, |this, _| {
this.languages.next_language_server_id()
})?;
let prettier = Prettier::start(
worktree_id.map(|id| id.to_usize()),
new_server_id,
task_prettier_dir,
node,
cx.clone(),
)
.await .await
.context("prettier start") {
.map_err(Arc::new)?; Ok(None) => {
log::info!("Started prettier in {:?}", prettier.prettier_dir()); match project.update(&mut cx, |project, _| {
if let Some(prettier_server) = prettier.server() {
this.update(&mut cx, |project, cx| {
let name = if prettier.is_default() {
LanguageServerName(Arc::from("prettier (default)"))
} else {
let prettier_dir = prettier.prettier_dir();
let worktree_path = prettier
.worktree_id()
.map(WorktreeId::from_usize)
.and_then(|id| project.worktree_for_id(id, cx))
.map(|worktree| worktree.read(cx).abs_path());
match worktree_path {
Some(worktree_path) => {
if worktree_path.as_ref() == prettier_dir {
LanguageServerName(Arc::from(format!(
"prettier ({})",
prettier_dir
.file_name()
.and_then(|name| name.to_str())
.unwrap_or_default()
)))
} else {
let dir_to_display = match prettier_dir
.strip_prefix(&worktree_path)
.ok()
{
Some(relative_path) => relative_path,
None => prettier_dir,
};
LanguageServerName(Arc::from(format!(
"prettier ({})",
dir_to_display.display(),
)))
}
}
None => LanguageServerName(Arc::from(format!(
"prettier ({})",
prettier_dir.display(),
))),
}
};
project project
.supplementary_language_servers .prettiers_per_worktree
.insert(new_server_id, (name, Arc::clone(prettier_server))); .entry(worktree_id)
cx.emit(Event::LanguageServerAdded(new_server_id)); .or_default()
})?; .insert(None);
project.default_prettier.as_ref().and_then(
|default_prettier| default_prettier.instance.clone(),
)
}) {
Ok(Some(old_task)) => Some((None, old_task)),
Ok(None) => {
match project.update(&mut cx, |_, cx| {
start_default_prettier(node, Some(worktree_id), cx)
}) {
Ok(new_default_prettier) => {
return Some((None, new_default_prettier.await))
}
Err(e) => {
Some((
None,
Task::ready(Err(Arc::new(e.context("project is gone during default prettier startup"))))
.shared(),
))
}
}
}
Err(e) => Some((None, Task::ready(Err(Arc::new(e.context("project is gone during default prettier checks"))))
.shared())),
}
}
Ok(Some(prettier_dir)) => {
match project.update(&mut cx, |project, _| {
project
.prettiers_per_worktree
.entry(worktree_id)
.or_default()
.insert(Some(prettier_dir.clone()));
project.prettier_instances.get(&prettier_dir).cloned()
}) {
Ok(Some(existing_prettier)) => {
log::debug!(
"Found already started prettier in {prettier_dir:?}"
);
return Some((Some(prettier_dir), existing_prettier));
}
Err(e) => {
return Some((
Some(prettier_dir),
Task::ready(Err(Arc::new(e.context("project is gone during custom prettier checks"))))
.shared(),
))
}
_ => {},
}
log::info!("Found prettier in {prettier_dir:?}, starting.");
let new_prettier_task =
match project.update(&mut cx, |project, cx| {
let new_prettier_task = start_prettier(
node,
prettier_dir.clone(),
Some(worktree_id),
cx,
);
project.prettier_instances.insert(
prettier_dir.clone(),
new_prettier_task.clone(),
);
new_prettier_task
}) {
Ok(task) => task,
Err(e) => return Some((
Some(prettier_dir),
Task::ready(Err(Arc::new(e.context("project is gone during custom prettier startup"))))
.shared()
)),
};
Some((Some(prettier_dir), new_prettier_task))
}
Err(e) => {
return Some((
None,
Task::ready(Err(Arc::new(
e.context("determining prettier path"),
)))
.shared(),
));
} }
Ok(Arc::new(prettier)).map_err(Arc::new)
} }
}) });
.shared(); }
this.update(&mut cx, |project, _| { None => {
project let started_default_prettier = self
.prettier_instances .default_prettier
.insert((worktree_id, prettier_dir), new_prettier_task.clone()); .as_ref()
}) .and_then(|default_prettier| default_prettier.instance.clone());
.ok(); match started_default_prettier {
Some(new_prettier_task) Some(old_task) => return Task::ready(Some((None, old_task))),
}) None => {
let new_task = start_default_prettier(node, None, cx);
return cx.spawn(|_, _| async move { Some((None, new_task.await)) });
}
}
}
}
} else if self.remote_id().is_some() { } else if self.remote_id().is_some() {
return Task::ready(None); return Task::ready(None);
} else { } else {
Task::ready(Some( Task::ready(Some((
None,
Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(), Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(),
)) )))
} }
} }
@ -8567,8 +8612,7 @@ impl Project {
_: &Language, _: &Language,
_: &LanguageSettings, _: &LanguageSettings,
_: &mut ModelContext<Self>, _: &mut ModelContext<Self>,
) -> Task<anyhow::Result<()>> { ) {
Task::ready(Ok(()))
} }
#[cfg(not(any(test, feature = "test-support")))] #[cfg(not(any(test, feature = "test-support")))]
@ -8578,19 +8622,19 @@ impl Project {
new_language: &Language, new_language: &Language,
language_settings: &LanguageSettings, language_settings: &LanguageSettings,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<anyhow::Result<()>> { ) {
match &language_settings.formatter { match &language_settings.formatter {
Formatter::Prettier { .. } | Formatter::Auto => {} Formatter::Prettier { .. } | Formatter::Auto => {}
Formatter::LanguageServer | Formatter::External { .. } => return Task::ready(Ok(())), Formatter::LanguageServer | Formatter::External { .. } => return,
}; };
let Some(node) = self.node.as_ref().cloned() else { let Some(node) = self.node.as_ref().cloned() else {
return Task::ready(Ok(())); return;
}; };
let mut prettier_plugins = None; let mut prettier_plugins = None;
if new_language.prettier_parser_name().is_some() { if new_language.prettier_parser_name().is_some() {
prettier_plugins prettier_plugins
.get_or_insert_with(|| HashSet::default()) .get_or_insert_with(|| HashSet::<&'static str>::default())
.extend( .extend(
new_language new_language
.lsp_adapters() .lsp_adapters()
@ -8599,114 +8643,287 @@ impl Project {
) )
} }
let Some(prettier_plugins) = prettier_plugins else { let Some(prettier_plugins) = prettier_plugins else {
return Task::ready(Ok(())); return;
}; };
let fs = Arc::clone(&self.fs);
let locate_prettier_installation = match worktree.and_then(|worktree_id| {
self.worktree_for_id(worktree_id, cx)
.map(|worktree| worktree.read(cx).abs_path())
}) {
Some(locate_from) => {
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
cx.background_executor().spawn(async move {
Prettier::locate_prettier_installation(
fs.as_ref(),
&installed_prettiers,
locate_from.as_ref(),
)
.await
})
}
None => Task::ready(Ok(None)),
};
let mut plugins_to_install = prettier_plugins; let mut plugins_to_install = prettier_plugins;
let (mut install_success_tx, mut install_success_rx) =
futures::channel::mpsc::channel::<HashSet<&'static str>>(1);
let new_installation_process = cx
.spawn(|this, mut cx| async move {
if let Some(installed_plugins) = install_success_rx.next().await {
this.update(&mut cx, |this, _| {
let default_prettier =
this.default_prettier
.get_or_insert_with(|| DefaultPrettier {
installation_process: None,
installed_plugins: HashSet::default(),
});
if !installed_plugins.is_empty() {
log::info!("Installed new prettier plugins: {installed_plugins:?}");
default_prettier.installed_plugins.extend(installed_plugins);
}
})
.ok();
}
})
.shared();
let previous_installation_process = let previous_installation_process =
if let Some(default_prettier) = &mut self.default_prettier { if let Some(default_prettier) = &mut self.default_prettier {
plugins_to_install plugins_to_install
.retain(|plugin| !default_prettier.installed_plugins.contains(plugin)); .retain(|plugin| !default_prettier.installed_plugins.contains(plugin));
if plugins_to_install.is_empty() { if plugins_to_install.is_empty() {
return Task::ready(Ok(())); return;
} }
std::mem::replace( default_prettier.installation_process.clone()
&mut default_prettier.installation_process,
Some(new_installation_process.clone()),
)
} else { } else {
None None
}; };
let default_prettier_dir = util::paths::DEFAULT_PRETTIER_DIR.as_path();
let already_running_prettier = self
.prettier_instances
.get(&(worktree, default_prettier_dir.to_path_buf()))
.cloned();
let fs = Arc::clone(&self.fs); let fs = Arc::clone(&self.fs);
cx.spawn(move |this, mut cx| async move { let default_prettier = self
if let Some(previous_installation_process) = previous_installation_process { .default_prettier
previous_installation_process.await; .get_or_insert_with(|| DefaultPrettier {
} instance: None,
let mut everything_was_installed = false; installation_process: None,
this.update(&mut cx, |this, _| { installed_plugins: HashSet::default(),
match &mut this.default_prettier { });
Some(default_prettier) => { default_prettier.installation_process = Some(
plugins_to_install cx.spawn(|this, mut cx| async move {
.retain(|plugin| !default_prettier.installed_plugins.contains(plugin)); match locate_prettier_installation
everything_was_installed = plugins_to_install.is_empty(); .await
}, .context("locate prettier installation")
None => this.default_prettier = Some(DefaultPrettier { installation_process: Some(new_installation_process), installed_plugins: HashSet::default() }), .map_err(Arc::new)?
} {
})?; Some(_non_default_prettier) => return Ok(()),
if everything_was_installed { None => {
return Ok(()); let mut needs_install = match previous_installation_process {
} Some(previous_installation_process) => {
previous_installation_process.await.is_err()
cx.spawn(move |_| async move { }
let prettier_wrapper_path = default_prettier_dir.join(prettier::PRETTIER_SERVER_FILE); None => true,
// method creates parent directory if it doesn't exist };
fs.save(&prettier_wrapper_path, &text::Rope::from(prettier::PRETTIER_SERVER_JS), text::LineEnding::Unix).await this.update(&mut cx, |this, _| {
.with_context(|| format!("writing {} file at {prettier_wrapper_path:?}", prettier::PRETTIER_SERVER_FILE))?; if let Some(default_prettier) = &mut this.default_prettier {
plugins_to_install.retain(|plugin| {
let packages_to_versions = future::try_join_all( !default_prettier.installed_plugins.contains(plugin)
plugins_to_install });
.iter() needs_install |= !plugins_to_install.is_empty();
.chain(Some(&"prettier")) }
.map(|package_name| async { })?;
let returned_package_name = package_name.to_string(); if needs_install {
let latest_version = node.npm_package_latest_version(package_name) let installed_plugins = plugins_to_install.clone();
cx.background_executor()
.spawn(async move {
install_default_prettier(plugins_to_install, node, fs).await
})
.await .await
.with_context(|| { .context("prettier & plugins install")
format!("fetching latest npm version for package {returned_package_name}") .map_err(Arc::new)?;
})?; this.update(&mut cx, |this, _| {
anyhow::Ok((returned_package_name, latest_version)) let default_prettier =
}), this.default_prettier
) .get_or_insert_with(|| DefaultPrettier {
.await instance: None,
.context("fetching latest npm versions")?; installation_process: Some(
Task::ready(Ok(())).shared(),
log::info!("Fetching default prettier and plugins: {packages_to_versions:?}"); ),
let borrowed_packages = packages_to_versions.iter().map(|(package, version)| { installed_plugins: HashSet::default(),
(package.as_str(), version.as_str()) });
}).collect::<Vec<_>>(); default_prettier.instance = None;
node.npm_install_packages(default_prettier_dir, &borrowed_packages).await.context("fetching formatter packages")?; default_prettier.installed_plugins.extend(installed_plugins);
let installed_packages = !plugins_to_install.is_empty(); })?;
install_success_tx.try_send(plugins_to_install).ok(); }
if !installed_packages {
if let Some(prettier) = already_running_prettier {
prettier.await.map_err(|e| anyhow::anyhow!("Default prettier startup await failure: {e:#}"))?.clear_cache().await.context("clearing default prettier cache after plugins install")?;
} }
} }
Ok(())
anyhow::Ok(()) })
}).await .shared(),
}) );
} }
} }
fn start_default_prettier(
node: Arc<dyn NodeRuntime>,
worktree_id: Option<WorktreeId>,
cx: &mut ModelContext<'_, Project>,
) -> Task<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>> {
cx.spawn(|project, mut cx| async move {
loop {
let default_prettier_installing = match project.update(&mut cx, |project, _| {
project
.default_prettier
.as_ref()
.and_then(|default_prettier| default_prettier.installation_process.clone())
}) {
Ok(installation) => installation,
Err(e) => {
return Task::ready(Err(Arc::new(
e.context("project is gone during default prettier installation"),
)))
.shared()
}
};
match default_prettier_installing {
Some(installation_task) => {
if installation_task.await.is_ok() {
break;
}
}
None => break,
}
}
match project.update(&mut cx, |project, cx| {
match project
.default_prettier
.as_mut()
.and_then(|default_prettier| default_prettier.instance.as_mut())
{
Some(default_prettier) => default_prettier.clone(),
None => {
let new_default_prettier =
start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), worktree_id, cx);
project
.default_prettier
.get_or_insert_with(|| DefaultPrettier {
instance: None,
installation_process: None,
#[cfg(not(any(test, feature = "test-support")))]
installed_plugins: HashSet::default(),
})
.instance = Some(new_default_prettier.clone());
new_default_prettier
}
}
}) {
Ok(task) => task,
Err(e) => Task::ready(Err(Arc::new(
e.context("project is gone during default prettier startup"),
)))
.shared(),
}
})
}
fn start_prettier(
node: Arc<dyn NodeRuntime>,
prettier_dir: PathBuf,
worktree_id: Option<WorktreeId>,
cx: &mut ModelContext<'_, Project>,
) -> Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>> {
cx.spawn(|project, mut cx| async move {
let new_server_id = project.update(&mut cx, |project, _| {
project.languages.next_language_server_id()
})?;
let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone())
.await
.context("default prettier spawn")
.map(Arc::new)
.map_err(Arc::new)?;
register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx);
Ok(new_prettier)
})
.shared()
}
fn register_new_prettier(
project: &WeakModel<Project>,
prettier: &Prettier,
worktree_id: Option<WorktreeId>,
new_server_id: LanguageServerId,
cx: &mut AsyncAppContext,
) {
let prettier_dir = prettier.prettier_dir();
let is_default = prettier.is_default();
if is_default {
log::info!("Started default prettier in {prettier_dir:?}");
} else {
log::info!("Started prettier in {prettier_dir:?}");
}
if let Some(prettier_server) = prettier.server() {
project
.update(cx, |project, cx| {
let name = if is_default {
LanguageServerName(Arc::from("prettier (default)"))
} else {
let worktree_path = worktree_id
.and_then(|id| project.worktree_for_id(id, cx))
.map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path()));
let name = match worktree_path {
Some(worktree_path) => {
if prettier_dir == worktree_path.as_ref() {
let name = prettier_dir
.file_name()
.and_then(|name| name.to_str())
.unwrap_or_default();
format!("prettier ({name})")
} else {
let dir_to_display = prettier_dir
.strip_prefix(worktree_path.as_ref())
.ok()
.unwrap_or(prettier_dir);
format!("prettier ({})", dir_to_display.display())
}
}
None => format!("prettier ({})", prettier_dir.display()),
};
LanguageServerName(Arc::from(name))
};
project
.supplementary_language_servers
.insert(new_server_id, (name, Arc::clone(prettier_server)));
cx.emit(Event::LanguageServerAdded(new_server_id));
})
.ok();
}
}
#[cfg(not(any(test, feature = "test-support")))]
async fn install_default_prettier(
plugins_to_install: HashSet<&'static str>,
node: Arc<dyn NodeRuntime>,
fs: Arc<dyn Fs>,
) -> anyhow::Result<()> {
let prettier_wrapper_path = DEFAULT_PRETTIER_DIR.join(prettier::PRETTIER_SERVER_FILE);
// method creates parent directory if it doesn't exist
fs.save(
&prettier_wrapper_path,
&text::Rope::from(prettier::PRETTIER_SERVER_JS),
text::LineEnding::Unix,
)
.await
.with_context(|| {
format!(
"writing {} file at {prettier_wrapper_path:?}",
prettier::PRETTIER_SERVER_FILE
)
})?;
let packages_to_versions =
future::try_join_all(plugins_to_install.iter().chain(Some(&"prettier")).map(
|package_name| async {
let returned_package_name = package_name.to_string();
let latest_version = node
.npm_package_latest_version(package_name)
.await
.with_context(|| {
format!("fetching latest npm version for package {returned_package_name}")
})?;
anyhow::Ok((returned_package_name, latest_version))
},
))
.await
.context("fetching latest npm versions")?;
log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
let borrowed_packages = packages_to_versions
.iter()
.map(|(package, version)| (package.as_str(), version.as_str()))
.collect::<Vec<_>>();
node.npm_install_packages(DEFAULT_PRETTIER_DIR.as_path(), &borrowed_packages)
.await
.context("fetching formatter packages")?;
anyhow::Ok(())
}
fn subscribe_for_copilot_events( fn subscribe_for_copilot_events(
copilot: &Model<Copilot>, copilot: &Model<Copilot>,
cx: &mut ModelContext<'_, Project>, cx: &mut ModelContext<'_, Project>,

View file

@ -1,4 +1,4 @@
use crate::{search::PathMatcher, Event, *}; use crate::{Event, *};
use fs::FakeFs; use fs::FakeFs;
use futures::{future, StreamExt}; use futures::{future, StreamExt};
use gpui::AppContext; use gpui::AppContext;
@ -13,7 +13,7 @@ use pretty_assertions::assert_eq;
use serde_json::json; use serde_json::json;
use std::{os, task::Poll}; use std::{os, task::Poll};
use unindent::Unindent as _; use unindent::Unindent as _;
use util::{assert_set_eq, test::temp_tree}; use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
#[gpui::test] #[gpui::test]
async fn test_block_via_channel(cx: &mut gpui::TestAppContext) { async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {

View file

@ -1,7 +1,6 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder}; use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use client::proto; use client::proto;
use globset::{Glob, GlobMatcher};
use itertools::Itertools; use itertools::Itertools;
use language::{char_kind, BufferSnapshot}; use language::{char_kind, BufferSnapshot};
use regex::{Regex, RegexBuilder}; use regex::{Regex, RegexBuilder};
@ -10,9 +9,10 @@ use std::{
borrow::Cow, borrow::Cow,
io::{BufRead, BufReader, Read}, io::{BufRead, BufReader, Read},
ops::Range, ops::Range,
path::{Path, PathBuf}, path::Path,
sync::Arc, sync::Arc,
}; };
use util::paths::PathMatcher;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct SearchInputs { pub struct SearchInputs {
@ -52,31 +52,6 @@ pub enum SearchQuery {
}, },
} }
#[derive(Clone, Debug)]
pub struct PathMatcher {
maybe_path: PathBuf,
glob: GlobMatcher,
}
impl std::fmt::Display for PathMatcher {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.maybe_path.to_string_lossy().fmt(f)
}
}
impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher {
glob: Glob::new(&maybe_glob)?.compile_matcher(),
maybe_path: PathBuf::from(maybe_glob),
})
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
}
}
impl SearchQuery { impl SearchQuery {
pub fn text( pub fn text(
query: impl ToString, query: impl ToString,

View file

@ -0,0 +1,29 @@
[package]
name = "rich_text2"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/rich_text.rs"
doctest = false
[features]
test-support = [
"gpui/test-support",
"util/test-support",
]
[dependencies]
collections = { path = "../collections" }
gpui = { package = "gpui2", path = "../gpui2" }
sum_tree = { path = "../sum_tree" }
theme = { package = "theme2", path = "../theme2" }
language = { package = "language2", path = "../language2" }
util = { path = "../util" }
anyhow.workspace = true
futures.workspace = true
lazy_static.workspace = true
pulldown-cmark = { version = "0.9.2", default-features = false }
smallvec.workspace = true
smol.workspace = true

View file

@ -0,0 +1,373 @@
use std::{ops::Range, sync::Arc};
use anyhow::bail;
use futures::FutureExt;
use gpui::{AnyElement, FontStyle, FontWeight, HighlightStyle, UnderlineStyle};
use language::{HighlightId, Language, LanguageRegistry};
use util::RangeExt;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Highlight {
Id(HighlightId),
Highlight(HighlightStyle),
Mention,
SelfMention,
}
impl From<HighlightStyle> for Highlight {
fn from(style: HighlightStyle) -> Self {
Self::Highlight(style)
}
}
impl From<HighlightId> for Highlight {
fn from(style: HighlightId) -> Self {
Self::Id(style)
}
}
#[derive(Debug, Clone)]
pub struct RichText {
pub text: String,
pub highlights: Vec<(Range<usize>, Highlight)>,
pub region_ranges: Vec<Range<usize>>,
pub regions: Vec<RenderedRegion>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BackgroundKind {
Code,
/// A mention background for non-self user.
Mention,
SelfMention,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RenderedRegion {
pub background_kind: Option<BackgroundKind>,
pub link_url: Option<String>,
}
/// Allows one to specify extra links to the rendered markdown, which can be used
/// for e.g. mentions.
pub struct Mention {
pub range: Range<usize>,
pub is_self_mention: bool,
}
impl RichText {
pub fn element<V: 'static>(
&self,
// syntax: Arc<SyntaxTheme>,
// style: RichTextStyle,
// cx: &mut ViewContext<V>,
) -> AnyElement<V> {
todo!();
// let mut region_id = 0;
// let view_id = cx.view_id();
// let regions = self.regions.clone();
// enum Markdown {}
// Text::new(self.text.clone(), style.text.clone())
// .with_highlights(
// self.highlights
// .iter()
// .filter_map(|(range, highlight)| {
// let style = match highlight {
// Highlight::Id(id) => id.style(&syntax)?,
// Highlight::Highlight(style) => style.clone(),
// Highlight::Mention => style.mention_highlight,
// Highlight::SelfMention => style.self_mention_highlight,
// };
// Some((range.clone(), style))
// })
// .collect::<Vec<_>>(),
// )
// .with_custom_runs(self.region_ranges.clone(), move |ix, bounds, cx| {
// region_id += 1;
// let region = regions[ix].clone();
// if let Some(url) = region.link_url {
// cx.scene().push_cursor_region(CursorRegion {
// bounds,
// style: CursorStyle::PointingHand,
// });
// cx.scene().push_mouse_region(
// MouseRegion::new::<Markdown>(view_id, region_id, bounds)
// .on_click::<V, _>(MouseButton::Left, move |_, _, cx| {
// cx.platform().open_url(&url)
// }),
// );
// }
// if let Some(region_kind) = &region.background_kind {
// let background = match region_kind {
// BackgroundKind::Code => style.code_background,
// BackgroundKind::Mention => style.mention_background,
// BackgroundKind::SelfMention => style.self_mention_background,
// };
// if background.is_some() {
// cx.scene().push_quad(gpui::Quad {
// bounds,
// background,
// border: Default::default(),
// corner_radii: (2.0).into(),
// });
// }
// }
// })
// .with_soft_wrap(true)
// .into_any()
}
pub fn add_mention(
&mut self,
range: Range<usize>,
is_current_user: bool,
mention_style: HighlightStyle,
) -> anyhow::Result<()> {
if range.end > self.text.len() {
bail!(
"Mention in range {range:?} is outside of bounds for a message of length {}",
self.text.len()
);
}
if is_current_user {
self.region_ranges.push(range.clone());
self.regions.push(RenderedRegion {
background_kind: Some(BackgroundKind::Mention),
link_url: None,
});
}
self.highlights
.push((range, Highlight::Highlight(mention_style)));
Ok(())
}
}
pub fn render_markdown_mut(
block: &str,
mut mentions: &[Mention],
language_registry: &Arc<LanguageRegistry>,
language: Option<&Arc<Language>>,
data: &mut RichText,
) {
use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
let mut bold_depth = 0;
let mut italic_depth = 0;
let mut link_url = None;
let mut current_language = None;
let mut list_stack = Vec::new();
let options = Options::all();
for (event, source_range) in Parser::new_ext(&block, options).into_offset_iter() {
let prev_len = data.text.len();
match event {
Event::Text(t) => {
if let Some(language) = &current_language {
render_code(&mut data.text, &mut data.highlights, t.as_ref(), language);
} else {
if let Some(mention) = mentions.first() {
if source_range.contains_inclusive(&mention.range) {
mentions = &mentions[1..];
let range = (prev_len + mention.range.start - source_range.start)
..(prev_len + mention.range.end - source_range.start);
data.highlights.push((
range.clone(),
if mention.is_self_mention {
Highlight::SelfMention
} else {
Highlight::Mention
},
));
data.region_ranges.push(range);
data.regions.push(RenderedRegion {
background_kind: Some(if mention.is_self_mention {
BackgroundKind::SelfMention
} else {
BackgroundKind::Mention
}),
link_url: None,
});
}
}
data.text.push_str(t.as_ref());
let mut style = HighlightStyle::default();
if bold_depth > 0 {
style.font_weight = Some(FontWeight::BOLD);
}
if italic_depth > 0 {
style.font_style = Some(FontStyle::Italic);
}
if let Some(link_url) = link_url.clone() {
data.region_ranges.push(prev_len..data.text.len());
data.regions.push(RenderedRegion {
link_url: Some(link_url),
background_kind: None,
});
style.underline = Some(UnderlineStyle {
thickness: 1.0.into(),
..Default::default()
});
}
if style != HighlightStyle::default() {
let mut new_highlight = true;
if let Some((last_range, last_style)) = data.highlights.last_mut() {
if last_range.end == prev_len
&& last_style == &Highlight::Highlight(style)
{
last_range.end = data.text.len();
new_highlight = false;
}
}
if new_highlight {
data.highlights
.push((prev_len..data.text.len(), Highlight::Highlight(style)));
}
}
}
}
Event::Code(t) => {
data.text.push_str(t.as_ref());
data.region_ranges.push(prev_len..data.text.len());
if link_url.is_some() {
data.highlights.push((
prev_len..data.text.len(),
Highlight::Highlight(HighlightStyle {
underline: Some(UnderlineStyle {
thickness: 1.0.into(),
..Default::default()
}),
..Default::default()
}),
));
}
data.regions.push(RenderedRegion {
background_kind: Some(BackgroundKind::Code),
link_url: link_url.clone(),
});
}
Event::Start(tag) => match tag {
Tag::Paragraph => new_paragraph(&mut data.text, &mut list_stack),
Tag::Heading(_, _, _) => {
new_paragraph(&mut data.text, &mut list_stack);
bold_depth += 1;
}
Tag::CodeBlock(kind) => {
new_paragraph(&mut data.text, &mut list_stack);
current_language = if let CodeBlockKind::Fenced(language) = kind {
language_registry
.language_for_name(language.as_ref())
.now_or_never()
.and_then(Result::ok)
} else {
language.cloned()
}
}
Tag::Emphasis => italic_depth += 1,
Tag::Strong => bold_depth += 1,
Tag::Link(_, url, _) => link_url = Some(url.to_string()),
Tag::List(number) => {
list_stack.push((number, false));
}
Tag::Item => {
let len = list_stack.len();
if let Some((list_number, has_content)) = list_stack.last_mut() {
*has_content = false;
if !data.text.is_empty() && !data.text.ends_with('\n') {
data.text.push('\n');
}
for _ in 0..len - 1 {
data.text.push_str(" ");
}
if let Some(number) = list_number {
data.text.push_str(&format!("{}. ", number));
*number += 1;
*has_content = false;
} else {
data.text.push_str("- ");
}
}
}
_ => {}
},
Event::End(tag) => match tag {
Tag::Heading(_, _, _) => bold_depth -= 1,
Tag::CodeBlock(_) => current_language = None,
Tag::Emphasis => italic_depth -= 1,
Tag::Strong => bold_depth -= 1,
Tag::Link(_, _, _) => link_url = None,
Tag::List(_) => drop(list_stack.pop()),
_ => {}
},
Event::HardBreak => data.text.push('\n'),
Event::SoftBreak => data.text.push(' '),
_ => {}
}
}
}
pub fn render_markdown(
block: String,
mentions: &[Mention],
language_registry: &Arc<LanguageRegistry>,
language: Option<&Arc<Language>>,
) -> RichText {
let mut data = RichText {
text: Default::default(),
highlights: Default::default(),
region_ranges: Default::default(),
regions: Default::default(),
};
render_markdown_mut(&block, mentions, language_registry, language, &mut data);
data.text = data.text.trim().to_string();
data
}
pub fn render_code(
text: &mut String,
highlights: &mut Vec<(Range<usize>, Highlight)>,
content: &str,
language: &Arc<Language>,
) {
let prev_len = text.len();
text.push_str(content);
for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) {
highlights.push((
prev_len + range.start..prev_len + range.end,
Highlight::Id(highlight_id),
));
}
}
pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option<u64>, bool)>) {
let mut is_subsequent_paragraph_of_list = false;
if let Some((_, has_content)) = list_stack.last_mut() {
if *has_content {
is_subsequent_paragraph_of_list = true;
} else {
*has_content = true;
return;
}
}
if !text.is_empty() {
if !text.ends_with('\n') {
text.push('\n');
}
text.push('\n');
}
for _ in 0..list_stack.len().saturating_sub(1) {
text.push_str(" ");
}
if is_subsequent_paragraph_of_list {
text.push_str(" ");
}
}

View file

@ -29,7 +29,6 @@ serde.workspace = true
serde_derive.workspace = true serde_derive.workspace = true
smallvec.workspace = true smallvec.workspace = true
smol.workspace = true smol.workspace = true
globset.workspace = true
serde_json.workspace = true serde_json.workspace = true
[dev-dependencies] [dev-dependencies]
client = { path = "../client", features = ["test-support"] } client = { path = "../client", features = ["test-support"] }

View file

@ -22,7 +22,7 @@ use gpui::{
}; };
use menu::Confirm; use menu::Confirm;
use project::{ use project::{
search::{PathMatcher, SearchInputs, SearchQuery}, search::{SearchInputs, SearchQuery},
Entry, Project, Entry, Project,
}; };
use semantic_index::{SemanticIndex, SemanticIndexStatus}; use semantic_index::{SemanticIndex, SemanticIndexStatus};
@ -37,7 +37,7 @@ use std::{
sync::Arc, sync::Arc,
time::{Duration, Instant}, time::{Duration, Instant},
}; };
use util::ResultExt as _; use util::{paths::PathMatcher, ResultExt as _};
use workspace::{ use workspace::{
item::{BreadcrumbText, Item, ItemEvent, ItemHandle}, item::{BreadcrumbText, Item, ItemEvent, ItemHandle},
searchable::{Direction, SearchableItem, SearchableItemHandle}, searchable::{Direction, SearchableItem, SearchableItemHandle},

View file

@ -9,7 +9,7 @@ use futures::channel::oneshot;
use gpui::executor; use gpui::executor;
use ndarray::{Array1, Array2}; use ndarray::{Array1, Array2};
use ordered_float::OrderedFloat; use ordered_float::OrderedFloat;
use project::{search::PathMatcher, Fs}; use project::Fs;
use rpc::proto::Timestamp; use rpc::proto::Timestamp;
use rusqlite::params; use rusqlite::params;
use rusqlite::types::Value; use rusqlite::types::Value;
@ -21,7 +21,7 @@ use std::{
sync::Arc, sync::Arc,
time::SystemTime, time::SystemTime,
}; };
use util::TryFutureExt; use util::{paths::PathMatcher, TryFutureExt};
pub fn argsort<T: Ord>(data: &[T]) -> Vec<usize> { pub fn argsort<T: Ord>(data: &[T]) -> Vec<usize> {
let mut indices = (0..data.len()).collect::<Vec<_>>(); let mut indices = (0..data.len()).collect::<Vec<_>>();

View file

@ -21,7 +21,7 @@ use ordered_float::OrderedFloat;
use parking_lot::Mutex; use parking_lot::Mutex;
use parsing::{CodeContextRetriever, Span, SpanDigest, PARSEABLE_ENTIRE_FILE_TYPES}; use parsing::{CodeContextRetriever, Span, SpanDigest, PARSEABLE_ENTIRE_FILE_TYPES};
use postage::watch; use postage::watch;
use project::{search::PathMatcher, Fs, PathChange, Project, ProjectEntryId, Worktree, WorktreeId}; use project::{Fs, PathChange, Project, ProjectEntryId, Worktree, WorktreeId};
use smol::channel; use smol::channel;
use std::{ use std::{
cmp::Reverse, cmp::Reverse,
@ -33,6 +33,7 @@ use std::{
sync::{Arc, Weak}, sync::{Arc, Weak},
time::{Duration, Instant, SystemTime}, time::{Duration, Instant, SystemTime},
}; };
use util::paths::PathMatcher;
use util::{channel::RELEASE_CHANNEL_NAME, http::HttpClient, paths::EMBEDDINGS_DIR, ResultExt}; use util::{channel::RELEASE_CHANNEL_NAME, http::HttpClient, paths::EMBEDDINGS_DIR, ResultExt};
use workspace::WorkspaceCreated; use workspace::WorkspaceCreated;

View file

@ -10,13 +10,13 @@ use gpui::{executor::Deterministic, Task, TestAppContext};
use language::{Language, LanguageConfig, LanguageRegistry, ToOffset}; use language::{Language, LanguageConfig, LanguageRegistry, ToOffset};
use parking_lot::Mutex; use parking_lot::Mutex;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use project::{project_settings::ProjectSettings, search::PathMatcher, FakeFs, Fs, Project}; use project::{project_settings::ProjectSettings, FakeFs, Fs, Project};
use rand::{rngs::StdRng, Rng}; use rand::{rngs::StdRng, Rng};
use serde_json::json; use serde_json::json;
use settings::SettingsStore; use settings::SettingsStore;
use std::{path::Path, sync::Arc, time::SystemTime}; use std::{path::Path, sync::Arc, time::SystemTime};
use unindent::Unindent; use unindent::Unindent;
use util::RandomCharIter; use util::{paths::PathMatcher, RandomCharIter};
#[ctor::ctor] #[ctor::ctor]
fn init_logger() { fn init_logger() {
@ -289,12 +289,12 @@ async fn test_code_context_retrieval_rust() {
impl E { impl E {
// This is also a preceding comment // This is also a preceding comment
pub fn function_1() -> Option<()> { pub fn function_1() -> Option<()> {
todo!(); unimplemented!();
} }
// This is a preceding comment // This is a preceding comment
fn function_2() -> Result<()> { fn function_2() -> Result<()> {
todo!(); unimplemented!();
} }
} }
@ -344,7 +344,7 @@ async fn test_code_context_retrieval_rust() {
" "
// This is also a preceding comment // This is also a preceding comment
pub fn function_1() -> Option<()> { pub fn function_1() -> Option<()> {
todo!(); unimplemented!();
}" }"
.unindent(), .unindent(),
text.find("pub fn function_1").unwrap(), text.find("pub fn function_1").unwrap(),
@ -353,7 +353,7 @@ async fn test_code_context_retrieval_rust() {
" "
// This is a preceding comment // This is a preceding comment
fn function_2() -> Result<()> { fn function_2() -> Result<()> {
todo!(); unimplemented!();
}" }"
.unindent(), .unindent(),
text.find("fn function_2").unwrap(), text.find("fn function_2").unwrap(),

View file

@ -17,7 +17,7 @@ doctest = false
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
fs = { path = "../fs" } fs = { package = "fs2", path = "../fs2" }
gpui = { package = "gpui2", path = "../gpui2" } gpui = { package = "gpui2", path = "../gpui2" }
indexmap = "1.6.2" indexmap = "1.6.2"
parking_lot.workspace = true parking_lot.workspace = true
@ -32,5 +32,5 @@ util = { path = "../util" }
[dev-dependencies] [dev-dependencies]
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] } gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] } fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
settings = { package = "settings2", path = "../settings2", features = ["test-support"] } settings = { package = "settings2", path = "../settings2", features = ["test-support"] }

View file

@ -14,6 +14,7 @@ test-support = ["tempdir", "git2"]
[dependencies] [dependencies]
anyhow.workspace = true anyhow.workspace = true
backtrace = "0.3" backtrace = "0.3"
globset.workspace = true
log.workspace = true log.workspace = true
lazy_static.workspace = true lazy_static.workspace = true
futures.workspace = true futures.workspace = true

View file

@ -1,5 +1,6 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use globset::{Glob, GlobMatcher};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
lazy_static::lazy_static! { lazy_static::lazy_static! {
@ -189,6 +190,31 @@ impl<P> PathLikeWithPosition<P> {
} }
} }
#[derive(Clone, Debug)]
pub struct PathMatcher {
maybe_path: PathBuf,
glob: GlobMatcher,
}
impl std::fmt::Display for PathMatcher {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.maybe_path.to_string_lossy().fmt(f)
}
}
impl PathMatcher {
pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
Ok(PathMatcher {
glob: Glob::new(&maybe_glob)?.compile_matcher(),
maybe_path: PathBuf::from(maybe_glob),
})
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -14,7 +14,7 @@ test-support = [
"client2/test-support", "client2/test-support",
"project2/test-support", "project2/test-support",
"settings2/test-support", "settings2/test-support",
"gpui2/test-support", "gpui/test-support",
"fs2/test-support" "fs2/test-support"
] ]
@ -25,7 +25,7 @@ client2 = { path = "../client2" }
collections = { path = "../collections" } collections = { path = "../collections" }
# context_menu = { path = "../context_menu" } # context_menu = { path = "../context_menu" }
fs2 = { path = "../fs2" } fs2 = { path = "../fs2" }
gpui2 = { path = "../gpui2" } gpui = { package = "gpui2", path = "../gpui2" }
install_cli2 = { path = "../install_cli2" } install_cli2 = { path = "../install_cli2" }
language2 = { path = "../language2" } language2 = { path = "../language2" }
#menu = { path = "../menu" } #menu = { path = "../menu" }
@ -56,7 +56,7 @@ uuid.workspace = true
[dev-dependencies] [dev-dependencies]
call2 = { path = "../call2", features = ["test-support"] } call2 = { path = "../call2", features = ["test-support"] }
client2 = { path = "../client2", features = ["test-support"] } client2 = { path = "../client2", features = ["test-support"] }
gpui2 = { path = "../gpui2", features = ["test-support"] } gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
project2 = { path = "../project2", features = ["test-support"] } project2 = { path = "../project2", features = ["test-support"] }
settings2 = { path = "../settings2", features = ["test-support"] } settings2 = { path = "../settings2", features = ["test-support"] }
fs2 = { path = "../fs2", features = ["test-support"] } fs2 = { path = "../fs2", features = ["test-support"] }

View file

@ -1,5 +1,5 @@
use crate::{status_bar::StatusItemView, Axis, Workspace}; use crate::{status_bar::StatusItemView, Axis, Workspace};
use gpui2::{ use gpui::{
div, Action, AnyView, AppContext, Div, Entity, EntityId, EventEmitter, ParentElement, Render, div, Action, AnyView, AppContext, Div, Entity, EntityId, EventEmitter, ParentElement, Render,
Subscription, View, ViewContext, WeakView, WindowContext, Subscription, View, ViewContext, WeakView, WindowContext,
}; };
@ -226,9 +226,9 @@ impl Dock {
// }) // })
} }
// pub fn active_panel_index(&self) -> usize { pub fn active_panel_index(&self) -> usize {
// self.active_panel_index self.active_panel_index
// } }
pub(crate) fn set_open(&mut self, open: bool, cx: &mut ViewContext<Self>) { pub(crate) fn set_open(&mut self, open: bool, cx: &mut ViewContext<Self>) {
if open != self.is_open { if open != self.is_open {
@ -241,84 +241,87 @@ impl Dock {
} }
} }
// pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext<Self>) { // todo!()
// for entry in &mut self.panel_entries { // pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext<Self>) {
// if entry.panel.as_any() == panel { // for entry in &mut self.panel_entries {
// if zoomed != entry.panel.is_zoomed(cx) { // if entry.panel.as_any() == panel {
// entry.panel.set_zoomed(zoomed, cx); // if zoomed != entry.panel.is_zoomed(cx) {
// } // entry.panel.set_zoomed(zoomed, cx);
// } else if entry.panel.is_zoomed(cx) {
// entry.panel.set_zoomed(false, cx);
// }
// }
// cx.notify();
// }
// pub fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
// for entry in &mut self.panel_entries {
// if entry.panel.is_zoomed(cx) {
// entry.panel.set_zoomed(false, cx);
// } // }
// } else if entry.panel.is_zoomed(cx) {
// entry.panel.set_zoomed(false, cx);
// } // }
// } // }
// pub(crate) fn add_panel<T: Panel>(&mut self, panel: View<T>, cx: &mut ViewContext<Self>) { // cx.notify();
// let subscriptions = [ // }
// cx.observe(&panel, |_, _, cx| cx.notify()),
// cx.subscribe(&panel, |this, panel, event, cx| {
// if T::should_activate_on_event(event) {
// if let Some(ix) = this
// .panel_entries
// .iter()
// .position(|entry| entry.panel.id() == panel.id())
// {
// this.set_open(true, cx);
// this.activate_panel(ix, cx);
// cx.focus(&panel);
// }
// } else if T::should_close_on_event(event)
// && this.visible_panel().map_or(false, |p| p.id() == panel.id())
// {
// this.set_open(false, cx);
// }
// }),
// ];
// let dock_view_id = cx.view_id(); pub fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
// self.panel_entries.push(PanelEntry { for entry in &mut self.panel_entries {
// panel: Arc::new(panel), if entry.panel.is_zoomed(cx) {
// // todo!() entry.panel.set_zoomed(false, cx);
// // context_menu: cx.add_view(|cx| { }
// // let mut menu = ContextMenu::new(dock_view_id, cx); }
// // menu.set_position_mode(OverlayPositionMode::Local); }
// // menu
// // }),
// _subscriptions: subscriptions,
// });
// cx.notify()
// }
// pub fn remove_panel<T: Panel>(&mut self, panel: &View<T>, cx: &mut ViewContext<Self>) { pub(crate) fn add_panel<T: Panel>(&mut self, panel: View<T>, cx: &mut ViewContext<Self>) {
// if let Some(panel_ix) = self let subscriptions = [
// .panel_entries cx.observe(&panel, |_, _, cx| cx.notify()),
// .iter() cx.subscribe(&panel, |this, panel, event, cx| {
// .position(|entry| entry.panel.id() == panel.id()) if T::should_activate_on_event(event) {
// { if let Some(ix) = this
// if panel_ix == self.active_panel_index { .panel_entries
// self.active_panel_index = 0; .iter()
// self.set_open(false, cx); .position(|entry| entry.panel.id() == panel.id())
// } else if panel_ix < self.active_panel_index { {
// self.active_panel_index -= 1; this.set_open(true, cx);
// } this.activate_panel(ix, cx);
// self.panel_entries.remove(panel_ix); // todo!()
// cx.notify(); // cx.focus(&panel);
// } }
// } } else if T::should_close_on_event(event)
&& this.visible_panel().map_or(false, |p| p.id() == panel.id())
{
this.set_open(false, cx);
}
}),
];
// pub fn panels_len(&self) -> usize { // todo!()
// self.panel_entries.len() // let dock_view_id = cx.view_id();
// } self.panel_entries.push(PanelEntry {
panel: Arc::new(panel),
// todo!()
// context_menu: cx.add_view(|cx| {
// let mut menu = ContextMenu::new(dock_view_id, cx);
// menu.set_position_mode(OverlayPositionMode::Local);
// menu
// }),
_subscriptions: subscriptions,
});
cx.notify()
}
pub fn remove_panel<T: Panel>(&mut self, panel: &View<T>, cx: &mut ViewContext<Self>) {
if let Some(panel_ix) = self
.panel_entries
.iter()
.position(|entry| entry.panel.id() == panel.id())
{
if panel_ix == self.active_panel_index {
self.active_panel_index = 0;
self.set_open(false, cx);
} else if panel_ix < self.active_panel_index {
self.active_panel_index -= 1;
}
self.panel_entries.remove(panel_ix);
cx.notify();
}
}
pub fn panels_len(&self) -> usize {
self.panel_entries.len()
}
pub fn activate_panel(&mut self, panel_ix: usize, cx: &mut ViewContext<Self>) { pub fn activate_panel(&mut self, panel_ix: usize, cx: &mut ViewContext<Self>) {
if panel_ix != self.active_panel_index { if panel_ix != self.active_panel_index {
@ -352,38 +355,38 @@ impl Dock {
} }
} }
// pub fn zoomed_panel(&self, cx: &WindowContext) -> Option<Arc<dyn PanelHandle>> { pub fn zoomed_panel(&self, cx: &WindowContext) -> Option<Arc<dyn PanelHandle>> {
// let entry = self.visible_entry()?; let entry = self.visible_entry()?;
// if entry.panel.is_zoomed(cx) { if entry.panel.is_zoomed(cx) {
// Some(entry.panel.clone()) Some(entry.panel.clone())
// } else { } else {
// None None
// } }
// } }
// pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option<f32> { pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option<f32> {
// self.panel_entries self.panel_entries
// .iter() .iter()
// .find(|entry| entry.panel.id() == panel.id()) .find(|entry| entry.panel.id() == panel.id())
// .map(|entry| entry.panel.size(cx)) .map(|entry| entry.panel.size(cx))
// } }
// pub fn active_panel_size(&self, cx: &WindowContext) -> Option<f32> { pub fn active_panel_size(&self, cx: &WindowContext) -> Option<f32> {
// if self.is_open { if self.is_open {
// self.panel_entries self.panel_entries
// .get(self.active_panel_index) .get(self.active_panel_index)
// .map(|entry| entry.panel.size(cx)) .map(|entry| entry.panel.size(cx))
// } else { } else {
// None None
// } }
// } }
// pub fn resize_active_panel(&mut self, size: Option<f32>, cx: &mut ViewContext<Self>) { pub fn resize_active_panel(&mut self, size: Option<f32>, cx: &mut ViewContext<Self>) {
// if let Some(entry) = self.panel_entries.get_mut(self.active_panel_index) { if let Some(entry) = self.panel_entries.get_mut(self.active_panel_index) {
// entry.panel.set_size(size, cx); entry.panel.set_size(size, cx);
// cx.notify(); cx.notify();
// } }
// } }
// pub fn render_placeholder(&self, cx: &WindowContext) -> AnyElement<Workspace> { // pub fn render_placeholder(&self, cx: &WindowContext) -> AnyElement<Workspace> {
// todo!() // todo!()
@ -629,7 +632,7 @@ impl StatusItemView for PanelButtons {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub mod test { pub mod test {
use super::*; use super::*;
use gpui2::{div, Div, ViewContext, WindowContext}; use gpui::{div, Div, ViewContext, WindowContext};
#[derive(Debug)] #[derive(Debug)]
pub enum TestPanelEvent { pub enum TestPanelEvent {
@ -678,7 +681,7 @@ pub mod test {
"TestPanel" "TestPanel"
} }
fn position(&self, _: &gpui2::WindowContext) -> super::DockPosition { fn position(&self, _: &gpui::WindowContext) -> super::DockPosition {
self.position self.position
} }

View file

@ -11,7 +11,7 @@ use client2::{
proto::{self, PeerId}, proto::{self, PeerId},
Client, Client,
}; };
use gpui2::{ use gpui::{
AnyElement, AnyView, AppContext, Entity, EntityId, EventEmitter, HighlightStyle, Model, Pixels, AnyElement, AnyView, AppContext, Entity, EntityId, EventEmitter, HighlightStyle, Model, Pixels,
Point, Render, SharedString, Task, View, ViewContext, WeakView, WindowContext, Point, Render, SharedString, Task, View, ViewContext, WeakView, WindowContext,
}; };
@ -212,7 +212,7 @@ pub trait ItemHandle: 'static + Send {
&self, &self,
cx: &mut WindowContext, cx: &mut WindowContext,
handler: Box<dyn Fn(ItemEvent, &mut WindowContext) + Send>, handler: Box<dyn Fn(ItemEvent, &mut WindowContext) + Send>,
) -> gpui2::Subscription; ) -> gpui::Subscription;
fn tab_tooltip_text(&self, cx: &AppContext) -> Option<SharedString>; fn tab_tooltip_text(&self, cx: &AppContext) -> Option<SharedString>;
fn tab_description(&self, detail: usize, cx: &AppContext) -> Option<SharedString>; fn tab_description(&self, detail: usize, cx: &AppContext) -> Option<SharedString>;
fn tab_content(&self, detail: Option<usize>, cx: &AppContext) -> AnyElement<Pane>; fn tab_content(&self, detail: Option<usize>, cx: &AppContext) -> AnyElement<Pane>;
@ -256,7 +256,7 @@ pub trait ItemHandle: 'static + Send {
&mut self, &mut self,
cx: &mut AppContext, cx: &mut AppContext,
callback: Box<dyn FnOnce(&mut AppContext) + Send>, callback: Box<dyn FnOnce(&mut AppContext) + Send>,
) -> gpui2::Subscription; ) -> gpui::Subscription;
fn to_searchable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn SearchableItemHandle>>; fn to_searchable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn SearchableItemHandle>>;
fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation; fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation;
fn breadcrumbs(&self, theme: &ThemeVariant, cx: &AppContext) -> Option<Vec<BreadcrumbText>>; fn breadcrumbs(&self, theme: &ThemeVariant, cx: &AppContext) -> Option<Vec<BreadcrumbText>>;
@ -286,7 +286,7 @@ impl<T: Item> ItemHandle for View<T> {
&self, &self,
cx: &mut WindowContext, cx: &mut WindowContext,
handler: Box<dyn Fn(ItemEvent, &mut WindowContext) + Send>, handler: Box<dyn Fn(ItemEvent, &mut WindowContext) + Send>,
) -> gpui2::Subscription { ) -> gpui::Subscription {
cx.subscribe(self, move |_, event, cx| { cx.subscribe(self, move |_, event, cx| {
for item_event in T::to_item_events(event) { for item_event in T::to_item_events(event) {
handler(item_event, cx) handler(item_event, cx)
@ -573,7 +573,7 @@ impl<T: Item> ItemHandle for View<T> {
&mut self, &mut self,
cx: &mut AppContext, cx: &mut AppContext,
callback: Box<dyn FnOnce(&mut AppContext) + Send>, callback: Box<dyn FnOnce(&mut AppContext) + Send>,
) -> gpui2::Subscription { ) -> gpui::Subscription {
cx.observe_release(self, move |_, cx| callback(cx)) cx.observe_release(self, move |_, cx| callback(cx))
} }
@ -747,7 +747,7 @@ impl<T: FollowableItem> FollowableItemHandle for View<T> {
// pub mod test { // pub mod test {
// use super::{Item, ItemEvent}; // use super::{Item, ItemEvent};
// use crate::{ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId}; // use crate::{ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId};
// use gpui2::{ // use gpui::{
// elements::Empty, AnyElement, AppContext, Element, Entity, Model, Task, View, // elements::Empty, AnyElement, AppContext, Element, Entity, Model, Task, View,
// ViewContext, View, WeakViewHandle, // ViewContext, View, WeakViewHandle,
// }; // };

View file

@ -1,6 +1,6 @@
use crate::{Toast, Workspace}; use crate::{Toast, Workspace};
use collections::HashMap; use collections::HashMap;
use gpui2::{AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext}; use gpui::{AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext};
use std::{any::TypeId, ops::DerefMut}; use std::{any::TypeId, ops::DerefMut};
pub fn init(cx: &mut AppContext) { pub fn init(cx: &mut AppContext) {
@ -160,7 +160,7 @@ impl Workspace {
pub mod simple_message_notification { pub mod simple_message_notification {
use super::Notification; use super::Notification;
use gpui2::{AnyElement, AppContext, Div, EventEmitter, Render, TextStyle, ViewContext}; use gpui::{AnyElement, AppContext, Div, EventEmitter, Render, TextStyle, ViewContext};
use serde::Deserialize; use serde::Deserialize;
use std::{borrow::Cow, sync::Arc}; use std::{borrow::Cow, sync::Arc};
@ -220,36 +220,36 @@ pub mod simple_message_notification {
} }
} }
pub fn new_element(
message: fn(TextStyle, &AppContext) -> AnyElement<MessageNotification>,
) -> MessageNotification {
Self {
message: NotificationMessage::Element(message),
on_click: None,
click_message: None,
}
}
pub fn with_click_message<S>(mut self, message: S) -> Self
where
S: Into<Cow<'static, str>>,
{
self.click_message = Some(message.into());
self
}
pub fn on_click<F>(mut self, on_click: F) -> Self
where
F: 'static + Send + Sync + Fn(&mut ViewContext<Self>),
{
self.on_click = Some(Arc::new(on_click));
self
}
// todo!() // todo!()
// pub fn new_element( // pub fn dismiss(&mut self, _: &CancelMessageNotification, cx: &mut ViewContext<Self>) {
// message: fn(TextStyle, &AppContext) -> AnyElement<MessageNotification>, // cx.emit(MessageNotificationEvent::Dismiss);
// ) -> MessageNotification { // }
// Self {
// message: NotificationMessage::Element(message),
// on_click: None,
// click_message: None,
// }
// }
// pub fn with_click_message<S>(mut self, message: S) -> Self
// where
// S: Into<Cow<'static, str>>,
// {
// self.click_message = Some(message.into());
// self
// }
// pub fn on_click<F>(mut self, on_click: F) -> Self
// where
// F: 'static + Fn(&mut ViewContext<Self>),
// {
// self.on_click = Some(Arc::new(on_click));
// self
// }
// pub fn dismiss(&mut self, _: &CancelMessageNotification, cx: &mut ViewContext<Self>) {
// cx.emit(MessageNotificationEvent::Dismiss);
// }
} }
impl Render for MessageNotification { impl Render for MessageNotification {
@ -265,7 +265,7 @@ pub mod simple_message_notification {
// "MessageNotification" // "MessageNotification"
// } // }
// fn render(&mut self, cx: &mut gpui2::ViewContext<Self>) -> gpui::AnyElement<Self> { // fn render(&mut self, cx: &mut gpui::ViewContext<Self>) -> gpui::AnyElement<Self> {
// let theme = theme2::current(cx).clone(); // let theme = theme2::current(cx).clone();
// let theme = &theme.simple_message_notification; // let theme = &theme.simple_message_notification;

View file

@ -8,7 +8,7 @@ use crate::{
}; };
use anyhow::Result; use anyhow::Result;
use collections::{HashMap, HashSet, VecDeque}; use collections::{HashMap, HashSet, VecDeque};
use gpui2::{ use gpui::{
AppContext, AsyncWindowContext, Component, Div, EntityId, EventEmitter, Model, PromptLevel, AppContext, AsyncWindowContext, Component, Div, EntityId, EventEmitter, Model, PromptLevel,
Render, Task, View, ViewContext, VisualContext, WeakView, WindowContext, Render, Task, View, ViewContext, VisualContext, WeakView, WindowContext,
}; };
@ -416,17 +416,17 @@ impl Pane {
} }
} }
// pub(crate) fn workspace(&self) -> &WeakView<Workspace> { pub(crate) fn workspace(&self) -> &WeakView<Workspace> {
// &self.workspace &self.workspace
// } }
pub fn has_focus(&self) -> bool { pub fn has_focus(&self) -> bool {
self.has_focus self.has_focus
} }
// pub fn active_item_index(&self) -> usize { pub fn active_item_index(&self) -> usize {
// self.active_item_index self.active_item_index
// } }
// pub fn on_can_drop<F>(&mut self, can_drop: F) // pub fn on_can_drop<F>(&mut self, can_drop: F)
// where // where
@ -1865,14 +1865,14 @@ impl Pane {
// .into_any() // .into_any()
// } // }
// pub fn set_zoomed(&mut self, zoomed: bool, cx: &mut ViewContext<Self>) { pub fn set_zoomed(&mut self, zoomed: bool, cx: &mut ViewContext<Self>) {
// self.zoomed = zoomed; self.zoomed = zoomed;
// cx.notify(); cx.notify();
// } }
// pub fn is_zoomed(&self) -> bool { pub fn is_zoomed(&self) -> bool {
// self.zoomed self.zoomed
// } }
} }
// impl Entity for Pane { // impl Entity for Pane {
@ -2907,6 +2907,6 @@ impl Render for DraggedTab {
type Element = Div<Self>; type Element = Div<Self>;
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element { fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
div().w_8().h_4().bg(gpui2::red()) div().w_8().h_4().bg(gpui::red())
} }
} }

View file

@ -1,6 +1,6 @@
use super::DraggedItem; use super::DraggedItem;
use crate::{Pane, SplitDirection, Workspace}; use crate::{Pane, SplitDirection, Workspace};
use gpui2::{ use gpui::{
color::Color, color::Color,
elements::{Canvas, MouseEventHandler, ParentElement, Stack}, elements::{Canvas, MouseEventHandler, ParentElement, Stack},
geometry::{rect::RectF, vector::Vector2F}, geometry::{rect::RectF, vector::Vector2F},

View file

@ -6,9 +6,7 @@ use db2::sqlez::{
bindable::{Bind, Column, StaticColumnCount}, bindable::{Bind, Column, StaticColumnCount},
statement::Statement, statement::Statement,
}; };
use gpui2::{ use gpui::{point, size, AnyElement, AnyWeakView, Bounds, Model, Pixels, Point, View, ViewContext};
point, size, AnyElement, AnyWeakView, Bounds, Model, Pixels, Point, View, ViewContext,
};
use parking_lot::Mutex; use parking_lot::Mutex;
use project2::Project; use project2::Project;
use serde::Deserialize; use serde::Deserialize;

View file

@ -6,7 +6,7 @@ use std::path::Path;
use anyhow::{anyhow, bail, Context, Result}; use anyhow::{anyhow, bail, Context, Result};
use db2::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql}; use db2::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql};
use gpui2::WindowBounds; use gpui::WindowBounds;
use util::{unzip_option, ResultExt}; use util::{unzip_option, ResultExt};
use uuid::Uuid; use uuid::Uuid;
@ -549,425 +549,425 @@ impl WorkspaceDb {
} }
} }
// todo!() #[cfg(test)]
// #[cfg(test)] mod tests {
// mod tests { use super::*;
// use super::*; use db2::open_test_db;
// use db::open_test_db; use gpui;
// #[gpui::test] #[gpui::test]
// async fn test_next_id_stability() { async fn test_next_id_stability() {
// env_logger::try_init().ok(); env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("test_next_id_stability").await); let db = WorkspaceDb(open_test_db("test_next_id_stability").await);
// db.write(|conn| { db.write(|conn| {
// conn.migrate( conn.migrate(
// "test_table", "test_table",
// &[sql!( &[sql!(
// CREATE TABLE test_table( CREATE TABLE test_table(
// text TEXT, text TEXT,
// workspace_id INTEGER, workspace_id INTEGER,
// FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
// ON DELETE CASCADE ON DELETE CASCADE
// ) STRICT; ) STRICT;
// )], )],
// ) )
// .unwrap(); .unwrap();
// }) })
// .await; .await;
// let id = db.next_id().await.unwrap(); let id = db.next_id().await.unwrap();
// // Assert the empty row got inserted // Assert the empty row got inserted
// assert_eq!( assert_eq!(
// Some(id), Some(id),
// db.select_row_bound::<WorkspaceId, WorkspaceId>(sql!( db.select_row_bound::<WorkspaceId, WorkspaceId>(sql!(
// SELECT workspace_id FROM workspaces WHERE workspace_id = ? SELECT workspace_id FROM workspaces WHERE workspace_id = ?
// )) ))
// .unwrap()(id) .unwrap()(id)
// .unwrap() .unwrap()
// ); );
// db.write(move |conn| { db.write(move |conn| {
// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?))) conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
// .unwrap()(("test-text-1", id)) .unwrap()(("test-text-1", id))
// .unwrap() .unwrap()
// }) })
// .await; .await;
// let test_text_1 = db let test_text_1 = db
// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?)) .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
// .unwrap()(1) .unwrap()(1)
// .unwrap() .unwrap()
// .unwrap(); .unwrap();
// assert_eq!(test_text_1, "test-text-1"); assert_eq!(test_text_1, "test-text-1");
// } }
// #[gpui::test] #[gpui::test]
// async fn test_workspace_id_stability() { async fn test_workspace_id_stability() {
// env_logger::try_init().ok(); env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("test_workspace_id_stability").await); let db = WorkspaceDb(open_test_db("test_workspace_id_stability").await);
// db.write(|conn| { db.write(|conn| {
// conn.migrate( conn.migrate(
// "test_table", "test_table",
// &[sql!( &[sql!(
// CREATE TABLE test_table( CREATE TABLE test_table(
// text TEXT, text TEXT,
// workspace_id INTEGER, workspace_id INTEGER,
// FOREIGN KEY(workspace_id) FOREIGN KEY(workspace_id)
// REFERENCES workspaces(workspace_id) REFERENCES workspaces(workspace_id)
// ON DELETE CASCADE ON DELETE CASCADE
// ) STRICT;)], ) STRICT;)],
// ) )
// }) })
// .await .await
// .unwrap(); .unwrap();
// let mut workspace_1 = SerializedWorkspace { let mut workspace_1 = SerializedWorkspace {
// id: 1, id: 1,
// location: (["/tmp", "/tmp2"]).into(), location: (["/tmp", "/tmp2"]).into(),
// center_group: Default::default(), center_group: Default::default(),
// bounds: Default::default(), bounds: Default::default(),
// display: Default::default(), display: Default::default(),
// docks: Default::default(), docks: Default::default(),
// }; };
// let workspace_2 = SerializedWorkspace { let workspace_2 = SerializedWorkspace {
// id: 2, id: 2,
// location: (["/tmp"]).into(), location: (["/tmp"]).into(),
// center_group: Default::default(), center_group: Default::default(),
// bounds: Default::default(), bounds: Default::default(),
// display: Default::default(), display: Default::default(),
// docks: Default::default(), docks: Default::default(),
// }; };
// db.save_workspace(workspace_1.clone()).await; db.save_workspace(workspace_1.clone()).await;
// db.write(|conn| { db.write(|conn| {
// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?))) conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
// .unwrap()(("test-text-1", 1)) .unwrap()(("test-text-1", 1))
// .unwrap(); .unwrap();
// }) })
// .await; .await;
// db.save_workspace(workspace_2.clone()).await; db.save_workspace(workspace_2.clone()).await;
// db.write(|conn| { db.write(|conn| {
// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?))) conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
// .unwrap()(("test-text-2", 2)) .unwrap()(("test-text-2", 2))
// .unwrap(); .unwrap();
// }) })
// .await; .await;
// workspace_1.location = (["/tmp", "/tmp3"]).into(); workspace_1.location = (["/tmp", "/tmp3"]).into();
// db.save_workspace(workspace_1.clone()).await; db.save_workspace(workspace_1.clone()).await;
// db.save_workspace(workspace_1).await; db.save_workspace(workspace_1).await;
// db.save_workspace(workspace_2).await; db.save_workspace(workspace_2).await;
// let test_text_2 = db let test_text_2 = db
// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?)) .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
// .unwrap()(2) .unwrap()(2)
// .unwrap() .unwrap()
// .unwrap(); .unwrap();
// assert_eq!(test_text_2, "test-text-2"); assert_eq!(test_text_2, "test-text-2");
// let test_text_1 = db let test_text_1 = db
// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?)) .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
// .unwrap()(1) .unwrap()(1)
// .unwrap() .unwrap()
// .unwrap(); .unwrap();
// assert_eq!(test_text_1, "test-text-1"); assert_eq!(test_text_1, "test-text-1");
// } }
// fn group(axis: gpui::Axis, children: Vec<SerializedPaneGroup>) -> SerializedPaneGroup { fn group(axis: Axis, children: Vec<SerializedPaneGroup>) -> SerializedPaneGroup {
// SerializedPaneGroup::Group { SerializedPaneGroup::Group {
// axis, axis,
// flexes: None, flexes: None,
// children, children,
// } }
// } }
// #[gpui::test] #[gpui::test]
// async fn test_full_workspace_serialization() { async fn test_full_workspace_serialization() {
// env_logger::try_init().ok(); env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("test_full_workspace_serialization").await); let db = WorkspaceDb(open_test_db("test_full_workspace_serialization").await);
// // ----------------- // -----------------
// // | 1,2 | 5,6 | // | 1,2 | 5,6 |
// // | - - - | | // | - - - | |
// // | 3,4 | | // | 3,4 | |
// // ----------------- // -----------------
// let center_group = group( let center_group = group(
// gpui::Axis::Horizontal, Axis::Horizontal,
// vec![ vec![
// group( group(
// gpui::Axis::Vertical, Axis::Vertical,
// vec![ vec![
// SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
// vec![ vec![
// SerializedItem::new("Terminal", 5, false), SerializedItem::new("Terminal", 5, false),
// SerializedItem::new("Terminal", 6, true), SerializedItem::new("Terminal", 6, true),
// ], ],
// false, false,
// )), )),
// SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
// vec![ vec![
// SerializedItem::new("Terminal", 7, true), SerializedItem::new("Terminal", 7, true),
// SerializedItem::new("Terminal", 8, false), SerializedItem::new("Terminal", 8, false),
// ], ],
// false, false,
// )), )),
// ], ],
// ), ),
// SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
// vec![ vec![
// SerializedItem::new("Terminal", 9, false), SerializedItem::new("Terminal", 9, false),
// SerializedItem::new("Terminal", 10, true), SerializedItem::new("Terminal", 10, true),
// ], ],
// false, false,
// )), )),
// ], ],
// ); );
// let workspace = SerializedWorkspace { let workspace = SerializedWorkspace {
// id: 5, id: 5,
// location: (["/tmp", "/tmp2"]).into(), location: (["/tmp", "/tmp2"]).into(),
// center_group, center_group,
// bounds: Default::default(), bounds: Default::default(),
// display: Default::default(), display: Default::default(),
// docks: Default::default(), docks: Default::default(),
// }; };
// db.save_workspace(workspace.clone()).await; db.save_workspace(workspace.clone()).await;
// let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]); let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]);
// assert_eq!(workspace, round_trip_workspace.unwrap()); assert_eq!(workspace, round_trip_workspace.unwrap());
// // Test guaranteed duplicate IDs // Test guaranteed duplicate IDs
// db.save_workspace(workspace.clone()).await; db.save_workspace(workspace.clone()).await;
// db.save_workspace(workspace.clone()).await; db.save_workspace(workspace.clone()).await;
// let round_trip_workspace = db.workspace_for_roots(&["/tmp", "/tmp2"]); let round_trip_workspace = db.workspace_for_roots(&["/tmp", "/tmp2"]);
// assert_eq!(workspace, round_trip_workspace.unwrap()); assert_eq!(workspace, round_trip_workspace.unwrap());
// } }
// #[gpui::test] #[gpui::test]
// async fn test_workspace_assignment() { async fn test_workspace_assignment() {
// env_logger::try_init().ok(); env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("test_basic_functionality").await); let db = WorkspaceDb(open_test_db("test_basic_functionality").await);
// let workspace_1 = SerializedWorkspace { let workspace_1 = SerializedWorkspace {
// id: 1, id: 1,
// location: (["/tmp", "/tmp2"]).into(), location: (["/tmp", "/tmp2"]).into(),
// center_group: Default::default(), center_group: Default::default(),
// bounds: Default::default(), bounds: Default::default(),
// display: Default::default(), display: Default::default(),
// docks: Default::default(), docks: Default::default(),
// }; };
// let mut workspace_2 = SerializedWorkspace { let mut workspace_2 = SerializedWorkspace {
// id: 2, id: 2,
// location: (["/tmp"]).into(), location: (["/tmp"]).into(),
// center_group: Default::default(), center_group: Default::default(),
// bounds: Default::default(), bounds: Default::default(),
// display: Default::default(), display: Default::default(),
// docks: Default::default(), docks: Default::default(),
// }; };
// db.save_workspace(workspace_1.clone()).await; db.save_workspace(workspace_1.clone()).await;
// db.save_workspace(workspace_2.clone()).await; db.save_workspace(workspace_2.clone()).await;
// // Test that paths are treated as a set // Test that paths are treated as a set
// assert_eq!( assert_eq!(
// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(), db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
// workspace_1 workspace_1
// ); );
// assert_eq!( assert_eq!(
// db.workspace_for_roots(&["/tmp2", "/tmp"]).unwrap(), db.workspace_for_roots(&["/tmp2", "/tmp"]).unwrap(),
// workspace_1 workspace_1
// ); );
// // Make sure that other keys work // Make sure that other keys work
// assert_eq!(db.workspace_for_roots(&["/tmp"]).unwrap(), workspace_2); assert_eq!(db.workspace_for_roots(&["/tmp"]).unwrap(), workspace_2);
// assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None); assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None);
// // Test 'mutate' case of updating a pre-existing id // Test 'mutate' case of updating a pre-existing id
// workspace_2.location = (["/tmp", "/tmp2"]).into(); workspace_2.location = (["/tmp", "/tmp2"]).into();
// db.save_workspace(workspace_2.clone()).await; db.save_workspace(workspace_2.clone()).await;
// assert_eq!( assert_eq!(
// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(), db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
// workspace_2 workspace_2
// ); );
// // Test other mechanism for mutating // Test other mechanism for mutating
// let mut workspace_3 = SerializedWorkspace { let mut workspace_3 = SerializedWorkspace {
// id: 3, id: 3,
// location: (&["/tmp", "/tmp2"]).into(), location: (&["/tmp", "/tmp2"]).into(),
// center_group: Default::default(), center_group: Default::default(),
// bounds: Default::default(), bounds: Default::default(),
// display: Default::default(), display: Default::default(),
// docks: Default::default(), docks: Default::default(),
// }; };
// db.save_workspace(workspace_3.clone()).await; db.save_workspace(workspace_3.clone()).await;
// assert_eq!( assert_eq!(
// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(), db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
// workspace_3 workspace_3
// ); );
// // Make sure that updating paths differently also works // Make sure that updating paths differently also works
// workspace_3.location = (["/tmp3", "/tmp4", "/tmp2"]).into(); workspace_3.location = (["/tmp3", "/tmp4", "/tmp2"]).into();
// db.save_workspace(workspace_3.clone()).await; db.save_workspace(workspace_3.clone()).await;
// assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None); assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None);
// assert_eq!( assert_eq!(
// db.workspace_for_roots(&["/tmp2", "/tmp3", "/tmp4"]) db.workspace_for_roots(&["/tmp2", "/tmp3", "/tmp4"])
// .unwrap(), .unwrap(),
// workspace_3 workspace_3
// ); );
// } }
// use crate::persistence::model::SerializedWorkspace; use crate::persistence::model::SerializedWorkspace;
// use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup}; use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup};
// fn default_workspace<P: AsRef<Path>>( fn default_workspace<P: AsRef<Path>>(
// workspace_id: &[P], workspace_id: &[P],
// center_group: &SerializedPaneGroup, center_group: &SerializedPaneGroup,
// ) -> SerializedWorkspace { ) -> SerializedWorkspace {
// SerializedWorkspace { SerializedWorkspace {
// id: 4, id: 4,
// location: workspace_id.into(), location: workspace_id.into(),
// center_group: center_group.clone(), center_group: center_group.clone(),
// bounds: Default::default(), bounds: Default::default(),
// display: Default::default(), display: Default::default(),
// docks: Default::default(), docks: Default::default(),
// } }
// } }
// #[gpui::test] #[gpui::test]
// async fn test_simple_split() { async fn test_simple_split() {
// env_logger::try_init().ok(); env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("simple_split").await); let db = WorkspaceDb(open_test_db("simple_split").await);
// // ----------------- // -----------------
// // | 1,2 | 5,6 | // | 1,2 | 5,6 |
// // | - - - | | // | - - - | |
// // | 3,4 | | // | 3,4 | |
// // ----------------- // -----------------
// let center_pane = group( let center_pane = group(
// gpui::Axis::Horizontal, Axis::Horizontal,
// vec![ vec![
// group( group(
// gpui::Axis::Vertical, Axis::Vertical,
// vec![ vec![
// SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
// vec![ vec![
// SerializedItem::new("Terminal", 1, false), SerializedItem::new("Terminal", 1, false),
// SerializedItem::new("Terminal", 2, true), SerializedItem::new("Terminal", 2, true),
// ], ],
// false, false,
// )), )),
// SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
// vec![ vec![
// SerializedItem::new("Terminal", 4, false), SerializedItem::new("Terminal", 4, false),
// SerializedItem::new("Terminal", 3, true), SerializedItem::new("Terminal", 3, true),
// ], ],
// true, true,
// )), )),
// ], ],
// ), ),
// SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
// vec![ vec![
// SerializedItem::new("Terminal", 5, true), SerializedItem::new("Terminal", 5, true),
// SerializedItem::new("Terminal", 6, false), SerializedItem::new("Terminal", 6, false),
// ], ],
// false, false,
// )), )),
// ], ],
// ); );
// let workspace = default_workspace(&["/tmp"], &center_pane); let workspace = default_workspace(&["/tmp"], &center_pane);
// db.save_workspace(workspace.clone()).await; db.save_workspace(workspace.clone()).await;
// let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap(); let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap();
// assert_eq!(workspace.center_group, new_workspace.center_group); assert_eq!(workspace.center_group, new_workspace.center_group);
// } }
// #[gpui::test] #[gpui::test]
// async fn test_cleanup_panes() { async fn test_cleanup_panes() {
// env_logger::try_init().ok(); env_logger::try_init().ok();
// let db = WorkspaceDb(open_test_db("test_cleanup_panes").await); let db = WorkspaceDb(open_test_db("test_cleanup_panes").await);
// let center_pane = group( let center_pane = group(
// gpui::Axis::Horizontal, Axis::Horizontal,
// vec![ vec![
// group( group(
// gpui::Axis::Vertical, Axis::Vertical,
// vec![ vec![
// SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
// vec![ vec![
// SerializedItem::new("Terminal", 1, false), SerializedItem::new("Terminal", 1, false),
// SerializedItem::new("Terminal", 2, true), SerializedItem::new("Terminal", 2, true),
// ], ],
// false, false,
// )), )),
// SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
// vec![ vec![
// SerializedItem::new("Terminal", 4, false), SerializedItem::new("Terminal", 4, false),
// SerializedItem::new("Terminal", 3, true), SerializedItem::new("Terminal", 3, true),
// ], ],
// true, true,
// )), )),
// ], ],
// ), ),
// SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
// vec![ vec![
// SerializedItem::new("Terminal", 5, false), SerializedItem::new("Terminal", 5, false),
// SerializedItem::new("Terminal", 6, true), SerializedItem::new("Terminal", 6, true),
// ], ],
// false, false,
// )), )),
// ], ],
// ); );
// let id = &["/tmp"]; let id = &["/tmp"];
// let mut workspace = default_workspace(id, &center_pane); let mut workspace = default_workspace(id, &center_pane);
// db.save_workspace(workspace.clone()).await; db.save_workspace(workspace.clone()).await;
// workspace.center_group = group( workspace.center_group = group(
// gpui::Axis::Vertical, Axis::Vertical,
// vec![ vec![
// SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
// vec![ vec![
// SerializedItem::new("Terminal", 1, false), SerializedItem::new("Terminal", 1, false),
// SerializedItem::new("Terminal", 2, true), SerializedItem::new("Terminal", 2, true),
// ], ],
// false, false,
// )), )),
// SerializedPaneGroup::Pane(SerializedPane::new( SerializedPaneGroup::Pane(SerializedPane::new(
// vec![ vec![
// SerializedItem::new("Terminal", 4, true), SerializedItem::new("Terminal", 4, true),
// SerializedItem::new("Terminal", 3, false), SerializedItem::new("Terminal", 3, false),
// ], ],
// true, true,
// )), )),
// ], ],
// ); );
// db.save_workspace(workspace.clone()).await; db.save_workspace(workspace.clone()).await;
// let new_workspace = db.workspace_for_roots(id).unwrap(); let new_workspace = db.workspace_for_roots(id).unwrap();
// assert_eq!(workspace.center_group, new_workspace.center_group); assert_eq!(workspace.center_group, new_workspace.center_group);
// } }
// } }

View file

@ -7,7 +7,7 @@ use db2::sqlez::{
bindable::{Bind, Column, StaticColumnCount}, bindable::{Bind, Column, StaticColumnCount},
statement::Statement, statement::Statement,
}; };
use gpui2::{AsyncWindowContext, Model, Task, View, WeakView, WindowBounds}; use gpui::{AsyncWindowContext, Model, Task, View, WeakView, WindowBounds};
use project2::Project; use project2::Project;
use std::{ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
@ -55,7 +55,7 @@ impl Column for WorkspaceLocation {
} }
} }
#[derive(PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct SerializedWorkspace { pub struct SerializedWorkspace {
pub id: WorkspaceId, pub id: WorkspaceId,
pub location: WorkspaceLocation, pub location: WorkspaceLocation,
@ -127,7 +127,7 @@ impl Bind for DockData {
} }
} }
#[derive(PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub enum SerializedPaneGroup { pub enum SerializedPaneGroup {
Group { Group {
axis: Axis, axis: Axis,
@ -286,15 +286,15 @@ pub struct SerializedItem {
pub active: bool, pub active: bool,
} }
// impl SerializedItem { impl SerializedItem {
// pub fn new(kind: impl AsRef<str>, item_id: ItemId, active: bool) -> Self { pub fn new(kind: impl AsRef<str>, item_id: ItemId, active: bool) -> Self {
// Self { Self {
// kind: Arc::from(kind.as_ref()), kind: Arc::from(kind.as_ref()),
// item_id, item_id,
// active, active,
// } }
// } }
// } }
#[cfg(test)] #[cfg(test)]
impl Default for SerializedItem { impl Default for SerializedItem {

View file

@ -1,6 +1,6 @@
use std::{any::Any, sync::Arc}; use std::{any::Any, sync::Arc};
use gpui2::{AnyView, AppContext, Subscription, Task, View, ViewContext, WindowContext}; use gpui::{AnyView, AppContext, Subscription, Task, View, ViewContext, WindowContext};
use project2::search::SearchQuery; use project2::search::SearchQuery;
use crate::{ use crate::{

View file

@ -1,7 +1,7 @@
use std::any::TypeId; use std::any::TypeId;
use crate::{ItemHandle, Pane}; use crate::{ItemHandle, Pane};
use gpui2::{ use gpui::{
div, AnyView, Component, Div, ParentElement, Render, Styled, Subscription, View, ViewContext, div, AnyView, Component, Div, ParentElement, Render, Styled, Subscription, View, ViewContext,
WindowContext, WindowContext,
}; };

View file

@ -1,5 +1,5 @@
use crate::ItemHandle; use crate::ItemHandle;
use gpui2::{ use gpui::{
AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext, WindowContext, AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext, WindowContext,
}; };

View file

@ -8,6 +8,7 @@ pub mod pane;
pub mod pane_group; pub mod pane_group;
mod persistence; mod persistence;
pub mod searchable; pub mod searchable;
// todo!()
// pub mod shared_screen; // pub mod shared_screen;
mod status_bar; mod status_bar;
mod toolbar; mod toolbar;
@ -23,14 +24,14 @@ use client2::{
proto::{self, PeerId}, proto::{self, PeerId},
Client, TypedEnvelope, UserStore, Client, TypedEnvelope, UserStore,
}; };
use collections::{HashMap, HashSet}; use collections::{hash_map, HashMap, HashSet};
use dock::{Dock, DockPosition, PanelButtons}; use dock::{Dock, DockPosition, PanelButtons};
use futures::{ use futures::{
channel::{mpsc, oneshot}, channel::{mpsc, oneshot},
future::try_join_all, future::try_join_all,
Future, FutureExt, StreamExt, Future, FutureExt, StreamExt,
}; };
use gpui2::{ use gpui::{
div, point, size, AnyModel, AnyView, AnyWeakView, AppContext, AsyncAppContext, div, point, size, AnyModel, AnyView, AnyWeakView, AppContext, AsyncAppContext,
AsyncWindowContext, Bounds, Component, Div, EntityId, EventEmitter, GlobalPixels, Model, AsyncWindowContext, Bounds, Component, Div, EntityId, EventEmitter, GlobalPixels, Model,
ModelContext, ParentElement, Point, Render, Size, StatefulInteractive, Styled, Subscription, ModelContext, ParentElement, Point, Render, Size, StatefulInteractive, Styled, Subscription,
@ -38,6 +39,7 @@ use gpui2::{
WindowOptions, WindowOptions,
}; };
use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, ProjectItem}; use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, ProjectItem};
use itertools::Itertools;
use language2::LanguageRegistry; use language2::LanguageRegistry;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
@ -174,42 +176,42 @@ pub struct Toast {
on_click: Option<(Cow<'static, str>, Arc<dyn Fn(&mut WindowContext)>)>, on_click: Option<(Cow<'static, str>, Arc<dyn Fn(&mut WindowContext)>)>,
} }
// impl Toast { impl Toast {
// pub fn new<I: Into<Cow<'static, str>>>(id: usize, msg: I) -> Self { pub fn new<I: Into<Cow<'static, str>>>(id: usize, msg: I) -> Self {
// Toast { Toast {
// id, id,
// msg: msg.into(), msg: msg.into(),
// on_click: None, on_click: None,
// } }
// } }
// pub fn on_click<F, M>(mut self, message: M, on_click: F) -> Self pub fn on_click<F, M>(mut self, message: M, on_click: F) -> Self
// where where
// M: Into<Cow<'static, str>>, M: Into<Cow<'static, str>>,
// F: Fn(&mut WindowContext) + 'static, F: Fn(&mut WindowContext) + 'static,
// { {
// self.on_click = Some((message.into(), Arc::new(on_click))); self.on_click = Some((message.into(), Arc::new(on_click)));
// self self
// } }
// } }
// impl PartialEq for Toast { impl PartialEq for Toast {
// fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
// self.id == other.id self.id == other.id
// && self.msg == other.msg && self.msg == other.msg
// && self.on_click.is_some() == other.on_click.is_some() && self.on_click.is_some() == other.on_click.is_some()
// } }
// } }
// impl Clone for Toast { impl Clone for Toast {
// fn clone(&self) -> Self { fn clone(&self) -> Self {
// Toast { Toast {
// id: self.id, id: self.id,
// msg: self.msg.to_owned(), msg: self.msg.to_owned(),
// on_click: self.on_click.clone(), on_click: self.on_click.clone(),
// } }
// } }
// } }
// #[derive(Clone, Deserialize, PartialEq)] // #[derive(Clone, Deserialize, PartialEq)]
// pub struct OpenTerminal { // pub struct OpenTerminal {
@ -460,7 +462,7 @@ struct Follower {
impl AppState { impl AppState {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub fn test(cx: &mut AppContext) -> Arc<Self> { pub fn test(cx: &mut AppContext) -> Arc<Self> {
use gpui2::Context; use gpui::Context;
use node_runtime::FakeNodeRuntime; use node_runtime::FakeNodeRuntime;
use settings2::SettingsStore; use settings2::SettingsStore;
@ -476,8 +478,7 @@ impl AppState {
let user_store = cx.build_model(|cx| UserStore::new(client.clone(), http_client, cx)); let user_store = cx.build_model(|cx| UserStore::new(client.clone(), http_client, cx));
let workspace_store = cx.build_model(|cx| WorkspaceStore::new(client.clone(), cx)); let workspace_store = cx.build_model(|cx| WorkspaceStore::new(client.clone(), cx));
// todo!() theme2::init(cx);
// theme::init((), cx);
client2::init(&client, cx); client2::init(&client, cx);
crate::init_settings(cx); crate::init_settings(cx);
@ -549,7 +550,7 @@ pub struct Workspace {
weak_self: WeakView<Self>, weak_self: WeakView<Self>,
// modal: Option<ActiveModal>, // modal: Option<ActiveModal>,
zoomed: Option<AnyWeakView>, zoomed: Option<AnyWeakView>,
// zoomed_position: Option<DockPosition>, zoomed_position: Option<DockPosition>,
center: PaneGroup, center: PaneGroup,
left_dock: View<Dock>, left_dock: View<Dock>,
bottom_dock: View<Dock>, bottom_dock: View<Dock>,
@ -626,7 +627,7 @@ impl Workspace {
} }
project2::Event::Closed => { project2::Event::Closed => {
// cx.remove_window(); cx.remove_window();
} }
project2::Event::DeletedEntry(entry_id) => { project2::Event::DeletedEntry(entry_id) => {
@ -768,7 +769,7 @@ impl Workspace {
weak_self: weak_handle.clone(), weak_self: weak_handle.clone(),
// modal: None, // modal: None,
zoomed: None, zoomed: None,
// zoomed_position: None, zoomed_position: None,
center: PaneGroup::new(center_pane.clone()), center: PaneGroup::new(center_pane.clone()),
panes: vec![center_pane.clone()], panes: vec![center_pane.clone()],
panes_by_item: Default::default(), panes_by_item: Default::default(),
@ -1059,183 +1060,185 @@ impl Workspace {
&self.project &self.project
} }
// pub fn recent_navigation_history( pub fn recent_navigation_history(
// &self, &self,
// limit: Option<usize>, limit: Option<usize>,
// cx: &AppContext, cx: &AppContext,
// ) -> Vec<(ProjectPath, Option<PathBuf>)> { ) -> Vec<(ProjectPath, Option<PathBuf>)> {
// let mut abs_paths_opened: HashMap<PathBuf, HashSet<ProjectPath>> = HashMap::default(); let mut abs_paths_opened: HashMap<PathBuf, HashSet<ProjectPath>> = HashMap::default();
// let mut history: HashMap<ProjectPath, (Option<PathBuf>, usize)> = HashMap::default(); let mut history: HashMap<ProjectPath, (Option<PathBuf>, usize)> = HashMap::default();
// for pane in &self.panes { for pane in &self.panes {
// let pane = pane.read(cx); let pane = pane.read(cx);
// pane.nav_history() pane.nav_history()
// .for_each_entry(cx, |entry, (project_path, fs_path)| { .for_each_entry(cx, |entry, (project_path, fs_path)| {
// if let Some(fs_path) = &fs_path { if let Some(fs_path) = &fs_path {
// abs_paths_opened abs_paths_opened
// .entry(fs_path.clone()) .entry(fs_path.clone())
// .or_default() .or_default()
// .insert(project_path.clone()); .insert(project_path.clone());
// } }
// let timestamp = entry.timestamp; let timestamp = entry.timestamp;
// match history.entry(project_path) { match history.entry(project_path) {
// hash_map::Entry::Occupied(mut entry) => { hash_map::Entry::Occupied(mut entry) => {
// let (_, old_timestamp) = entry.get(); let (_, old_timestamp) = entry.get();
// if &timestamp > old_timestamp { if &timestamp > old_timestamp {
// entry.insert((fs_path, timestamp)); entry.insert((fs_path, timestamp));
// } }
// } }
// hash_map::Entry::Vacant(entry) => { hash_map::Entry::Vacant(entry) => {
// entry.insert((fs_path, timestamp)); entry.insert((fs_path, timestamp));
// } }
// } }
// }); });
// } }
// history history
// .into_iter() .into_iter()
// .sorted_by_key(|(_, (_, timestamp))| *timestamp) .sorted_by_key(|(_, (_, timestamp))| *timestamp)
// .map(|(project_path, (fs_path, _))| (project_path, fs_path)) .map(|(project_path, (fs_path, _))| (project_path, fs_path))
// .rev() .rev()
// .filter(|(history_path, abs_path)| { .filter(|(history_path, abs_path)| {
// let latest_project_path_opened = abs_path let latest_project_path_opened = abs_path
// .as_ref() .as_ref()
// .and_then(|abs_path| abs_paths_opened.get(abs_path)) .and_then(|abs_path| abs_paths_opened.get(abs_path))
// .and_then(|project_paths| { .and_then(|project_paths| {
// project_paths project_paths
// .iter() .iter()
// .max_by(|b1, b2| b1.worktree_id.cmp(&b2.worktree_id)) .max_by(|b1, b2| b1.worktree_id.cmp(&b2.worktree_id))
// }); });
// match latest_project_path_opened { match latest_project_path_opened {
// Some(latest_project_path_opened) => latest_project_path_opened == history_path, Some(latest_project_path_opened) => latest_project_path_opened == history_path,
// None => true, None => true,
// } }
// }) })
// .take(limit.unwrap_or(usize::MAX)) .take(limit.unwrap_or(usize::MAX))
// .collect() .collect()
// } }
// fn navigate_history( fn navigate_history(
// &mut self, &mut self,
// pane: WeakView<Pane>, pane: WeakView<Pane>,
// mode: NavigationMode, mode: NavigationMode,
// cx: &mut ViewContext<Workspace>, cx: &mut ViewContext<Workspace>,
// ) -> Task<Result<()>> { ) -> Task<Result<()>> {
// let to_load = if let Some(pane) = pane.upgrade(cx) { let to_load = if let Some(pane) = pane.upgrade() {
// cx.focus(&pane); // todo!("focus")
// cx.focus(&pane);
// pane.update(cx, |pane, cx| { pane.update(cx, |pane, cx| {
// loop { loop {
// // Retrieve the weak item handle from the history. // Retrieve the weak item handle from the history.
// let entry = pane.nav_history_mut().pop(mode, cx)?; let entry = pane.nav_history_mut().pop(mode, cx)?;
// // If the item is still present in this pane, then activate it. // If the item is still present in this pane, then activate it.
// if let Some(index) = entry if let Some(index) = entry
// .item .item
// .upgrade(cx) .upgrade()
// .and_then(|v| pane.index_for_item(v.as_ref())) .and_then(|v| pane.index_for_item(v.as_ref()))
// { {
// let prev_active_item_index = pane.active_item_index(); let prev_active_item_index = pane.active_item_index();
// pane.nav_history_mut().set_mode(mode); pane.nav_history_mut().set_mode(mode);
// pane.activate_item(index, true, true, cx); pane.activate_item(index, true, true, cx);
// pane.nav_history_mut().set_mode(NavigationMode::Normal); pane.nav_history_mut().set_mode(NavigationMode::Normal);
// let mut navigated = prev_active_item_index != pane.active_item_index(); let mut navigated = prev_active_item_index != pane.active_item_index();
// if let Some(data) = entry.data { if let Some(data) = entry.data {
// navigated |= pane.active_item()?.navigate(data, cx); navigated |= pane.active_item()?.navigate(data, cx);
// } }
// if navigated { if navigated {
// break None; break None;
// } }
// } }
// // If the item is no longer present in this pane, then retrieve its // If the item is no longer present in this pane, then retrieve its
// // project path in order to reopen it. // project path in order to reopen it.
// else { else {
// break pane break pane
// .nav_history() .nav_history()
// .path_for_item(entry.item.id()) .path_for_item(entry.item.id())
// .map(|(project_path, _)| (project_path, entry)); .map(|(project_path, _)| (project_path, entry));
// } }
// } }
// }) })
// } else { } else {
// None None
// }; };
// if let Some((project_path, entry)) = to_load { if let Some((project_path, entry)) = to_load {
// // If the item was no longer present, then load it again from its previous path. // If the item was no longer present, then load it again from its previous path.
// let task = self.load_path(project_path, cx); let task = self.load_path(project_path, cx);
// cx.spawn(|workspace, mut cx| async move { cx.spawn(|workspace, mut cx| async move {
// let task = task.await; let task = task.await;
// let mut navigated = false; let mut navigated = false;
// if let Some((project_entry_id, build_item)) = task.log_err() { if let Some((project_entry_id, build_item)) = task.log_err() {
// let prev_active_item_id = pane.update(&mut cx, |pane, _| { let prev_active_item_id = pane.update(&mut cx, |pane, _| {
// pane.nav_history_mut().set_mode(mode); pane.nav_history_mut().set_mode(mode);
// pane.active_item().map(|p| p.id()) pane.active_item().map(|p| p.id())
// })?; })?;
// pane.update(&mut cx, |pane, cx| { pane.update(&mut cx, |pane, cx| {
// let item = pane.open_item(project_entry_id, true, cx, build_item); let item = pane.open_item(project_entry_id, true, cx, build_item);
// navigated |= Some(item.id()) != prev_active_item_id; navigated |= Some(item.id()) != prev_active_item_id;
// pane.nav_history_mut().set_mode(NavigationMode::Normal); pane.nav_history_mut().set_mode(NavigationMode::Normal);
// if let Some(data) = entry.data { if let Some(data) = entry.data {
// navigated |= item.navigate(data, cx); navigated |= item.navigate(data, cx);
// } }
// })?; })?;
// } }
// if !navigated { if !navigated {
// workspace workspace
// .update(&mut cx, |workspace, cx| { .update(&mut cx, |workspace, cx| {
// Self::navigate_history(workspace, pane, mode, cx) Self::navigate_history(workspace, pane, mode, cx)
// })? })?
// .await?; .await?;
// } }
// Ok(()) Ok(())
// }) })
// } else { } else {
// Task::ready(Ok(())) Task::ready(Ok(()))
// } }
// } }
// pub fn go_back( pub fn go_back(
// &mut self, &mut self,
// pane: WeakView<Pane>, pane: WeakView<Pane>,
// cx: &mut ViewContext<Workspace>, cx: &mut ViewContext<Workspace>,
// ) -> Task<Result<()>> { ) -> Task<Result<()>> {
// self.navigate_history(pane, NavigationMode::GoingBack, cx) self.navigate_history(pane, NavigationMode::GoingBack, cx)
// } }
// pub fn go_forward( pub fn go_forward(
// &mut self, &mut self,
// pane: WeakView<Pane>, pane: WeakView<Pane>,
// cx: &mut ViewContext<Workspace>, cx: &mut ViewContext<Workspace>,
// ) -> Task<Result<()>> { ) -> Task<Result<()>> {
// self.navigate_history(pane, NavigationMode::GoingForward, cx) self.navigate_history(pane, NavigationMode::GoingForward, cx)
// } }
// pub fn reopen_closed_item(&mut self, cx: &mut ViewContext<Workspace>) -> Task<Result<()>> { pub fn reopen_closed_item(&mut self, cx: &mut ViewContext<Workspace>) -> Task<Result<()>> {
// self.navigate_history( self.navigate_history(
// self.active_pane().downgrade(), self.active_pane().downgrade(),
// NavigationMode::ReopeningClosedItem, NavigationMode::ReopeningClosedItem,
// cx, cx,
// ) )
// } }
// pub fn client(&self) -> &Client { pub fn client(&self) -> &Client {
// &self.app_state.client &self.app_state.client
// } }
// pub fn set_titlebar_item(&mut self, item: AnyViewHandle, cx: &mut ViewContext<Self>) { // todo!()
// self.titlebar_item = Some(item); // pub fn set_titlebar_item(&mut self, item: AnyViewHandle, cx: &mut ViewContext<Self>) {
// cx.notify(); // self.titlebar_item = Some(item);
// } // cx.notify();
// }
// pub fn titlebar_item(&self) -> Option<AnyViewHandle> { // pub fn titlebar_item(&self) -> Option<AnyViewHandle> {
// self.titlebar_item.clone() // self.titlebar_item.clone()
// } // }
// /// Call the given callback with a workspace whose project is local. // /// Call the given callback with a workspace whose project is local.
// /// // ///
@ -1261,32 +1264,29 @@ impl Workspace {
// } // }
// } // }
// pub fn worktrees<'a>( pub fn worktrees<'a>(&self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Model<Worktree>> {
// &self, self.project.read(cx).worktrees()
// cx: &'a AppContext, }
// ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
// self.project.read(cx).worktrees(cx)
// }
// pub fn visible_worktrees<'a>( pub fn visible_worktrees<'a>(
// &self, &self,
// cx: &'a AppContext, cx: &'a AppContext,
// ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> { ) -> impl 'a + Iterator<Item = Model<Worktree>> {
// self.project.read(cx).visible_worktrees(cx) self.project.read(cx).visible_worktrees(cx)
// } }
// pub fn worktree_scans_complete(&self, cx: &AppContext) -> impl Future<Output = ()> + 'static { pub fn worktree_scans_complete(&self, cx: &AppContext) -> impl Future<Output = ()> + 'static {
// let futures = self let futures = self
// .worktrees(cx) .worktrees(cx)
// .filter_map(|worktree| worktree.read(cx).as_local()) .filter_map(|worktree| worktree.read(cx).as_local())
// .map(|worktree| worktree.scan_complete()) .map(|worktree| worktree.scan_complete())
// .collect::<Vec<_>>(); .collect::<Vec<_>>();
// async move { async move {
// for future in futures { for future in futures {
// future.await; future.await;
// } }
// } }
// } }
// pub fn close_global(_: &CloseWindow, cx: &mut AppContext) { // pub fn close_global(_: &CloseWindow, cx: &mut AppContext) {
// cx.spawn(|mut cx| async move { // cx.spawn(|mut cx| async move {
@ -1699,31 +1699,31 @@ impl Workspace {
self.active_pane().read(cx).active_item() self.active_pane().read(cx).active_item()
} }
// fn active_project_path(&self, cx: &ViewContext<Self>) -> Option<ProjectPath> { fn active_project_path(&self, cx: &ViewContext<Self>) -> Option<ProjectPath> {
// self.active_item(cx).and_then(|item| item.project_path(cx)) self.active_item(cx).and_then(|item| item.project_path(cx))
// } }
// pub fn save_active_item( pub fn save_active_item(
// &mut self, &mut self,
// save_intent: SaveIntent, save_intent: SaveIntent,
// cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
// ) -> Task<Result<()>> { ) -> Task<Result<()>> {
// let project = self.project.clone(); let project = self.project.clone();
// let pane = self.active_pane(); let pane = self.active_pane();
// let item_ix = pane.read(cx).active_item_index(); let item_ix = pane.read(cx).active_item_index();
// let item = pane.read(cx).active_item(); let item = pane.read(cx).active_item();
// let pane = pane.downgrade(); let pane = pane.downgrade();
// cx.spawn(|_, mut cx| async move { cx.spawn(|_, mut cx| async move {
// if let Some(item) = item { if let Some(item) = item {
// Pane::save_item(project, &pane, item_ix, item.as_ref(), save_intent, &mut cx) Pane::save_item(project, &pane, item_ix, item.as_ref(), save_intent, &mut cx)
// .await .await
// .map(|_| ()) .map(|_| ())
// } else { } else {
// Ok(()) Ok(())
// } }
// }) })
// } }
// pub fn close_inactive_items_and_panes( // pub fn close_inactive_items_and_panes(
// &mut self, // &mut self,
@ -1825,19 +1825,20 @@ impl Workspace {
// self.serialize_workspace(cx); // self.serialize_workspace(cx);
// } // }
// pub fn close_all_docks(&mut self, cx: &mut ViewContext<Self>) { pub fn close_all_docks(&mut self, cx: &mut ViewContext<Self>) {
// let docks = [&self.left_dock, &self.bottom_dock, &self.right_dock]; let docks = [&self.left_dock, &self.bottom_dock, &self.right_dock];
// for dock in docks { for dock in docks {
// dock.update(cx, |dock, cx| { dock.update(cx, |dock, cx| {
// dock.set_open(false, cx); dock.set_open(false, cx);
// }); });
// } }
// cx.focus_self(); // todo!("focus")
// cx.notify(); // cx.focus_self();
// self.serialize_workspace(cx); cx.notify();
// } self.serialize_workspace(cx);
}
// /// Transfer focus to the panel of the given type. // /// Transfer focus to the panel of the given type.
// pub fn focus_panel<T: Panel>(&mut self, cx: &mut ViewContext<Self>) -> Option<View<T>> { // pub fn focus_panel<T: Panel>(&mut self, cx: &mut ViewContext<Self>) -> Option<View<T>> {
@ -1904,19 +1905,19 @@ impl Workspace {
// None // None
// } // }
// fn zoom_out(&mut self, cx: &mut ViewContext<Self>) { fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
// for pane in &self.panes { for pane in &self.panes {
// pane.update(cx, |pane, cx| pane.set_zoomed(false, cx)); pane.update(cx, |pane, cx| pane.set_zoomed(false, cx));
// } }
// self.left_dock.update(cx, |dock, cx| dock.zoom_out(cx)); self.left_dock.update(cx, |dock, cx| dock.zoom_out(cx));
// self.bottom_dock.update(cx, |dock, cx| dock.zoom_out(cx)); self.bottom_dock.update(cx, |dock, cx| dock.zoom_out(cx));
// self.right_dock.update(cx, |dock, cx| dock.zoom_out(cx)); self.right_dock.update(cx, |dock, cx| dock.zoom_out(cx));
// self.zoomed = None; self.zoomed = None;
// self.zoomed_position = None; self.zoomed_position = None;
// cx.notify(); cx.notify();
// } }
// #[cfg(any(test, feature = "test-support"))] // #[cfg(any(test, feature = "test-support"))]
// pub fn zoomed_view(&self, cx: &AppContext) -> Option<AnyViewHandle> { // pub fn zoomed_view(&self, cx: &AppContext) -> Option<AnyViewHandle> {
@ -1962,22 +1963,21 @@ impl Workspace {
// cx.notify(); // cx.notify();
// } // }
fn add_pane(&mut self, _cx: &mut ViewContext<Self>) -> View<Pane> { fn add_pane(&mut self, cx: &mut ViewContext<Self>) -> View<Pane> {
todo!() let pane = cx.build_view(|cx| {
// let pane = cx.build_view(|cx| { Pane::new(
// Pane::new( self.weak_handle(),
// self.weak_handle(), self.project.clone(),
// self.project.clone(), self.pane_history_timestamp.clone(),
// self.pane_history_timestamp.clone(), cx,
// cx, )
// ) });
// }); cx.subscribe(&pane, Self::handle_pane_event).detach();
// cx.subscribe(&pane, Self::handle_pane_event).detach(); self.panes.push(pane.clone());
// self.panes.push(pane.clone());
// todo!() // todo!()
// cx.focus(&pane); // cx.focus(&pane);
// cx.emit(Event::PaneAdded(pane.clone())); cx.emit(Event::PaneAdded(pane.clone()));
// pane pane
} }
// pub fn add_item_to_center( // pub fn add_item_to_center(
@ -3122,6 +3122,7 @@ impl Workspace {
None None
} }
// todo!()
// fn shared_screen_for_peer( // fn shared_screen_for_peer(
// &self, // &self,
// peer_id: PeerId, // peer_id: PeerId,
@ -3498,6 +3499,7 @@ impl Workspace {
}) })
} }
// todo!()
// #[cfg(any(test, feature = "test-support"))] // #[cfg(any(test, feature = "test-support"))]
// pub fn test_new(project: ModelHandle<Project>, cx: &mut ViewContext<Self>) -> Self { // pub fn test_new(project: ModelHandle<Project>, cx: &mut ViewContext<Self>) -> Self {
// use node_runtime::FakeNodeRuntime; // use node_runtime::FakeNodeRuntime;
@ -3658,6 +3660,7 @@ fn open_items(
}) })
} }
// todo!()
// fn notify_of_new_dock(workspace: &WeakView<Workspace>, cx: &mut AsyncAppContext) { // fn notify_of_new_dock(workspace: &WeakView<Workspace>, cx: &mut AsyncAppContext) {
// const NEW_PANEL_BLOG_POST: &str = "https://zed.dev/blog/new-panel-system"; // const NEW_PANEL_BLOG_POST: &str = "https://zed.dev/blog/new-panel-system";
// const NEW_DOCK_HINT_KEY: &str = "show_new_dock_key"; // const NEW_DOCK_HINT_KEY: &str = "show_new_dock_key";
@ -3738,23 +3741,22 @@ fn open_items(
// }) // })
// .ok(); // .ok();
fn notify_if_database_failed(_workspace: WindowHandle<Workspace>, _cx: &mut AsyncAppContext) { fn notify_if_database_failed(workspace: WindowHandle<Workspace>, cx: &mut AsyncAppContext) {
const REPORT_ISSUE_URL: &str ="https://github.com/zed-industries/community/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml"; const REPORT_ISSUE_URL: &str ="https://github.com/zed-industries/community/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml";
// todo!() workspace
// workspace .update(cx, |workspace, cx| {
// .update(cx, |workspace, cx| { if (*db2::ALL_FILE_DB_FAILED).load(std::sync::atomic::Ordering::Acquire) {
// if (*db::ALL_FILE_DB_FAILED).load(std::sync::atomic::Ordering::Acquire) { workspace.show_notification_once(0, cx, |cx| {
// workspace.show_notification_once(0, cx, |cx| { cx.build_view(|_| {
// cx.build_view(|_| { MessageNotification::new("Failed to load the database file.")
// MessageNotification::new("Failed to load the database file.") .with_click_message("Click to let us know about this error")
// .with_click_message("Click to let us know about this error") .on_click(|cx| cx.open_url(REPORT_ISSUE_URL))
// .on_click(|cx| cx.platform().open_url(REPORT_ISSUE_URL)) })
// }) });
// }); }
// } })
// }) .log_err();
// .log_err();
} }
impl EventEmitter for Workspace { impl EventEmitter for Workspace {
@ -4176,36 +4178,32 @@ impl WorkspaceStore {
} }
async fn handle_update_followers( async fn handle_update_followers(
_this: Model<Self>, this: Model<Self>,
_envelope: TypedEnvelope<proto::UpdateFollowers>, envelope: TypedEnvelope<proto::UpdateFollowers>,
_: Arc<Client>, _: Arc<Client>,
mut _cx: AsyncWindowContext, mut cx: AsyncWindowContext,
) -> Result<()> { ) -> Result<()> {
// let leader_id = envelope.original_sender_id()?; let leader_id = envelope.original_sender_id()?;
// let update = envelope.payload; let update = envelope.payload;
// this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
// for workspace in &this.workspaces { for workspace in &this.workspaces {
// let Some(workspace) = workspace.upgrade() else { workspace.update(cx, |workspace, cx| {
// continue; let project_id = workspace.project.read(cx).remote_id();
// }; if update.project_id != project_id && update.project_id.is_some() {
// workspace.update(cx, |workspace, cx| { return;
// let project_id = workspace.project.read(cx).remote_id(); }
// if update.project_id != project_id && update.project_id.is_some() { workspace.handle_update_followers(leader_id, update.clone(), cx);
// return; })?;
// } }
// workspace.handle_update_followers(leader_id, update.clone(), cx); Ok(())
// }); })?
// }
// Ok(())
// })?
todo!()
} }
} }
// impl Entity for WorkspaceStore { impl EventEmitter for WorkspaceStore {
// type Event = (); type Event = ();
// } }
impl ViewId { impl ViewId {
pub(crate) fn from_proto(message: proto::ViewId) -> Result<Self> { pub(crate) fn from_proto(message: proto::ViewId) -> Result<Self> {

View file

@ -49,7 +49,7 @@ impl Settings for WorkspaceSettings {
fn load( fn load(
default_value: &Self::FileContent, default_value: &Self::FileContent,
user_values: &[&Self::FileContent], user_values: &[&Self::FileContent],
_: &mut gpui2::AppContext, _: &mut gpui::AppContext,
) -> anyhow::Result<Self> { ) -> anyhow::Result<Self> {
Self::load_via_json_merge(default_value, user_values) Self::load_via_json_merge(default_value, user_values)
} }

View file

@ -12,6 +12,7 @@ use cli::{
CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME, CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME,
}; };
use client::UserStore; use client::UserStore;
use collections::HashMap;
use db::kvp::KEY_VALUE_STORE; use db::kvp::KEY_VALUE_STORE;
use fs::RealFs; use fs::RealFs;
use futures::{channel::mpsc, SinkExt, StreamExt}; use futures::{channel::mpsc, SinkExt, StreamExt};
@ -42,11 +43,13 @@ use std::{
thread, thread,
time::{SystemTime, UNIX_EPOCH}, time::{SystemTime, UNIX_EPOCH},
}; };
use text::Point;
use util::{ use util::{
async_maybe, async_maybe,
channel::{parse_zed_link, ReleaseChannel, RELEASE_CHANNEL}, channel::{parse_zed_link, ReleaseChannel, RELEASE_CHANNEL},
http::{self, HttpClient}, http::{self, HttpClient},
paths, ResultExt, paths::{self, PathLikeWithPosition},
ResultExt,
}; };
use uuid::Uuid; use uuid::Uuid;
use workspace2::{AppState, WorkspaceStore}; use workspace2::{AppState, WorkspaceStore};
@ -228,10 +231,8 @@ fn main() {
let mut _triggered_authentication = false; let mut _triggered_authentication = false;
match open_rx.try_next() { match open_rx.try_next() {
Ok(Some(OpenRequest::Paths { paths: _ })) => { Ok(Some(OpenRequest::Paths { paths })) => {
// todo!("workspace") workspace2::open_paths(&paths, &app_state, None, cx).detach();
// cx.update(|cx| workspace::open_paths(&paths, &app_state, None, cx))
// .detach();
} }
Ok(Some(OpenRequest::CliConnection { connection })) => { Ok(Some(OpenRequest::CliConnection { connection })) => {
let app_state = app_state.clone(); let app_state = app_state.clone();
@ -263,10 +264,10 @@ fn main() {
async move { async move {
while let Some(request) = open_rx.next().await { while let Some(request) = open_rx.next().await {
match request { match request {
OpenRequest::Paths { paths: _ } => { OpenRequest::Paths { paths } => {
// todo!("workspace") cx.update(|cx| workspace2::open_paths(&paths, &app_state, None, cx))
// cx.update(|cx| workspace::open_paths(&paths, &app_state, None, cx)) .ok()
// .detach(); .map(|t| t.detach());
} }
OpenRequest::CliConnection { connection } => { OpenRequest::CliConnection { connection } => {
let app_state = app_state.clone(); let app_state = app_state.clone();
@ -781,45 +782,45 @@ async fn handle_cli_connection(
) { ) {
if let Some(request) = requests.next().await { if let Some(request) = requests.next().await {
match request { match request {
CliRequest::Open { paths: _, wait: _ } => { CliRequest::Open { paths, wait } => {
// let mut caret_positions = HashMap::new(); let mut caret_positions = HashMap::default();
// todo!("workspace") let paths = if paths.is_empty() {
// let paths = if paths.is_empty() { workspace2::last_opened_workspace_paths()
// workspace::last_opened_workspace_paths() .await
// .await .map(|location| location.paths().to_vec())
// .map(|location| location.paths().to_vec()) .unwrap_or_default()
// .unwrap_or_default() } else {
// } else { paths
// paths .into_iter()
// .into_iter() .filter_map(|path_with_position_string| {
// .filter_map(|path_with_position_string| { let path_with_position = PathLikeWithPosition::parse_str(
// let path_with_position = PathLikeWithPosition::parse_str( &path_with_position_string,
// &path_with_position_string, |path_str| {
// |path_str| { Ok::<_, std::convert::Infallible>(
// Ok::<_, std::convert::Infallible>( Path::new(path_str).to_path_buf(),
// Path::new(path_str).to_path_buf(), )
// ) },
// }, )
// ) .expect("Infallible");
// .expect("Infallible"); let path = path_with_position.path_like;
// let path = path_with_position.path_like; if let Some(row) = path_with_position.row {
// if let Some(row) = path_with_position.row { if path.is_file() {
// if path.is_file() { let row = row.saturating_sub(1);
// let row = row.saturating_sub(1); let col =
// let col = path_with_position.column.unwrap_or(0).saturating_sub(1);
// path_with_position.column.unwrap_or(0).saturating_sub(1); caret_positions.insert(path.clone(), Point::new(row, col));
// caret_positions.insert(path.clone(), Point::new(row, col)); }
// } }
// } Some(path)
// Some(path) })
// }) .collect()
// .collect() };
// };
// todo!("editor")
// let mut errored = false; // let mut errored = false;
// match cx // match cx
// .update(|cx| workspace::open_paths(&paths, &app_state, None, cx)) // .update(|cx| workspace2::open_paths(&paths, &app_state, None, cx))
// .await // .await
// { // {
// Ok((workspace, items)) => { // Ok((workspace, items)) => {

View file

@ -37,10 +37,9 @@ pub enum IsOnlyInstance {
} }
pub fn ensure_only_instance() -> IsOnlyInstance { pub fn ensure_only_instance() -> IsOnlyInstance {
// todo!("zed_stateless") if *db::ZED_STATELESS {
// if *db::ZED_STATELESS { return IsOnlyInstance::Yes;
// return IsOnlyInstance::Yes; }
// }
if check_got_handshake() { if check_got_handshake() {
return IsOnlyInstance::No; return IsOnlyInstance::No;

View file

@ -69,11 +69,10 @@ pub async fn handle_cli_connection(
let mut caret_positions = HashMap::default(); let mut caret_positions = HashMap::default();
let paths = if paths.is_empty() { let paths = if paths.is_empty() {
todo!() workspace2::last_opened_workspace_paths()
// workspace::last_opened_workspace_paths() .await
// .await .map(|location| location.paths().to_vec())
// .map(|location| location.paths().to_vec()) .unwrap_or_default()
// .unwrap_or_default()
} else { } else {
paths paths
.into_iter() .into_iter()
@ -115,7 +114,7 @@ pub async fn handle_cli_connection(
match item { match item {
Some(Ok(mut item)) => { Some(Ok(mut item)) => {
if let Some(point) = caret_positions.remove(path) { if let Some(point) = caret_positions.remove(path) {
todo!() todo!("editor")
// if let Some(active_editor) = item.downcast::<Editor>() { // if let Some(active_editor) = item.downcast::<Editor>() {
// active_editor // active_editor
// .downgrade() // .downgrade()
@ -260,33 +259,33 @@ pub fn initialize_workspace(
move |workspace, _, event, cx| { move |workspace, _, event, cx| {
if let workspace2::Event::PaneAdded(pane) = event { if let workspace2::Event::PaneAdded(pane) = event {
pane.update(cx, |pane, cx| { pane.update(cx, |pane, cx| {
// todo!() pane.toolbar().update(cx, |toolbar, cx| {
// pane.toolbar().update(cx, |toolbar, cx| { // todo!()
// let breadcrumbs = cx.add_view(|_| Breadcrumbs::new(workspace)); // let breadcrumbs = cx.add_view(|_| Breadcrumbs::new(workspace));
// toolbar.add_item(breadcrumbs, cx); // toolbar.add_item(breadcrumbs, cx);
// let buffer_search_bar = cx.add_view(BufferSearchBar::new); // let buffer_search_bar = cx.add_view(BufferSearchBar::new);
// toolbar.add_item(buffer_search_bar.clone(), cx); // toolbar.add_item(buffer_search_bar.clone(), cx);
// let quick_action_bar = cx.add_view(|_| { // let quick_action_bar = cx.add_view(|_| {
// QuickActionBar::new(buffer_search_bar, workspace) // QuickActionBar::new(buffer_search_bar, workspace)
// }); // });
// toolbar.add_item(quick_action_bar, cx); // toolbar.add_item(quick_action_bar, cx);
// let diagnostic_editor_controls = // let diagnostic_editor_controls =
// cx.add_view(|_| diagnostics2::ToolbarControls::new()); // cx.add_view(|_| diagnostics2::ToolbarControls::new());
// toolbar.add_item(diagnostic_editor_controls, cx); // toolbar.add_item(diagnostic_editor_controls, cx);
// let project_search_bar = cx.add_view(|_| ProjectSearchBar::new()); // let project_search_bar = cx.add_view(|_| ProjectSearchBar::new());
// toolbar.add_item(project_search_bar, cx); // toolbar.add_item(project_search_bar, cx);
// let submit_feedback_button = // let submit_feedback_button =
// cx.add_view(|_| SubmitFeedbackButton::new()); // cx.add_view(|_| SubmitFeedbackButton::new());
// toolbar.add_item(submit_feedback_button, cx); // toolbar.add_item(submit_feedback_button, cx);
// let feedback_info_text = cx.add_view(|_| FeedbackInfoText::new()); // let feedback_info_text = cx.add_view(|_| FeedbackInfoText::new());
// toolbar.add_item(feedback_info_text, cx); // toolbar.add_item(feedback_info_text, cx);
// let lsp_log_item = // let lsp_log_item =
// cx.add_view(|_| language_tools::LspLogToolbarItemView::new()); // cx.add_view(|_| language_tools::LspLogToolbarItemView::new());
// toolbar.add_item(lsp_log_item, cx); // toolbar.add_item(lsp_log_item, cx);
// let syntax_tree_item = cx // let syntax_tree_item = cx
// .add_view(|_| language_tools::SyntaxTreeToolbarItemView::new()); // .add_view(|_| language_tools::SyntaxTreeToolbarItemView::new());
// toolbar.add_item(syntax_tree_item, cx); // toolbar.add_item(syntax_tree_item, cx);
// }) })
}); });
} }
} }