From cbd902658c879f057ad842689c2b681bf67ae7af Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 2 Nov 2023 19:18:01 -0600 Subject: [PATCH 01/15] git -> git3 This is needed for the editor. --- Cargo.lock | 34 ++- crates/client2/Cargo.toml | 2 +- crates/fs2/Cargo.toml | 2 +- crates/git3/Cargo.toml | 30 +++ crates/git3/src/diff.rs | 412 ++++++++++++++++++++++++++++++++ crates/git3/src/git.rs | 11 + crates/language2/Cargo.toml | 6 +- crates/multi_buffer2/Cargo.toml | 6 +- crates/project2/Cargo.toml | 4 +- 9 files changed, 490 insertions(+), 17 deletions(-) create mode 100644 crates/git3/Cargo.toml create mode 100644 crates/git3/src/diff.rs create mode 100644 crates/git3/src/git.rs diff --git a/Cargo.lock b/Cargo.lock index db8e88cb1c..8b7ea1c641 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3079,7 +3079,7 @@ dependencies = [ "smol", "sum_tree", "tempfile", - "text", + "text2", "time", "util", ] @@ -3371,6 +3371,26 @@ dependencies = [ "url", ] +[[package]] +name = "git3" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "clock", + "collections", + "futures 0.3.28", + "git2", + "lazy_static", + "log", + "parking_lot 0.11.2", + "smol", + "sum_tree", + "text2", + "unindent", + "util", +] + [[package]] name = "glob" version = "0.3.1" @@ -4345,7 +4365,7 @@ dependencies = [ "env_logger 0.9.3", "futures 0.3.28", "fuzzy2", - "git", + "git3", "globset", "gpui2", "indoc", @@ -4366,7 +4386,7 @@ dependencies = [ "smallvec", "smol", "sum_tree", - "text", + "text2", "theme2", "tree-sitter", "tree-sitter-elixir", @@ -5081,7 +5101,7 @@ dependencies = [ "ctor", "env_logger 0.9.3", "futures 0.3.28", - "git", + "git3", "gpui2", "indoc", "itertools 0.10.5", @@ -5104,7 +5124,7 @@ dependencies = [ "smol", "snippet", "sum_tree", - "text", + "text2", "theme2", "tree-sitter", "tree-sitter-html", @@ -6284,8 +6304,8 @@ dependencies = [ "fsevent", "futures 0.3.28", "fuzzy2", - "git", "git2", + "git3", "globset", "gpui2", "ignore", @@ -6313,7 +6333,7 @@ dependencies = [ "sum_tree", "tempdir", "terminal2", - "text", + "text2", "thiserror", "toml 0.5.11", "unindent", diff --git a/crates/client2/Cargo.toml b/crates/client2/Cargo.toml index 45e1f618d2..ace229bc21 100644 --- a/crates/client2/Cargo.toml +++ b/crates/client2/Cargo.toml @@ -17,7 +17,7 @@ db = { package = "db2", path = "../db2" } gpui = { package = "gpui2", path = "../gpui2" } util = { path = "../util" } rpc = { package = "rpc2", path = "../rpc2" } -text = { path = "../text" } +text = { package = "text2", path = "../text2" } settings = { package = "settings2", path = "../settings2" } feature_flags = { package = "feature_flags2", path = "../feature_flags2" } sum_tree = { path = "../sum_tree" } diff --git a/crates/fs2/Cargo.toml b/crates/fs2/Cargo.toml index 636def05ec..ca525afe5f 100644 --- a/crates/fs2/Cargo.toml +++ b/crates/fs2/Cargo.toml @@ -10,7 +10,7 @@ path = "src/fs2.rs" [dependencies] collections = { path = "../collections" } rope = { path = "../rope" } -text = { path = "../text" } +text = { package = "text2", path = "../text2" } util = { path = "../util" } sum_tree = { path = "../sum_tree" } diff --git a/crates/git3/Cargo.toml b/crates/git3/Cargo.toml new file mode 100644 index 0000000000..e88fa6574d --- /dev/null +++ b/crates/git3/Cargo.toml @@ -0,0 +1,30 @@ +[package] +# git2 was already taken. +name = "git3" +version = "0.1.0" +edition = "2021" +publish = false + +[lib] +path = "src/git.rs" + +[dependencies] +anyhow.workspace = true +clock = { path = "../clock" } +lazy_static.workspace = true +sum_tree = { path = "../sum_tree" } +text = { package = "text2", path = "../text2" } +collections = { path = "../collections" } +util = { path = "../util" } +log.workspace = true +smol.workspace = true +parking_lot.workspace = true +async-trait.workspace = true +futures.workspace = true +git2.workspace = true + +[dev-dependencies] +unindent.workspace = true + +[features] +test-support = [] diff --git a/crates/git3/src/diff.rs b/crates/git3/src/diff.rs new file mode 100644 index 0000000000..39383cfc78 --- /dev/null +++ b/crates/git3/src/diff.rs @@ -0,0 +1,412 @@ +use std::{iter, ops::Range}; +use sum_tree::SumTree; +use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point}; + +pub use git2 as libgit; +use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum DiffHunkStatus { + Added, + Modified, + Removed, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct DiffHunk { + pub buffer_range: Range, + pub diff_base_byte_range: Range, +} + +impl DiffHunk { + pub fn status(&self) -> DiffHunkStatus { + if self.diff_base_byte_range.is_empty() { + DiffHunkStatus::Added + } else if self.buffer_range.is_empty() { + DiffHunkStatus::Removed + } else { + DiffHunkStatus::Modified + } + } +} + +impl sum_tree::Item for DiffHunk { + type Summary = DiffHunkSummary; + + fn summary(&self) -> Self::Summary { + DiffHunkSummary { + buffer_range: self.buffer_range.clone(), + } + } +} + +#[derive(Debug, Default, Clone)] +pub struct DiffHunkSummary { + buffer_range: Range, +} + +impl sum_tree::Summary for DiffHunkSummary { + type Context = text::BufferSnapshot; + + fn add_summary(&mut self, other: &Self, buffer: &Self::Context) { + self.buffer_range.start = self + .buffer_range + .start + .min(&other.buffer_range.start, buffer); + self.buffer_range.end = self.buffer_range.end.max(&other.buffer_range.end, buffer); + } +} + +#[derive(Clone)] +pub struct BufferDiff { + last_buffer_version: Option, + tree: SumTree>, +} + +impl BufferDiff { + pub fn new() -> BufferDiff { + BufferDiff { + last_buffer_version: None, + tree: SumTree::new(), + } + } + + pub fn is_empty(&self) -> bool { + self.tree.is_empty() + } + + pub fn hunks_in_row_range<'a>( + &'a self, + range: Range, + buffer: &'a BufferSnapshot, + ) -> impl 'a + Iterator> { + let start = buffer.anchor_before(Point::new(range.start, 0)); + let end = buffer.anchor_after(Point::new(range.end, 0)); + + self.hunks_intersecting_range(start..end, buffer) + } + + pub fn hunks_intersecting_range<'a>( + &'a self, + range: Range, + buffer: &'a BufferSnapshot, + ) -> impl 'a + Iterator> { + let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| { + let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); + let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); + !before_start && !after_end + }); + + let anchor_iter = std::iter::from_fn(move || { + cursor.next(buffer); + cursor.item() + }) + .flat_map(move |hunk| { + [ + (&hunk.buffer_range.start, hunk.diff_base_byte_range.start), + (&hunk.buffer_range.end, hunk.diff_base_byte_range.end), + ] + .into_iter() + }); + + let mut summaries = buffer.summaries_for_anchors_with_payload::(anchor_iter); + iter::from_fn(move || { + let (start_point, start_base) = summaries.next()?; + let (end_point, end_base) = summaries.next()?; + + let end_row = if end_point.column > 0 { + end_point.row + 1 + } else { + end_point.row + }; + + Some(DiffHunk { + buffer_range: start_point.row..end_row, + diff_base_byte_range: start_base..end_base, + }) + }) + } + + pub fn hunks_intersecting_range_rev<'a>( + &'a self, + range: Range, + buffer: &'a BufferSnapshot, + ) -> impl 'a + Iterator> { + let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| { + let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt(); + let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt(); + !before_start && !after_end + }); + + std::iter::from_fn(move || { + cursor.prev(buffer); + + let hunk = cursor.item()?; + let range = hunk.buffer_range.to_point(buffer); + let end_row = if range.end.column > 0 { + range.end.row + 1 + } else { + range.end.row + }; + + Some(DiffHunk { + buffer_range: range.start.row..end_row, + diff_base_byte_range: hunk.diff_base_byte_range.clone(), + }) + }) + } + + pub fn clear(&mut self, buffer: &text::BufferSnapshot) { + self.last_buffer_version = Some(buffer.version().clone()); + self.tree = SumTree::new(); + } + + pub async fn update(&mut self, diff_base: &str, buffer: &text::BufferSnapshot) { + let mut tree = SumTree::new(); + + let buffer_text = buffer.as_rope().to_string(); + let patch = Self::diff(&diff_base, &buffer_text); + + if let Some(patch) = patch { + let mut divergence = 0; + for hunk_index in 0..patch.num_hunks() { + let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence); + tree.push(hunk, buffer); + } + } + + self.tree = tree; + self.last_buffer_version = Some(buffer.version().clone()); + } + + #[cfg(test)] + fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator> { + let start = text.anchor_before(Point::new(0, 0)); + let end = text.anchor_after(Point::new(u32::MAX, u32::MAX)); + self.hunks_intersecting_range(start..end, text) + } + + fn diff<'a>(head: &'a str, current: &'a str) -> Option> { + let mut options = GitOptions::default(); + options.context_lines(0); + + let patch = GitPatch::from_buffers( + head.as_bytes(), + None, + current.as_bytes(), + None, + Some(&mut options), + ); + + match patch { + Ok(patch) => Some(patch), + + Err(err) => { + log::error!("`GitPatch::from_buffers` failed: {}", err); + None + } + } + } + + fn process_patch_hunk<'a>( + patch: &GitPatch<'a>, + hunk_index: usize, + buffer: &text::BufferSnapshot, + buffer_row_divergence: &mut i64, + ) -> DiffHunk { + let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap(); + assert!(line_item_count > 0); + + let mut first_deletion_buffer_row: Option = None; + let mut buffer_row_range: Option> = None; + let mut diff_base_byte_range: Option> = None; + + for line_index in 0..line_item_count { + let line = patch.line_in_hunk(hunk_index, line_index).unwrap(); + let kind = line.origin_value(); + let content_offset = line.content_offset() as isize; + let content_len = line.content().len() as isize; + + if kind == GitDiffLineType::Addition { + *buffer_row_divergence += 1; + let row = line.new_lineno().unwrap().saturating_sub(1); + + match &mut buffer_row_range { + Some(buffer_row_range) => buffer_row_range.end = row + 1, + None => buffer_row_range = Some(row..row + 1), + } + } + + if kind == GitDiffLineType::Deletion { + let end = content_offset + content_len; + + match &mut diff_base_byte_range { + Some(head_byte_range) => head_byte_range.end = end as usize, + None => diff_base_byte_range = Some(content_offset as usize..end as usize), + } + + if first_deletion_buffer_row.is_none() { + let old_row = line.old_lineno().unwrap().saturating_sub(1); + let row = old_row as i64 + *buffer_row_divergence; + first_deletion_buffer_row = Some(row as u32); + } + + *buffer_row_divergence -= 1; + } + } + + //unwrap_or deletion without addition + let buffer_row_range = buffer_row_range.unwrap_or_else(|| { + //we cannot have an addition-less hunk without deletion(s) or else there would be no hunk + let row = first_deletion_buffer_row.unwrap(); + row..row + }); + + //unwrap_or addition without deletion + let diff_base_byte_range = diff_base_byte_range.unwrap_or(0..0); + + let start = Point::new(buffer_row_range.start, 0); + let end = Point::new(buffer_row_range.end, 0); + let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end); + DiffHunk { + buffer_range, + diff_base_byte_range, + } + } +} + +/// Range (crossing new lines), old, new +#[cfg(any(test, feature = "test-support"))] +#[track_caller] +pub fn assert_hunks( + diff_hunks: Iter, + buffer: &BufferSnapshot, + diff_base: &str, + expected_hunks: &[(Range, &str, &str)], +) where + Iter: Iterator>, +{ + let actual_hunks = diff_hunks + .map(|hunk| { + ( + hunk.buffer_range.clone(), + &diff_base[hunk.diff_base_byte_range], + buffer + .text_for_range( + Point::new(hunk.buffer_range.start, 0) + ..Point::new(hunk.buffer_range.end, 0), + ) + .collect::(), + ) + }) + .collect::>(); + + let expected_hunks: Vec<_> = expected_hunks + .iter() + .map(|(r, s, h)| (r.clone(), *s, h.to_string())) + .collect(); + + assert_eq!(actual_hunks, expected_hunks); +} + +#[cfg(test)] +mod tests { + use std::assert_eq; + + use super::*; + use text::Buffer; + use unindent::Unindent as _; + + #[test] + fn test_buffer_diff_simple() { + let diff_base = " + one + two + three + " + .unindent(); + + let buffer_text = " + one + HELLO + three + " + .unindent(); + + let mut buffer = Buffer::new(0, 0, buffer_text); + let mut diff = BufferDiff::new(); + smol::block_on(diff.update(&diff_base, &buffer)); + assert_hunks( + diff.hunks(&buffer), + &buffer, + &diff_base, + &[(1..2, "two\n", "HELLO\n")], + ); + + buffer.edit([(0..0, "point five\n")]); + smol::block_on(diff.update(&diff_base, &buffer)); + assert_hunks( + diff.hunks(&buffer), + &buffer, + &diff_base, + &[(0..1, "", "point five\n"), (2..3, "two\n", "HELLO\n")], + ); + + diff.clear(&buffer); + assert_hunks(diff.hunks(&buffer), &buffer, &diff_base, &[]); + } + + #[test] + fn test_buffer_diff_range() { + let diff_base = " + one + two + three + four + five + six + seven + eight + nine + ten + " + .unindent(); + + let buffer_text = " + A + one + B + two + C + three + HELLO + four + five + SIXTEEN + seven + eight + WORLD + nine + + ten + + " + .unindent(); + + let buffer = Buffer::new(0, 0, buffer_text); + let mut diff = BufferDiff::new(); + smol::block_on(diff.update(&diff_base, &buffer)); + assert_eq!(diff.hunks(&buffer).count(), 8); + + assert_hunks( + diff.hunks_in_row_range(7..12, &buffer), + &buffer, + &diff_base, + &[ + (6..7, "", "HELLO\n"), + (9..10, "six\n", "SIXTEEN\n"), + (12..13, "", "WORLD\n"), + ], + ); + } +} diff --git a/crates/git3/src/git.rs b/crates/git3/src/git.rs new file mode 100644 index 0000000000..b1b885eca2 --- /dev/null +++ b/crates/git3/src/git.rs @@ -0,0 +1,11 @@ +use std::ffi::OsStr; + +pub use git2 as libgit; +pub use lazy_static::lazy_static; + +pub mod diff; + +lazy_static! { + pub static ref DOT_GIT: &'static OsStr = OsStr::new(".git"); + pub static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore"); +} diff --git a/crates/language2/Cargo.toml b/crates/language2/Cargo.toml index 4fca16bcb5..0e4d9addfa 100644 --- a/crates/language2/Cargo.toml +++ b/crates/language2/Cargo.toml @@ -25,13 +25,13 @@ test-support = [ clock = { path = "../clock" } collections = { path = "../collections" } fuzzy = { package = "fuzzy2", path = "../fuzzy2" } -git = { path = "../git" } +git = { package = "git3", path = "../git3" } gpui = { package = "gpui2", path = "../gpui2" } lsp = { package = "lsp2", path = "../lsp2" } rpc = { package = "rpc2", path = "../rpc2" } settings = { package = "settings2", path = "../settings2" } sum_tree = { path = "../sum_tree" } -text = { path = "../text" } +text = { package = "text2", path = "../text2" } theme = { package = "theme2", path = "../theme2" } util = { path = "../util" } @@ -64,7 +64,7 @@ client = { package = "client2", path = "../client2", features = ["test-support"] collections = { path = "../collections", features = ["test-support"] } gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] } lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] } -text = { path = "../text", features = ["test-support"] } +text = { package = "text2", path = "../text2", features = ["test-support"] } settings = { package = "settings2", path = "../settings2", features = ["test-support"] } util = { path = "../util", features = ["test-support"] } ctor.workspace = true diff --git a/crates/multi_buffer2/Cargo.toml b/crates/multi_buffer2/Cargo.toml index a57ef29531..4b69edd5a8 100644 --- a/crates/multi_buffer2/Cargo.toml +++ b/crates/multi_buffer2/Cargo.toml @@ -23,7 +23,7 @@ test-support = [ client = { package = "client2", path = "../client2" } clock = { path = "../clock" } collections = { path = "../collections" } -git = { path = "../git" } +git = { package = "git3", path = "../git3" } gpui = { package = "gpui2", path = "../gpui2" } language = { package = "language2", path = "../language2" } lsp = { package = "lsp2", path = "../lsp2" } @@ -31,7 +31,7 @@ rich_text = { path = "../rich_text" } settings = { package = "settings2", path = "../settings2" } snippet = { path = "../snippet" } sum_tree = { path = "../sum_tree" } -text = { path = "../text" } +text = { package = "text2", path = "../text2" } theme = { package = "theme2", path = "../theme2" } util = { path = "../util" } @@ -60,7 +60,7 @@ tree-sitter-typescript = { workspace = true, optional = true } [dev-dependencies] copilot = { package = "copilot2", path = "../copilot2", features = ["test-support"] } -text = { path = "../text", features = ["test-support"] } +text = { package = "text2", path = "../text2", features = ["test-support"] } language = { package = "language2", path = "../language2", features = ["test-support"] } lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] } gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] } diff --git a/crates/project2/Cargo.toml b/crates/project2/Cargo.toml index 7aae9fb007..892ddb91c7 100644 --- a/crates/project2/Cargo.toml +++ b/crates/project2/Cargo.toml @@ -20,7 +20,7 @@ test-support = [ ] [dependencies] -text = { path = "../text" } +text = { package = "text2", path = "../text2" } copilot = { package = "copilot2", path = "../copilot2" } client = { package = "client2", path = "../client2" } clock = { path = "../clock" } @@ -29,7 +29,7 @@ db = { package = "db2", path = "../db2" } fs = { package = "fs2", path = "../fs2" } fsevent = { path = "../fsevent" } fuzzy = { package = "fuzzy2", path = "../fuzzy2" } -git = { path = "../git" } +git = { package = "git3", path = "../git3" } gpui = { package = "gpui2", path = "../gpui2" } language = { package = "language2", path = "../language2" } lsp = { package = "lsp2", path = "../lsp2" } From 6ecb4805f76891344286c18eb81f7aa8e464adf4 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 2 Nov 2023 19:19:18 -0600 Subject: [PATCH 02/15] Add rich_text2 --- Cargo.lock | 2 +- crates/multi_buffer2/Cargo.toml | 2 +- crates/rich_text2/src/rich_text.rs | 379 +++++++++++++++++++++++++++++ 3 files changed, 381 insertions(+), 2 deletions(-) create mode 100644 crates/rich_text2/src/rich_text.rs diff --git a/Cargo.lock b/Cargo.lock index 8b7ea1c641..70586ed6f4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1546,7 +1546,7 @@ dependencies = [ "sum_tree", "sysinfo", "tempfile", - "text", + "text2", "thiserror", "time", "tiny_http", diff --git a/crates/multi_buffer2/Cargo.toml b/crates/multi_buffer2/Cargo.toml index 4b69edd5a8..98b96dfa1d 100644 --- a/crates/multi_buffer2/Cargo.toml +++ b/crates/multi_buffer2/Cargo.toml @@ -27,7 +27,7 @@ git = { package = "git3", path = "../git3" } gpui = { package = "gpui2", path = "../gpui2" } language = { package = "language2", path = "../language2" } lsp = { package = "lsp2", path = "../lsp2" } -rich_text = { path = "../rich_text" } +rich_text = { package = "rich_text2", path = "../rich_text2" } settings = { package = "settings2", path = "../settings2" } snippet = { path = "../snippet" } sum_tree = { path = "../sum_tree" } diff --git a/crates/rich_text2/src/rich_text.rs b/crates/rich_text2/src/rich_text.rs new file mode 100644 index 0000000000..9a8f4a1457 --- /dev/null +++ b/crates/rich_text2/src/rich_text.rs @@ -0,0 +1,379 @@ +use std::{ops::Range, sync::Arc}; + +use anyhow::bail; +use futures::FutureExt; +use gpui::{ + elements::Text, + fonts::{HighlightStyle, Underline, Weight}, + platform::{CursorStyle, MouseButton}, + AnyElement, CursorRegion, Element, MouseRegion, ViewContext, +}; +use language::{HighlightId, Language, LanguageRegistry}; +use theme::{RichTextStyle, SyntaxTheme}; +use util::RangeExt; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Highlight { + Id(HighlightId), + Highlight(HighlightStyle), + Mention, + SelfMention, +} + +impl From for Highlight { + fn from(style: HighlightStyle) -> Self { + Self::Highlight(style) + } +} + +impl From for Highlight { + fn from(style: HighlightId) -> Self { + Self::Id(style) + } +} + +#[derive(Debug, Clone)] +pub struct RichText { + pub text: String, + pub highlights: Vec<(Range, Highlight)>, + pub region_ranges: Vec>, + pub regions: Vec, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum BackgroundKind { + Code, + /// A mention background for non-self user. + Mention, + SelfMention, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RenderedRegion { + pub background_kind: Option, + pub link_url: Option, +} + +/// Allows one to specify extra links to the rendered markdown, which can be used +/// for e.g. mentions. +pub struct Mention { + pub range: Range, + pub is_self_mention: bool, +} + +impl RichText { + pub fn element( + &self, + syntax: Arc, + style: RichTextStyle, + cx: &mut ViewContext, + ) -> AnyElement { + todo!(); + + // let mut region_id = 0; + // let view_id = cx.view_id(); + + // let regions = self.regions.clone(); + + // enum Markdown {} + // Text::new(self.text.clone(), style.text.clone()) + // .with_highlights( + // self.highlights + // .iter() + // .filter_map(|(range, highlight)| { + // let style = match highlight { + // Highlight::Id(id) => id.style(&syntax)?, + // Highlight::Highlight(style) => style.clone(), + // Highlight::Mention => style.mention_highlight, + // Highlight::SelfMention => style.self_mention_highlight, + // }; + // Some((range.clone(), style)) + // }) + // .collect::>(), + // ) + // .with_custom_runs(self.region_ranges.clone(), move |ix, bounds, cx| { + // region_id += 1; + // let region = regions[ix].clone(); + // if let Some(url) = region.link_url { + // cx.scene().push_cursor_region(CursorRegion { + // bounds, + // style: CursorStyle::PointingHand, + // }); + // cx.scene().push_mouse_region( + // MouseRegion::new::(view_id, region_id, bounds) + // .on_click::(MouseButton::Left, move |_, _, cx| { + // cx.platform().open_url(&url) + // }), + // ); + // } + // if let Some(region_kind) = ®ion.background_kind { + // let background = match region_kind { + // BackgroundKind::Code => style.code_background, + // BackgroundKind::Mention => style.mention_background, + // BackgroundKind::SelfMention => style.self_mention_background, + // }; + // if background.is_some() { + // cx.scene().push_quad(gpui::Quad { + // bounds, + // background, + // border: Default::default(), + // corner_radii: (2.0).into(), + // }); + // } + // } + // }) + // .with_soft_wrap(true) + // .into_any() + } + + pub fn add_mention( + &mut self, + range: Range, + is_current_user: bool, + mention_style: HighlightStyle, + ) -> anyhow::Result<()> { + if range.end > self.text.len() { + bail!( + "Mention in range {range:?} is outside of bounds for a message of length {}", + self.text.len() + ); + } + + if is_current_user { + self.region_ranges.push(range.clone()); + self.regions.push(RenderedRegion { + background_kind: Some(BackgroundKind::Mention), + link_url: None, + }); + } + self.highlights + .push((range, Highlight::Highlight(mention_style))); + Ok(()) + } +} + +pub fn render_markdown_mut( + block: &str, + mut mentions: &[Mention], + language_registry: &Arc, + language: Option<&Arc>, + data: &mut RichText, +) { + use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag}; + + let mut bold_depth = 0; + let mut italic_depth = 0; + let mut link_url = None; + let mut current_language = None; + let mut list_stack = Vec::new(); + + let options = Options::all(); + for (event, source_range) in Parser::new_ext(&block, options).into_offset_iter() { + let prev_len = data.text.len(); + match event { + Event::Text(t) => { + if let Some(language) = ¤t_language { + render_code(&mut data.text, &mut data.highlights, t.as_ref(), language); + } else { + if let Some(mention) = mentions.first() { + if source_range.contains_inclusive(&mention.range) { + mentions = &mentions[1..]; + let range = (prev_len + mention.range.start - source_range.start) + ..(prev_len + mention.range.end - source_range.start); + data.highlights.push(( + range.clone(), + if mention.is_self_mention { + Highlight::SelfMention + } else { + Highlight::Mention + }, + )); + data.region_ranges.push(range); + data.regions.push(RenderedRegion { + background_kind: Some(if mention.is_self_mention { + BackgroundKind::SelfMention + } else { + BackgroundKind::Mention + }), + link_url: None, + }); + } + } + + data.text.push_str(t.as_ref()); + let mut style = HighlightStyle::default(); + if bold_depth > 0 { + style.weight = Some(Weight::BOLD); + } + if italic_depth > 0 { + style.italic = Some(true); + } + if let Some(link_url) = link_url.clone() { + data.region_ranges.push(prev_len..data.text.len()); + data.regions.push(RenderedRegion { + link_url: Some(link_url), + background_kind: None, + }); + style.underline = Some(Underline { + thickness: 1.0.into(), + ..Default::default() + }); + } + + if style != HighlightStyle::default() { + let mut new_highlight = true; + if let Some((last_range, last_style)) = data.highlights.last_mut() { + if last_range.end == prev_len + && last_style == &Highlight::Highlight(style) + { + last_range.end = data.text.len(); + new_highlight = false; + } + } + if new_highlight { + data.highlights + .push((prev_len..data.text.len(), Highlight::Highlight(style))); + } + } + } + } + Event::Code(t) => { + data.text.push_str(t.as_ref()); + data.region_ranges.push(prev_len..data.text.len()); + if link_url.is_some() { + data.highlights.push(( + prev_len..data.text.len(), + Highlight::Highlight(HighlightStyle { + underline: Some(Underline { + thickness: 1.0.into(), + ..Default::default() + }), + ..Default::default() + }), + )); + } + data.regions.push(RenderedRegion { + background_kind: Some(BackgroundKind::Code), + link_url: link_url.clone(), + }); + } + Event::Start(tag) => match tag { + Tag::Paragraph => new_paragraph(&mut data.text, &mut list_stack), + Tag::Heading(_, _, _) => { + new_paragraph(&mut data.text, &mut list_stack); + bold_depth += 1; + } + Tag::CodeBlock(kind) => { + new_paragraph(&mut data.text, &mut list_stack); + current_language = if let CodeBlockKind::Fenced(language) = kind { + language_registry + .language_for_name(language.as_ref()) + .now_or_never() + .and_then(Result::ok) + } else { + language.cloned() + } + } + Tag::Emphasis => italic_depth += 1, + Tag::Strong => bold_depth += 1, + Tag::Link(_, url, _) => link_url = Some(url.to_string()), + Tag::List(number) => { + list_stack.push((number, false)); + } + Tag::Item => { + let len = list_stack.len(); + if let Some((list_number, has_content)) = list_stack.last_mut() { + *has_content = false; + if !data.text.is_empty() && !data.text.ends_with('\n') { + data.text.push('\n'); + } + for _ in 0..len - 1 { + data.text.push_str(" "); + } + if let Some(number) = list_number { + data.text.push_str(&format!("{}. ", number)); + *number += 1; + *has_content = false; + } else { + data.text.push_str("- "); + } + } + } + _ => {} + }, + Event::End(tag) => match tag { + Tag::Heading(_, _, _) => bold_depth -= 1, + Tag::CodeBlock(_) => current_language = None, + Tag::Emphasis => italic_depth -= 1, + Tag::Strong => bold_depth -= 1, + Tag::Link(_, _, _) => link_url = None, + Tag::List(_) => drop(list_stack.pop()), + _ => {} + }, + Event::HardBreak => data.text.push('\n'), + Event::SoftBreak => data.text.push(' '), + _ => {} + } + } +} + +pub fn render_markdown( + block: String, + mentions: &[Mention], + language_registry: &Arc, + language: Option<&Arc>, +) -> RichText { + let mut data = RichText { + text: Default::default(), + highlights: Default::default(), + region_ranges: Default::default(), + regions: Default::default(), + }; + + render_markdown_mut(&block, mentions, language_registry, language, &mut data); + + data.text = data.text.trim().to_string(); + + data +} + +pub fn render_code( + text: &mut String, + highlights: &mut Vec<(Range, Highlight)>, + content: &str, + language: &Arc, +) { + let prev_len = text.len(); + text.push_str(content); + for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) { + highlights.push(( + prev_len + range.start..prev_len + range.end, + Highlight::Id(highlight_id), + )); + } +} + +pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option, bool)>) { + let mut is_subsequent_paragraph_of_list = false; + if let Some((_, has_content)) = list_stack.last_mut() { + if *has_content { + is_subsequent_paragraph_of_list = true; + } else { + *has_content = true; + return; + } + } + + if !text.is_empty() { + if !text.ends_with('\n') { + text.push('\n'); + } + text.push('\n'); + } + for _ in 0..list_stack.len().saturating_sub(1) { + text.push_str(" "); + } + if is_subsequent_paragraph_of_list { + text.push_str(" "); + } +} From 0b3932f38cd9ad53df7513b30b9a7fde5cd07fbc Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 2 Nov 2023 19:30:33 -0600 Subject: [PATCH 03/15] Convert rich_text2 --- Cargo.lock | 22 ++++++++++++++++++++-- crates/rich_text2/Cargo.toml | 29 +++++++++++++++++++++++++++++ crates/rich_text2/src/rich_text.rs | 22 ++++++++-------------- crates/theme2/Cargo.toml | 4 ++-- 4 files changed, 59 insertions(+), 18 deletions(-) create mode 100644 crates/rich_text2/Cargo.toml diff --git a/Cargo.lock b/Cargo.lock index 70586ed6f4..64bfe4edf8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5115,7 +5115,7 @@ dependencies = [ "project2", "pulldown-cmark", "rand 0.8.5", - "rich_text", + "rich_text2", "schemars", "serde", "serde_derive", @@ -6947,6 +6947,24 @@ dependencies = [ "util", ] +[[package]] +name = "rich_text2" +version = "0.1.0" +dependencies = [ + "anyhow", + "collections", + "futures 0.3.28", + "gpui2", + "language2", + "lazy_static", + "pulldown-cmark", + "smallvec", + "smol", + "sum_tree", + "theme2", + "util", +] + [[package]] name = "ring" version = "0.16.20" @@ -8876,7 +8894,7 @@ name = "theme2" version = "0.1.0" dependencies = [ "anyhow", - "fs", + "fs2", "gpui2", "indexmap 1.9.3", "parking_lot 0.11.2", diff --git a/crates/rich_text2/Cargo.toml b/crates/rich_text2/Cargo.toml new file mode 100644 index 0000000000..4eee1e107b --- /dev/null +++ b/crates/rich_text2/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "rich_text2" +version = "0.1.0" +edition = "2021" +publish = false + +[lib] +path = "src/rich_text.rs" +doctest = false + +[features] +test-support = [ + "gpui/test-support", + "util/test-support", +] + +[dependencies] +collections = { path = "../collections" } +gpui = { package = "gpui2", path = "../gpui2" } +sum_tree = { path = "../sum_tree" } +theme = { package = "theme2", path = "../theme2" } +language = { package = "language2", path = "../language2" } +util = { path = "../util" } +anyhow.workspace = true +futures.workspace = true +lazy_static.workspace = true +pulldown-cmark = { version = "0.9.2", default-features = false } +smallvec.workspace = true +smol.workspace = true diff --git a/crates/rich_text2/src/rich_text.rs b/crates/rich_text2/src/rich_text.rs index 9a8f4a1457..48b530b7c5 100644 --- a/crates/rich_text2/src/rich_text.rs +++ b/crates/rich_text2/src/rich_text.rs @@ -2,14 +2,8 @@ use std::{ops::Range, sync::Arc}; use anyhow::bail; use futures::FutureExt; -use gpui::{ - elements::Text, - fonts::{HighlightStyle, Underline, Weight}, - platform::{CursorStyle, MouseButton}, - AnyElement, CursorRegion, Element, MouseRegion, ViewContext, -}; +use gpui::{AnyElement, FontStyle, FontWeight, HighlightStyle, UnderlineStyle}; use language::{HighlightId, Language, LanguageRegistry}; -use theme::{RichTextStyle, SyntaxTheme}; use util::RangeExt; #[derive(Debug, Clone, PartialEq, Eq)] @@ -64,9 +58,9 @@ pub struct Mention { impl RichText { pub fn element( &self, - syntax: Arc, - style: RichTextStyle, - cx: &mut ViewContext, + // syntax: Arc, + // style: RichTextStyle, + // cx: &mut ViewContext, ) -> AnyElement { todo!(); @@ -203,10 +197,10 @@ pub fn render_markdown_mut( data.text.push_str(t.as_ref()); let mut style = HighlightStyle::default(); if bold_depth > 0 { - style.weight = Some(Weight::BOLD); + style.font_weight = Some(FontWeight::BOLD); } if italic_depth > 0 { - style.italic = Some(true); + style.font_style = Some(FontStyle::Italic); } if let Some(link_url) = link_url.clone() { data.region_ranges.push(prev_len..data.text.len()); @@ -214,7 +208,7 @@ pub fn render_markdown_mut( link_url: Some(link_url), background_kind: None, }); - style.underline = Some(Underline { + style.underline = Some(UnderlineStyle { thickness: 1.0.into(), ..Default::default() }); @@ -244,7 +238,7 @@ pub fn render_markdown_mut( data.highlights.push(( prev_len..data.text.len(), Highlight::Highlight(HighlightStyle { - underline: Some(Underline { + underline: Some(UnderlineStyle { thickness: 1.0.into(), ..Default::default() }), diff --git a/crates/theme2/Cargo.toml b/crates/theme2/Cargo.toml index a051468b00..5a8448372c 100644 --- a/crates/theme2/Cargo.toml +++ b/crates/theme2/Cargo.toml @@ -17,7 +17,7 @@ doctest = false [dependencies] anyhow.workspace = true -fs = { path = "../fs" } +fs = { package = "fs2", path = "../fs2" } gpui = { package = "gpui2", path = "../gpui2" } indexmap = "1.6.2" parking_lot.workspace = true @@ -32,5 +32,5 @@ util = { path = "../util" } [dev-dependencies] gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] } -fs = { path = "../fs", features = ["test-support"] } +fs = { package = "fs2", path = "../fs2", features = ["test-support"] } settings = { package = "settings2", path = "../settings2", features = ["test-support"] } From d673efebd2fccfc3500f66ebf0c6887b85ca6b47 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 1 Nov 2023 11:53:00 +0200 Subject: [PATCH 04/15] Add prettier workspace resolution test --- Cargo.lock | 2 +- crates/prettier/src/prettier.rs | 396 +++++++++++++++++- crates/project/src/project_tests.rs | 4 +- crates/project/src/search.rs | 26 +- crates/search/Cargo.toml | 1 - crates/search/src/project_search.rs | 4 +- crates/semantic_index/src/db.rs | 4 +- crates/semantic_index/src/semantic_index.rs | 3 +- .../src/semantic_index_tests.rs | 4 +- crates/util/Cargo.toml | 1 + crates/util/src/paths.rs | 26 ++ 11 files changed, 426 insertions(+), 45 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 64bfe4edf8..17d19258e4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7580,7 +7580,6 @@ dependencies = [ "collections", "editor", "futures 0.3.28", - "globset", "gpui", "language", "log", @@ -9887,6 +9886,7 @@ dependencies = [ "dirs 3.0.2", "futures 0.3.28", "git2", + "globset", "isahc", "lazy_static", "log", diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 7517b4ee43..58de2fc913 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -3,7 +3,7 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use anyhow::Context; -use collections::HashMap; +use collections::{HashMap, HashSet}; use fs::Fs; use gpui::{AsyncAppContext, ModelHandle}; use language::language_settings::language_settings; @@ -11,7 +11,7 @@ use language::{Buffer, Diff}; use lsp::{LanguageServer, LanguageServerId}; use node_runtime::NodeRuntime; use serde::{Deserialize, Serialize}; -use util::paths::DEFAULT_PRETTIER_DIR; +use util::paths::{PathMatcher, DEFAULT_PRETTIER_DIR}; pub enum Prettier { Real(RealPrettier), @@ -63,14 +63,77 @@ impl Prettier { ".editorconfig", ]; + pub async fn locate_prettier_installation( + fs: &dyn Fs, + installed_prettiers: &HashSet, + locate_from: &Path, + ) -> anyhow::Result> { + let mut path_to_check = locate_from + .components() + .take_while(|component| !is_node_modules(component)) + .collect::(); + let mut project_path_with_prettier_dependency = None; + loop { + if installed_prettiers.contains(&path_to_check) { + return Ok(Some(path_to_check)); + } else if let Some(package_json_contents) = + read_package_json(fs, &path_to_check).await? + { + if has_prettier_in_package_json(&package_json_contents) { + if has_prettier_in_node_modules(fs, &path_to_check).await? { + return Ok(Some(path_to_check)); + } else if project_path_with_prettier_dependency.is_none() { + project_path_with_prettier_dependency = Some(path_to_check.clone()); + } + } else { + match package_json_contents.get("workspaces") { + Some(serde_json::Value::Array(workspaces)) => { + match &project_path_with_prettier_dependency { + Some(project_path_with_prettier_dependency) => { + let subproject_path = project_path_with_prettier_dependency.strip_prefix(&path_to_check).expect("traversing path parents, should be able to strip prefix"); + if workspaces.iter().filter_map(|value| { + if let serde_json::Value::String(s) = value { + Some(s.clone()) + } else { + log::warn!("Skipping non-string 'workspaces' value: {value:?}"); + None + } + }).any(|workspace_definition| { + if let Some(path_matcher) = PathMatcher::new(&workspace_definition).ok() { + path_matcher.is_match(subproject_path) + } else { + workspace_definition == subproject_path.to_string_lossy() + } + }) { + return Ok(Some(path_to_check)); + } else { + log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but is not included in its package.json workspaces {workspaces:?}"); + } + } + None => { + log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but has no prettier in its package.json"); + } + } + }, + Some(unknown) => log::error!("Failed to parse workspaces for {path_to_check:?} from package.json, got {unknown:?}. Skipping."), + None => log::warn!("Skipping path {path_to_check:?} that has no prettier dependency and no workspaces section in its package.json"), + } + } + } + + if !path_to_check.pop() { + match project_path_with_prettier_dependency { + Some(closest_prettier_discovered) => anyhow::bail!("No prettier found in ancestors of {locate_from:?}, but discovered prettier package.json dependency in {closest_prettier_discovered:?}"), + None => return Ok(None), + } + } + } + } + pub async fn locate( starting_path: Option, fs: Arc, ) -> anyhow::Result { - fn is_node_modules(path_component: &std::path::Component<'_>) -> bool { - path_component.as_os_str().to_string_lossy() == "node_modules" - } - let paths_to_check = match starting_path.as_ref() { Some(starting_path) => { let worktree_root = starting_path @@ -106,7 +169,7 @@ impl Prettier { None => Vec::new(), }; - match find_closest_prettier_dir(paths_to_check, fs.as_ref()) + match find_closest_prettier_dir(fs.as_ref(), paths_to_check) .await .with_context(|| format!("finding prettier starting with {starting_path:?}"))? { @@ -350,9 +413,62 @@ impl Prettier { } } -async fn find_closest_prettier_dir( - paths_to_check: Vec, +async fn has_prettier_in_node_modules(fs: &dyn Fs, path: &Path) -> anyhow::Result { + let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME); + if let Some(node_modules_location_metadata) = fs + .metadata(&possible_node_modules_location) + .await + .with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))? + { + return Ok(node_modules_location_metadata.is_dir); + } + Ok(false) +} + +async fn read_package_json( fs: &dyn Fs, + path: &Path, +) -> anyhow::Result>> { + let possible_package_json = path.join("package.json"); + if let Some(package_json_metadata) = fs + .metadata(&possible_package_json) + .await + .with_context(|| format!("Fetching metadata for {possible_package_json:?}"))? + { + if !package_json_metadata.is_dir && !package_json_metadata.is_symlink { + let package_json_contents = fs + .load(&possible_package_json) + .await + .with_context(|| format!("reading {possible_package_json:?} file contents"))?; + return serde_json::from_str::>( + &package_json_contents, + ) + .map(Some) + .with_context(|| format!("parsing {possible_package_json:?} file contents")); + } + } + Ok(None) +} + +fn has_prettier_in_package_json( + package_json_contents: &HashMap, +) -> bool { + if let Some(serde_json::Value::Object(o)) = package_json_contents.get("dependencies") { + if o.contains_key(PRETTIER_PACKAGE_NAME) { + return true; + } + } + if let Some(serde_json::Value::Object(o)) = package_json_contents.get("devDependencies") { + if o.contains_key(PRETTIER_PACKAGE_NAME) { + return true; + } + } + false +} + +async fn find_closest_prettier_dir( + fs: &dyn Fs, + paths_to_check: Vec, ) -> anyhow::Result> { for path in paths_to_check { let possible_package_json = path.join("package.json"); @@ -436,3 +552,265 @@ impl lsp::request::Request for ClearCache { type Result = (); const METHOD: &'static str = "prettier/clear_cache"; } + +#[cfg(test)] +mod tests { + use fs::FakeFs; + use serde_json::json; + + use super::*; + + #[gpui::test] + async fn test_prettier_lookup_finds_nothing(cx: &mut gpui::TestAppContext) { + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/root", + json!({ + ".config": { + "zed": { + "settings.json": r#"{ "formatter": "auto" }"#, + }, + }, + "work": { + "project": { + "src": { + "index.js": "// index.js file contents", + }, + "node_modules": { + "expect": { + "build": { + "print.js": "// print.js file contents", + }, + "package.json": r#"{ + "devDependencies": { + "prettier": "2.5.1" + } + }"#, + }, + "prettier": { + "index.js": "// Dummy prettier package file", + }, + }, + "package.json": r#"{}"# + }, + } + }), + ) + .await; + + assert!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/.config/zed/settings.json"), + ) + .await + .unwrap() + .is_none(), + "Should successfully find no prettier for path hierarchy without it" + ); + assert!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/work/project/src/index.js") + ) + .await + .unwrap() + .is_none(), + "Should successfully find no prettier for path hierarchy that has node_modules with prettier, but no package.json mentions of it" + ); + assert!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/work/project/node_modules/expect/build/print.js") + ) + .await + .unwrap() + .is_none(), + "Even though it has package.json with prettier in it and no prettier on node_modules along the path, nothing should fail since declared inside node_modules" + ); + } + + #[gpui::test] + async fn test_prettier_lookup_in_simple_npm_projects(cx: &mut gpui::TestAppContext) { + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/root", + json!({ + "web_blog": { + "node_modules": { + "prettier": { + "index.js": "// Dummy prettier package file", + }, + "expect": { + "build": { + "print.js": "// print.js file contents", + }, + "package.json": r#"{ + "devDependencies": { + "prettier": "2.5.1" + } + }"#, + }, + }, + "pages": { + "[slug].tsx": "// [slug].tsx file contents", + }, + "package.json": r#"{ + "devDependencies": { + "prettier": "2.3.0" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "htmlWhitespaceSensitivity": "strict", + "tabWidth": 4 + } + }"# + } + }), + ) + .await; + + assert_eq!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/web_blog/pages/[slug].tsx") + ) + .await + .unwrap(), + Some(PathBuf::from("/root/web_blog")), + "Should find a preinstalled prettier in the project root" + ); + assert_eq!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/web_blog/node_modules/expect/build/print.js") + ) + .await + .unwrap(), + Some(PathBuf::from("/root/web_blog")), + "Should find a preinstalled prettier in the project root even for node_modules files" + ); + } + + #[gpui::test] + async fn test_prettier_lookup_for_not_installed(cx: &mut gpui::TestAppContext) { + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/root", + json!({ + "work": { + "web_blog": { + "pages": { + "[slug].tsx": "// [slug].tsx file contents", + }, + "package.json": r#"{ + "devDependencies": { + "prettier": "2.3.0" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "htmlWhitespaceSensitivity": "strict", + "tabWidth": 4 + } + }"# + } + } + }), + ) + .await; + + let path = "/root/work/web_blog/node_modules/pages/[slug].tsx"; + match Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new(path) + ) + .await { + Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"), + Err(e) => { + let message = e.to_string(); + assert!(message.contains(path), "Error message should mention which start file was used for location"); + assert!(message.contains("/root/work/web_blog"), "Error message should mention potential candidates without prettier node_modules contents"); + }, + }; + + assert_eq!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::from_iter( + [PathBuf::from("/root"), PathBuf::from("/root/work")].into_iter() + ), + Path::new("/root/work/web_blog/node_modules/pages/[slug].tsx") + ) + .await + .unwrap(), + Some(PathBuf::from("/root/work")), + "Should return first cached value found without path checks" + ); + } + + #[gpui::test] + async fn test_prettier_lookup_in_npm_workspaces(cx: &mut gpui::TestAppContext) { + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/root", + json!({ + "work": { + "full-stack-foundations": { + "exercises": { + "03.loading": { + "01.problem.loader": { + "app": { + "routes": { + "users+": { + "$username_+": { + "notes.tsx": "// notes.tsx file contents", + }, + }, + }, + }, + "node_modules": {}, + "package.json": r#"{ + "devDependencies": { + "prettier": "^3.0.3" + } + }"# + }, + }, + }, + "package.json": r#"{ + "workspaces": ["exercises/*/*", "examples/*"] + }"#, + "node_modules": { + "prettier": { + "index.js": "// Dummy prettier package file", + }, + }, + }, + } + }), + ) + .await; + + assert_eq!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx"), + ).await.unwrap(), + Some(PathBuf::from("/root/work/full-stack-foundations")), + "Should ascend to the multi-workspace root and find the prettier there", + ); + } +} + +fn is_node_modules(path_component: &std::path::Component<'_>) -> bool { + path_component.as_os_str().to_string_lossy() == "node_modules" +} diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index d5ff1e08d7..32dc542c20 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -1,4 +1,4 @@ -use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *}; +use crate::{worktree::WorktreeModelHandle, Event, *}; use fs::{FakeFs, RealFs}; use futures::{future, StreamExt}; use gpui::{executor::Deterministic, test::subscribe, AppContext}; @@ -13,7 +13,7 @@ use pretty_assertions::assert_eq; use serde_json::json; use std::{cell::RefCell, os::unix, rc::Rc, task::Poll}; use unindent::Unindent as _; -use util::{assert_set_eq, test::temp_tree}; +use util::{assert_set_eq, test::temp_tree, paths::PathMatcher}; #[cfg(test)] #[ctor::ctor] diff --git a/crates/project/src/search.rs b/crates/project/src/search.rs index 46dd30c8a0..f626f15d12 100644 --- a/crates/project/src/search.rs +++ b/crates/project/src/search.rs @@ -13,6 +13,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; +use util::paths::PathMatcher; #[derive(Clone, Debug)] pub struct SearchInputs { @@ -52,31 +53,6 @@ pub enum SearchQuery { }, } -#[derive(Clone, Debug)] -pub struct PathMatcher { - maybe_path: PathBuf, - glob: GlobMatcher, -} - -impl std::fmt::Display for PathMatcher { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.maybe_path.to_string_lossy().fmt(f) - } -} - -impl PathMatcher { - pub fn new(maybe_glob: &str) -> Result { - Ok(PathMatcher { - glob: Glob::new(&maybe_glob)?.compile_matcher(), - maybe_path: PathBuf::from(maybe_glob), - }) - } - - pub fn is_match>(&self, other: P) -> bool { - other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other) - } -} - impl SearchQuery { pub fn text( query: impl ToString, diff --git a/crates/search/Cargo.toml b/crates/search/Cargo.toml index 64421f5431..4ebd31a2bc 100644 --- a/crates/search/Cargo.toml +++ b/crates/search/Cargo.toml @@ -29,7 +29,6 @@ serde.workspace = true serde_derive.workspace = true smallvec.workspace = true smol.workspace = true -globset.workspace = true serde_json.workspace = true [dev-dependencies] client = { path = "../client", features = ["test-support"] } diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 55e3f6babd..f6e17bbee5 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -22,7 +22,7 @@ use gpui::{ }; use menu::Confirm; use project::{ - search::{PathMatcher, SearchInputs, SearchQuery}, + search::{SearchInputs, SearchQuery}, Entry, Project, }; use semantic_index::{SemanticIndex, SemanticIndexStatus}; @@ -37,7 +37,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; -use util::ResultExt as _; +use util::{paths::PathMatcher, ResultExt as _}; use workspace::{ item::{BreadcrumbText, Item, ItemEvent, ItemHandle}, searchable::{Direction, SearchableItem, SearchableItemHandle}, diff --git a/crates/semantic_index/src/db.rs b/crates/semantic_index/src/db.rs index 63527cea1c..5b416f7a64 100644 --- a/crates/semantic_index/src/db.rs +++ b/crates/semantic_index/src/db.rs @@ -9,7 +9,7 @@ use futures::channel::oneshot; use gpui::executor; use ndarray::{Array1, Array2}; use ordered_float::OrderedFloat; -use project::{search::PathMatcher, Fs}; +use project::Fs; use rpc::proto::Timestamp; use rusqlite::params; use rusqlite::types::Value; @@ -21,7 +21,7 @@ use std::{ sync::Arc, time::SystemTime, }; -use util::TryFutureExt; +use util::{paths::PathMatcher, TryFutureExt}; pub fn argsort(data: &[T]) -> Vec { let mut indices = (0..data.len()).collect::>(); diff --git a/crates/semantic_index/src/semantic_index.rs b/crates/semantic_index/src/semantic_index.rs index 818faa0444..7d1eacd7fa 100644 --- a/crates/semantic_index/src/semantic_index.rs +++ b/crates/semantic_index/src/semantic_index.rs @@ -21,7 +21,7 @@ use ordered_float::OrderedFloat; use parking_lot::Mutex; use parsing::{CodeContextRetriever, Span, SpanDigest, PARSEABLE_ENTIRE_FILE_TYPES}; use postage::watch; -use project::{search::PathMatcher, Fs, PathChange, Project, ProjectEntryId, Worktree, WorktreeId}; +use project::{Fs, PathChange, Project, ProjectEntryId, Worktree, WorktreeId}; use smol::channel; use std::{ cmp::Reverse, @@ -33,6 +33,7 @@ use std::{ sync::{Arc, Weak}, time::{Duration, Instant, SystemTime}, }; +use util::paths::PathMatcher; use util::{channel::RELEASE_CHANNEL_NAME, http::HttpClient, paths::EMBEDDINGS_DIR, ResultExt}; use workspace::WorkspaceCreated; diff --git a/crates/semantic_index/src/semantic_index_tests.rs b/crates/semantic_index/src/semantic_index_tests.rs index 7a91d1e100..044ded2682 100644 --- a/crates/semantic_index/src/semantic_index_tests.rs +++ b/crates/semantic_index/src/semantic_index_tests.rs @@ -10,13 +10,13 @@ use gpui::{executor::Deterministic, Task, TestAppContext}; use language::{Language, LanguageConfig, LanguageRegistry, ToOffset}; use parking_lot::Mutex; use pretty_assertions::assert_eq; -use project::{project_settings::ProjectSettings, search::PathMatcher, FakeFs, Fs, Project}; +use project::{project_settings::ProjectSettings, FakeFs, Fs, Project}; use rand::{rngs::StdRng, Rng}; use serde_json::json; use settings::SettingsStore; use std::{path::Path, sync::Arc, time::SystemTime}; use unindent::Unindent; -use util::RandomCharIter; +use util::{paths::PathMatcher, RandomCharIter}; #[ctor::ctor] fn init_logger() { diff --git a/crates/util/Cargo.toml b/crates/util/Cargo.toml index 6ab76b0850..cfbd7551f9 100644 --- a/crates/util/Cargo.toml +++ b/crates/util/Cargo.toml @@ -14,6 +14,7 @@ test-support = ["tempdir", "git2"] [dependencies] anyhow.workspace = true backtrace = "0.3" +globset.workspace = true log.workspace = true lazy_static.workspace = true futures.workspace = true diff --git a/crates/util/src/paths.rs b/crates/util/src/paths.rs index 96d77236a9..d54e0b1cd6 100644 --- a/crates/util/src/paths.rs +++ b/crates/util/src/paths.rs @@ -1,5 +1,6 @@ use std::path::{Path, PathBuf}; +use globset::{Glob, GlobMatcher}; use serde::{Deserialize, Serialize}; lazy_static::lazy_static! { @@ -189,6 +190,31 @@ impl

PathLikeWithPosition

{ } } +#[derive(Clone, Debug)] +pub struct PathMatcher { + maybe_path: PathBuf, + glob: GlobMatcher, +} + +impl std::fmt::Display for PathMatcher { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.maybe_path.to_string_lossy().fmt(f) + } +} + +impl PathMatcher { + pub fn new(maybe_glob: &str) -> Result { + Ok(PathMatcher { + glob: Glob::new(&maybe_glob)?.compile_matcher(), + maybe_path: PathBuf::from(maybe_glob), + }) + } + + pub fn is_match>(&self, other: P) -> bool { + other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other) + } +} + #[cfg(test)] mod tests { use super::*; From 6bbb79a9f55beba80c3e6e776411cf3f3fc63a54 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 2 Nov 2023 21:21:41 +0200 Subject: [PATCH 05/15] Rework prettier installation and start --- crates/prettier/src/prettier.rs | 134 +------ crates/project/src/project.rs | 622 ++++++++++++++++++-------------- crates/project/src/search.rs | 3 +- 3 files changed, 366 insertions(+), 393 deletions(-) diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 58de2fc913..3e846db66e 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -1,4 +1,3 @@ -use std::collections::VecDeque; use std::path::{Path, PathBuf}; use std::sync::Arc; @@ -20,7 +19,6 @@ pub enum Prettier { } pub struct RealPrettier { - worktree_id: Option, default: bool, prettier_dir: PathBuf, server: Arc, @@ -28,17 +26,10 @@ pub struct RealPrettier { #[cfg(any(test, feature = "test-support"))] pub struct TestPrettier { - worktree_id: Option, prettier_dir: PathBuf, default: bool, } -#[derive(Debug)] -pub struct LocateStart { - pub worktree_root_path: Arc, - pub starting_path: Arc, -} - pub const PRETTIER_SERVER_FILE: &str = "prettier_server.js"; pub const PRETTIER_SERVER_JS: &str = include_str!("./prettier_server.js"); const PRETTIER_PACKAGE_NAME: &str = "prettier"; @@ -130,75 +121,21 @@ impl Prettier { } } - pub async fn locate( - starting_path: Option, - fs: Arc, - ) -> anyhow::Result { - let paths_to_check = match starting_path.as_ref() { - Some(starting_path) => { - let worktree_root = starting_path - .worktree_root_path - .components() - .into_iter() - .take_while(|path_component| !is_node_modules(path_component)) - .collect::(); - if worktree_root != starting_path.worktree_root_path.as_ref() { - vec![worktree_root] - } else { - if starting_path.starting_path.as_ref() == Path::new("") { - worktree_root - .parent() - .map(|path| vec![path.to_path_buf()]) - .unwrap_or_default() - } else { - let file_to_format = starting_path.starting_path.as_ref(); - let mut paths_to_check = VecDeque::new(); - let mut current_path = worktree_root; - for path_component in file_to_format.components().into_iter() { - let new_path = current_path.join(path_component); - let old_path = std::mem::replace(&mut current_path, new_path); - paths_to_check.push_front(old_path); - if is_node_modules(&path_component) { - break; - } - } - Vec::from(paths_to_check) - } - } - } - None => Vec::new(), - }; - - match find_closest_prettier_dir(fs.as_ref(), paths_to_check) - .await - .with_context(|| format!("finding prettier starting with {starting_path:?}"))? - { - Some(prettier_dir) => Ok(prettier_dir), - None => Ok(DEFAULT_PRETTIER_DIR.to_path_buf()), - } - } - #[cfg(any(test, feature = "test-support"))] pub async fn start( - worktree_id: Option, _: LanguageServerId, prettier_dir: PathBuf, _: Arc, _: AsyncAppContext, ) -> anyhow::Result { - Ok( - #[cfg(any(test, feature = "test-support"))] - Self::Test(TestPrettier { - worktree_id, - default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(), - prettier_dir, - }), - ) + Ok(Self::Test(TestPrettier { + default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(), + prettier_dir, + })) } #[cfg(not(any(test, feature = "test-support")))] pub async fn start( - worktree_id: Option, server_id: LanguageServerId, prettier_dir: PathBuf, node: Arc, @@ -206,7 +143,7 @@ impl Prettier { ) -> anyhow::Result { use lsp::LanguageServerBinary; - let backgroud = cx.background(); + let background = cx.background(); anyhow::ensure!( prettier_dir.is_dir(), "Prettier dir {prettier_dir:?} is not a directory" @@ -217,7 +154,7 @@ impl Prettier { "no prettier server package found at {prettier_server:?}" ); - let node_path = backgroud + let node_path = background .spawn(async move { node.binary_path().await }) .await?; let server = LanguageServer::new( @@ -232,12 +169,11 @@ impl Prettier { cx, ) .context("prettier server creation")?; - let server = backgroud + let server = background .spawn(server.initialize(None)) .await .context("prettier server initialization")?; Ok(Self::Real(RealPrettier { - worktree_id, server, default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(), prettier_dir, @@ -403,14 +339,6 @@ impl Prettier { Self::Test(test_prettier) => &test_prettier.prettier_dir, } } - - pub fn worktree_id(&self) -> Option { - match self { - Self::Real(local) => local.worktree_id, - #[cfg(any(test, feature = "test-support"))] - Self::Test(test_prettier) => test_prettier.worktree_id, - } - } } async fn has_prettier_in_node_modules(fs: &dyn Fs, path: &Path) -> anyhow::Result { @@ -466,54 +394,6 @@ fn has_prettier_in_package_json( false } -async fn find_closest_prettier_dir( - fs: &dyn Fs, - paths_to_check: Vec, -) -> anyhow::Result> { - for path in paths_to_check { - let possible_package_json = path.join("package.json"); - if let Some(package_json_metadata) = fs - .metadata(&possible_package_json) - .await - .with_context(|| format!("Fetching metadata for {possible_package_json:?}"))? - { - if !package_json_metadata.is_dir && !package_json_metadata.is_symlink { - let package_json_contents = fs - .load(&possible_package_json) - .await - .with_context(|| format!("reading {possible_package_json:?} file contents"))?; - if let Ok(json_contents) = serde_json::from_str::>( - &package_json_contents, - ) { - if let Some(serde_json::Value::Object(o)) = json_contents.get("dependencies") { - if o.contains_key(PRETTIER_PACKAGE_NAME) { - return Ok(Some(path)); - } - } - if let Some(serde_json::Value::Object(o)) = json_contents.get("devDependencies") - { - if o.contains_key(PRETTIER_PACKAGE_NAME) { - return Ok(Some(path)); - } - } - } - } - } - - let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME); - if let Some(node_modules_location_metadata) = fs - .metadata(&possible_node_modules_location) - .await - .with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))? - { - if node_modules_location_metadata.is_dir { - return Ok(Some(path)); - } - } - } - Ok(None) -} - enum Format {} #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b38bcd1db2..a5c26bbb41 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -54,7 +54,7 @@ use lsp_command::*; use node_runtime::NodeRuntime; use parking_lot::Mutex; use postage::watch; -use prettier::{LocateStart, Prettier}; +use prettier::Prettier; use project_settings::{LspSettings, ProjectSettings}; use rand::prelude::*; use search::SearchQuery; @@ -82,8 +82,11 @@ use std::{ use terminals::Terminals; use text::Anchor; use util::{ - debug_panic, defer, http::HttpClient, merge_json_value_into, - paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _, + debug_panic, defer, + http::HttpClient, + merge_json_value_into, + paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH}, + post_inc, ResultExt, TryFutureExt as _, }; pub use fs::*; @@ -162,17 +165,14 @@ pub struct Project { copilot_log_subscription: Option, current_lsp_settings: HashMap, LspSettings>, node: Option>, - #[cfg(not(any(test, feature = "test-support")))] default_prettier: Option, - prettier_instances: HashMap< - (Option, PathBuf), - Shared, Arc>>>, - >, + prettiers_per_worktree: HashMap>>, + prettier_instances: HashMap, Arc>>>>, } -#[cfg(not(any(test, feature = "test-support")))] struct DefaultPrettier { - installation_process: Option>>, + instance: Option, Arc>>>>, + #[cfg(not(any(test, feature = "test-support")))] installed_plugins: HashSet<&'static str>, } @@ -685,8 +685,8 @@ impl Project { copilot_log_subscription: None, current_lsp_settings: settings::get::(cx).lsp.clone(), node: Some(node), - #[cfg(not(any(test, feature = "test-support")))] default_prettier: None, + prettiers_per_worktree: HashMap::default(), prettier_instances: HashMap::default(), } }) @@ -786,8 +786,8 @@ impl Project { copilot_log_subscription: None, current_lsp_settings: settings::get::(cx).lsp.clone(), node: None, - #[cfg(not(any(test, feature = "test-support")))] default_prettier: None, + prettiers_per_worktree: HashMap::default(), prettier_instances: HashMap::default(), }; for worktree in worktrees { @@ -2681,20 +2681,8 @@ impl Project { let buffer_file = File::from_dyn(buffer_file.as_ref()); let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx)); - let task_buffer = buffer.clone(); - let prettier_installation_task = - self.install_default_formatters(worktree, &new_language, &settings, cx); - cx.spawn(|project, mut cx| async move { - prettier_installation_task.await?; - let _ = project - .update(&mut cx, |project, cx| { - project.prettier_instance_for_buffer(&task_buffer, cx) - }) - .await; - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - + self.install_default_formatters(worktree, &new_language, &settings, cx) + .detach_and_log_err(cx); if let Some(file) = buffer_file { let worktree = file.worktree.clone(); if let Some(tree) = worktree.read(cx).as_local() { @@ -4029,7 +4017,7 @@ impl Project { } pub fn format( - &self, + &mut self, buffers: HashSet>, push_to_history: bool, trigger: FormatTrigger, @@ -4049,10 +4037,10 @@ impl Project { }) .collect::>(); - cx.spawn(|this, mut cx| async move { + cx.spawn(|project, mut cx| async move { // Do not allow multiple concurrent formatting requests for the // same buffer. - this.update(&mut cx, |this, cx| { + project.update(&mut cx, |this, cx| { buffers_with_paths_and_servers.retain(|(buffer, _, _)| { this.buffers_being_formatted .insert(buffer.read(cx).remote_id()) @@ -4060,7 +4048,7 @@ impl Project { }); let _cleanup = defer({ - let this = this.clone(); + let this = project.clone(); let mut cx = cx.clone(); let buffers = &buffers_with_paths_and_servers; move || { @@ -4128,7 +4116,7 @@ impl Project { { format_operation = Some(FormatOperation::Lsp( Self::format_via_lsp( - &this, + &project, &buffer, buffer_abs_path, &language_server, @@ -4163,16 +4151,24 @@ impl Project { } } (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => { - if let Some(prettier_task) = this + if let Some((prettier_path, prettier_task)) = project .update(&mut cx, |project, cx| { project.prettier_instance_for_buffer(buffer, cx) }).await { match prettier_task.await { Ok(prettier) => { - let buffer_path = buffer.read_with(&cx, |buffer, cx| { - File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) - }); + let buffer_file = buffer.update(&mut cx, |buffer, _| buffer.file().cloned()); + let buffer_path= { + File::from_dyn(buffer_file.as_ref()).map(|file| { + cx.update(|cx| { + let worktree_id = file.worktree_id(cx); + let file_abs_path = file.abs_path(cx); + project.update(cx, |project, _| project.prettiers_per_worktree.entry(worktree_id).or_default().insert(prettier_path)); + file_abs_path + }) + }) + }; format_operation = Some(FormatOperation::Prettier( prettier .format(buffer, buffer_path, &cx) @@ -4180,16 +4176,29 @@ impl Project { .context("formatting via prettier")?, )); } - Err(e) => anyhow::bail!( - "Failed to create prettier instance for buffer during autoformatting: {e:#}" - ), + Err(e) => { + project.update(&mut cx, |project, _| { + match &prettier_path { + Some(prettier_path) => { + project.prettier_instances.remove(prettier_path); + }, + None => { + if let Some(default_prettier) = project.default_prettier.as_mut() { + default_prettier.instance = None; + } + }, + } + }); + anyhow::bail!( + "Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}" + )}, } } else if let Some((language_server, buffer_abs_path)) = language_server.as_ref().zip(buffer_abs_path.as_ref()) { format_operation = Some(FormatOperation::Lsp( Self::format_via_lsp( - &this, + &project, &buffer, buffer_abs_path, &language_server, @@ -4202,16 +4211,24 @@ impl Project { } } (Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => { - if let Some(prettier_task) = this + if let Some((prettier_path, prettier_task)) = project .update(&mut cx, |project, cx| { project.prettier_instance_for_buffer(buffer, cx) }).await { match prettier_task.await { Ok(prettier) => { - let buffer_path = buffer.read_with(&cx, |buffer, cx| { - File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) - }); + let buffer_file = buffer.update(&mut cx, |buffer, _| buffer.file().cloned()); + let buffer_path= { + File::from_dyn(buffer_file.as_ref()).map(|file| { + cx.update(|cx| { + let worktree_id = file.worktree_id(cx); + let file_abs_path = file.abs_path(cx); + project.update(cx, |project, _| project.prettiers_per_worktree.entry(worktree_id).or_default().insert(prettier_path)); + file_abs_path + }) + }) + }; format_operation = Some(FormatOperation::Prettier( prettier .format(buffer, buffer_path, &cx) @@ -4219,9 +4236,22 @@ impl Project { .context("formatting via prettier")?, )); } - Err(e) => anyhow::bail!( - "Failed to create prettier instance for buffer during formatting: {e:#}" - ), + Err(e) => { + project.update(&mut cx, |project, _| { + match &prettier_path { + Some(prettier_path) => { + project.prettier_instances.remove(prettier_path); + }, + None => { + if let Some(default_prettier) = project.default_prettier.as_mut() { + default_prettier.instance = None; + } + }, + } + }); + anyhow::bail!( + "Failed to create prettier instance from {prettier_path:?} for buffer during formatting: {e:#}" + )}, } } } @@ -6431,15 +6461,25 @@ impl Project { "Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}" ); let prettiers_to_reload = self - .prettier_instances + .prettiers_per_worktree + .get(¤t_worktree_id) .iter() - .filter_map(|((worktree_id, prettier_path), prettier_task)| { - if worktree_id.is_none() || worktree_id == &Some(current_worktree_id) { - Some((*worktree_id, prettier_path.clone(), prettier_task.clone())) - } else { - None - } + .flat_map(|prettier_paths| prettier_paths.iter()) + .flatten() + .filter_map(|prettier_path| { + Some(( + current_worktree_id, + Some(prettier_path.clone()), + self.prettier_instances.get(prettier_path)?.clone(), + )) }) + .chain(self.default_prettier.iter().filter_map(|default_prettier| { + Some(( + current_worktree_id, + None, + default_prettier.instance.clone()?, + )) + })) .collect::>(); cx.background() @@ -6450,9 +6490,15 @@ impl Project { .clear_cache() .await .with_context(|| { - format!( - "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update" - ) + match prettier_path { + Some(prettier_path) => format!( + "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update" + ), + None => format!( + "clearing default prettier cache for worktree {worktree_id:?} on prettier settings update" + ), + } + }) .map_err(Arc::new) } @@ -8364,7 +8410,12 @@ impl Project { &mut self, buffer: &ModelHandle, cx: &mut ModelContext, - ) -> Task, Arc>>>>> { + ) -> Task< + Option<( + Option, + Shared, Arc>>>, + )>, + > { let buffer = buffer.read(cx); let buffer_file = buffer.file(); let Some(buffer_language) = buffer.language() else { @@ -8374,136 +8425,136 @@ impl Project { return Task::ready(None); } - let buffer_file = File::from_dyn(buffer_file); - let buffer_path = buffer_file.map(|file| Arc::clone(file.path())); - let worktree_path = buffer_file - .as_ref() - .and_then(|file| Some(file.worktree.read(cx).abs_path())); - let worktree_id = buffer_file.map(|file| file.worktree_id(cx)); - if self.is_local() || worktree_id.is_none() || worktree_path.is_none() { + if self.is_local() { let Some(node) = self.node.as_ref().map(Arc::clone) else { return Task::ready(None); }; - cx.spawn(|this, mut cx| async move { - let fs = this.update(&mut cx, |project, _| Arc::clone(&project.fs)); - let prettier_dir = match cx - .background() - .spawn(Prettier::locate( - worktree_path.zip(buffer_path).map( - |(worktree_root_path, starting_path)| LocateStart { - worktree_root_path, - starting_path, - }, - ), - fs, - )) - .await - { - Ok(path) => path, - Err(e) => { - return Some( - Task::ready(Err(Arc::new(e.context( - "determining prettier path for worktree {worktree_path:?}", - )))) - .shared(), - ); - } - }; - - if let Some(existing_prettier) = this.update(&mut cx, |project, _| { - project - .prettier_instances - .get(&(worktree_id, prettier_dir.clone())) - .cloned() - }) { - return Some(existing_prettier); - } - - log::info!("Found prettier in {prettier_dir:?}, starting."); - let task_prettier_dir = prettier_dir.clone(); - let weak_project = this.downgrade(); - let new_server_id = - this.update(&mut cx, |this, _| this.languages.next_language_server_id()); - let new_prettier_task = cx - .spawn(|mut cx| async move { - let prettier = Prettier::start( - worktree_id.map(|id| id.to_usize()), - new_server_id, - task_prettier_dir, - node, - cx.clone(), - ) - .await - .context("prettier start") - .map_err(Arc::new)?; - log::info!("Started prettier in {:?}", prettier.prettier_dir()); - - if let Some((project, prettier_server)) = - weak_project.upgrade(&mut cx).zip(prettier.server()) + match File::from_dyn(buffer_file).map(|file| file.abs_path(cx)) { + Some(buffer_path) => { + let fs = Arc::clone(&self.fs); + let installed_prettiers = self.prettier_instances.keys().cloned().collect(); + return cx.spawn(|project, mut cx| async move { + match cx + .background() + .spawn(async move { + Prettier::locate_prettier_installation( + fs.as_ref(), + &installed_prettiers, + &buffer_path, + ) + .await + }) + .await { - project.update(&mut cx, |project, cx| { - let name = if prettier.is_default() { - LanguageServerName(Arc::from("prettier (default)")) - } else { - let prettier_dir = prettier.prettier_dir(); - let worktree_path = prettier - .worktree_id() - .map(WorktreeId::from_usize) - .and_then(|id| project.worktree_for_id(id, cx)) - .map(|worktree| worktree.read(cx).abs_path()); - match worktree_path { - Some(worktree_path) => { - if worktree_path.as_ref() == prettier_dir { - LanguageServerName(Arc::from(format!( - "prettier ({})", - prettier_dir - .file_name() - .and_then(|name| name.to_str()) - .unwrap_or_default() - ))) - } else { - let dir_to_display = match prettier_dir - .strip_prefix(&worktree_path) - .ok() - { - Some(relative_path) => relative_path, - None => prettier_dir, - }; - LanguageServerName(Arc::from(format!( - "prettier ({})", - dir_to_display.display(), - ))) - } - } - None => LanguageServerName(Arc::from(format!( - "prettier ({})", - prettier_dir.display(), - ))), - } - }; + Ok(None) => { + let new_task = project.update(&mut cx, |project, cx| { + let new_task = spawn_default_prettier(node, cx); + project + .default_prettier + .get_or_insert_with(|| DefaultPrettier { + instance: None, + #[cfg(not(any(test, feature = "test-support")))] + installed_plugins: HashSet::default(), + }) + .instance = Some(new_task.clone()); + new_task + }); + return Some((None, new_task)); + } + Err(e) => { + return Some(( + None, + Task::ready(Err(Arc::new(e.context( + "determining prettier path for worktree {worktree_path:?}", + )))) + .shared(), + )); + } + Ok(Some(prettier_dir)) => { + if let Some(existing_prettier) = + project.update(&mut cx, |project, _| { + project.prettier_instances.get(&prettier_dir).cloned() + }) + { + return Some((Some(prettier_dir), existing_prettier)); + } - project - .supplementary_language_servers - .insert(new_server_id, (name, Arc::clone(prettier_server))); - cx.emit(Event::LanguageServerAdded(new_server_id)); - }); + log::info!("Found prettier in {prettier_dir:?}, starting."); + let task_prettier_dir = prettier_dir.clone(); + let weak_project = project.downgrade(); + let new_server_id = project.update(&mut cx, |this, _| { + this.languages.next_language_server_id() + }); + let new_prettier_task = cx + .spawn(|mut cx| async move { + let prettier = Prettier::start( + new_server_id, + task_prettier_dir, + node, + cx.clone(), + ) + .await + .context("prettier start") + .map_err(Arc::new)?; + log::info!( + "Started prettier in {:?}", + prettier.prettier_dir() + ); + + if let Some((project, prettier_server)) = + weak_project.upgrade(&mut cx).zip(prettier.server()) + { + project.update(&mut cx, |project, cx| { + let name = if prettier.is_default() { + LanguageServerName(Arc::from( + "prettier (default)", + )) + } else { + LanguageServerName(Arc::from(format!( + "prettier ({})", + prettier.prettier_dir().display(), + ))) + }; + + project.supplementary_language_servers.insert( + new_server_id, + (name, Arc::clone(prettier_server)), + ); + cx.emit(Event::LanguageServerAdded(new_server_id)); + }); + } + Ok(Arc::new(prettier)).map_err(Arc::new) + }) + .shared(); + project.update(&mut cx, |project, _| { + project + .prettier_instances + .insert(prettier_dir.clone(), new_prettier_task.clone()); + }); + Some((Some(prettier_dir), new_prettier_task)) + } } - Ok(Arc::new(prettier)).map_err(Arc::new) - }) - .shared(); - this.update(&mut cx, |project, _| { - project - .prettier_instances - .insert((worktree_id, prettier_dir), new_prettier_task.clone()); - }); - Some(new_prettier_task) - }) + }); + } + None => { + let new_task = spawn_default_prettier(node, cx); + self.default_prettier + .get_or_insert_with(|| DefaultPrettier { + instance: None, + #[cfg(not(any(test, feature = "test-support")))] + installed_plugins: HashSet::default(), + }) + .instance = Some(new_task.clone()); + return Task::ready(Some((None, new_task))); + } + } } else if self.remote_id().is_some() { return Task::ready(None); } else { - Task::ready(Some( + Task::ready(Some(( + None, Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(), - )) + ))) } } @@ -8537,7 +8588,7 @@ impl Project { let mut prettier_plugins = None; if new_language.prettier_parser_name().is_some() { prettier_plugins - .get_or_insert_with(|| HashSet::default()) + .get_or_insert_with(|| HashSet::<&'static str>::default()) .extend( new_language .lsp_adapters() @@ -8549,27 +8600,25 @@ impl Project { return Task::ready(Ok(())); }; + let fs = Arc::clone(&self.fs); + let locate_prettier_installation = match worktree.and_then(|worktree_id| { + self.worktree_for_id(worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + }) { + Some(locate_from) => { + let installed_prettiers = self.prettier_instances.keys().cloned().collect(); + cx.background().spawn(async move { + Prettier::locate_prettier_installation( + fs.as_ref(), + &installed_prettiers, + locate_from.as_ref(), + ) + .await + }) + } + None => Task::ready(Ok(None)), + }; let mut plugins_to_install = prettier_plugins; - let (mut install_success_tx, mut install_success_rx) = - futures::channel::mpsc::channel::>(1); - let new_installation_process = cx - .spawn(|this, mut cx| async move { - if let Some(installed_plugins) = install_success_rx.next().await { - this.update(&mut cx, |this, _| { - let default_prettier = - this.default_prettier - .get_or_insert_with(|| DefaultPrettier { - installation_process: None, - installed_plugins: HashSet::default(), - }); - if !installed_plugins.is_empty() { - log::info!("Installed new prettier plugins: {installed_plugins:?}"); - default_prettier.installed_plugins.extend(installed_plugins); - } - }) - } - }) - .shared(); let previous_installation_process = if let Some(default_prettier) = &mut self.default_prettier { plugins_to_install @@ -8577,83 +8626,128 @@ impl Project { if plugins_to_install.is_empty() { return Task::ready(Ok(())); } - std::mem::replace( - &mut default_prettier.installation_process, - Some(new_installation_process.clone()), - ) + default_prettier.instance.clone() } else { None }; - - let default_prettier_dir = util::paths::DEFAULT_PRETTIER_DIR.as_path(); - let already_running_prettier = self - .prettier_instances - .get(&(worktree, default_prettier_dir.to_path_buf())) - .cloned(); let fs = Arc::clone(&self.fs); cx.spawn(|this, mut cx| async move { - if let Some(previous_installation_process) = previous_installation_process { - previous_installation_process.await; - } - let mut everything_was_installed = false; - this.update(&mut cx, |this, _| { - match &mut this.default_prettier { - Some(default_prettier) => { - plugins_to_install - .retain(|plugin| !default_prettier.installed_plugins.contains(plugin)); - everything_was_installed = plugins_to_install.is_empty(); - }, - None => this.default_prettier = Some(DefaultPrettier { installation_process: Some(new_installation_process), installed_plugins: HashSet::default() }), - } - }); - if everything_was_installed { - return Ok(()); - } - - cx.background() - .spawn(async move { - let prettier_wrapper_path = default_prettier_dir.join(prettier::PRETTIER_SERVER_FILE); - // method creates parent directory if it doesn't exist - fs.save(&prettier_wrapper_path, &text::Rope::from(prettier::PRETTIER_SERVER_JS), text::LineEnding::Unix).await - .with_context(|| format!("writing {} file at {prettier_wrapper_path:?}", prettier::PRETTIER_SERVER_FILE))?; - - let packages_to_versions = future::try_join_all( - plugins_to_install - .iter() - .chain(Some(&"prettier")) - .map(|package_name| async { - let returned_package_name = package_name.to_string(); - let latest_version = node.npm_package_latest_version(package_name) - .await - .with_context(|| { - format!("fetching latest npm version for package {returned_package_name}") - })?; - anyhow::Ok((returned_package_name, latest_version)) - }), - ) - .await - .context("fetching latest npm versions")?; - - log::info!("Fetching default prettier and plugins: {packages_to_versions:?}"); - let borrowed_packages = packages_to_versions.iter().map(|(package, version)| { - (package.as_str(), version.as_str()) - }).collect::>(); - node.npm_install_packages(default_prettier_dir, &borrowed_packages).await.context("fetching formatter packages")?; - let installed_packages = !plugins_to_install.is_empty(); - install_success_tx.try_send(plugins_to_install).ok(); - - if !installed_packages { - if let Some(prettier) = already_running_prettier { - prettier.await.map_err(|e| anyhow::anyhow!("Default prettier startup await failure: {e:#}"))?.clear_cache().await.context("clearing default prettier cache after plugins install")?; + match locate_prettier_installation + .await + .context("locate prettier installation")? + { + Some(_non_default_prettier) => return Ok(()), + None => { + let mut needs_restart = match previous_installation_process { + Some(previous_installation_process) => { + previous_installation_process.await.is_err() } + None => true, + }; + this.update(&mut cx, |this, _| { + if let Some(default_prettier) = &mut this.default_prettier { + plugins_to_install.retain(|plugin| { + !default_prettier.installed_plugins.contains(plugin) + }); + needs_restart |= !plugins_to_install.is_empty(); + } + }); + if needs_restart { + let installed_plugins = plugins_to_install.clone(); + cx.background() + .spawn(async move { + install_default_prettier(plugins_to_install, node, fs).await + }) + .await + .context("prettier & plugins install")?; + this.update(&mut cx, |this, _| { + let default_prettier = + this.default_prettier + .get_or_insert_with(|| DefaultPrettier { + instance: None, + installed_plugins: HashSet::default(), + }); + default_prettier.instance = None; + default_prettier.installed_plugins.extend(installed_plugins); + }); } - - anyhow::Ok(()) - }).await + } + } + Ok(()) }) } } +fn spawn_default_prettier( + node: Arc, + cx: &mut ModelContext<'_, Project>, +) -> Shared, Arc>>> { + cx.spawn(|project, mut cx| async move { + let new_server_id = project.update(&mut cx, |project, _| { + project.languages.next_language_server_id() + }); + Prettier::start( + new_server_id, + DEFAULT_PRETTIER_DIR.clone(), + node, + cx.clone(), + ) + .await + .context("default prettier spawn") + .map(Arc::new) + .map_err(Arc::new) + }) + .shared() +} + +#[cfg(not(any(test, feature = "test-support")))] +async fn install_default_prettier( + plugins_to_install: HashSet<&'static str>, + node: Arc, + fs: Arc, +) -> anyhow::Result<()> { + let prettier_wrapper_path = DEFAULT_PRETTIER_DIR.join(prettier::PRETTIER_SERVER_FILE); + // method creates parent directory if it doesn't exist + fs.save( + &prettier_wrapper_path, + &text::Rope::from(prettier::PRETTIER_SERVER_JS), + text::LineEnding::Unix, + ) + .await + .with_context(|| { + format!( + "writing {} file at {prettier_wrapper_path:?}", + prettier::PRETTIER_SERVER_FILE + ) + })?; + + let packages_to_versions = + future::try_join_all(plugins_to_install.iter().chain(Some(&"prettier")).map( + |package_name| async { + let returned_package_name = package_name.to_string(); + let latest_version = node + .npm_package_latest_version(package_name) + .await + .with_context(|| { + format!("fetching latest npm version for package {returned_package_name}") + })?; + anyhow::Ok((returned_package_name, latest_version)) + }, + )) + .await + .context("fetching latest npm versions")?; + + log::info!("Fetching default prettier and plugins: {packages_to_versions:?}"); + let borrowed_packages = packages_to_versions + .iter() + .map(|(package, version)| (package.as_str(), version.as_str())) + .collect::>(); + node.npm_install_packages(DEFAULT_PRETTIER_DIR.as_path(), &borrowed_packages) + .await + .context("fetching formatter packages")?; + anyhow::Ok(()) +} + fn subscribe_for_copilot_events( copilot: &ModelHandle, cx: &mut ModelContext<'_, Project>, diff --git a/crates/project/src/search.rs b/crates/project/src/search.rs index f626f15d12..7e360e22ee 100644 --- a/crates/project/src/search.rs +++ b/crates/project/src/search.rs @@ -1,7 +1,6 @@ use aho_corasick::{AhoCorasick, AhoCorasickBuilder}; use anyhow::{Context, Result}; use client::proto; -use globset::{Glob, GlobMatcher}; use itertools::Itertools; use language::{char_kind, BufferSnapshot}; use regex::{Regex, RegexBuilder}; @@ -10,7 +9,7 @@ use std::{ borrow::Cow, io::{BufRead, BufReader, Read}, ops::Range, - path::{Path, PathBuf}, + path::Path, sync::Arc, }; use util::paths::PathMatcher; From ff144def6343bb2776a120ee80d9bdbd8d7ebf40 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 2 Nov 2023 21:42:33 +0200 Subject: [PATCH 06/15] Fix the bugs --- crates/prettier/src/prettier.rs | 11 ++++- crates/project/src/project.rs | 72 ++++++++++++++++++--------------- 2 files changed, 50 insertions(+), 33 deletions(-) diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 3e846db66e..685a3ae7a5 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -63,6 +63,15 @@ impl Prettier { .components() .take_while(|component| !is_node_modules(component)) .collect::(); + let path_to_check_metadata = fs + .metadata(&path_to_check) + .await + .with_context(|| format!("failed to get metadata for initial path {path_to_check:?}"))? + .with_context(|| format!("empty metadata for initial path {path_to_check:?}"))?; + if !path_to_check_metadata.is_dir { + path_to_check.pop(); + } + let mut project_path_with_prettier_dependency = None; loop { if installed_prettiers.contains(&path_to_check) { @@ -361,7 +370,7 @@ async fn read_package_json( if let Some(package_json_metadata) = fs .metadata(&possible_package_json) .await - .with_context(|| format!("Fetching metadata for {possible_package_json:?}"))? + .with_context(|| format!("fetching metadata for package json {possible_package_json:?}"))? { if !package_json_metadata.is_dir && !package_json_metadata.is_symlink { let package_json_contents = fs diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index a5c26bbb41..b861291338 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -8464,9 +8464,9 @@ impl Project { Err(e) => { return Some(( None, - Task::ready(Err(Arc::new(e.context( - "determining prettier path for worktree {worktree_path:?}", - )))) + Task::ready(Err(Arc::new( + e.context("determining prettier path"), + ))) .shared(), )); } @@ -8481,7 +8481,7 @@ impl Project { log::info!("Found prettier in {prettier_dir:?}, starting."); let task_prettier_dir = prettier_dir.clone(); - let weak_project = project.downgrade(); + let task_project = project.clone(); let new_server_id = project.update(&mut cx, |this, _| { this.languages.next_language_server_id() }); @@ -8496,33 +8496,12 @@ impl Project { .await .context("prettier start") .map_err(Arc::new)?; - log::info!( - "Started prettier in {:?}", - prettier.prettier_dir() + register_new_prettier( + &task_project, + &prettier, + new_server_id, + &mut cx, ); - - if let Some((project, prettier_server)) = - weak_project.upgrade(&mut cx).zip(prettier.server()) - { - project.update(&mut cx, |project, cx| { - let name = if prettier.is_default() { - LanguageServerName(Arc::from( - "prettier (default)", - )) - } else { - LanguageServerName(Arc::from(format!( - "prettier ({})", - prettier.prettier_dir().display(), - ))) - }; - - project.supplementary_language_servers.insert( - new_server_id, - (name, Arc::clone(prettier_server)), - ); - cx.emit(Event::LanguageServerAdded(new_server_id)); - }); - } Ok(Arc::new(prettier)).map_err(Arc::new) }) .shared(); @@ -8678,6 +8657,31 @@ impl Project { } } +fn register_new_prettier( + project: &ModelHandle, + prettier: &Prettier, + new_server_id: LanguageServerId, + cx: &mut AsyncAppContext, +) { + log::info!("Started prettier in {:?}", prettier.prettier_dir()); + if let Some(prettier_server) = prettier.server() { + project.update(cx, |project, cx| { + let name = if prettier.is_default() { + LanguageServerName(Arc::from("prettier (default)")) + } else { + LanguageServerName(Arc::from(format!( + "prettier ({})", + prettier.prettier_dir().display(), + ))) + }; + project + .supplementary_language_servers + .insert(new_server_id, (name, Arc::clone(prettier_server))); + cx.emit(Event::LanguageServerAdded(new_server_id)); + }); + } +} + fn spawn_default_prettier( node: Arc, cx: &mut ModelContext<'_, Project>, @@ -8686,7 +8690,7 @@ fn spawn_default_prettier( let new_server_id = project.update(&mut cx, |project, _| { project.languages.next_language_server_id() }); - Prettier::start( + let new_prettier = Prettier::start( new_server_id, DEFAULT_PRETTIER_DIR.clone(), node, @@ -8695,7 +8699,11 @@ fn spawn_default_prettier( .await .context("default prettier spawn") .map(Arc::new) - .map_err(Arc::new) + .map_err(Arc::new)?; + + register_new_prettier(&project, &new_prettier, new_server_id, &mut cx); + + Ok(new_prettier) }) .shared() } From b75d8a60a84c737b822eef49d95a2dae19389a3e Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 2 Nov 2023 21:50:47 +0200 Subject: [PATCH 07/15] Simplify --- crates/project/src/project.rs | 81 ++++++++++++----------------------- 1 file changed, 27 insertions(+), 54 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b861291338..215c8d15ee 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -8448,7 +8448,8 @@ impl Project { { Ok(None) => { let new_task = project.update(&mut cx, |project, cx| { - let new_task = spawn_default_prettier(node, cx); + let new_task = + start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), cx); project .default_prettier .get_or_insert_with(|| DefaultPrettier { @@ -8480,35 +8481,13 @@ impl Project { } log::info!("Found prettier in {prettier_dir:?}, starting."); - let task_prettier_dir = prettier_dir.clone(); - let task_project = project.clone(); - let new_server_id = project.update(&mut cx, |this, _| { - this.languages.next_language_server_id() - }); - let new_prettier_task = cx - .spawn(|mut cx| async move { - let prettier = Prettier::start( - new_server_id, - task_prettier_dir, - node, - cx.clone(), - ) - .await - .context("prettier start") - .map_err(Arc::new)?; - register_new_prettier( - &task_project, - &prettier, - new_server_id, - &mut cx, - ); - Ok(Arc::new(prettier)).map_err(Arc::new) - }) - .shared(); - project.update(&mut cx, |project, _| { + let new_prettier_task = project.update(&mut cx, |project, cx| { + let new_prettier_task = + start_prettier(node, prettier_dir.clone(), cx); project .prettier_instances .insert(prettier_dir.clone(), new_prettier_task.clone()); + new_prettier_task }); Some((Some(prettier_dir), new_prettier_task)) } @@ -8516,7 +8495,7 @@ impl Project { }); } None => { - let new_task = spawn_default_prettier(node, cx); + let new_task = start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), cx); self.default_prettier .get_or_insert_with(|| DefaultPrettier { instance: None, @@ -8657,6 +8636,26 @@ impl Project { } } +fn start_prettier( + node: Arc, + prettier_dir: PathBuf, + cx: &mut ModelContext<'_, Project>, +) -> Shared, Arc>>> { + cx.spawn(|project, mut cx| async move { + let new_server_id = project.update(&mut cx, |project, _| { + project.languages.next_language_server_id() + }); + let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone()) + .await + .context("default prettier spawn") + .map(Arc::new) + .map_err(Arc::new)?; + register_new_prettier(&project, &new_prettier, new_server_id, &mut cx); + Ok(new_prettier) + }) + .shared() +} + fn register_new_prettier( project: &ModelHandle, prettier: &Prettier, @@ -8682,32 +8681,6 @@ fn register_new_prettier( } } -fn spawn_default_prettier( - node: Arc, - cx: &mut ModelContext<'_, Project>, -) -> Shared, Arc>>> { - cx.spawn(|project, mut cx| async move { - let new_server_id = project.update(&mut cx, |project, _| { - project.languages.next_language_server_id() - }); - let new_prettier = Prettier::start( - new_server_id, - DEFAULT_PRETTIER_DIR.clone(), - node, - cx.clone(), - ) - .await - .context("default prettier spawn") - .map(Arc::new) - .map_err(Arc::new)?; - - register_new_prettier(&project, &new_prettier, new_server_id, &mut cx); - - Ok(new_prettier) - }) - .shared() -} - #[cfg(not(any(test, feature = "test-support")))] async fn install_default_prettier( plugins_to_install: HashSet<&'static str>, From 369b5140fb56b20ac972f18f6bbb062c6f6fc73a Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 2 Nov 2023 22:12:19 +0200 Subject: [PATCH 08/15] Restore LSP names for prettier servers --- crates/project/src/project.rs | 119 +++++++++++++++++++++------------- 1 file changed, 74 insertions(+), 45 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 215c8d15ee..dc009b7468 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4158,17 +4158,7 @@ impl Project { match prettier_task.await { Ok(prettier) => { - let buffer_file = buffer.update(&mut cx, |buffer, _| buffer.file().cloned()); - let buffer_path= { - File::from_dyn(buffer_file.as_ref()).map(|file| { - cx.update(|cx| { - let worktree_id = file.worktree_id(cx); - let file_abs_path = file.abs_path(cx); - project.update(cx, |project, _| project.prettiers_per_worktree.entry(worktree_id).or_default().insert(prettier_path)); - file_abs_path - }) - }) - }; + let buffer_path = buffer.update(&mut cx, |buffer, cx| File::from_dyn(buffer.file()).map(|f| f.abs_path(cx))); format_operation = Some(FormatOperation::Prettier( prettier .format(buffer, buffer_path, &cx) @@ -4218,17 +4208,7 @@ impl Project { match prettier_task.await { Ok(prettier) => { - let buffer_file = buffer.update(&mut cx, |buffer, _| buffer.file().cloned()); - let buffer_path= { - File::from_dyn(buffer_file.as_ref()).map(|file| { - cx.update(|cx| { - let worktree_id = file.worktree_id(cx); - let file_abs_path = file.abs_path(cx); - project.update(cx, |project, _| project.prettiers_per_worktree.entry(worktree_id).or_default().insert(prettier_path)); - file_abs_path - }) - }) - }; + let buffer_path = buffer.update(&mut cx, |buffer, cx| File::from_dyn(buffer.file()).map(|f| f.abs_path(cx))); format_operation = Some(FormatOperation::Prettier( prettier .format(buffer, buffer_path, &cx) @@ -8429,8 +8409,9 @@ impl Project { let Some(node) = self.node.as_ref().map(Arc::clone) else { return Task::ready(None); }; - match File::from_dyn(buffer_file).map(|file| file.abs_path(cx)) { - Some(buffer_path) => { + match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx))) + { + Some((worktree_id, buffer_path)) => { let fs = Arc::clone(&self.fs); let installed_prettiers = self.prettier_instances.keys().cloned().collect(); return cx.spawn(|project, mut cx| async move { @@ -8447,9 +8428,20 @@ impl Project { .await { Ok(None) => { + project.update(&mut cx, |project, _| { + project + .prettiers_per_worktree + .entry(worktree_id) + .or_default() + .insert(None) + }); let new_task = project.update(&mut cx, |project, cx| { - let new_task = - start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), cx); + let new_task = start_prettier( + node, + DEFAULT_PRETTIER_DIR.clone(), + Some(worktree_id), + cx, + ); project .default_prettier .get_or_insert_with(|| DefaultPrettier { @@ -8462,16 +8454,14 @@ impl Project { }); return Some((None, new_task)); } - Err(e) => { - return Some(( - None, - Task::ready(Err(Arc::new( - e.context("determining prettier path"), - ))) - .shared(), - )); - } Ok(Some(prettier_dir)) => { + project.update(&mut cx, |project, _| { + project + .prettiers_per_worktree + .entry(worktree_id) + .or_default() + .insert(Some(prettier_dir.clone())) + }); if let Some(existing_prettier) = project.update(&mut cx, |project, _| { project.prettier_instances.get(&prettier_dir).cloned() @@ -8482,8 +8472,12 @@ impl Project { log::info!("Found prettier in {prettier_dir:?}, starting."); let new_prettier_task = project.update(&mut cx, |project, cx| { - let new_prettier_task = - start_prettier(node, prettier_dir.clone(), cx); + let new_prettier_task = start_prettier( + node, + prettier_dir.clone(), + Some(worktree_id), + cx, + ); project .prettier_instances .insert(prettier_dir.clone(), new_prettier_task.clone()); @@ -8491,11 +8485,20 @@ impl Project { }); Some((Some(prettier_dir), new_prettier_task)) } + Err(e) => { + return Some(( + None, + Task::ready(Err(Arc::new( + e.context("determining prettier path"), + ))) + .shared(), + )); + } } }); } None => { - let new_task = start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), cx); + let new_task = start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), None, cx); self.default_prettier .get_or_insert_with(|| DefaultPrettier { instance: None, @@ -8639,6 +8642,7 @@ impl Project { fn start_prettier( node: Arc, prettier_dir: PathBuf, + worktree_id: Option, cx: &mut ModelContext<'_, Project>, ) -> Shared, Arc>>> { cx.spawn(|project, mut cx| async move { @@ -8650,7 +8654,7 @@ fn start_prettier( .context("default prettier spawn") .map(Arc::new) .map_err(Arc::new)?; - register_new_prettier(&project, &new_prettier, new_server_id, &mut cx); + register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx); Ok(new_prettier) }) .shared() @@ -8659,19 +8663,44 @@ fn start_prettier( fn register_new_prettier( project: &ModelHandle, prettier: &Prettier, + worktree_id: Option, new_server_id: LanguageServerId, cx: &mut AsyncAppContext, ) { - log::info!("Started prettier in {:?}", prettier.prettier_dir()); + let prettier_dir = prettier.prettier_dir(); + let is_default = prettier.is_default(); + if is_default { + log::info!("Started default prettier in {prettier_dir:?}"); + } else { + log::info!("Started prettier in {prettier_dir:?}"); + } if let Some(prettier_server) = prettier.server() { project.update(cx, |project, cx| { - let name = if prettier.is_default() { + let name = if is_default { LanguageServerName(Arc::from("prettier (default)")) } else { - LanguageServerName(Arc::from(format!( - "prettier ({})", - prettier.prettier_dir().display(), - ))) + let worktree_path = worktree_id + .and_then(|id| project.worktree_for_id(id, cx)) + .map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path())); + let name = match worktree_path { + Some(worktree_path) => { + if prettier_dir == worktree_path.as_ref() { + let name = prettier_dir + .file_name() + .and_then(|name| name.to_str()) + .unwrap_or_default(); + format!("prettier ({name})") + } else { + let dir_to_display = prettier_dir + .strip_prefix(worktree_path.as_ref()) + .ok() + .unwrap_or(prettier_dir); + format!("prettier ({})", dir_to_display.display()) + } + } + None => format!("prettier ({})", prettier_dir.display()), + }; + LanguageServerName(Arc::from(name)) }; project .supplementary_language_servers From cf95f9b08279709fafc88a187c17dfc1f7fefd36 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 2 Nov 2023 22:49:32 +0200 Subject: [PATCH 09/15] Make it more clear that missing prettier is to blame --- crates/prettier/src/prettier.rs | 68 ++++++++++++++++++++++++++++++++- crates/project/src/project.rs | 24 +++++++++--- 2 files changed, 84 insertions(+), 8 deletions(-) diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 685a3ae7a5..857bec3939 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -75,12 +75,14 @@ impl Prettier { let mut project_path_with_prettier_dependency = None; loop { if installed_prettiers.contains(&path_to_check) { + log::debug!("Found prettier path {path_to_check:?} in installed prettiers"); return Ok(Some(path_to_check)); } else if let Some(package_json_contents) = read_package_json(fs, &path_to_check).await? { if has_prettier_in_package_json(&package_json_contents) { if has_prettier_in_node_modules(fs, &path_to_check).await? { + log::debug!("Found prettier path {path_to_check:?} in both package.json and node_modules"); return Ok(Some(path_to_check)); } else if project_path_with_prettier_dependency.is_none() { project_path_with_prettier_dependency = Some(path_to_check.clone()); @@ -105,6 +107,8 @@ impl Prettier { workspace_definition == subproject_path.to_string_lossy() } }) { + anyhow::ensure!(has_prettier_in_node_modules(fs, &path_to_check).await?, "Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}, but it's not installed into workspace root's node_modules"); + log::info!("Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}"); return Ok(Some(path_to_check)); } else { log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but is not included in its package.json workspaces {workspaces:?}"); @@ -123,8 +127,13 @@ impl Prettier { if !path_to_check.pop() { match project_path_with_prettier_dependency { - Some(closest_prettier_discovered) => anyhow::bail!("No prettier found in ancestors of {locate_from:?}, but discovered prettier package.json dependency in {closest_prettier_discovered:?}"), - None => return Ok(None), + Some(closest_prettier_discovered) => { + anyhow::bail!("No prettier found in node_modules for ancestors of {locate_from:?}, but discovered prettier package.json dependency in {closest_prettier_discovered:?}") + } + None => { + log::debug!("Found no prettier in ancestors of {locate_from:?}"); + return Ok(None); + } } } } @@ -698,6 +707,61 @@ mod tests { "Should ascend to the multi-workspace root and find the prettier there", ); } + + #[gpui::test] + async fn test_prettier_lookup_in_npm_workspaces_for_not_installed( + cx: &mut gpui::TestAppContext, + ) { + let fs = FakeFs::new(cx.background()); + fs.insert_tree( + "/root", + json!({ + "work": { + "full-stack-foundations": { + "exercises": { + "03.loading": { + "01.problem.loader": { + "app": { + "routes": { + "users+": { + "$username_+": { + "notes.tsx": "// notes.tsx file contents", + }, + }, + }, + }, + "node_modules": {}, + "package.json": r#"{ + "devDependencies": { + "prettier": "^3.0.3" + } + }"# + }, + }, + }, + "package.json": r#"{ + "workspaces": ["exercises/*/*", "examples/*"] + }"#, + }, + } + }), + ) + .await; + + match Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx") + ) + .await { + Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"), + Err(e) => { + let message = e.to_string(); + assert!(message.contains("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader"), "Error message should mention which project had prettier defined"); + assert!(message.contains("/root/work/full-stack-foundations"), "Error message should mention potential candidates without prettier node_modules contents"); + }, + }; + } } fn is_node_modules(path_component: &std::path::Component<'_>) -> bool { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index dc009b7468..147981c944 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4179,9 +4179,15 @@ impl Project { }, } }); - anyhow::bail!( - "Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}" - )}, + match &prettier_path { + Some(prettier_path) => { + log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}"); + }, + None => { + log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}"); + }, + } + } } } else if let Some((language_server, buffer_abs_path)) = language_server.as_ref().zip(buffer_abs_path.as_ref()) @@ -4229,9 +4235,15 @@ impl Project { }, } }); - anyhow::bail!( - "Failed to create prettier instance from {prettier_path:?} for buffer during formatting: {e:#}" - )}, + match &prettier_path { + Some(prettier_path) => { + log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}"); + }, + None => { + log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}"); + }, + } + } } } } From 244c6939681cd347f9eb35bae3c14ca8f04bf014 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Thu, 2 Nov 2023 23:01:05 +0200 Subject: [PATCH 10/15] Reuse already running default prettiers --- crates/project/src/project.rs | 93 ++++++++++++++++++++++------------- 1 file changed, 59 insertions(+), 34 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 147981c944..7623f84c6d 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -8440,31 +8440,43 @@ impl Project { .await { Ok(None) => { - project.update(&mut cx, |project, _| { - project - .prettiers_per_worktree - .entry(worktree_id) - .or_default() - .insert(None) - }); - let new_task = project.update(&mut cx, |project, cx| { - let new_task = start_prettier( - node, - DEFAULT_PRETTIER_DIR.clone(), - Some(worktree_id), - cx, - ); - project - .default_prettier - .get_or_insert_with(|| DefaultPrettier { - instance: None, - #[cfg(not(any(test, feature = "test-support")))] - installed_plugins: HashSet::default(), - }) - .instance = Some(new_task.clone()); - new_task - }); - return Some((None, new_task)); + let started_default_prettier = + project.update(&mut cx, |project, _| { + project + .prettiers_per_worktree + .entry(worktree_id) + .or_default() + .insert(None); + project.default_prettier.as_ref().and_then( + |default_prettier| default_prettier.instance.clone(), + ) + }); + match started_default_prettier { + Some(old_task) => return Some((None, old_task)), + None => { + let new_task = project.update(&mut cx, |project, cx| { + let new_task = start_prettier( + node, + DEFAULT_PRETTIER_DIR.clone(), + Some(worktree_id), + cx, + ); + project + .default_prettier + .get_or_insert_with(|| DefaultPrettier { + instance: None, + #[cfg(not(any( + test, + feature = "test-support" + )))] + installed_plugins: HashSet::default(), + }) + .instance = Some(new_task.clone()); + new_task + }); + return Some((None, new_task)); + } + } } Ok(Some(prettier_dir)) => { project.update(&mut cx, |project, _| { @@ -8479,6 +8491,9 @@ impl Project { project.prettier_instances.get(&prettier_dir).cloned() }) { + log::debug!( + "Found already started prettier in {prettier_dir:?}" + ); return Some((Some(prettier_dir), existing_prettier)); } @@ -8510,15 +8525,25 @@ impl Project { }); } None => { - let new_task = start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), None, cx); - self.default_prettier - .get_or_insert_with(|| DefaultPrettier { - instance: None, - #[cfg(not(any(test, feature = "test-support")))] - installed_plugins: HashSet::default(), - }) - .instance = Some(new_task.clone()); - return Task::ready(Some((None, new_task))); + let started_default_prettier = self + .default_prettier + .as_ref() + .and_then(|default_prettier| default_prettier.instance.clone()); + match started_default_prettier { + Some(old_task) => return Task::ready(Some((None, old_task))), + None => { + let new_task = + start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), None, cx); + self.default_prettier + .get_or_insert_with(|| DefaultPrettier { + instance: None, + #[cfg(not(any(test, feature = "test-support")))] + installed_plugins: HashSet::default(), + }) + .instance = Some(new_task.clone()); + return Task::ready(Some((None, new_task))); + } + } } } } else if self.remote_id().is_some() { From 24dd1c581290f3f7df572e79e11ad2d597cc2dd1 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 3 Nov 2023 00:38:18 +0200 Subject: [PATCH 11/15] Properly order default prettier installations and startups --- crates/project/src/project.rs | 205 +++++++++++++++++----------- crates/project/src/project_tests.rs | 2 +- 2 files changed, 123 insertions(+), 84 deletions(-) diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 7623f84c6d..919b563cfb 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -172,6 +172,7 @@ pub struct Project { struct DefaultPrettier { instance: Option, Arc>>>>, + installation_process: Option>>>>, #[cfg(not(any(test, feature = "test-support")))] installed_plugins: HashSet<&'static str>, } @@ -924,8 +925,7 @@ impl Project { } for (worktree, language, settings) in language_formatters_to_check { - self.install_default_formatters(worktree, &language, &settings, cx) - .detach_and_log_err(cx); + self.install_default_formatters(worktree, &language, &settings, cx); } // Start all the newly-enabled language servers. @@ -2681,8 +2681,7 @@ impl Project { let buffer_file = File::from_dyn(buffer_file.as_ref()); let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx)); - self.install_default_formatters(worktree, &new_language, &settings, cx) - .detach_and_log_err(cx); + self.install_default_formatters(worktree, &new_language, &settings, cx); if let Some(file) = buffer_file { let worktree = file.worktree.clone(); if let Some(tree) = worktree.read(cx).as_local() { @@ -8454,27 +8453,12 @@ impl Project { match started_default_prettier { Some(old_task) => return Some((None, old_task)), None => { - let new_task = project.update(&mut cx, |project, cx| { - let new_task = start_prettier( - node, - DEFAULT_PRETTIER_DIR.clone(), - Some(worktree_id), - cx, - ); - project - .default_prettier - .get_or_insert_with(|| DefaultPrettier { - instance: None, - #[cfg(not(any( - test, - feature = "test-support" - )))] - installed_plugins: HashSet::default(), - }) - .instance = Some(new_task.clone()); - new_task - }); - return Some((None, new_task)); + let new_default_prettier = project + .update(&mut cx, |_, cx| { + start_default_prettier(node, Some(worktree_id), cx) + }) + .await; + return Some((None, new_default_prettier)); } } } @@ -8532,16 +8516,8 @@ impl Project { match started_default_prettier { Some(old_task) => return Task::ready(Some((None, old_task))), None => { - let new_task = - start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), None, cx); - self.default_prettier - .get_or_insert_with(|| DefaultPrettier { - instance: None, - #[cfg(not(any(test, feature = "test-support")))] - installed_plugins: HashSet::default(), - }) - .instance = Some(new_task.clone()); - return Task::ready(Some((None, new_task))); + let new_task = start_default_prettier(node, None, cx); + return cx.spawn(|_, _| async move { Some((None, new_task.await)) }); } } } @@ -8563,8 +8539,7 @@ impl Project { _new_language: &Language, _language_settings: &LanguageSettings, _cx: &mut ModelContext, - ) -> Task> { - return Task::ready(Ok(())); + ) { } #[cfg(not(any(test, feature = "test-support")))] @@ -8574,13 +8549,13 @@ impl Project { new_language: &Language, language_settings: &LanguageSettings, cx: &mut ModelContext, - ) -> Task> { + ) { match &language_settings.formatter { Formatter::Prettier { .. } | Formatter::Auto => {} - Formatter::LanguageServer | Formatter::External { .. } => return Task::ready(Ok(())), + Formatter::LanguageServer | Formatter::External { .. } => return, }; let Some(node) = self.node.as_ref().cloned() else { - return Task::ready(Ok(())); + return; }; let mut prettier_plugins = None; @@ -8595,7 +8570,7 @@ impl Project { ) } let Some(prettier_plugins) = prettier_plugins else { - return Task::ready(Ok(())); + return; }; let fs = Arc::clone(&self.fs); @@ -8622,60 +8597,124 @@ impl Project { plugins_to_install .retain(|plugin| !default_prettier.installed_plugins.contains(plugin)); if plugins_to_install.is_empty() { - return Task::ready(Ok(())); + return; } - default_prettier.instance.clone() + default_prettier.installation_process.clone() } else { None }; let fs = Arc::clone(&self.fs); - cx.spawn(|this, mut cx| async move { - match locate_prettier_installation - .await - .context("locate prettier installation")? - { - Some(_non_default_prettier) => return Ok(()), - None => { - let mut needs_restart = match previous_installation_process { - Some(previous_installation_process) => { - previous_installation_process.await.is_err() - } - None => true, - }; - this.update(&mut cx, |this, _| { - if let Some(default_prettier) = &mut this.default_prettier { - plugins_to_install.retain(|plugin| { - !default_prettier.installed_plugins.contains(plugin) - }); - needs_restart |= !plugins_to_install.is_empty(); - } - }); - if needs_restart { - let installed_plugins = plugins_to_install.clone(); - cx.background() - .spawn(async move { - install_default_prettier(plugins_to_install, node, fs).await - }) - .await - .context("prettier & plugins install")?; + let default_prettier = self + .default_prettier + .get_or_insert_with(|| DefaultPrettier { + instance: None, + installation_process: None, + installed_plugins: HashSet::default(), + }); + default_prettier.installation_process = Some( + cx.spawn(|this, mut cx| async move { + match locate_prettier_installation + .await + .context("locate prettier installation") + .map_err(Arc::new)? + { + Some(_non_default_prettier) => return Ok(()), + None => { + let mut needs_install = match previous_installation_process { + Some(previous_installation_process) => { + previous_installation_process.await.is_err() + } + None => true, + }; this.update(&mut cx, |this, _| { - let default_prettier = - this.default_prettier - .get_or_insert_with(|| DefaultPrettier { - instance: None, - installed_plugins: HashSet::default(), - }); - default_prettier.instance = None; - default_prettier.installed_plugins.extend(installed_plugins); + if let Some(default_prettier) = &mut this.default_prettier { + plugins_to_install.retain(|plugin| { + !default_prettier.installed_plugins.contains(plugin) + }); + needs_install |= !plugins_to_install.is_empty(); + } }); + if needs_install { + let installed_plugins = plugins_to_install.clone(); + cx.background() + .spawn(async move { + install_default_prettier(plugins_to_install, node, fs).await + }) + .await + .context("prettier & plugins install") + .map_err(Arc::new)?; + this.update(&mut cx, |this, _| { + let default_prettier = + this.default_prettier + .get_or_insert_with(|| DefaultPrettier { + instance: None, + installation_process: Some( + Task::ready(Ok(())).shared(), + ), + installed_plugins: HashSet::default(), + }); + default_prettier.instance = None; + default_prettier.installed_plugins.extend(installed_plugins); + }); + } } } - } - Ok(()) - }) + Ok(()) + }) + .shared(), + ); } } +fn start_default_prettier( + node: Arc, + worktree_id: Option, + cx: &mut ModelContext<'_, Project>, +) -> Task, Arc>>>> { + cx.spawn(|project, mut cx| async move { + loop { + let default_prettier_installing = project.update(&mut cx, |project, _| { + project + .default_prettier + .as_ref() + .and_then(|default_prettier| default_prettier.installation_process.clone()) + }); + match default_prettier_installing { + Some(installation_task) => { + if installation_task.await.is_ok() { + break; + } + } + None => break, + } + } + + project.update(&mut cx, |project, cx| { + match project + .default_prettier + .as_mut() + .and_then(|default_prettier| default_prettier.instance.as_mut()) + { + Some(default_prettier) => default_prettier.clone(), + None => { + let new_default_prettier = + start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), worktree_id, cx); + project + .default_prettier + .get_or_insert_with(|| DefaultPrettier { + instance: None, + installation_process: None, + #[cfg(not(any(test, feature = "test-support")))] + installed_plugins: HashSet::default(), + }) + .instance = Some(new_default_prettier.clone()); + new_default_prettier + } + } + }) + }) +} + fn start_prettier( node: Arc, prettier_dir: PathBuf, diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 32dc542c20..90d32643d5 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -13,7 +13,7 @@ use pretty_assertions::assert_eq; use serde_json::json; use std::{cell::RefCell, os::unix, rc::Rc, task::Poll}; use unindent::Unindent as _; -use util::{assert_set_eq, test::temp_tree, paths::PathMatcher}; +use util::{assert_set_eq, paths::PathMatcher, test::temp_tree}; #[cfg(test)] #[ctor::ctor] From 09346fb9f1eb4391390dbbf726bccc4109deb1bc Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 3 Nov 2023 11:02:38 +0200 Subject: [PATCH 12/15] Port changes to zed2 --- crates/prettier/src/prettier.rs | 6 +- crates/prettier/src/prettier_server.js | 98 +-- crates/prettier2/src/prettier2.rs | 555 +++++++++++++---- crates/prettier2/src/prettier_server.js | 99 +-- crates/project/src/project.rs | 8 +- crates/project2/src/project2.rs | 761 +++++++++++++++--------- crates/project2/src/project_tests.rs | 4 +- crates/project2/src/search.rs | 29 +- 8 files changed, 1059 insertions(+), 501 deletions(-) diff --git a/crates/prettier/src/prettier.rs b/crates/prettier/src/prettier.rs index 857bec3939..06c1b66977 100644 --- a/crates/prettier/src/prettier.rs +++ b/crates/prettier/src/prettier.rs @@ -61,7 +61,7 @@ impl Prettier { ) -> anyhow::Result> { let mut path_to_check = locate_from .components() - .take_while(|component| !is_node_modules(component)) + .take_while(|component| component.as_os_str().to_string_lossy() != "node_modules") .collect::(); let path_to_check_metadata = fs .metadata(&path_to_check) @@ -763,7 +763,3 @@ mod tests { }; } } - -fn is_node_modules(path_component: &std::path::Component<'_>) -> bool { - path_component.as_os_str().to_string_lossy() == "node_modules" -} diff --git a/crates/prettier/src/prettier_server.js b/crates/prettier/src/prettier_server.js index 9967aec50f..191431da0b 100644 --- a/crates/prettier/src/prettier_server.js +++ b/crates/prettier/src/prettier_server.js @@ -1,11 +1,13 @@ -const { Buffer } = require('buffer'); +const { Buffer } = require("buffer"); const fs = require("fs"); const path = require("path"); -const { once } = require('events'); +const { once } = require("events"); const prettierContainerPath = process.argv[2]; if (prettierContainerPath == null || prettierContainerPath.length == 0) { - process.stderr.write(`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`); + process.stderr.write( + `Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`, + ); process.exit(1); } fs.stat(prettierContainerPath, (err, stats) => { @@ -19,7 +21,7 @@ fs.stat(prettierContainerPath, (err, stats) => { process.exit(1); } }); -const prettierPath = path.join(prettierContainerPath, 'node_modules/prettier'); +const prettierPath = path.join(prettierContainerPath, "node_modules/prettier"); class Prettier { constructor(path, prettier, config) { @@ -34,7 +36,7 @@ class Prettier { let config; try { prettier = await loadPrettier(prettierPath); - config = await prettier.resolveConfig(prettierPath) || {}; + config = (await prettier.resolveConfig(prettierPath)) || {}; } catch (e) { process.stderr.write(`Failed to load prettier: ${e}\n`); process.exit(1); @@ -42,7 +44,7 @@ class Prettier { process.stderr.write(`Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`); process.stdin.resume(); handleBuffer(new Prettier(prettierPath, prettier, config)); -})() +})(); async function handleBuffer(prettier) { for await (const messageText of readStdin()) { @@ -54,25 +56,29 @@ async function handleBuffer(prettier) { continue; } // allow concurrent request handling by not `await`ing the message handling promise (async function) - handleMessage(message, prettier).catch(e => { + handleMessage(message, prettier).catch((e) => { const errorMessage = message; if ((errorMessage.params || {}).text !== undefined) { errorMessage.params.text = "..snip.."; } - sendResponse({ id: message.id, ...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`) }); }); + sendResponse({ + id: message.id, + ...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`), + }); + }); } } const headerSeparator = "\r\n"; -const contentLengthHeaderName = 'Content-Length'; +const contentLengthHeaderName = "Content-Length"; async function* readStdin() { let buffer = Buffer.alloc(0); let streamEnded = false; - process.stdin.on('end', () => { + process.stdin.on("end", () => { streamEnded = true; }); - process.stdin.on('data', (data) => { + process.stdin.on("data", (data) => { buffer = Buffer.concat([buffer, data]); }); @@ -80,7 +86,7 @@ async function* readStdin() { sendResponse(makeError(errorMessage)); buffer = Buffer.alloc(0); messageLength = null; - await once(process.stdin, 'readable'); + await once(process.stdin, "readable"); streamEnded = false; } @@ -91,20 +97,25 @@ async function* readStdin() { if (messageLength === null) { while (buffer.indexOf(`${headerSeparator}${headerSeparator}`) === -1) { if (streamEnded) { - await handleStreamEnded('Unexpected end of stream: headers not found'); + await handleStreamEnded("Unexpected end of stream: headers not found"); continue main_loop; } else if (buffer.length > contentLengthHeaderName.length * 10) { - await handleStreamEnded(`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`); + await handleStreamEnded( + `Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`, + ); continue main_loop; } - await once(process.stdin, 'readable'); + await once(process.stdin, "readable"); } - const headers = buffer.subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`)).toString('ascii'); - const contentLengthHeader = headers.split(headerSeparator) - .map(header => header.split(':')) - .filter(header => header[2] === undefined) - .filter(header => (header[1] || '').length > 0) - .find(header => (header[0] || '').trim() === contentLengthHeaderName); + const headers = buffer + .subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`)) + .toString("ascii"); + const contentLengthHeader = headers + .split(headerSeparator) + .map((header) => header.split(":")) + .filter((header) => header[2] === undefined) + .filter((header) => (header[1] || "").length > 0) + .find((header) => (header[0] || "").trim() === contentLengthHeaderName); const contentLength = (contentLengthHeader || [])[1]; if (contentLength === undefined) { await handleStreamEnded(`Missing or incorrect ${contentLengthHeaderName} header: ${headers}`); @@ -114,13 +125,14 @@ async function* readStdin() { messageLength = parseInt(contentLength, 10); } - while (buffer.length < (headersLength + messageLength)) { + while (buffer.length < headersLength + messageLength) { if (streamEnded) { await handleStreamEnded( - `Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`); + `Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`, + ); continue main_loop; } - await once(process.stdin, 'readable'); + await once(process.stdin, "readable"); } const messageEnd = headersLength + messageLength; @@ -128,12 +140,12 @@ async function* readStdin() { buffer = buffer.subarray(messageEnd); headersLength = null; messageLength = null; - yield message.toString('utf8'); + yield message.toString("utf8"); } } catch (e) { sendResponse(makeError(`Error reading stdin: ${e}`)); } finally { - process.stdin.off('data', () => { }); + process.stdin.off("data", () => {}); } } @@ -146,7 +158,7 @@ async function handleMessage(message, prettier) { throw new Error(`Message id is undefined: ${JSON.stringify(message)}`); } - if (method === 'prettier/format') { + if (method === "prettier/format") { if (params === undefined || params.text === undefined) { throw new Error(`Message params.text is undefined: ${JSON.stringify(message)}`); } @@ -156,7 +168,7 @@ async function handleMessage(message, prettier) { let resolvedConfig = {}; if (params.options.filepath !== undefined) { - resolvedConfig = await prettier.prettier.resolveConfig(params.options.filepath) || {}; + resolvedConfig = (await prettier.prettier.resolveConfig(params.options.filepath)) || {}; } const options = { @@ -164,21 +176,25 @@ async function handleMessage(message, prettier) { ...resolvedConfig, parser: params.options.parser, plugins: params.options.plugins, - path: params.options.filepath + path: params.options.filepath, }; - process.stderr.write(`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${params.options.filepath || ''}' with options: ${JSON.stringify(options)}\n`); + process.stderr.write( + `Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${ + params.options.filepath || "" + }' with options: ${JSON.stringify(options)}\n`, + ); const formattedText = await prettier.prettier.format(params.text, options); sendResponse({ id, result: { text: formattedText } }); - } else if (method === 'prettier/clear_cache') { + } else if (method === "prettier/clear_cache") { prettier.prettier.clearConfigCache(); - prettier.config = await prettier.prettier.resolveConfig(prettier.path) || {}; + prettier.config = (await prettier.prettier.resolveConfig(prettier.path)) || {}; sendResponse({ id, result: null }); - } else if (method === 'initialize') { + } else if (method === "initialize") { sendResponse({ - id: id || 0, + id, result: { - "capabilities": {} - } + capabilities: {}, + }, }); } else { throw new Error(`Unknown method: ${method}`); @@ -188,18 +204,20 @@ async function handleMessage(message, prettier) { function makeError(message) { return { error: { - "code": -32600, // invalid request code + code: -32600, // invalid request code message, - } + }, }; } function sendResponse(response) { const responsePayloadString = JSON.stringify({ jsonrpc: "2.0", - ...response + ...response, }); - const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(responsePayloadString)}${headerSeparator}${headerSeparator}`; + const headers = `${contentLengthHeaderName}: ${Buffer.byteLength( + responsePayloadString, + )}${headerSeparator}${headerSeparator}`; process.stdout.write(headers + responsePayloadString); } diff --git a/crates/prettier2/src/prettier2.rs b/crates/prettier2/src/prettier2.rs index d9b6f9eab7..44151774ae 100644 --- a/crates/prettier2/src/prettier2.rs +++ b/crates/prettier2/src/prettier2.rs @@ -1,5 +1,5 @@ use anyhow::Context; -use collections::HashMap; +use collections::{HashMap, HashSet}; use fs::Fs; use gpui::{AsyncAppContext, Model}; use language::{language_settings::language_settings, Buffer, Diff}; @@ -7,11 +7,10 @@ use lsp::{LanguageServer, LanguageServerId}; use node_runtime::NodeRuntime; use serde::{Deserialize, Serialize}; use std::{ - collections::VecDeque, path::{Path, PathBuf}, sync::Arc, }; -use util::paths::DEFAULT_PRETTIER_DIR; +use util::paths::{PathMatcher, DEFAULT_PRETTIER_DIR}; pub enum Prettier { Real(RealPrettier), @@ -20,7 +19,6 @@ pub enum Prettier { } pub struct RealPrettier { - worktree_id: Option, default: bool, prettier_dir: PathBuf, server: Arc, @@ -28,17 +26,10 @@ pub struct RealPrettier { #[cfg(any(test, feature = "test-support"))] pub struct TestPrettier { - worktree_id: Option, prettier_dir: PathBuf, default: bool, } -#[derive(Debug)] -pub struct LocateStart { - pub worktree_root_path: Arc, - pub starting_path: Arc, -} - pub const PRETTIER_SERVER_FILE: &str = "prettier_server.js"; pub const PRETTIER_SERVER_JS: &str = include_str!("./prettier_server.js"); const PRETTIER_PACKAGE_NAME: &str = "prettier"; @@ -63,79 +54,106 @@ impl Prettier { ".editorconfig", ]; - pub async fn locate( - starting_path: Option, - fs: Arc, - ) -> anyhow::Result { - fn is_node_modules(path_component: &std::path::Component<'_>) -> bool { - path_component.as_os_str().to_string_lossy() == "node_modules" + pub async fn locate_prettier_installation( + fs: &dyn Fs, + installed_prettiers: &HashSet, + locate_from: &Path, + ) -> anyhow::Result> { + let mut path_to_check = locate_from + .components() + .take_while(|component| component.as_os_str().to_string_lossy() != "node_modules") + .collect::(); + let path_to_check_metadata = fs + .metadata(&path_to_check) + .await + .with_context(|| format!("failed to get metadata for initial path {path_to_check:?}"))? + .with_context(|| format!("empty metadata for initial path {path_to_check:?}"))?; + if !path_to_check_metadata.is_dir { + path_to_check.pop(); } - let paths_to_check = match starting_path.as_ref() { - Some(starting_path) => { - let worktree_root = starting_path - .worktree_root_path - .components() - .into_iter() - .take_while(|path_component| !is_node_modules(path_component)) - .collect::(); - if worktree_root != starting_path.worktree_root_path.as_ref() { - vec![worktree_root] + let mut project_path_with_prettier_dependency = None; + loop { + if installed_prettiers.contains(&path_to_check) { + log::debug!("Found prettier path {path_to_check:?} in installed prettiers"); + return Ok(Some(path_to_check)); + } else if let Some(package_json_contents) = + read_package_json(fs, &path_to_check).await? + { + if has_prettier_in_package_json(&package_json_contents) { + if has_prettier_in_node_modules(fs, &path_to_check).await? { + log::debug!("Found prettier path {path_to_check:?} in both package.json and node_modules"); + return Ok(Some(path_to_check)); + } else if project_path_with_prettier_dependency.is_none() { + project_path_with_prettier_dependency = Some(path_to_check.clone()); + } } else { - if starting_path.starting_path.as_ref() == Path::new("") { - worktree_root - .parent() - .map(|path| vec![path.to_path_buf()]) - .unwrap_or_default() - } else { - let file_to_format = starting_path.starting_path.as_ref(); - let mut paths_to_check = VecDeque::new(); - let mut current_path = worktree_root; - for path_component in file_to_format.components().into_iter() { - let new_path = current_path.join(path_component); - let old_path = std::mem::replace(&mut current_path, new_path); - paths_to_check.push_front(old_path); - if is_node_modules(&path_component) { - break; - } + match package_json_contents.get("workspaces") { + Some(serde_json::Value::Array(workspaces)) => { + match &project_path_with_prettier_dependency { + Some(project_path_with_prettier_dependency) => { + let subproject_path = project_path_with_prettier_dependency.strip_prefix(&path_to_check).expect("traversing path parents, should be able to strip prefix"); + if workspaces.iter().filter_map(|value| { + if let serde_json::Value::String(s) = value { + Some(s.clone()) + } else { + log::warn!("Skipping non-string 'workspaces' value: {value:?}"); + None + } + }).any(|workspace_definition| { + if let Some(path_matcher) = PathMatcher::new(&workspace_definition).ok() { + path_matcher.is_match(subproject_path) + } else { + workspace_definition == subproject_path.to_string_lossy() + } + }) { + anyhow::ensure!(has_prettier_in_node_modules(fs, &path_to_check).await?, "Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}, but it's not installed into workspace root's node_modules"); + log::info!("Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}"); + return Ok(Some(path_to_check)); + } else { + log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but is not included in its package.json workspaces {workspaces:?}"); + } + } + None => { + log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but has no prettier in its package.json"); + } + } + }, + Some(unknown) => log::error!("Failed to parse workspaces for {path_to_check:?} from package.json, got {unknown:?}. Skipping."), + None => log::warn!("Skipping path {path_to_check:?} that has no prettier dependency and no workspaces section in its package.json"), } - Vec::from(paths_to_check) + } + } + + if !path_to_check.pop() { + match project_path_with_prettier_dependency { + Some(closest_prettier_discovered) => { + anyhow::bail!("No prettier found in node_modules for ancestors of {locate_from:?}, but discovered prettier package.json dependency in {closest_prettier_discovered:?}") + } + None => { + log::debug!("Found no prettier in ancestors of {locate_from:?}"); + return Ok(None); } } } - None => Vec::new(), - }; - - match find_closest_prettier_dir(paths_to_check, fs.as_ref()) - .await - .with_context(|| format!("finding prettier starting with {starting_path:?}"))? - { - Some(prettier_dir) => Ok(prettier_dir), - None => Ok(DEFAULT_PRETTIER_DIR.to_path_buf()), } } #[cfg(any(test, feature = "test-support"))] pub async fn start( - worktree_id: Option, _: LanguageServerId, prettier_dir: PathBuf, _: Arc, _: AsyncAppContext, ) -> anyhow::Result { - Ok( - #[cfg(any(test, feature = "test-support"))] - Self::Test(TestPrettier { - worktree_id, - default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(), - prettier_dir, - }), - ) + Ok(Self::Test(TestPrettier { + default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(), + prettier_dir, + })) } #[cfg(not(any(test, feature = "test-support")))] pub async fn start( - worktree_id: Option, server_id: LanguageServerId, prettier_dir: PathBuf, node: Arc, @@ -174,7 +192,6 @@ impl Prettier { .await .context("prettier server initialization")?; Ok(Self::Real(RealPrettier { - worktree_id, server, default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(), prettier_dir, @@ -370,64 +387,61 @@ impl Prettier { Self::Test(test_prettier) => &test_prettier.prettier_dir, } } - - pub fn worktree_id(&self) -> Option { - match self { - Self::Real(local) => local.worktree_id, - #[cfg(any(test, feature = "test-support"))] - Self::Test(test_prettier) => test_prettier.worktree_id, - } - } } -async fn find_closest_prettier_dir( - paths_to_check: Vec, - fs: &dyn Fs, -) -> anyhow::Result> { - for path in paths_to_check { - let possible_package_json = path.join("package.json"); - if let Some(package_json_metadata) = fs - .metadata(&possible_package_json) - .await - .with_context(|| format!("Fetching metadata for {possible_package_json:?}"))? - { - if !package_json_metadata.is_dir && !package_json_metadata.is_symlink { - let package_json_contents = fs - .load(&possible_package_json) - .await - .with_context(|| format!("reading {possible_package_json:?} file contents"))?; - if let Ok(json_contents) = serde_json::from_str::>( - &package_json_contents, - ) { - if let Some(serde_json::Value::Object(o)) = json_contents.get("dependencies") { - if o.contains_key(PRETTIER_PACKAGE_NAME) { - return Ok(Some(path)); - } - } - if let Some(serde_json::Value::Object(o)) = json_contents.get("devDependencies") - { - if o.contains_key(PRETTIER_PACKAGE_NAME) { - return Ok(Some(path)); - } - } - } - } - } +async fn has_prettier_in_node_modules(fs: &dyn Fs, path: &Path) -> anyhow::Result { + let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME); + if let Some(node_modules_location_metadata) = fs + .metadata(&possible_node_modules_location) + .await + .with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))? + { + return Ok(node_modules_location_metadata.is_dir); + } + Ok(false) +} - let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME); - if let Some(node_modules_location_metadata) = fs - .metadata(&possible_node_modules_location) - .await - .with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))? - { - if node_modules_location_metadata.is_dir { - return Ok(Some(path)); - } +async fn read_package_json( + fs: &dyn Fs, + path: &Path, +) -> anyhow::Result>> { + let possible_package_json = path.join("package.json"); + if let Some(package_json_metadata) = fs + .metadata(&possible_package_json) + .await + .with_context(|| format!("fetching metadata for package json {possible_package_json:?}"))? + { + if !package_json_metadata.is_dir && !package_json_metadata.is_symlink { + let package_json_contents = fs + .load(&possible_package_json) + .await + .with_context(|| format!("reading {possible_package_json:?} file contents"))?; + return serde_json::from_str::>( + &package_json_contents, + ) + .map(Some) + .with_context(|| format!("parsing {possible_package_json:?} file contents")); } } Ok(None) } +fn has_prettier_in_package_json( + package_json_contents: &HashMap, +) -> bool { + if let Some(serde_json::Value::Object(o)) = package_json_contents.get("dependencies") { + if o.contains_key(PRETTIER_PACKAGE_NAME) { + return true; + } + } + if let Some(serde_json::Value::Object(o)) = package_json_contents.get("devDependencies") { + if o.contains_key(PRETTIER_PACKAGE_NAME) { + return true; + } + } + false +} + enum Format {} #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] @@ -466,3 +480,316 @@ impl lsp::request::Request for ClearCache { type Result = (); const METHOD: &'static str = "prettier/clear_cache"; } + +#[cfg(test)] +mod tests { + use fs::FakeFs; + use serde_json::json; + + use super::*; + + #[gpui::test] + async fn test_prettier_lookup_finds_nothing(cx: &mut gpui::TestAppContext) { + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + ".config": { + "zed": { + "settings.json": r#"{ "formatter": "auto" }"#, + }, + }, + "work": { + "project": { + "src": { + "index.js": "// index.js file contents", + }, + "node_modules": { + "expect": { + "build": { + "print.js": "// print.js file contents", + }, + "package.json": r#"{ + "devDependencies": { + "prettier": "2.5.1" + } + }"#, + }, + "prettier": { + "index.js": "// Dummy prettier package file", + }, + }, + "package.json": r#"{}"# + }, + } + }), + ) + .await; + + assert!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/.config/zed/settings.json"), + ) + .await + .unwrap() + .is_none(), + "Should successfully find no prettier for path hierarchy without it" + ); + assert!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/work/project/src/index.js") + ) + .await + .unwrap() + .is_none(), + "Should successfully find no prettier for path hierarchy that has node_modules with prettier, but no package.json mentions of it" + ); + assert!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/work/project/node_modules/expect/build/print.js") + ) + .await + .unwrap() + .is_none(), + "Even though it has package.json with prettier in it and no prettier on node_modules along the path, nothing should fail since declared inside node_modules" + ); + } + + #[gpui::test] + async fn test_prettier_lookup_in_simple_npm_projects(cx: &mut gpui::TestAppContext) { + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + "web_blog": { + "node_modules": { + "prettier": { + "index.js": "// Dummy prettier package file", + }, + "expect": { + "build": { + "print.js": "// print.js file contents", + }, + "package.json": r#"{ + "devDependencies": { + "prettier": "2.5.1" + } + }"#, + }, + }, + "pages": { + "[slug].tsx": "// [slug].tsx file contents", + }, + "package.json": r#"{ + "devDependencies": { + "prettier": "2.3.0" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "htmlWhitespaceSensitivity": "strict", + "tabWidth": 4 + } + }"# + } + }), + ) + .await; + + assert_eq!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/web_blog/pages/[slug].tsx") + ) + .await + .unwrap(), + Some(PathBuf::from("/root/web_blog")), + "Should find a preinstalled prettier in the project root" + ); + assert_eq!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/web_blog/node_modules/expect/build/print.js") + ) + .await + .unwrap(), + Some(PathBuf::from("/root/web_blog")), + "Should find a preinstalled prettier in the project root even for node_modules files" + ); + } + + #[gpui::test] + async fn test_prettier_lookup_for_not_installed(cx: &mut gpui::TestAppContext) { + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + "work": { + "web_blog": { + "pages": { + "[slug].tsx": "// [slug].tsx file contents", + }, + "package.json": r#"{ + "devDependencies": { + "prettier": "2.3.0" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "htmlWhitespaceSensitivity": "strict", + "tabWidth": 4 + } + }"# + } + } + }), + ) + .await; + + let path = "/root/work/web_blog/node_modules/pages/[slug].tsx"; + match Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new(path) + ) + .await { + Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"), + Err(e) => { + let message = e.to_string(); + assert!(message.contains(path), "Error message should mention which start file was used for location"); + assert!(message.contains("/root/work/web_blog"), "Error message should mention potential candidates without prettier node_modules contents"); + }, + }; + + assert_eq!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::from_iter( + [PathBuf::from("/root"), PathBuf::from("/root/work")].into_iter() + ), + Path::new("/root/work/web_blog/node_modules/pages/[slug].tsx") + ) + .await + .unwrap(), + Some(PathBuf::from("/root/work")), + "Should return first cached value found without path checks" + ); + } + + #[gpui::test] + async fn test_prettier_lookup_in_npm_workspaces(cx: &mut gpui::TestAppContext) { + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + "work": { + "full-stack-foundations": { + "exercises": { + "03.loading": { + "01.problem.loader": { + "app": { + "routes": { + "users+": { + "$username_+": { + "notes.tsx": "// notes.tsx file contents", + }, + }, + }, + }, + "node_modules": {}, + "package.json": r#"{ + "devDependencies": { + "prettier": "^3.0.3" + } + }"# + }, + }, + }, + "package.json": r#"{ + "workspaces": ["exercises/*/*", "examples/*"] + }"#, + "node_modules": { + "prettier": { + "index.js": "// Dummy prettier package file", + }, + }, + }, + } + }), + ) + .await; + + assert_eq!( + Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx"), + ).await.unwrap(), + Some(PathBuf::from("/root/work/full-stack-foundations")), + "Should ascend to the multi-workspace root and find the prettier there", + ); + } + + #[gpui::test] + async fn test_prettier_lookup_in_npm_workspaces_for_not_installed( + cx: &mut gpui::TestAppContext, + ) { + let fs = FakeFs::new(cx.executor().clone()); + fs.insert_tree( + "/root", + json!({ + "work": { + "full-stack-foundations": { + "exercises": { + "03.loading": { + "01.problem.loader": { + "app": { + "routes": { + "users+": { + "$username_+": { + "notes.tsx": "// notes.tsx file contents", + }, + }, + }, + }, + "node_modules": {}, + "package.json": r#"{ + "devDependencies": { + "prettier": "^3.0.3" + } + }"# + }, + }, + }, + "package.json": r#"{ + "workspaces": ["exercises/*/*", "examples/*"] + }"#, + }, + } + }), + ) + .await; + + match Prettier::locate_prettier_installation( + fs.as_ref(), + &HashSet::default(), + Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx") + ) + .await { + Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"), + Err(e) => { + let message = e.to_string(); + assert!(message.contains("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader"), "Error message should mention which project had prettier defined"); + assert!(message.contains("/root/work/full-stack-foundations"), "Error message should mention potential candidates without prettier node_modules contents"); + }, + }; + } +} diff --git a/crates/prettier2/src/prettier_server.js b/crates/prettier2/src/prettier_server.js index a56c220f20..191431da0b 100644 --- a/crates/prettier2/src/prettier_server.js +++ b/crates/prettier2/src/prettier_server.js @@ -1,11 +1,13 @@ -const { Buffer } = require('buffer'); +const { Buffer } = require("buffer"); const fs = require("fs"); const path = require("path"); -const { once } = require('events'); +const { once } = require("events"); const prettierContainerPath = process.argv[2]; if (prettierContainerPath == null || prettierContainerPath.length == 0) { - process.stderr.write(`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`); + process.stderr.write( + `Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`, + ); process.exit(1); } fs.stat(prettierContainerPath, (err, stats) => { @@ -19,7 +21,7 @@ fs.stat(prettierContainerPath, (err, stats) => { process.exit(1); } }); -const prettierPath = path.join(prettierContainerPath, 'node_modules/prettier'); +const prettierPath = path.join(prettierContainerPath, "node_modules/prettier"); class Prettier { constructor(path, prettier, config) { @@ -34,7 +36,7 @@ class Prettier { let config; try { prettier = await loadPrettier(prettierPath); - config = await prettier.resolveConfig(prettierPath) || {}; + config = (await prettier.resolveConfig(prettierPath)) || {}; } catch (e) { process.stderr.write(`Failed to load prettier: ${e}\n`); process.exit(1); @@ -42,7 +44,7 @@ class Prettier { process.stderr.write(`Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`); process.stdin.resume(); handleBuffer(new Prettier(prettierPath, prettier, config)); -})() +})(); async function handleBuffer(prettier) { for await (const messageText of readStdin()) { @@ -54,22 +56,29 @@ async function handleBuffer(prettier) { continue; } // allow concurrent request handling by not `await`ing the message handling promise (async function) - handleMessage(message, prettier).catch(e => { - sendResponse({ id: message.id, ...makeError(`error during message handling: ${e}`) }); + handleMessage(message, prettier).catch((e) => { + const errorMessage = message; + if ((errorMessage.params || {}).text !== undefined) { + errorMessage.params.text = "..snip.."; + } + sendResponse({ + id: message.id, + ...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`), + }); }); } } const headerSeparator = "\r\n"; -const contentLengthHeaderName = 'Content-Length'; +const contentLengthHeaderName = "Content-Length"; async function* readStdin() { let buffer = Buffer.alloc(0); let streamEnded = false; - process.stdin.on('end', () => { + process.stdin.on("end", () => { streamEnded = true; }); - process.stdin.on('data', (data) => { + process.stdin.on("data", (data) => { buffer = Buffer.concat([buffer, data]); }); @@ -77,7 +86,7 @@ async function* readStdin() { sendResponse(makeError(errorMessage)); buffer = Buffer.alloc(0); messageLength = null; - await once(process.stdin, 'readable'); + await once(process.stdin, "readable"); streamEnded = false; } @@ -88,20 +97,25 @@ async function* readStdin() { if (messageLength === null) { while (buffer.indexOf(`${headerSeparator}${headerSeparator}`) === -1) { if (streamEnded) { - await handleStreamEnded('Unexpected end of stream: headers not found'); + await handleStreamEnded("Unexpected end of stream: headers not found"); continue main_loop; } else if (buffer.length > contentLengthHeaderName.length * 10) { - await handleStreamEnded(`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`); + await handleStreamEnded( + `Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`, + ); continue main_loop; } - await once(process.stdin, 'readable'); + await once(process.stdin, "readable"); } - const headers = buffer.subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`)).toString('ascii'); - const contentLengthHeader = headers.split(headerSeparator) - .map(header => header.split(':')) - .filter(header => header[2] === undefined) - .filter(header => (header[1] || '').length > 0) - .find(header => (header[0] || '').trim() === contentLengthHeaderName); + const headers = buffer + .subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`)) + .toString("ascii"); + const contentLengthHeader = headers + .split(headerSeparator) + .map((header) => header.split(":")) + .filter((header) => header[2] === undefined) + .filter((header) => (header[1] || "").length > 0) + .find((header) => (header[0] || "").trim() === contentLengthHeaderName); const contentLength = (contentLengthHeader || [])[1]; if (contentLength === undefined) { await handleStreamEnded(`Missing or incorrect ${contentLengthHeaderName} header: ${headers}`); @@ -111,13 +125,14 @@ async function* readStdin() { messageLength = parseInt(contentLength, 10); } - while (buffer.length < (headersLength + messageLength)) { + while (buffer.length < headersLength + messageLength) { if (streamEnded) { await handleStreamEnded( - `Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`); + `Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`, + ); continue main_loop; } - await once(process.stdin, 'readable'); + await once(process.stdin, "readable"); } const messageEnd = headersLength + messageLength; @@ -125,12 +140,12 @@ async function* readStdin() { buffer = buffer.subarray(messageEnd); headersLength = null; messageLength = null; - yield message.toString('utf8'); + yield message.toString("utf8"); } } catch (e) { sendResponse(makeError(`Error reading stdin: ${e}`)); } finally { - process.stdin.off('data', () => { }); + process.stdin.off("data", () => {}); } } @@ -143,7 +158,7 @@ async function handleMessage(message, prettier) { throw new Error(`Message id is undefined: ${JSON.stringify(message)}`); } - if (method === 'prettier/format') { + if (method === "prettier/format") { if (params === undefined || params.text === undefined) { throw new Error(`Message params.text is undefined: ${JSON.stringify(message)}`); } @@ -153,7 +168,7 @@ async function handleMessage(message, prettier) { let resolvedConfig = {}; if (params.options.filepath !== undefined) { - resolvedConfig = await prettier.prettier.resolveConfig(params.options.filepath) || {}; + resolvedConfig = (await prettier.prettier.resolveConfig(params.options.filepath)) || {}; } const options = { @@ -161,21 +176,25 @@ async function handleMessage(message, prettier) { ...resolvedConfig, parser: params.options.parser, plugins: params.options.plugins, - path: params.options.filepath + path: params.options.filepath, }; - process.stderr.write(`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${params.options.filepath || ''}' with options: ${JSON.stringify(options)}\n`); + process.stderr.write( + `Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${ + params.options.filepath || "" + }' with options: ${JSON.stringify(options)}\n`, + ); const formattedText = await prettier.prettier.format(params.text, options); sendResponse({ id, result: { text: formattedText } }); - } else if (method === 'prettier/clear_cache') { + } else if (method === "prettier/clear_cache") { prettier.prettier.clearConfigCache(); - prettier.config = await prettier.prettier.resolveConfig(prettier.path) || {}; + prettier.config = (await prettier.prettier.resolveConfig(prettier.path)) || {}; sendResponse({ id, result: null }); - } else if (method === 'initialize') { + } else if (method === "initialize") { sendResponse({ id, result: { - "capabilities": {} - } + capabilities: {}, + }, }); } else { throw new Error(`Unknown method: ${method}`); @@ -185,18 +204,20 @@ async function handleMessage(message, prettier) { function makeError(message) { return { error: { - "code": -32600, // invalid request code + code: -32600, // invalid request code message, - } + }, }; } function sendResponse(response) { const responsePayloadString = JSON.stringify({ jsonrpc: "2.0", - ...response + ...response, }); - const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(responsePayloadString)}${headerSeparator}${headerSeparator}`; + const headers = `${contentLengthHeaderName}: ${Buffer.byteLength( + responsePayloadString, + )}${headerSeparator}${headerSeparator}`; process.stdout.write(headers + responsePayloadString); } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 919b563cfb..f7a050e7e0 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -4157,7 +4157,9 @@ impl Project { match prettier_task.await { Ok(prettier) => { - let buffer_path = buffer.update(&mut cx, |buffer, cx| File::from_dyn(buffer.file()).map(|f| f.abs_path(cx))); + let buffer_path = buffer.update(&mut cx, |buffer, cx| { + File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) + }); format_operation = Some(FormatOperation::Prettier( prettier .format(buffer, buffer_path, &cx) @@ -4213,7 +4215,9 @@ impl Project { match prettier_task.await { Ok(prettier) => { - let buffer_path = buffer.update(&mut cx, |buffer, cx| File::from_dyn(buffer.file()).map(|f| f.abs_path(cx))); + let buffer_path = buffer.update(&mut cx, |buffer, cx| { + File::from_dyn(buffer.file()).map(|file| file.abs_path(cx)) + }); format_operation = Some(FormatOperation::Prettier( prettier .format(buffer, buffer_path, &cx) diff --git a/crates/project2/src/project2.rs b/crates/project2/src/project2.rs index 65d1aba820..5d7c976e77 100644 --- a/crates/project2/src/project2.rs +++ b/crates/project2/src/project2.rs @@ -54,7 +54,7 @@ use lsp_command::*; use node_runtime::NodeRuntime; use parking_lot::Mutex; use postage::watch; -use prettier::{LocateStart, Prettier}; +use prettier::Prettier; use project_settings::{LspSettings, ProjectSettings}; use rand::prelude::*; use search::SearchQuery; @@ -82,8 +82,11 @@ use std::{ use terminals::Terminals; use text::Anchor; use util::{ - debug_panic, defer, http::HttpClient, merge_json_value_into, - paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _, + debug_panic, defer, + http::HttpClient, + merge_json_value_into, + paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH}, + post_inc, ResultExt, TryFutureExt as _, }; pub use fs::*; @@ -162,17 +165,15 @@ pub struct Project { copilot_log_subscription: Option, current_lsp_settings: HashMap, LspSettings>, node: Option>, - #[cfg(not(any(test, feature = "test-support")))] default_prettier: Option, - prettier_instances: HashMap< - (Option, PathBuf), - Shared, Arc>>>, - >, + prettiers_per_worktree: HashMap>>, + prettier_instances: HashMap, Arc>>>>, } -#[cfg(not(any(test, feature = "test-support")))] struct DefaultPrettier { - installation_process: Option>>, + instance: Option, Arc>>>>, + installation_process: Option>>>>, + #[cfg(not(any(test, feature = "test-support")))] installed_plugins: HashSet<&'static str>, } @@ -686,8 +687,8 @@ impl Project { copilot_log_subscription: None, current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), node: Some(node), - #[cfg(not(any(test, feature = "test-support")))] default_prettier: None, + prettiers_per_worktree: HashMap::default(), prettier_instances: HashMap::default(), } }) @@ -789,8 +790,8 @@ impl Project { copilot_log_subscription: None, current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(), node: None, - #[cfg(not(any(test, feature = "test-support")))] default_prettier: None, + prettiers_per_worktree: HashMap::default(), prettier_instances: HashMap::default(), }; for worktree in worktrees { @@ -963,8 +964,7 @@ impl Project { } for (worktree, language, settings) in language_formatters_to_check { - self.install_default_formatters(worktree, &language, &settings, cx) - .detach_and_log_err(cx); + self.install_default_formatters(worktree, &language, &settings, cx); } // Start all the newly-enabled language servers. @@ -2720,20 +2720,7 @@ impl Project { let buffer_file = File::from_dyn(buffer_file.as_ref()); let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx)); - let task_buffer = buffer.clone(); - let prettier_installation_task = - self.install_default_formatters(worktree, &new_language, &settings, cx); - cx.spawn(move |project, mut cx| async move { - prettier_installation_task.await?; - let _ = project - .update(&mut cx, |project, cx| { - project.prettier_instance_for_buffer(&task_buffer, cx) - })? - .await; - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - + self.install_default_formatters(worktree, &new_language, &settings, cx); if let Some(file) = buffer_file { let worktree = file.worktree.clone(); if let Some(tree) = worktree.read(cx).as_local() { @@ -4096,7 +4083,7 @@ impl Project { } pub fn format( - &self, + &mut self, buffers: HashSet>, push_to_history: bool, trigger: FormatTrigger, @@ -4116,10 +4103,10 @@ impl Project { }) .collect::>(); - cx.spawn(move |this, mut cx| async move { + cx.spawn(move |project, mut cx| async move { // Do not allow multiple concurrent formatting requests for the // same buffer. - this.update(&mut cx, |this, cx| { + project.update(&mut cx, |this, cx| { buffers_with_paths_and_servers.retain(|(buffer, _, _)| { this.buffers_being_formatted .insert(buffer.read(cx).remote_id()) @@ -4127,7 +4114,7 @@ impl Project { })?; let _cleanup = defer({ - let this = this.clone(); + let this = project.clone(); let mut cx = cx.clone(); let buffers = &buffers_with_paths_and_servers; move || { @@ -4195,7 +4182,7 @@ impl Project { { format_operation = Some(FormatOperation::Lsp( Self::format_via_lsp( - &this, + &project, &buffer, buffer_abs_path, &language_server, @@ -4230,7 +4217,7 @@ impl Project { } } (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => { - if let Some(prettier_task) = this + if let Some((prettier_path, prettier_task)) = project .update(&mut cx, |project, cx| { project.prettier_instance_for_buffer(buffer, cx) })?.await { @@ -4247,16 +4234,35 @@ impl Project { .context("formatting via prettier")?, )); } - Err(e) => anyhow::bail!( - "Failed to create prettier instance for buffer during autoformatting: {e:#}" - ), + Err(e) => { + project.update(&mut cx, |project, _| { + match &prettier_path { + Some(prettier_path) => { + project.prettier_instances.remove(prettier_path); + }, + None => { + if let Some(default_prettier) = project.default_prettier.as_mut() { + default_prettier.instance = None; + } + }, + } + })?; + match &prettier_path { + Some(prettier_path) => { + log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}"); + }, + None => { + log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}"); + }, + } + } } } else if let Some((language_server, buffer_abs_path)) = language_server.as_ref().zip(buffer_abs_path.as_ref()) { format_operation = Some(FormatOperation::Lsp( Self::format_via_lsp( - &this, + &project, &buffer, buffer_abs_path, &language_server, @@ -4269,7 +4275,7 @@ impl Project { } } (Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => { - if let Some(prettier_task) = this + if let Some((prettier_path, prettier_task)) = project .update(&mut cx, |project, cx| { project.prettier_instance_for_buffer(buffer, cx) })?.await { @@ -4286,9 +4292,28 @@ impl Project { .context("formatting via prettier")?, )); } - Err(e) => anyhow::bail!( - "Failed to create prettier instance for buffer during formatting: {e:#}" - ), + Err(e) => { + project.update(&mut cx, |project, _| { + match &prettier_path { + Some(prettier_path) => { + project.prettier_instances.remove(prettier_path); + }, + None => { + if let Some(default_prettier) = project.default_prettier.as_mut() { + default_prettier.instance = None; + } + }, + } + })?; + match &prettier_path { + Some(prettier_path) => { + log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}"); + }, + None => { + log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}"); + }, + } + } } } } @@ -6506,15 +6531,25 @@ impl Project { "Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}" ); let prettiers_to_reload = self - .prettier_instances + .prettiers_per_worktree + .get(¤t_worktree_id) .iter() - .filter_map(|((worktree_id, prettier_path), prettier_task)| { - if worktree_id.is_none() || worktree_id == &Some(current_worktree_id) { - Some((*worktree_id, prettier_path.clone(), prettier_task.clone())) - } else { - None - } + .flat_map(|prettier_paths| prettier_paths.iter()) + .flatten() + .filter_map(|prettier_path| { + Some(( + current_worktree_id, + Some(prettier_path.clone()), + self.prettier_instances.get(prettier_path)?.clone(), + )) }) + .chain(self.default_prettier.iter().filter_map(|default_prettier| { + Some(( + current_worktree_id, + None, + default_prettier.instance.clone()?, + )) + })) .collect::>(); cx.background_executor() @@ -6525,9 +6560,14 @@ impl Project { .clear_cache() .await .with_context(|| { - format!( - "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update" - ) + match prettier_path { + Some(prettier_path) => format!( + "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update" + ), + None => format!( + "clearing default prettier cache for worktree {worktree_id:?} on prettier settings update" + ), + } }) .map_err(Arc::new) } @@ -8411,7 +8451,12 @@ impl Project { &mut self, buffer: &Model, cx: &mut ModelContext, - ) -> Task, Arc>>>>> { + ) -> Task< + Option<( + Option, + Shared, Arc>>>, + )>, + > { let buffer = buffer.read(cx); let buffer_file = buffer.file(); let Some(buffer_language) = buffer.language() else { @@ -8421,142 +8466,142 @@ impl Project { return Task::ready(None); } - let buffer_file = File::from_dyn(buffer_file); - let buffer_path = buffer_file.map(|file| Arc::clone(file.path())); - let worktree_path = buffer_file - .as_ref() - .and_then(|file| Some(file.worktree.read(cx).abs_path())); - let worktree_id = buffer_file.map(|file| file.worktree_id(cx)); - if self.is_local() || worktree_id.is_none() || worktree_path.is_none() { + if self.is_local() { let Some(node) = self.node.as_ref().map(Arc::clone) else { return Task::ready(None); }; - let fs = self.fs.clone(); - cx.spawn(move |this, mut cx| async move { - let prettier_dir = match cx - .background_executor() - .spawn(Prettier::locate( - worktree_path.zip(buffer_path).map( - |(worktree_root_path, starting_path)| LocateStart { - worktree_root_path, - starting_path, - }, - ), - fs, - )) - .await - { - Ok(path) => path, - Err(e) => { - return Some( - Task::ready(Err(Arc::new(e.context( - "determining prettier path for worktree {worktree_path:?}", - )))) - .shared(), - ); - } - }; - - if let Some(existing_prettier) = this - .update(&mut cx, |project, _| { - project - .prettier_instances - .get(&(worktree_id, prettier_dir.clone())) - .cloned() - }) - .ok() - .flatten() - { - return Some(existing_prettier); - } - - log::info!("Found prettier in {prettier_dir:?}, starting."); - let task_prettier_dir = prettier_dir.clone(); - let new_prettier_task = cx - .spawn({ - let this = this.clone(); - move |mut cx| async move { - let new_server_id = this.update(&mut cx, |this, _| { - this.languages.next_language_server_id() - })?; - let prettier = Prettier::start( - worktree_id.map(|id| id.to_usize()), - new_server_id, - task_prettier_dir, - node, - cx.clone(), - ) + match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx))) + { + Some((worktree_id, buffer_path)) => { + let fs = Arc::clone(&self.fs); + let installed_prettiers = self.prettier_instances.keys().cloned().collect(); + return cx.spawn(|project, mut cx| async move { + match cx + .background_executor() + .spawn(async move { + Prettier::locate_prettier_installation( + fs.as_ref(), + &installed_prettiers, + &buffer_path, + ) + .await + }) .await - .context("prettier start") - .map_err(Arc::new)?; - log::info!("Started prettier in {:?}", prettier.prettier_dir()); - - if let Some(prettier_server) = prettier.server() { - this.update(&mut cx, |project, cx| { - let name = if prettier.is_default() { - LanguageServerName(Arc::from("prettier (default)")) - } else { - let prettier_dir = prettier.prettier_dir(); - let worktree_path = prettier - .worktree_id() - .map(WorktreeId::from_usize) - .and_then(|id| project.worktree_for_id(id, cx)) - .map(|worktree| worktree.read(cx).abs_path()); - match worktree_path { - Some(worktree_path) => { - if worktree_path.as_ref() == prettier_dir { - LanguageServerName(Arc::from(format!( - "prettier ({})", - prettier_dir - .file_name() - .and_then(|name| name.to_str()) - .unwrap_or_default() - ))) - } else { - let dir_to_display = match prettier_dir - .strip_prefix(&worktree_path) - .ok() - { - Some(relative_path) => relative_path, - None => prettier_dir, - }; - LanguageServerName(Arc::from(format!( - "prettier ({})", - dir_to_display.display(), - ))) - } - } - None => LanguageServerName(Arc::from(format!( - "prettier ({})", - prettier_dir.display(), - ))), - } - }; - + { + Ok(None) => { + match project.update(&mut cx, |project, _| { project - .supplementary_language_servers - .insert(new_server_id, (name, Arc::clone(prettier_server))); - cx.emit(Event::LanguageServerAdded(new_server_id)); - })?; + .prettiers_per_worktree + .entry(worktree_id) + .or_default() + .insert(None); + project.default_prettier.as_ref().and_then( + |default_prettier| default_prettier.instance.clone(), + ) + }) { + Ok(Some(old_task)) => Some((None, old_task)), + Ok(None) => { + match project.update(&mut cx, |_, cx| { + start_default_prettier(node, Some(worktree_id), cx) + }) { + Ok(new_default_prettier) => { + return Some((None, new_default_prettier.await)) + } + Err(e) => { + Some(( + None, + Task::ready(Err(Arc::new(e.context("project is gone during default prettier startup")))) + .shared(), + )) + } + } + } + Err(e) => Some((None, Task::ready(Err(Arc::new(e.context("project is gone during default prettier checks")))) + .shared())), + } + } + Ok(Some(prettier_dir)) => { + match project.update(&mut cx, |project, _| { + project + .prettiers_per_worktree + .entry(worktree_id) + .or_default() + .insert(Some(prettier_dir.clone())); + project.prettier_instances.get(&prettier_dir).cloned() + }) { + Ok(Some(existing_prettier)) => { + log::debug!( + "Found already started prettier in {prettier_dir:?}" + ); + return Some((Some(prettier_dir), existing_prettier)); + } + Err(e) => { + return Some(( + Some(prettier_dir), + Task::ready(Err(Arc::new(e.context("project is gone during custom prettier checks")))) + .shared(), + )) + } + _ => {}, + } + + log::info!("Found prettier in {prettier_dir:?}, starting."); + let new_prettier_task = + match project.update(&mut cx, |project, cx| { + let new_prettier_task = start_prettier( + node, + prettier_dir.clone(), + Some(worktree_id), + cx, + ); + project.prettier_instances.insert( + prettier_dir.clone(), + new_prettier_task.clone(), + ); + new_prettier_task + }) { + Ok(task) => task, + Err(e) => return Some(( + Some(prettier_dir), + Task::ready(Err(Arc::new(e.context("project is gone during custom prettier startup")))) + .shared() + )), + }; + Some((Some(prettier_dir), new_prettier_task)) + } + Err(e) => { + return Some(( + None, + Task::ready(Err(Arc::new( + e.context("determining prettier path"), + ))) + .shared(), + )); } - Ok(Arc::new(prettier)).map_err(Arc::new) } - }) - .shared(); - this.update(&mut cx, |project, _| { - project - .prettier_instances - .insert((worktree_id, prettier_dir), new_prettier_task.clone()); - }) - .ok(); - Some(new_prettier_task) - }) + }); + } + None => { + let started_default_prettier = self + .default_prettier + .as_ref() + .and_then(|default_prettier| default_prettier.instance.clone()); + match started_default_prettier { + Some(old_task) => return Task::ready(Some((None, old_task))), + None => { + let new_task = start_default_prettier(node, None, cx); + return cx.spawn(|_, _| async move { Some((None, new_task.await)) }); + } + } + } + } } else if self.remote_id().is_some() { return Task::ready(None); } else { - Task::ready(Some( + Task::ready(Some(( + None, Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(), - )) + ))) } } @@ -8567,8 +8612,7 @@ impl Project { _: &Language, _: &LanguageSettings, _: &mut ModelContext, - ) -> Task> { - Task::ready(Ok(())) + ) { } #[cfg(not(any(test, feature = "test-support")))] @@ -8578,19 +8622,19 @@ impl Project { new_language: &Language, language_settings: &LanguageSettings, cx: &mut ModelContext, - ) -> Task> { + ) { match &language_settings.formatter { Formatter::Prettier { .. } | Formatter::Auto => {} - Formatter::LanguageServer | Formatter::External { .. } => return Task::ready(Ok(())), + Formatter::LanguageServer | Formatter::External { .. } => return, }; let Some(node) = self.node.as_ref().cloned() else { - return Task::ready(Ok(())); + return; }; let mut prettier_plugins = None; if new_language.prettier_parser_name().is_some() { prettier_plugins - .get_or_insert_with(|| HashSet::default()) + .get_or_insert_with(|| HashSet::<&'static str>::default()) .extend( new_language .lsp_adapters() @@ -8599,114 +8643,287 @@ impl Project { ) } let Some(prettier_plugins) = prettier_plugins else { - return Task::ready(Ok(())); + return; }; + let fs = Arc::clone(&self.fs); + let locate_prettier_installation = match worktree.and_then(|worktree_id| { + self.worktree_for_id(worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + }) { + Some(locate_from) => { + let installed_prettiers = self.prettier_instances.keys().cloned().collect(); + cx.background_executor().spawn(async move { + Prettier::locate_prettier_installation( + fs.as_ref(), + &installed_prettiers, + locate_from.as_ref(), + ) + .await + }) + } + None => Task::ready(Ok(None)), + }; let mut plugins_to_install = prettier_plugins; - let (mut install_success_tx, mut install_success_rx) = - futures::channel::mpsc::channel::>(1); - let new_installation_process = cx - .spawn(|this, mut cx| async move { - if let Some(installed_plugins) = install_success_rx.next().await { - this.update(&mut cx, |this, _| { - let default_prettier = - this.default_prettier - .get_or_insert_with(|| DefaultPrettier { - installation_process: None, - installed_plugins: HashSet::default(), - }); - if !installed_plugins.is_empty() { - log::info!("Installed new prettier plugins: {installed_plugins:?}"); - default_prettier.installed_plugins.extend(installed_plugins); - } - }) - .ok(); - } - }) - .shared(); let previous_installation_process = if let Some(default_prettier) = &mut self.default_prettier { plugins_to_install .retain(|plugin| !default_prettier.installed_plugins.contains(plugin)); if plugins_to_install.is_empty() { - return Task::ready(Ok(())); + return; } - std::mem::replace( - &mut default_prettier.installation_process, - Some(new_installation_process.clone()), - ) + default_prettier.installation_process.clone() } else { None }; - let default_prettier_dir = util::paths::DEFAULT_PRETTIER_DIR.as_path(); - let already_running_prettier = self - .prettier_instances - .get(&(worktree, default_prettier_dir.to_path_buf())) - .cloned(); let fs = Arc::clone(&self.fs); - cx.spawn(move |this, mut cx| async move { - if let Some(previous_installation_process) = previous_installation_process { - previous_installation_process.await; - } - let mut everything_was_installed = false; - this.update(&mut cx, |this, _| { - match &mut this.default_prettier { - Some(default_prettier) => { - plugins_to_install - .retain(|plugin| !default_prettier.installed_plugins.contains(plugin)); - everything_was_installed = plugins_to_install.is_empty(); - }, - None => this.default_prettier = Some(DefaultPrettier { installation_process: Some(new_installation_process), installed_plugins: HashSet::default() }), - } - })?; - if everything_was_installed { - return Ok(()); - } - - cx.spawn(move |_| async move { - let prettier_wrapper_path = default_prettier_dir.join(prettier::PRETTIER_SERVER_FILE); - // method creates parent directory if it doesn't exist - fs.save(&prettier_wrapper_path, &text::Rope::from(prettier::PRETTIER_SERVER_JS), text::LineEnding::Unix).await - .with_context(|| format!("writing {} file at {prettier_wrapper_path:?}", prettier::PRETTIER_SERVER_FILE))?; - - let packages_to_versions = future::try_join_all( - plugins_to_install - .iter() - .chain(Some(&"prettier")) - .map(|package_name| async { - let returned_package_name = package_name.to_string(); - let latest_version = node.npm_package_latest_version(package_name) + let default_prettier = self + .default_prettier + .get_or_insert_with(|| DefaultPrettier { + instance: None, + installation_process: None, + installed_plugins: HashSet::default(), + }); + default_prettier.installation_process = Some( + cx.spawn(|this, mut cx| async move { + match locate_prettier_installation + .await + .context("locate prettier installation") + .map_err(Arc::new)? + { + Some(_non_default_prettier) => return Ok(()), + None => { + let mut needs_install = match previous_installation_process { + Some(previous_installation_process) => { + previous_installation_process.await.is_err() + } + None => true, + }; + this.update(&mut cx, |this, _| { + if let Some(default_prettier) = &mut this.default_prettier { + plugins_to_install.retain(|plugin| { + !default_prettier.installed_plugins.contains(plugin) + }); + needs_install |= !plugins_to_install.is_empty(); + } + })?; + if needs_install { + let installed_plugins = plugins_to_install.clone(); + cx.background_executor() + .spawn(async move { + install_default_prettier(plugins_to_install, node, fs).await + }) .await - .with_context(|| { - format!("fetching latest npm version for package {returned_package_name}") - })?; - anyhow::Ok((returned_package_name, latest_version)) - }), - ) - .await - .context("fetching latest npm versions")?; - - log::info!("Fetching default prettier and plugins: {packages_to_versions:?}"); - let borrowed_packages = packages_to_versions.iter().map(|(package, version)| { - (package.as_str(), version.as_str()) - }).collect::>(); - node.npm_install_packages(default_prettier_dir, &borrowed_packages).await.context("fetching formatter packages")?; - let installed_packages = !plugins_to_install.is_empty(); - install_success_tx.try_send(plugins_to_install).ok(); - - if !installed_packages { - if let Some(prettier) = already_running_prettier { - prettier.await.map_err(|e| anyhow::anyhow!("Default prettier startup await failure: {e:#}"))?.clear_cache().await.context("clearing default prettier cache after plugins install")?; + .context("prettier & plugins install") + .map_err(Arc::new)?; + this.update(&mut cx, |this, _| { + let default_prettier = + this.default_prettier + .get_or_insert_with(|| DefaultPrettier { + instance: None, + installation_process: Some( + Task::ready(Ok(())).shared(), + ), + installed_plugins: HashSet::default(), + }); + default_prettier.instance = None; + default_prettier.installed_plugins.extend(installed_plugins); + })?; + } } } - - anyhow::Ok(()) - }).await - }) + Ok(()) + }) + .shared(), + ); } } +fn start_default_prettier( + node: Arc, + worktree_id: Option, + cx: &mut ModelContext<'_, Project>, +) -> Task, Arc>>>> { + cx.spawn(|project, mut cx| async move { + loop { + let default_prettier_installing = match project.update(&mut cx, |project, _| { + project + .default_prettier + .as_ref() + .and_then(|default_prettier| default_prettier.installation_process.clone()) + }) { + Ok(installation) => installation, + Err(e) => { + return Task::ready(Err(Arc::new( + e.context("project is gone during default prettier installation"), + ))) + .shared() + } + }; + match default_prettier_installing { + Some(installation_task) => { + if installation_task.await.is_ok() { + break; + } + } + None => break, + } + } + + match project.update(&mut cx, |project, cx| { + match project + .default_prettier + .as_mut() + .and_then(|default_prettier| default_prettier.instance.as_mut()) + { + Some(default_prettier) => default_prettier.clone(), + None => { + let new_default_prettier = + start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), worktree_id, cx); + project + .default_prettier + .get_or_insert_with(|| DefaultPrettier { + instance: None, + installation_process: None, + #[cfg(not(any(test, feature = "test-support")))] + installed_plugins: HashSet::default(), + }) + .instance = Some(new_default_prettier.clone()); + new_default_prettier + } + } + }) { + Ok(task) => task, + Err(e) => Task::ready(Err(Arc::new( + e.context("project is gone during default prettier startup"), + ))) + .shared(), + } + }) +} + +fn start_prettier( + node: Arc, + prettier_dir: PathBuf, + worktree_id: Option, + cx: &mut ModelContext<'_, Project>, +) -> Shared, Arc>>> { + cx.spawn(|project, mut cx| async move { + let new_server_id = project.update(&mut cx, |project, _| { + project.languages.next_language_server_id() + })?; + let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone()) + .await + .context("default prettier spawn") + .map(Arc::new) + .map_err(Arc::new)?; + register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx); + Ok(new_prettier) + }) + .shared() +} + +fn register_new_prettier( + project: &WeakModel, + prettier: &Prettier, + worktree_id: Option, + new_server_id: LanguageServerId, + cx: &mut AsyncAppContext, +) { + let prettier_dir = prettier.prettier_dir(); + let is_default = prettier.is_default(); + if is_default { + log::info!("Started default prettier in {prettier_dir:?}"); + } else { + log::info!("Started prettier in {prettier_dir:?}"); + } + if let Some(prettier_server) = prettier.server() { + project + .update(cx, |project, cx| { + let name = if is_default { + LanguageServerName(Arc::from("prettier (default)")) + } else { + let worktree_path = worktree_id + .and_then(|id| project.worktree_for_id(id, cx)) + .map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path())); + let name = match worktree_path { + Some(worktree_path) => { + if prettier_dir == worktree_path.as_ref() { + let name = prettier_dir + .file_name() + .and_then(|name| name.to_str()) + .unwrap_or_default(); + format!("prettier ({name})") + } else { + let dir_to_display = prettier_dir + .strip_prefix(worktree_path.as_ref()) + .ok() + .unwrap_or(prettier_dir); + format!("prettier ({})", dir_to_display.display()) + } + } + None => format!("prettier ({})", prettier_dir.display()), + }; + LanguageServerName(Arc::from(name)) + }; + project + .supplementary_language_servers + .insert(new_server_id, (name, Arc::clone(prettier_server))); + cx.emit(Event::LanguageServerAdded(new_server_id)); + }) + .ok(); + } +} + +#[cfg(not(any(test, feature = "test-support")))] +async fn install_default_prettier( + plugins_to_install: HashSet<&'static str>, + node: Arc, + fs: Arc, +) -> anyhow::Result<()> { + let prettier_wrapper_path = DEFAULT_PRETTIER_DIR.join(prettier::PRETTIER_SERVER_FILE); + // method creates parent directory if it doesn't exist + fs.save( + &prettier_wrapper_path, + &text::Rope::from(prettier::PRETTIER_SERVER_JS), + text::LineEnding::Unix, + ) + .await + .with_context(|| { + format!( + "writing {} file at {prettier_wrapper_path:?}", + prettier::PRETTIER_SERVER_FILE + ) + })?; + + let packages_to_versions = + future::try_join_all(plugins_to_install.iter().chain(Some(&"prettier")).map( + |package_name| async { + let returned_package_name = package_name.to_string(); + let latest_version = node + .npm_package_latest_version(package_name) + .await + .with_context(|| { + format!("fetching latest npm version for package {returned_package_name}") + })?; + anyhow::Ok((returned_package_name, latest_version)) + }, + )) + .await + .context("fetching latest npm versions")?; + + log::info!("Fetching default prettier and plugins: {packages_to_versions:?}"); + let borrowed_packages = packages_to_versions + .iter() + .map(|(package, version)| (package.as_str(), version.as_str())) + .collect::>(); + node.npm_install_packages(DEFAULT_PRETTIER_DIR.as_path(), &borrowed_packages) + .await + .context("fetching formatter packages")?; + anyhow::Ok(()) +} + fn subscribe_for_copilot_events( copilot: &Model, cx: &mut ModelContext<'_, Project>, diff --git a/crates/project2/src/project_tests.rs b/crates/project2/src/project_tests.rs index 490b3a0788..19485b2306 100644 --- a/crates/project2/src/project_tests.rs +++ b/crates/project2/src/project_tests.rs @@ -1,4 +1,4 @@ -use crate::{search::PathMatcher, Event, *}; +use crate::{Event, *}; use fs::FakeFs; use futures::{future, StreamExt}; use gpui::AppContext; @@ -13,7 +13,7 @@ use pretty_assertions::assert_eq; use serde_json::json; use std::{os, task::Poll}; use unindent::Unindent as _; -use util::{assert_set_eq, test::temp_tree}; +use util::{assert_set_eq, paths::PathMatcher, test::temp_tree}; #[gpui::test] async fn test_block_via_channel(cx: &mut gpui::TestAppContext) { diff --git a/crates/project2/src/search.rs b/crates/project2/src/search.rs index 46dd30c8a0..7e360e22ee 100644 --- a/crates/project2/src/search.rs +++ b/crates/project2/src/search.rs @@ -1,7 +1,6 @@ use aho_corasick::{AhoCorasick, AhoCorasickBuilder}; use anyhow::{Context, Result}; use client::proto; -use globset::{Glob, GlobMatcher}; use itertools::Itertools; use language::{char_kind, BufferSnapshot}; use regex::{Regex, RegexBuilder}; @@ -10,9 +9,10 @@ use std::{ borrow::Cow, io::{BufRead, BufReader, Read}, ops::Range, - path::{Path, PathBuf}, + path::Path, sync::Arc, }; +use util::paths::PathMatcher; #[derive(Clone, Debug)] pub struct SearchInputs { @@ -52,31 +52,6 @@ pub enum SearchQuery { }, } -#[derive(Clone, Debug)] -pub struct PathMatcher { - maybe_path: PathBuf, - glob: GlobMatcher, -} - -impl std::fmt::Display for PathMatcher { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.maybe_path.to_string_lossy().fmt(f) - } -} - -impl PathMatcher { - pub fn new(maybe_glob: &str) -> Result { - Ok(PathMatcher { - glob: Glob::new(&maybe_glob)?.compile_matcher(), - maybe_path: PathBuf::from(maybe_glob), - }) - } - - pub fn is_match>(&self, other: P) -> bool { - other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other) - } -} - impl SearchQuery { pub fn text( query: impl ToString, From eb8a0e71487bfdf05072713c6e441a4ca8bca628 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 3 Nov 2023 12:38:09 +0200 Subject: [PATCH 13/15] Uncomment persistence tests --- crates/language2/src/syntax_map.rs | 2 - .../src/semantic_index_tests.rs | 8 +- crates/workspace2/Cargo.toml | 6 +- crates/workspace2/src/dock.rs | 6 +- crates/workspace2/src/item.rs | 12 +- crates/workspace2/src/notifications.rs | 6 +- crates/workspace2/src/pane.rs | 4 +- .../src/pane/dragged_item_receiver.rs | 2 +- crates/workspace2/src/pane_group.rs | 4 +- crates/workspace2/src/persistence.rs | 730 +++++++++--------- crates/workspace2/src/persistence/model.rs | 24 +- crates/workspace2/src/searchable.rs | 2 +- crates/workspace2/src/status_bar.rs | 2 +- crates/workspace2/src/toolbar.rs | 2 +- crates/workspace2/src/workspace2.rs | 4 +- crates/workspace2/src/workspace_settings.rs | 2 +- crates/zed2/src/zed2.rs | 63 +- 17 files changed, 437 insertions(+), 442 deletions(-) diff --git a/crates/language2/src/syntax_map.rs b/crates/language2/src/syntax_map.rs index 4abb9afe7e..18f2e9b264 100644 --- a/crates/language2/src/syntax_map.rs +++ b/crates/language2/src/syntax_map.rs @@ -234,7 +234,6 @@ impl SyntaxMap { self.snapshot.interpolate(text); } - #[allow(dead_code)] // todo!() #[cfg(test)] pub fn reparse(&mut self, language: Arc, text: &BufferSnapshot) { self.snapshot @@ -786,7 +785,6 @@ impl SyntaxSnapshot { ) } - #[allow(dead_code)] // todo!() #[cfg(test)] pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec { self.layers_for_range(0..buffer.len(), buffer).collect() diff --git a/crates/semantic_index/src/semantic_index_tests.rs b/crates/semantic_index/src/semantic_index_tests.rs index 044ded2682..2145d1f9e0 100644 --- a/crates/semantic_index/src/semantic_index_tests.rs +++ b/crates/semantic_index/src/semantic_index_tests.rs @@ -289,12 +289,12 @@ async fn test_code_context_retrieval_rust() { impl E { // This is also a preceding comment pub fn function_1() -> Option<()> { - todo!(); + unimplemented!(); } // This is a preceding comment fn function_2() -> Result<()> { - todo!(); + unimplemented!(); } } @@ -344,7 +344,7 @@ async fn test_code_context_retrieval_rust() { " // This is also a preceding comment pub fn function_1() -> Option<()> { - todo!(); + unimplemented!(); }" .unindent(), text.find("pub fn function_1").unwrap(), @@ -353,7 +353,7 @@ async fn test_code_context_retrieval_rust() { " // This is a preceding comment fn function_2() -> Result<()> { - todo!(); + unimplemented!(); }" .unindent(), text.find("fn function_2").unwrap(), diff --git a/crates/workspace2/Cargo.toml b/crates/workspace2/Cargo.toml index 5072f2b8f9..f3f10d2015 100644 --- a/crates/workspace2/Cargo.toml +++ b/crates/workspace2/Cargo.toml @@ -14,7 +14,7 @@ test-support = [ "client2/test-support", "project2/test-support", "settings2/test-support", - "gpui2/test-support", + "gpui/test-support", "fs2/test-support" ] @@ -25,7 +25,7 @@ client2 = { path = "../client2" } collections = { path = "../collections" } # context_menu = { path = "../context_menu" } fs2 = { path = "../fs2" } -gpui2 = { path = "../gpui2" } +gpui = { package = "gpui2", path = "../gpui2" } install_cli2 = { path = "../install_cli2" } language2 = { path = "../language2" } #menu = { path = "../menu" } @@ -56,7 +56,7 @@ uuid.workspace = true [dev-dependencies] call2 = { path = "../call2", features = ["test-support"] } client2 = { path = "../client2", features = ["test-support"] } -gpui2 = { path = "../gpui2", features = ["test-support"] } +gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] } project2 = { path = "../project2", features = ["test-support"] } settings2 = { path = "../settings2", features = ["test-support"] } fs2 = { path = "../fs2", features = ["test-support"] } diff --git a/crates/workspace2/src/dock.rs b/crates/workspace2/src/dock.rs index 9da9123a2f..9ade6278bb 100644 --- a/crates/workspace2/src/dock.rs +++ b/crates/workspace2/src/dock.rs @@ -1,5 +1,5 @@ use crate::{status_bar::StatusItemView, Axis, Workspace}; -use gpui2::{ +use gpui::{ div, Action, AnyView, AppContext, Div, Entity, EntityId, EventEmitter, ParentElement, Render, Subscription, View, ViewContext, WeakView, WindowContext, }; @@ -629,7 +629,7 @@ impl StatusItemView for PanelButtons { #[cfg(any(test, feature = "test-support"))] pub mod test { use super::*; - use gpui2::{div, Div, ViewContext, WindowContext}; + use gpui::{div, Div, ViewContext, WindowContext}; #[derive(Debug)] pub enum TestPanelEvent { @@ -678,7 +678,7 @@ pub mod test { "TestPanel" } - fn position(&self, _: &gpui2::WindowContext) -> super::DockPosition { + fn position(&self, _: &gpui::WindowContext) -> super::DockPosition { self.position } diff --git a/crates/workspace2/src/item.rs b/crates/workspace2/src/item.rs index c2d5c25781..15b387cbed 100644 --- a/crates/workspace2/src/item.rs +++ b/crates/workspace2/src/item.rs @@ -11,7 +11,7 @@ use client2::{ proto::{self, PeerId}, Client, }; -use gpui2::{ +use gpui::{ AnyElement, AnyView, AppContext, Entity, EntityId, EventEmitter, HighlightStyle, Model, Pixels, Point, Render, SharedString, Task, View, ViewContext, WeakView, WindowContext, }; @@ -212,7 +212,7 @@ pub trait ItemHandle: 'static + Send { &self, cx: &mut WindowContext, handler: Box, - ) -> gpui2::Subscription; + ) -> gpui::Subscription; fn tab_tooltip_text(&self, cx: &AppContext) -> Option; fn tab_description(&self, detail: usize, cx: &AppContext) -> Option; fn tab_content(&self, detail: Option, cx: &AppContext) -> AnyElement; @@ -256,7 +256,7 @@ pub trait ItemHandle: 'static + Send { &mut self, cx: &mut AppContext, callback: Box, - ) -> gpui2::Subscription; + ) -> gpui::Subscription; fn to_searchable_item_handle(&self, cx: &AppContext) -> Option>; fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation; fn breadcrumbs(&self, theme: &ThemeVariant, cx: &AppContext) -> Option>; @@ -286,7 +286,7 @@ impl ItemHandle for View { &self, cx: &mut WindowContext, handler: Box, - ) -> gpui2::Subscription { + ) -> gpui::Subscription { cx.subscribe(self, move |_, event, cx| { for item_event in T::to_item_events(event) { handler(item_event, cx) @@ -573,7 +573,7 @@ impl ItemHandle for View { &mut self, cx: &mut AppContext, callback: Box, - ) -> gpui2::Subscription { + ) -> gpui::Subscription { cx.observe_release(self, move |_, cx| callback(cx)) } @@ -747,7 +747,7 @@ impl FollowableItemHandle for View { // pub mod test { // use super::{Item, ItemEvent}; // use crate::{ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId}; -// use gpui2::{ +// use gpui::{ // elements::Empty, AnyElement, AppContext, Element, Entity, Model, Task, View, // ViewContext, View, WeakViewHandle, // }; diff --git a/crates/workspace2/src/notifications.rs b/crates/workspace2/src/notifications.rs index 9922bcdd26..0e0b291926 100644 --- a/crates/workspace2/src/notifications.rs +++ b/crates/workspace2/src/notifications.rs @@ -1,6 +1,6 @@ use crate::{Toast, Workspace}; use collections::HashMap; -use gpui2::{AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext}; +use gpui::{AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext}; use std::{any::TypeId, ops::DerefMut}; pub fn init(cx: &mut AppContext) { @@ -160,7 +160,7 @@ impl Workspace { pub mod simple_message_notification { use super::Notification; - use gpui2::{AnyElement, AppContext, Div, EventEmitter, Render, TextStyle, ViewContext}; + use gpui::{AnyElement, AppContext, Div, EventEmitter, Render, TextStyle, ViewContext}; use serde::Deserialize; use std::{borrow::Cow, sync::Arc}; @@ -265,7 +265,7 @@ pub mod simple_message_notification { // "MessageNotification" // } - // fn render(&mut self, cx: &mut gpui2::ViewContext) -> gpui::AnyElement { + // fn render(&mut self, cx: &mut gpui::ViewContext) -> gpui::AnyElement { // let theme = theme2::current(cx).clone(); // let theme = &theme.simple_message_notification; diff --git a/crates/workspace2/src/pane.rs b/crates/workspace2/src/pane.rs index b30ec0b7f8..7c27b8158b 100644 --- a/crates/workspace2/src/pane.rs +++ b/crates/workspace2/src/pane.rs @@ -8,7 +8,7 @@ use crate::{ }; use anyhow::Result; use collections::{HashMap, HashSet, VecDeque}; -use gpui2::{ +use gpui::{ AppContext, AsyncWindowContext, Component, Div, EntityId, EventEmitter, Model, PromptLevel, Render, Task, View, ViewContext, VisualContext, WeakView, WindowContext, }; @@ -2907,6 +2907,6 @@ impl Render for DraggedTab { type Element = Div; fn render(&mut self, cx: &mut ViewContext) -> Self::Element { - div().w_8().h_4().bg(gpui2::red()) + div().w_8().h_4().bg(gpui::red()) } } diff --git a/crates/workspace2/src/pane/dragged_item_receiver.rs b/crates/workspace2/src/pane/dragged_item_receiver.rs index 292529e787..d8e967dd75 100644 --- a/crates/workspace2/src/pane/dragged_item_receiver.rs +++ b/crates/workspace2/src/pane/dragged_item_receiver.rs @@ -1,6 +1,6 @@ use super::DraggedItem; use crate::{Pane, SplitDirection, Workspace}; -use gpui2::{ +use gpui::{ color::Color, elements::{Canvas, MouseEventHandler, ParentElement, Stack}, geometry::{rect::RectF, vector::Vector2F}, diff --git a/crates/workspace2/src/pane_group.rs b/crates/workspace2/src/pane_group.rs index e521c51bda..441aef21f5 100644 --- a/crates/workspace2/src/pane_group.rs +++ b/crates/workspace2/src/pane_group.rs @@ -6,9 +6,7 @@ use db2::sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::Statement, }; -use gpui2::{ - point, size, AnyElement, AnyWeakView, Bounds, Model, Pixels, Point, View, ViewContext, -}; +use gpui::{point, size, AnyElement, AnyWeakView, Bounds, Model, Pixels, Point, View, ViewContext}; use parking_lot::Mutex; use project2::Project; use serde::Deserialize; diff --git a/crates/workspace2/src/persistence.rs b/crates/workspace2/src/persistence.rs index 435518271d..9790495087 100644 --- a/crates/workspace2/src/persistence.rs +++ b/crates/workspace2/src/persistence.rs @@ -6,7 +6,7 @@ use std::path::Path; use anyhow::{anyhow, bail, Context, Result}; use db2::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql}; -use gpui2::WindowBounds; +use gpui::WindowBounds; use util::{unzip_option, ResultExt}; use uuid::Uuid; @@ -549,425 +549,425 @@ impl WorkspaceDb { } } -// todo!() -// #[cfg(test)] -// mod tests { -// use super::*; -// use db::open_test_db; +#[cfg(test)] +mod tests { + use super::*; + use db2::open_test_db; + use gpui; -// #[gpui::test] -// async fn test_next_id_stability() { -// env_logger::try_init().ok(); + #[gpui::test] + async fn test_next_id_stability() { + env_logger::try_init().ok(); -// let db = WorkspaceDb(open_test_db("test_next_id_stability").await); + let db = WorkspaceDb(open_test_db("test_next_id_stability").await); -// db.write(|conn| { -// conn.migrate( -// "test_table", -// &[sql!( -// CREATE TABLE test_table( -// text TEXT, -// workspace_id INTEGER, -// FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) -// ON DELETE CASCADE -// ) STRICT; -// )], -// ) -// .unwrap(); -// }) -// .await; + db.write(|conn| { + conn.migrate( + "test_table", + &[sql!( + CREATE TABLE test_table( + text TEXT, + workspace_id INTEGER, + FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id) + ON DELETE CASCADE + ) STRICT; + )], + ) + .unwrap(); + }) + .await; -// let id = db.next_id().await.unwrap(); -// // Assert the empty row got inserted -// assert_eq!( -// Some(id), -// db.select_row_bound::(sql!( -// SELECT workspace_id FROM workspaces WHERE workspace_id = ? -// )) -// .unwrap()(id) -// .unwrap() -// ); + let id = db.next_id().await.unwrap(); + // Assert the empty row got inserted + assert_eq!( + Some(id), + db.select_row_bound::(sql!( + SELECT workspace_id FROM workspaces WHERE workspace_id = ? + )) + .unwrap()(id) + .unwrap() + ); -// db.write(move |conn| { -// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?))) -// .unwrap()(("test-text-1", id)) -// .unwrap() -// }) -// .await; + db.write(move |conn| { + conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?))) + .unwrap()(("test-text-1", id)) + .unwrap() + }) + .await; -// let test_text_1 = db -// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?)) -// .unwrap()(1) -// .unwrap() -// .unwrap(); -// assert_eq!(test_text_1, "test-text-1"); -// } + let test_text_1 = db + .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?)) + .unwrap()(1) + .unwrap() + .unwrap(); + assert_eq!(test_text_1, "test-text-1"); + } -// #[gpui::test] -// async fn test_workspace_id_stability() { -// env_logger::try_init().ok(); + #[gpui::test] + async fn test_workspace_id_stability() { + env_logger::try_init().ok(); -// let db = WorkspaceDb(open_test_db("test_workspace_id_stability").await); + let db = WorkspaceDb(open_test_db("test_workspace_id_stability").await); -// db.write(|conn| { -// conn.migrate( -// "test_table", -// &[sql!( -// CREATE TABLE test_table( -// text TEXT, -// workspace_id INTEGER, -// FOREIGN KEY(workspace_id) -// REFERENCES workspaces(workspace_id) -// ON DELETE CASCADE -// ) STRICT;)], -// ) -// }) -// .await -// .unwrap(); + db.write(|conn| { + conn.migrate( + "test_table", + &[sql!( + CREATE TABLE test_table( + text TEXT, + workspace_id INTEGER, + FOREIGN KEY(workspace_id) + REFERENCES workspaces(workspace_id) + ON DELETE CASCADE + ) STRICT;)], + ) + }) + .await + .unwrap(); -// let mut workspace_1 = SerializedWorkspace { -// id: 1, -// location: (["/tmp", "/tmp2"]).into(), -// center_group: Default::default(), -// bounds: Default::default(), -// display: Default::default(), -// docks: Default::default(), -// }; + let mut workspace_1 = SerializedWorkspace { + id: 1, + location: (["/tmp", "/tmp2"]).into(), + center_group: Default::default(), + bounds: Default::default(), + display: Default::default(), + docks: Default::default(), + }; -// let workspace_2 = SerializedWorkspace { -// id: 2, -// location: (["/tmp"]).into(), -// center_group: Default::default(), -// bounds: Default::default(), -// display: Default::default(), -// docks: Default::default(), -// }; + let workspace_2 = SerializedWorkspace { + id: 2, + location: (["/tmp"]).into(), + center_group: Default::default(), + bounds: Default::default(), + display: Default::default(), + docks: Default::default(), + }; -// db.save_workspace(workspace_1.clone()).await; + db.save_workspace(workspace_1.clone()).await; -// db.write(|conn| { -// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?))) -// .unwrap()(("test-text-1", 1)) -// .unwrap(); -// }) -// .await; + db.write(|conn| { + conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?))) + .unwrap()(("test-text-1", 1)) + .unwrap(); + }) + .await; -// db.save_workspace(workspace_2.clone()).await; + db.save_workspace(workspace_2.clone()).await; -// db.write(|conn| { -// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?))) -// .unwrap()(("test-text-2", 2)) -// .unwrap(); -// }) -// .await; + db.write(|conn| { + conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?))) + .unwrap()(("test-text-2", 2)) + .unwrap(); + }) + .await; -// workspace_1.location = (["/tmp", "/tmp3"]).into(); -// db.save_workspace(workspace_1.clone()).await; -// db.save_workspace(workspace_1).await; -// db.save_workspace(workspace_2).await; + workspace_1.location = (["/tmp", "/tmp3"]).into(); + db.save_workspace(workspace_1.clone()).await; + db.save_workspace(workspace_1).await; + db.save_workspace(workspace_2).await; -// let test_text_2 = db -// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?)) -// .unwrap()(2) -// .unwrap() -// .unwrap(); -// assert_eq!(test_text_2, "test-text-2"); + let test_text_2 = db + .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?)) + .unwrap()(2) + .unwrap() + .unwrap(); + assert_eq!(test_text_2, "test-text-2"); -// let test_text_1 = db -// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?)) -// .unwrap()(1) -// .unwrap() -// .unwrap(); -// assert_eq!(test_text_1, "test-text-1"); -// } + let test_text_1 = db + .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?)) + .unwrap()(1) + .unwrap() + .unwrap(); + assert_eq!(test_text_1, "test-text-1"); + } -// fn group(axis: gpui::Axis, children: Vec) -> SerializedPaneGroup { -// SerializedPaneGroup::Group { -// axis, -// flexes: None, -// children, -// } -// } + fn group(axis: Axis, children: Vec) -> SerializedPaneGroup { + SerializedPaneGroup::Group { + axis, + flexes: None, + children, + } + } -// #[gpui::test] -// async fn test_full_workspace_serialization() { -// env_logger::try_init().ok(); + #[gpui::test] + async fn test_full_workspace_serialization() { + env_logger::try_init().ok(); -// let db = WorkspaceDb(open_test_db("test_full_workspace_serialization").await); + let db = WorkspaceDb(open_test_db("test_full_workspace_serialization").await); -// // ----------------- -// // | 1,2 | 5,6 | -// // | - - - | | -// // | 3,4 | | -// // ----------------- -// let center_group = group( -// gpui::Axis::Horizontal, -// vec![ -// group( -// gpui::Axis::Vertical, -// vec![ -// SerializedPaneGroup::Pane(SerializedPane::new( -// vec![ -// SerializedItem::new("Terminal", 5, false), -// SerializedItem::new("Terminal", 6, true), -// ], -// false, -// )), -// SerializedPaneGroup::Pane(SerializedPane::new( -// vec![ -// SerializedItem::new("Terminal", 7, true), -// SerializedItem::new("Terminal", 8, false), -// ], -// false, -// )), -// ], -// ), -// SerializedPaneGroup::Pane(SerializedPane::new( -// vec![ -// SerializedItem::new("Terminal", 9, false), -// SerializedItem::new("Terminal", 10, true), -// ], -// false, -// )), -// ], -// ); + // ----------------- + // | 1,2 | 5,6 | + // | - - - | | + // | 3,4 | | + // ----------------- + let center_group = group( + Axis::Horizontal, + vec![ + group( + Axis::Vertical, + vec![ + SerializedPaneGroup::Pane(SerializedPane::new( + vec![ + SerializedItem::new("Terminal", 5, false), + SerializedItem::new("Terminal", 6, true), + ], + false, + )), + SerializedPaneGroup::Pane(SerializedPane::new( + vec![ + SerializedItem::new("Terminal", 7, true), + SerializedItem::new("Terminal", 8, false), + ], + false, + )), + ], + ), + SerializedPaneGroup::Pane(SerializedPane::new( + vec![ + SerializedItem::new("Terminal", 9, false), + SerializedItem::new("Terminal", 10, true), + ], + false, + )), + ], + ); -// let workspace = SerializedWorkspace { -// id: 5, -// location: (["/tmp", "/tmp2"]).into(), -// center_group, -// bounds: Default::default(), -// display: Default::default(), -// docks: Default::default(), -// }; + let workspace = SerializedWorkspace { + id: 5, + location: (["/tmp", "/tmp2"]).into(), + center_group, + bounds: Default::default(), + display: Default::default(), + docks: Default::default(), + }; -// db.save_workspace(workspace.clone()).await; -// let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]); + db.save_workspace(workspace.clone()).await; + let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]); -// assert_eq!(workspace, round_trip_workspace.unwrap()); + assert_eq!(workspace, round_trip_workspace.unwrap()); -// // Test guaranteed duplicate IDs -// db.save_workspace(workspace.clone()).await; -// db.save_workspace(workspace.clone()).await; + // Test guaranteed duplicate IDs + db.save_workspace(workspace.clone()).await; + db.save_workspace(workspace.clone()).await; -// let round_trip_workspace = db.workspace_for_roots(&["/tmp", "/tmp2"]); -// assert_eq!(workspace, round_trip_workspace.unwrap()); -// } + let round_trip_workspace = db.workspace_for_roots(&["/tmp", "/tmp2"]); + assert_eq!(workspace, round_trip_workspace.unwrap()); + } -// #[gpui::test] -// async fn test_workspace_assignment() { -// env_logger::try_init().ok(); + #[gpui::test] + async fn test_workspace_assignment() { + env_logger::try_init().ok(); -// let db = WorkspaceDb(open_test_db("test_basic_functionality").await); + let db = WorkspaceDb(open_test_db("test_basic_functionality").await); -// let workspace_1 = SerializedWorkspace { -// id: 1, -// location: (["/tmp", "/tmp2"]).into(), -// center_group: Default::default(), -// bounds: Default::default(), -// display: Default::default(), -// docks: Default::default(), -// }; + let workspace_1 = SerializedWorkspace { + id: 1, + location: (["/tmp", "/tmp2"]).into(), + center_group: Default::default(), + bounds: Default::default(), + display: Default::default(), + docks: Default::default(), + }; -// let mut workspace_2 = SerializedWorkspace { -// id: 2, -// location: (["/tmp"]).into(), -// center_group: Default::default(), -// bounds: Default::default(), -// display: Default::default(), -// docks: Default::default(), -// }; + let mut workspace_2 = SerializedWorkspace { + id: 2, + location: (["/tmp"]).into(), + center_group: Default::default(), + bounds: Default::default(), + display: Default::default(), + docks: Default::default(), + }; -// db.save_workspace(workspace_1.clone()).await; -// db.save_workspace(workspace_2.clone()).await; + db.save_workspace(workspace_1.clone()).await; + db.save_workspace(workspace_2.clone()).await; -// // Test that paths are treated as a set -// assert_eq!( -// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(), -// workspace_1 -// ); -// assert_eq!( -// db.workspace_for_roots(&["/tmp2", "/tmp"]).unwrap(), -// workspace_1 -// ); + // Test that paths are treated as a set + assert_eq!( + db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(), + workspace_1 + ); + assert_eq!( + db.workspace_for_roots(&["/tmp2", "/tmp"]).unwrap(), + workspace_1 + ); -// // Make sure that other keys work -// assert_eq!(db.workspace_for_roots(&["/tmp"]).unwrap(), workspace_2); -// assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None); + // Make sure that other keys work + assert_eq!(db.workspace_for_roots(&["/tmp"]).unwrap(), workspace_2); + assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None); -// // Test 'mutate' case of updating a pre-existing id -// workspace_2.location = (["/tmp", "/tmp2"]).into(); + // Test 'mutate' case of updating a pre-existing id + workspace_2.location = (["/tmp", "/tmp2"]).into(); -// db.save_workspace(workspace_2.clone()).await; -// assert_eq!( -// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(), -// workspace_2 -// ); + db.save_workspace(workspace_2.clone()).await; + assert_eq!( + db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(), + workspace_2 + ); -// // Test other mechanism for mutating -// let mut workspace_3 = SerializedWorkspace { -// id: 3, -// location: (&["/tmp", "/tmp2"]).into(), -// center_group: Default::default(), -// bounds: Default::default(), -// display: Default::default(), -// docks: Default::default(), -// }; + // Test other mechanism for mutating + let mut workspace_3 = SerializedWorkspace { + id: 3, + location: (&["/tmp", "/tmp2"]).into(), + center_group: Default::default(), + bounds: Default::default(), + display: Default::default(), + docks: Default::default(), + }; -// db.save_workspace(workspace_3.clone()).await; -// assert_eq!( -// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(), -// workspace_3 -// ); + db.save_workspace(workspace_3.clone()).await; + assert_eq!( + db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(), + workspace_3 + ); -// // Make sure that updating paths differently also works -// workspace_3.location = (["/tmp3", "/tmp4", "/tmp2"]).into(); -// db.save_workspace(workspace_3.clone()).await; -// assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None); -// assert_eq!( -// db.workspace_for_roots(&["/tmp2", "/tmp3", "/tmp4"]) -// .unwrap(), -// workspace_3 -// ); -// } + // Make sure that updating paths differently also works + workspace_3.location = (["/tmp3", "/tmp4", "/tmp2"]).into(); + db.save_workspace(workspace_3.clone()).await; + assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None); + assert_eq!( + db.workspace_for_roots(&["/tmp2", "/tmp3", "/tmp4"]) + .unwrap(), + workspace_3 + ); + } -// use crate::persistence::model::SerializedWorkspace; -// use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup}; + use crate::persistence::model::SerializedWorkspace; + use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup}; -// fn default_workspace>( -// workspace_id: &[P], -// center_group: &SerializedPaneGroup, -// ) -> SerializedWorkspace { -// SerializedWorkspace { -// id: 4, -// location: workspace_id.into(), -// center_group: center_group.clone(), -// bounds: Default::default(), -// display: Default::default(), -// docks: Default::default(), -// } -// } + fn default_workspace>( + workspace_id: &[P], + center_group: &SerializedPaneGroup, + ) -> SerializedWorkspace { + SerializedWorkspace { + id: 4, + location: workspace_id.into(), + center_group: center_group.clone(), + bounds: Default::default(), + display: Default::default(), + docks: Default::default(), + } + } -// #[gpui::test] -// async fn test_simple_split() { -// env_logger::try_init().ok(); + #[gpui::test] + async fn test_simple_split() { + env_logger::try_init().ok(); -// let db = WorkspaceDb(open_test_db("simple_split").await); + let db = WorkspaceDb(open_test_db("simple_split").await); -// // ----------------- -// // | 1,2 | 5,6 | -// // | - - - | | -// // | 3,4 | | -// // ----------------- -// let center_pane = group( -// gpui::Axis::Horizontal, -// vec![ -// group( -// gpui::Axis::Vertical, -// vec![ -// SerializedPaneGroup::Pane(SerializedPane::new( -// vec![ -// SerializedItem::new("Terminal", 1, false), -// SerializedItem::new("Terminal", 2, true), -// ], -// false, -// )), -// SerializedPaneGroup::Pane(SerializedPane::new( -// vec![ -// SerializedItem::new("Terminal", 4, false), -// SerializedItem::new("Terminal", 3, true), -// ], -// true, -// )), -// ], -// ), -// SerializedPaneGroup::Pane(SerializedPane::new( -// vec![ -// SerializedItem::new("Terminal", 5, true), -// SerializedItem::new("Terminal", 6, false), -// ], -// false, -// )), -// ], -// ); + // ----------------- + // | 1,2 | 5,6 | + // | - - - | | + // | 3,4 | | + // ----------------- + let center_pane = group( + Axis::Horizontal, + vec![ + group( + Axis::Vertical, + vec![ + SerializedPaneGroup::Pane(SerializedPane::new( + vec![ + SerializedItem::new("Terminal", 1, false), + SerializedItem::new("Terminal", 2, true), + ], + false, + )), + SerializedPaneGroup::Pane(SerializedPane::new( + vec![ + SerializedItem::new("Terminal", 4, false), + SerializedItem::new("Terminal", 3, true), + ], + true, + )), + ], + ), + SerializedPaneGroup::Pane(SerializedPane::new( + vec![ + SerializedItem::new("Terminal", 5, true), + SerializedItem::new("Terminal", 6, false), + ], + false, + )), + ], + ); -// let workspace = default_workspace(&["/tmp"], ¢er_pane); + let workspace = default_workspace(&["/tmp"], ¢er_pane); -// db.save_workspace(workspace.clone()).await; + db.save_workspace(workspace.clone()).await; -// let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap(); + let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap(); -// assert_eq!(workspace.center_group, new_workspace.center_group); -// } + assert_eq!(workspace.center_group, new_workspace.center_group); + } -// #[gpui::test] -// async fn test_cleanup_panes() { -// env_logger::try_init().ok(); + #[gpui::test] + async fn test_cleanup_panes() { + env_logger::try_init().ok(); -// let db = WorkspaceDb(open_test_db("test_cleanup_panes").await); + let db = WorkspaceDb(open_test_db("test_cleanup_panes").await); -// let center_pane = group( -// gpui::Axis::Horizontal, -// vec![ -// group( -// gpui::Axis::Vertical, -// vec![ -// SerializedPaneGroup::Pane(SerializedPane::new( -// vec![ -// SerializedItem::new("Terminal", 1, false), -// SerializedItem::new("Terminal", 2, true), -// ], -// false, -// )), -// SerializedPaneGroup::Pane(SerializedPane::new( -// vec![ -// SerializedItem::new("Terminal", 4, false), -// SerializedItem::new("Terminal", 3, true), -// ], -// true, -// )), -// ], -// ), -// SerializedPaneGroup::Pane(SerializedPane::new( -// vec![ -// SerializedItem::new("Terminal", 5, false), -// SerializedItem::new("Terminal", 6, true), -// ], -// false, -// )), -// ], -// ); + let center_pane = group( + Axis::Horizontal, + vec![ + group( + Axis::Vertical, + vec![ + SerializedPaneGroup::Pane(SerializedPane::new( + vec![ + SerializedItem::new("Terminal", 1, false), + SerializedItem::new("Terminal", 2, true), + ], + false, + )), + SerializedPaneGroup::Pane(SerializedPane::new( + vec![ + SerializedItem::new("Terminal", 4, false), + SerializedItem::new("Terminal", 3, true), + ], + true, + )), + ], + ), + SerializedPaneGroup::Pane(SerializedPane::new( + vec![ + SerializedItem::new("Terminal", 5, false), + SerializedItem::new("Terminal", 6, true), + ], + false, + )), + ], + ); -// let id = &["/tmp"]; + let id = &["/tmp"]; -// let mut workspace = default_workspace(id, ¢er_pane); + let mut workspace = default_workspace(id, ¢er_pane); -// db.save_workspace(workspace.clone()).await; + db.save_workspace(workspace.clone()).await; -// workspace.center_group = group( -// gpui::Axis::Vertical, -// vec![ -// SerializedPaneGroup::Pane(SerializedPane::new( -// vec![ -// SerializedItem::new("Terminal", 1, false), -// SerializedItem::new("Terminal", 2, true), -// ], -// false, -// )), -// SerializedPaneGroup::Pane(SerializedPane::new( -// vec![ -// SerializedItem::new("Terminal", 4, true), -// SerializedItem::new("Terminal", 3, false), -// ], -// true, -// )), -// ], -// ); + workspace.center_group = group( + Axis::Vertical, + vec![ + SerializedPaneGroup::Pane(SerializedPane::new( + vec![ + SerializedItem::new("Terminal", 1, false), + SerializedItem::new("Terminal", 2, true), + ], + false, + )), + SerializedPaneGroup::Pane(SerializedPane::new( + vec![ + SerializedItem::new("Terminal", 4, true), + SerializedItem::new("Terminal", 3, false), + ], + true, + )), + ], + ); -// db.save_workspace(workspace.clone()).await; + db.save_workspace(workspace.clone()).await; -// let new_workspace = db.workspace_for_roots(id).unwrap(); + let new_workspace = db.workspace_for_roots(id).unwrap(); -// assert_eq!(workspace.center_group, new_workspace.center_group); -// } -// } + assert_eq!(workspace.center_group, new_workspace.center_group); + } +} diff --git a/crates/workspace2/src/persistence/model.rs b/crates/workspace2/src/persistence/model.rs index de4518f68e..2b8ec94bd4 100644 --- a/crates/workspace2/src/persistence/model.rs +++ b/crates/workspace2/src/persistence/model.rs @@ -7,7 +7,7 @@ use db2::sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::Statement, }; -use gpui2::{AsyncWindowContext, Model, Task, View, WeakView, WindowBounds}; +use gpui::{AsyncWindowContext, Model, Task, View, WeakView, WindowBounds}; use project2::Project; use std::{ path::{Path, PathBuf}, @@ -55,7 +55,7 @@ impl Column for WorkspaceLocation { } } -#[derive(PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone)] pub struct SerializedWorkspace { pub id: WorkspaceId, pub location: WorkspaceLocation, @@ -127,7 +127,7 @@ impl Bind for DockData { } } -#[derive(PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone)] pub enum SerializedPaneGroup { Group { axis: Axis, @@ -286,15 +286,15 @@ pub struct SerializedItem { pub active: bool, } -// impl SerializedItem { -// pub fn new(kind: impl AsRef, item_id: ItemId, active: bool) -> Self { -// Self { -// kind: Arc::from(kind.as_ref()), -// item_id, -// active, -// } -// } -// } +impl SerializedItem { + pub fn new(kind: impl AsRef, item_id: ItemId, active: bool) -> Self { + Self { + kind: Arc::from(kind.as_ref()), + item_id, + active, + } + } +} #[cfg(test)] impl Default for SerializedItem { diff --git a/crates/workspace2/src/searchable.rs b/crates/workspace2/src/searchable.rs index 3935423635..2b870c2944 100644 --- a/crates/workspace2/src/searchable.rs +++ b/crates/workspace2/src/searchable.rs @@ -1,6 +1,6 @@ use std::{any::Any, sync::Arc}; -use gpui2::{AnyView, AppContext, Subscription, Task, View, ViewContext, WindowContext}; +use gpui::{AnyView, AppContext, Subscription, Task, View, ViewContext, WindowContext}; use project2::search::SearchQuery; use crate::{ diff --git a/crates/workspace2/src/status_bar.rs b/crates/workspace2/src/status_bar.rs index c2f78d9ad6..ca4ebcdb13 100644 --- a/crates/workspace2/src/status_bar.rs +++ b/crates/workspace2/src/status_bar.rs @@ -1,7 +1,7 @@ use std::any::TypeId; use crate::{ItemHandle, Pane}; -use gpui2::{ +use gpui::{ div, AnyView, Component, Div, ParentElement, Render, Styled, Subscription, View, ViewContext, WindowContext, }; diff --git a/crates/workspace2/src/toolbar.rs b/crates/workspace2/src/toolbar.rs index c3d1e520c7..80503ad7bb 100644 --- a/crates/workspace2/src/toolbar.rs +++ b/crates/workspace2/src/toolbar.rs @@ -1,5 +1,5 @@ use crate::ItemHandle; -use gpui2::{ +use gpui::{ AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext, WindowContext, }; diff --git a/crates/workspace2/src/workspace2.rs b/crates/workspace2/src/workspace2.rs index 3d9b86a051..a754daaffa 100644 --- a/crates/workspace2/src/workspace2.rs +++ b/crates/workspace2/src/workspace2.rs @@ -30,7 +30,7 @@ use futures::{ future::try_join_all, Future, FutureExt, StreamExt, }; -use gpui2::{ +use gpui::{ div, point, size, AnyModel, AnyView, AnyWeakView, AppContext, AsyncAppContext, AsyncWindowContext, Bounds, Component, Div, EntityId, EventEmitter, GlobalPixels, Model, ModelContext, ParentElement, Point, Render, Size, StatefulInteractive, Styled, Subscription, @@ -460,7 +460,7 @@ struct Follower { impl AppState { #[cfg(any(test, feature = "test-support"))] pub fn test(cx: &mut AppContext) -> Arc { - use gpui2::Context; + use gpui::Context; use node_runtime::FakeNodeRuntime; use settings2::SettingsStore; diff --git a/crates/workspace2/src/workspace_settings.rs b/crates/workspace2/src/workspace_settings.rs index 4b93b705a3..c4d1bb41cd 100644 --- a/crates/workspace2/src/workspace_settings.rs +++ b/crates/workspace2/src/workspace_settings.rs @@ -49,7 +49,7 @@ impl Settings for WorkspaceSettings { fn load( default_value: &Self::FileContent, user_values: &[&Self::FileContent], - _: &mut gpui2::AppContext, + _: &mut gpui::AppContext, ) -> anyhow::Result { Self::load_via_json_merge(default_value, user_values) } diff --git a/crates/zed2/src/zed2.rs b/crates/zed2/src/zed2.rs index 4f28536085..713345b2ee 100644 --- a/crates/zed2/src/zed2.rs +++ b/crates/zed2/src/zed2.rs @@ -69,11 +69,10 @@ pub async fn handle_cli_connection( let mut caret_positions = HashMap::default(); let paths = if paths.is_empty() { - todo!() - // workspace::last_opened_workspace_paths() - // .await - // .map(|location| location.paths().to_vec()) - // .unwrap_or_default() + workspace2::last_opened_workspace_paths() + .await + .map(|location| location.paths().to_vec()) + .unwrap_or_default() } else { paths .into_iter() @@ -260,33 +259,33 @@ pub fn initialize_workspace( move |workspace, _, event, cx| { if let workspace2::Event::PaneAdded(pane) = event { pane.update(cx, |pane, cx| { - // todo!() - // pane.toolbar().update(cx, |toolbar, cx| { - // let breadcrumbs = cx.add_view(|_| Breadcrumbs::new(workspace)); - // toolbar.add_item(breadcrumbs, cx); - // let buffer_search_bar = cx.add_view(BufferSearchBar::new); - // toolbar.add_item(buffer_search_bar.clone(), cx); - // let quick_action_bar = cx.add_view(|_| { - // QuickActionBar::new(buffer_search_bar, workspace) - // }); - // toolbar.add_item(quick_action_bar, cx); - // let diagnostic_editor_controls = - // cx.add_view(|_| diagnostics2::ToolbarControls::new()); - // toolbar.add_item(diagnostic_editor_controls, cx); - // let project_search_bar = cx.add_view(|_| ProjectSearchBar::new()); - // toolbar.add_item(project_search_bar, cx); - // let submit_feedback_button = - // cx.add_view(|_| SubmitFeedbackButton::new()); - // toolbar.add_item(submit_feedback_button, cx); - // let feedback_info_text = cx.add_view(|_| FeedbackInfoText::new()); - // toolbar.add_item(feedback_info_text, cx); - // let lsp_log_item = - // cx.add_view(|_| language_tools::LspLogToolbarItemView::new()); - // toolbar.add_item(lsp_log_item, cx); - // let syntax_tree_item = cx - // .add_view(|_| language_tools::SyntaxTreeToolbarItemView::new()); - // toolbar.add_item(syntax_tree_item, cx); - // }) + pane.toolbar().update(cx, |toolbar, cx| { + // todo!() + // let breadcrumbs = cx.add_view(|_| Breadcrumbs::new(workspace)); + // toolbar.add_item(breadcrumbs, cx); + // let buffer_search_bar = cx.add_view(BufferSearchBar::new); + // toolbar.add_item(buffer_search_bar.clone(), cx); + // let quick_action_bar = cx.add_view(|_| { + // QuickActionBar::new(buffer_search_bar, workspace) + // }); + // toolbar.add_item(quick_action_bar, cx); + // let diagnostic_editor_controls = + // cx.add_view(|_| diagnostics2::ToolbarControls::new()); + // toolbar.add_item(diagnostic_editor_controls, cx); + // let project_search_bar = cx.add_view(|_| ProjectSearchBar::new()); + // toolbar.add_item(project_search_bar, cx); + // let submit_feedback_button = + // cx.add_view(|_| SubmitFeedbackButton::new()); + // toolbar.add_item(submit_feedback_button, cx); + // let feedback_info_text = cx.add_view(|_| FeedbackInfoText::new()); + // toolbar.add_item(feedback_info_text, cx); + // let lsp_log_item = + // cx.add_view(|_| language_tools::LspLogToolbarItemView::new()); + // toolbar.add_item(lsp_log_item, cx); + // let syntax_tree_item = cx + // .add_view(|_| language_tools::SyntaxTreeToolbarItemView::new()); + // toolbar.add_item(syntax_tree_item, cx); + }) }); } } From f1fc07de946acdd067be53b0eed11443c69c5717 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 3 Nov 2023 12:55:06 +0200 Subject: [PATCH 14/15] Move journal2 to workspace2 --- Cargo.lock | 2 +- crates/journal2/Cargo.toml | 2 +- crates/journal2/src/journal2.rs | 63 +++++++++++----------- crates/zed2/src/main.rs | 89 ++++++++++++++++---------------- crates/zed2/src/only_instance.rs | 7 ++- 5 files changed, 82 insertions(+), 81 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 17d19258e4..ea80c92376 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4232,7 +4232,7 @@ dependencies = [ "settings2", "shellexpand", "util", - "workspace", + "workspace2", ] [[package]] diff --git a/crates/journal2/Cargo.toml b/crates/journal2/Cargo.toml index f43d90fc85..72da3deb69 100644 --- a/crates/journal2/Cargo.toml +++ b/crates/journal2/Cargo.toml @@ -12,7 +12,7 @@ doctest = false editor = { path = "../editor" } gpui = { package = "gpui2", path = "../gpui2" } util = { path = "../util" } -workspace = { path = "../workspace" } +workspace2 = { path = "../workspace2" } settings2 = { path = "../settings2" } anyhow.workspace = true diff --git a/crates/journal2/src/journal2.rs b/crates/journal2/src/journal2.rs index fa6e05cca7..20d520e36e 100644 --- a/crates/journal2/src/journal2.rs +++ b/crates/journal2/src/journal2.rs @@ -9,7 +9,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use workspace::AppState; +use workspace2::AppState; // use zed::AppState; // todo!(); @@ -59,7 +59,7 @@ pub fn init(_: Arc, cx: &mut AppContext) { // cx.add_global_action(move |_: &NewJournalEntry, cx| new_journal_entry(app_state.clone(), cx)); } -pub fn new_journal_entry(_: Arc, cx: &mut AppContext) { +pub fn new_journal_entry(app_state: Arc, cx: &mut AppContext) { let settings = JournalSettings::get_global(cx); let journal_dir = match journal_dir(settings.path.as_ref().unwrap()) { Some(journal_dir) => journal_dir, @@ -77,7 +77,7 @@ pub fn new_journal_entry(_: Arc, cx: &mut AppContext) { let now = now.time(); let _entry_heading = heading_entry(now, &settings.hour_format); - let _create_entry = cx.background_executor().spawn(async move { + let create_entry = cx.background_executor().spawn(async move { std::fs::create_dir_all(month_dir)?; OpenOptions::new() .create(true) @@ -86,37 +86,38 @@ pub fn new_journal_entry(_: Arc, cx: &mut AppContext) { Ok::<_, std::io::Error>((journal_dir, entry_path)) }); - // todo!("workspace") - // cx.spawn(|cx| async move { - // let (journal_dir, entry_path) = create_entry.await?; - // let (workspace, _) = - // cx.update(|cx| workspace::open_paths(&[journal_dir], &app_state, None, cx))?; + cx.spawn(|mut cx| async move { + let (journal_dir, entry_path) = create_entry.await?; + let (workspace, _) = cx + .update(|cx| workspace2::open_paths(&[journal_dir], &app_state, None, cx))? + .await?; - // let opened = workspace - // .update(&mut cx, |workspace, cx| { - // workspace.open_paths(vec![entry_path], true, cx) - // })? - // .await; + let _opened = workspace + .update(&mut cx, |workspace, cx| { + workspace.open_paths(vec![entry_path], true, cx) + })? + .await; - // if let Some(Some(Ok(item))) = opened.first() { - // if let Some(editor) = item.downcast::().map(|editor| editor.downgrade()) { - // editor.update(&mut cx, |editor, cx| { - // let len = editor.buffer().read(cx).len(cx); - // editor.change_selections(Some(Autoscroll::center()), cx, |s| { - // s.select_ranges([len..len]) - // }); - // if len > 0 { - // editor.insert("\n\n", cx); - // } - // editor.insert(&entry_heading, cx); - // editor.insert("\n\n", cx); - // })?; - // } - // } + // todo!("editor") + // if let Some(Some(Ok(item))) = opened.first() { + // if let Some(editor) = item.downcast::().map(|editor| editor.downgrade()) { + // editor.update(&mut cx, |editor, cx| { + // let len = editor.buffer().read(cx).len(cx); + // editor.change_selections(Some(Autoscroll::center()), cx, |s| { + // s.select_ranges([len..len]) + // }); + // if len > 0 { + // editor.insert("\n\n", cx); + // } + // editor.insert(&entry_heading, cx); + // editor.insert("\n\n", cx); + // })?; + // } + // } - // anyhow::Ok(()) - // }) - // .detach_and_log_err(cx); + anyhow::Ok(()) + }) + .detach_and_log_err(cx); } fn journal_dir(path: &str) -> Option { diff --git a/crates/zed2/src/main.rs b/crates/zed2/src/main.rs index f8b77fe9df..6522d97fdc 100644 --- a/crates/zed2/src/main.rs +++ b/crates/zed2/src/main.rs @@ -12,6 +12,7 @@ use cli::{ CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME, }; use client::UserStore; +use collections::HashMap; use db::kvp::KEY_VALUE_STORE; use fs::RealFs; use futures::{channel::mpsc, SinkExt, StreamExt}; @@ -42,11 +43,13 @@ use std::{ thread, time::{SystemTime, UNIX_EPOCH}, }; +use text::Point; use util::{ async_maybe, channel::{parse_zed_link, ReleaseChannel, RELEASE_CHANNEL}, http::{self, HttpClient}, - paths, ResultExt, + paths::{self, PathLikeWithPosition}, + ResultExt, }; use uuid::Uuid; use workspace2::{AppState, WorkspaceStore}; @@ -228,10 +231,8 @@ fn main() { let mut _triggered_authentication = false; match open_rx.try_next() { - Ok(Some(OpenRequest::Paths { paths: _ })) => { - // todo!("workspace") - // cx.update(|cx| workspace::open_paths(&paths, &app_state, None, cx)) - // .detach(); + Ok(Some(OpenRequest::Paths { paths })) => { + workspace2::open_paths(&paths, &app_state, None, cx).detach(); } Ok(Some(OpenRequest::CliConnection { connection })) => { let app_state = app_state.clone(); @@ -263,10 +264,10 @@ fn main() { async move { while let Some(request) = open_rx.next().await { match request { - OpenRequest::Paths { paths: _ } => { - // todo!("workspace") - // cx.update(|cx| workspace::open_paths(&paths, &app_state, None, cx)) - // .detach(); + OpenRequest::Paths { paths } => { + cx.update(|cx| workspace2::open_paths(&paths, &app_state, None, cx)) + .ok() + .map(|t| t.detach()); } OpenRequest::CliConnection { connection } => { let app_state = app_state.clone(); @@ -781,45 +782,45 @@ async fn handle_cli_connection( ) { if let Some(request) = requests.next().await { match request { - CliRequest::Open { paths: _, wait: _ } => { - // let mut caret_positions = HashMap::new(); + CliRequest::Open { paths, wait } => { + let mut caret_positions = HashMap::default(); - // todo!("workspace") - // let paths = if paths.is_empty() { - // workspace::last_opened_workspace_paths() - // .await - // .map(|location| location.paths().to_vec()) - // .unwrap_or_default() - // } else { - // paths - // .into_iter() - // .filter_map(|path_with_position_string| { - // let path_with_position = PathLikeWithPosition::parse_str( - // &path_with_position_string, - // |path_str| { - // Ok::<_, std::convert::Infallible>( - // Path::new(path_str).to_path_buf(), - // ) - // }, - // ) - // .expect("Infallible"); - // let path = path_with_position.path_like; - // if let Some(row) = path_with_position.row { - // if path.is_file() { - // let row = row.saturating_sub(1); - // let col = - // path_with_position.column.unwrap_or(0).saturating_sub(1); - // caret_positions.insert(path.clone(), Point::new(row, col)); - // } - // } - // Some(path) - // }) - // .collect() - // }; + let paths = if paths.is_empty() { + workspace2::last_opened_workspace_paths() + .await + .map(|location| location.paths().to_vec()) + .unwrap_or_default() + } else { + paths + .into_iter() + .filter_map(|path_with_position_string| { + let path_with_position = PathLikeWithPosition::parse_str( + &path_with_position_string, + |path_str| { + Ok::<_, std::convert::Infallible>( + Path::new(path_str).to_path_buf(), + ) + }, + ) + .expect("Infallible"); + let path = path_with_position.path_like; + if let Some(row) = path_with_position.row { + if path.is_file() { + let row = row.saturating_sub(1); + let col = + path_with_position.column.unwrap_or(0).saturating_sub(1); + caret_positions.insert(path.clone(), Point::new(row, col)); + } + } + Some(path) + }) + .collect() + }; + // todo!("editor") // let mut errored = false; // match cx - // .update(|cx| workspace::open_paths(&paths, &app_state, None, cx)) + // .update(|cx| workspace2::open_paths(&paths, &app_state, None, cx)) // .await // { // Ok((workspace, items)) => { diff --git a/crates/zed2/src/only_instance.rs b/crates/zed2/src/only_instance.rs index b252f72ce5..a8c4b30816 100644 --- a/crates/zed2/src/only_instance.rs +++ b/crates/zed2/src/only_instance.rs @@ -37,10 +37,9 @@ pub enum IsOnlyInstance { } pub fn ensure_only_instance() -> IsOnlyInstance { - // todo!("zed_stateless") - // if *db::ZED_STATELESS { - // return IsOnlyInstance::Yes; - // } + if *db::ZED_STATELESS { + return IsOnlyInstance::Yes; + } if check_got_handshake() { return IsOnlyInstance::No; From 1a0cd3e09b59a454fdc0c6a9a26b6654c6d0aefe Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 3 Nov 2023 13:20:34 +0200 Subject: [PATCH 15/15] Remove and add more todosmerge . --- crates/workspace2/src/dock.rs | 209 ++++---- crates/workspace2/src/notifications.rs | 58 +-- crates/workspace2/src/pane.rs | 26 +- crates/workspace2/src/workspace2.rs | 640 ++++++++++++------------- crates/zed2/src/zed2.rs | 2 +- 5 files changed, 468 insertions(+), 467 deletions(-) diff --git a/crates/workspace2/src/dock.rs b/crates/workspace2/src/dock.rs index 9ade6278bb..e6b6c7561d 100644 --- a/crates/workspace2/src/dock.rs +++ b/crates/workspace2/src/dock.rs @@ -226,9 +226,9 @@ impl Dock { // }) } - // pub fn active_panel_index(&self) -> usize { - // self.active_panel_index - // } + pub fn active_panel_index(&self) -> usize { + self.active_panel_index + } pub(crate) fn set_open(&mut self, open: bool, cx: &mut ViewContext) { if open != self.is_open { @@ -241,84 +241,87 @@ impl Dock { } } - // pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext) { - // for entry in &mut self.panel_entries { - // if entry.panel.as_any() == panel { - // if zoomed != entry.panel.is_zoomed(cx) { - // entry.panel.set_zoomed(zoomed, cx); - // } - // } else if entry.panel.is_zoomed(cx) { - // entry.panel.set_zoomed(false, cx); - // } - // } - - // cx.notify(); - // } - - // pub fn zoom_out(&mut self, cx: &mut ViewContext) { - // for entry in &mut self.panel_entries { - // if entry.panel.is_zoomed(cx) { - // entry.panel.set_zoomed(false, cx); + // todo!() + // pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext) { + // for entry in &mut self.panel_entries { + // if entry.panel.as_any() == panel { + // if zoomed != entry.panel.is_zoomed(cx) { + // entry.panel.set_zoomed(zoomed, cx); // } + // } else if entry.panel.is_zoomed(cx) { + // entry.panel.set_zoomed(false, cx); // } // } - // pub(crate) fn add_panel(&mut self, panel: View, cx: &mut ViewContext) { - // let subscriptions = [ - // cx.observe(&panel, |_, _, cx| cx.notify()), - // cx.subscribe(&panel, |this, panel, event, cx| { - // if T::should_activate_on_event(event) { - // if let Some(ix) = this - // .panel_entries - // .iter() - // .position(|entry| entry.panel.id() == panel.id()) - // { - // this.set_open(true, cx); - // this.activate_panel(ix, cx); - // cx.focus(&panel); - // } - // } else if T::should_close_on_event(event) - // && this.visible_panel().map_or(false, |p| p.id() == panel.id()) - // { - // this.set_open(false, cx); - // } - // }), - // ]; + // cx.notify(); + // } - // let dock_view_id = cx.view_id(); - // self.panel_entries.push(PanelEntry { - // panel: Arc::new(panel), - // // todo!() - // // context_menu: cx.add_view(|cx| { - // // let mut menu = ContextMenu::new(dock_view_id, cx); - // // menu.set_position_mode(OverlayPositionMode::Local); - // // menu - // // }), - // _subscriptions: subscriptions, - // }); - // cx.notify() - // } + pub fn zoom_out(&mut self, cx: &mut ViewContext) { + for entry in &mut self.panel_entries { + if entry.panel.is_zoomed(cx) { + entry.panel.set_zoomed(false, cx); + } + } + } - // pub fn remove_panel(&mut self, panel: &View, cx: &mut ViewContext) { - // if let Some(panel_ix) = self - // .panel_entries - // .iter() - // .position(|entry| entry.panel.id() == panel.id()) - // { - // if panel_ix == self.active_panel_index { - // self.active_panel_index = 0; - // self.set_open(false, cx); - // } else if panel_ix < self.active_panel_index { - // self.active_panel_index -= 1; - // } - // self.panel_entries.remove(panel_ix); - // cx.notify(); - // } - // } + pub(crate) fn add_panel(&mut self, panel: View, cx: &mut ViewContext) { + let subscriptions = [ + cx.observe(&panel, |_, _, cx| cx.notify()), + cx.subscribe(&panel, |this, panel, event, cx| { + if T::should_activate_on_event(event) { + if let Some(ix) = this + .panel_entries + .iter() + .position(|entry| entry.panel.id() == panel.id()) + { + this.set_open(true, cx); + this.activate_panel(ix, cx); + // todo!() + // cx.focus(&panel); + } + } else if T::should_close_on_event(event) + && this.visible_panel().map_or(false, |p| p.id() == panel.id()) + { + this.set_open(false, cx); + } + }), + ]; - // pub fn panels_len(&self) -> usize { - // self.panel_entries.len() - // } + // todo!() + // let dock_view_id = cx.view_id(); + self.panel_entries.push(PanelEntry { + panel: Arc::new(panel), + // todo!() + // context_menu: cx.add_view(|cx| { + // let mut menu = ContextMenu::new(dock_view_id, cx); + // menu.set_position_mode(OverlayPositionMode::Local); + // menu + // }), + _subscriptions: subscriptions, + }); + cx.notify() + } + + pub fn remove_panel(&mut self, panel: &View, cx: &mut ViewContext) { + if let Some(panel_ix) = self + .panel_entries + .iter() + .position(|entry| entry.panel.id() == panel.id()) + { + if panel_ix == self.active_panel_index { + self.active_panel_index = 0; + self.set_open(false, cx); + } else if panel_ix < self.active_panel_index { + self.active_panel_index -= 1; + } + self.panel_entries.remove(panel_ix); + cx.notify(); + } + } + + pub fn panels_len(&self) -> usize { + self.panel_entries.len() + } pub fn activate_panel(&mut self, panel_ix: usize, cx: &mut ViewContext) { if panel_ix != self.active_panel_index { @@ -352,38 +355,38 @@ impl Dock { } } - // pub fn zoomed_panel(&self, cx: &WindowContext) -> Option> { - // let entry = self.visible_entry()?; - // if entry.panel.is_zoomed(cx) { - // Some(entry.panel.clone()) - // } else { - // None - // } - // } + pub fn zoomed_panel(&self, cx: &WindowContext) -> Option> { + let entry = self.visible_entry()?; + if entry.panel.is_zoomed(cx) { + Some(entry.panel.clone()) + } else { + None + } + } - // pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option { - // self.panel_entries - // .iter() - // .find(|entry| entry.panel.id() == panel.id()) - // .map(|entry| entry.panel.size(cx)) - // } + pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option { + self.panel_entries + .iter() + .find(|entry| entry.panel.id() == panel.id()) + .map(|entry| entry.panel.size(cx)) + } - // pub fn active_panel_size(&self, cx: &WindowContext) -> Option { - // if self.is_open { - // self.panel_entries - // .get(self.active_panel_index) - // .map(|entry| entry.panel.size(cx)) - // } else { - // None - // } - // } + pub fn active_panel_size(&self, cx: &WindowContext) -> Option { + if self.is_open { + self.panel_entries + .get(self.active_panel_index) + .map(|entry| entry.panel.size(cx)) + } else { + None + } + } - // pub fn resize_active_panel(&mut self, size: Option, cx: &mut ViewContext) { - // if let Some(entry) = self.panel_entries.get_mut(self.active_panel_index) { - // entry.panel.set_size(size, cx); - // cx.notify(); - // } - // } + pub fn resize_active_panel(&mut self, size: Option, cx: &mut ViewContext) { + if let Some(entry) = self.panel_entries.get_mut(self.active_panel_index) { + entry.panel.set_size(size, cx); + cx.notify(); + } + } // pub fn render_placeholder(&self, cx: &WindowContext) -> AnyElement { // todo!() diff --git a/crates/workspace2/src/notifications.rs b/crates/workspace2/src/notifications.rs index 0e0b291926..5dd5b2c7ae 100644 --- a/crates/workspace2/src/notifications.rs +++ b/crates/workspace2/src/notifications.rs @@ -220,36 +220,36 @@ pub mod simple_message_notification { } } + pub fn new_element( + message: fn(TextStyle, &AppContext) -> AnyElement, + ) -> MessageNotification { + Self { + message: NotificationMessage::Element(message), + on_click: None, + click_message: None, + } + } + + pub fn with_click_message(mut self, message: S) -> Self + where + S: Into>, + { + self.click_message = Some(message.into()); + self + } + + pub fn on_click(mut self, on_click: F) -> Self + where + F: 'static + Send + Sync + Fn(&mut ViewContext), + { + self.on_click = Some(Arc::new(on_click)); + self + } + // todo!() - // pub fn new_element( - // message: fn(TextStyle, &AppContext) -> AnyElement, - // ) -> MessageNotification { - // Self { - // message: NotificationMessage::Element(message), - // on_click: None, - // click_message: None, - // } - // } - - // pub fn with_click_message(mut self, message: S) -> Self - // where - // S: Into>, - // { - // self.click_message = Some(message.into()); - // self - // } - - // pub fn on_click(mut self, on_click: F) -> Self - // where - // F: 'static + Fn(&mut ViewContext), - // { - // self.on_click = Some(Arc::new(on_click)); - // self - // } - - // pub fn dismiss(&mut self, _: &CancelMessageNotification, cx: &mut ViewContext) { - // cx.emit(MessageNotificationEvent::Dismiss); - // } + // pub fn dismiss(&mut self, _: &CancelMessageNotification, cx: &mut ViewContext) { + // cx.emit(MessageNotificationEvent::Dismiss); + // } } impl Render for MessageNotification { diff --git a/crates/workspace2/src/pane.rs b/crates/workspace2/src/pane.rs index 7c27b8158b..16dbfda361 100644 --- a/crates/workspace2/src/pane.rs +++ b/crates/workspace2/src/pane.rs @@ -416,17 +416,17 @@ impl Pane { } } - // pub(crate) fn workspace(&self) -> &WeakView { - // &self.workspace - // } + pub(crate) fn workspace(&self) -> &WeakView { + &self.workspace + } pub fn has_focus(&self) -> bool { self.has_focus } - // pub fn active_item_index(&self) -> usize { - // self.active_item_index - // } + pub fn active_item_index(&self) -> usize { + self.active_item_index + } // pub fn on_can_drop(&mut self, can_drop: F) // where @@ -1865,14 +1865,14 @@ impl Pane { // .into_any() // } - // pub fn set_zoomed(&mut self, zoomed: bool, cx: &mut ViewContext) { - // self.zoomed = zoomed; - // cx.notify(); - // } + pub fn set_zoomed(&mut self, zoomed: bool, cx: &mut ViewContext) { + self.zoomed = zoomed; + cx.notify(); + } - // pub fn is_zoomed(&self) -> bool { - // self.zoomed - // } + pub fn is_zoomed(&self) -> bool { + self.zoomed + } } // impl Entity for Pane { diff --git a/crates/workspace2/src/workspace2.rs b/crates/workspace2/src/workspace2.rs index a754daaffa..bb9cb7e527 100644 --- a/crates/workspace2/src/workspace2.rs +++ b/crates/workspace2/src/workspace2.rs @@ -8,6 +8,7 @@ pub mod pane; pub mod pane_group; mod persistence; pub mod searchable; +// todo!() // pub mod shared_screen; mod status_bar; mod toolbar; @@ -23,7 +24,7 @@ use client2::{ proto::{self, PeerId}, Client, TypedEnvelope, UserStore, }; -use collections::{HashMap, HashSet}; +use collections::{hash_map, HashMap, HashSet}; use dock::{Dock, DockPosition, PanelButtons}; use futures::{ channel::{mpsc, oneshot}, @@ -38,6 +39,7 @@ use gpui::{ WindowOptions, }; use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, ProjectItem}; +use itertools::Itertools; use language2::LanguageRegistry; use lazy_static::lazy_static; use node_runtime::NodeRuntime; @@ -174,42 +176,42 @@ pub struct Toast { on_click: Option<(Cow<'static, str>, Arc)>, } -// impl Toast { -// pub fn new>>(id: usize, msg: I) -> Self { -// Toast { -// id, -// msg: msg.into(), -// on_click: None, -// } -// } +impl Toast { + pub fn new>>(id: usize, msg: I) -> Self { + Toast { + id, + msg: msg.into(), + on_click: None, + } + } -// pub fn on_click(mut self, message: M, on_click: F) -> Self -// where -// M: Into>, -// F: Fn(&mut WindowContext) + 'static, -// { -// self.on_click = Some((message.into(), Arc::new(on_click))); -// self -// } -// } + pub fn on_click(mut self, message: M, on_click: F) -> Self + where + M: Into>, + F: Fn(&mut WindowContext) + 'static, + { + self.on_click = Some((message.into(), Arc::new(on_click))); + self + } +} -// impl PartialEq for Toast { -// fn eq(&self, other: &Self) -> bool { -// self.id == other.id -// && self.msg == other.msg -// && self.on_click.is_some() == other.on_click.is_some() -// } -// } +impl PartialEq for Toast { + fn eq(&self, other: &Self) -> bool { + self.id == other.id + && self.msg == other.msg + && self.on_click.is_some() == other.on_click.is_some() + } +} -// impl Clone for Toast { -// fn clone(&self) -> Self { -// Toast { -// id: self.id, -// msg: self.msg.to_owned(), -// on_click: self.on_click.clone(), -// } -// } -// } +impl Clone for Toast { + fn clone(&self) -> Self { + Toast { + id: self.id, + msg: self.msg.to_owned(), + on_click: self.on_click.clone(), + } + } +} // #[derive(Clone, Deserialize, PartialEq)] // pub struct OpenTerminal { @@ -476,8 +478,7 @@ impl AppState { let user_store = cx.build_model(|cx| UserStore::new(client.clone(), http_client, cx)); let workspace_store = cx.build_model(|cx| WorkspaceStore::new(client.clone(), cx)); - // todo!() - // theme::init((), cx); + theme2::init(cx); client2::init(&client, cx); crate::init_settings(cx); @@ -549,7 +550,7 @@ pub struct Workspace { weak_self: WeakView, // modal: Option, zoomed: Option, - // zoomed_position: Option, + zoomed_position: Option, center: PaneGroup, left_dock: View, bottom_dock: View, @@ -626,7 +627,7 @@ impl Workspace { } project2::Event::Closed => { - // cx.remove_window(); + cx.remove_window(); } project2::Event::DeletedEntry(entry_id) => { @@ -768,7 +769,7 @@ impl Workspace { weak_self: weak_handle.clone(), // modal: None, zoomed: None, - // zoomed_position: None, + zoomed_position: None, center: PaneGroup::new(center_pane.clone()), panes: vec![center_pane.clone()], panes_by_item: Default::default(), @@ -1059,183 +1060,185 @@ impl Workspace { &self.project } - // pub fn recent_navigation_history( - // &self, - // limit: Option, - // cx: &AppContext, - // ) -> Vec<(ProjectPath, Option)> { - // let mut abs_paths_opened: HashMap> = HashMap::default(); - // let mut history: HashMap, usize)> = HashMap::default(); - // for pane in &self.panes { - // let pane = pane.read(cx); - // pane.nav_history() - // .for_each_entry(cx, |entry, (project_path, fs_path)| { - // if let Some(fs_path) = &fs_path { - // abs_paths_opened - // .entry(fs_path.clone()) - // .or_default() - // .insert(project_path.clone()); - // } - // let timestamp = entry.timestamp; - // match history.entry(project_path) { - // hash_map::Entry::Occupied(mut entry) => { - // let (_, old_timestamp) = entry.get(); - // if ×tamp > old_timestamp { - // entry.insert((fs_path, timestamp)); - // } - // } - // hash_map::Entry::Vacant(entry) => { - // entry.insert((fs_path, timestamp)); - // } - // } - // }); - // } + pub fn recent_navigation_history( + &self, + limit: Option, + cx: &AppContext, + ) -> Vec<(ProjectPath, Option)> { + let mut abs_paths_opened: HashMap> = HashMap::default(); + let mut history: HashMap, usize)> = HashMap::default(); + for pane in &self.panes { + let pane = pane.read(cx); + pane.nav_history() + .for_each_entry(cx, |entry, (project_path, fs_path)| { + if let Some(fs_path) = &fs_path { + abs_paths_opened + .entry(fs_path.clone()) + .or_default() + .insert(project_path.clone()); + } + let timestamp = entry.timestamp; + match history.entry(project_path) { + hash_map::Entry::Occupied(mut entry) => { + let (_, old_timestamp) = entry.get(); + if ×tamp > old_timestamp { + entry.insert((fs_path, timestamp)); + } + } + hash_map::Entry::Vacant(entry) => { + entry.insert((fs_path, timestamp)); + } + } + }); + } - // history - // .into_iter() - // .sorted_by_key(|(_, (_, timestamp))| *timestamp) - // .map(|(project_path, (fs_path, _))| (project_path, fs_path)) - // .rev() - // .filter(|(history_path, abs_path)| { - // let latest_project_path_opened = abs_path - // .as_ref() - // .and_then(|abs_path| abs_paths_opened.get(abs_path)) - // .and_then(|project_paths| { - // project_paths - // .iter() - // .max_by(|b1, b2| b1.worktree_id.cmp(&b2.worktree_id)) - // }); + history + .into_iter() + .sorted_by_key(|(_, (_, timestamp))| *timestamp) + .map(|(project_path, (fs_path, _))| (project_path, fs_path)) + .rev() + .filter(|(history_path, abs_path)| { + let latest_project_path_opened = abs_path + .as_ref() + .and_then(|abs_path| abs_paths_opened.get(abs_path)) + .and_then(|project_paths| { + project_paths + .iter() + .max_by(|b1, b2| b1.worktree_id.cmp(&b2.worktree_id)) + }); - // match latest_project_path_opened { - // Some(latest_project_path_opened) => latest_project_path_opened == history_path, - // None => true, - // } - // }) - // .take(limit.unwrap_or(usize::MAX)) - // .collect() - // } + match latest_project_path_opened { + Some(latest_project_path_opened) => latest_project_path_opened == history_path, + None => true, + } + }) + .take(limit.unwrap_or(usize::MAX)) + .collect() + } - // fn navigate_history( - // &mut self, - // pane: WeakView, - // mode: NavigationMode, - // cx: &mut ViewContext, - // ) -> Task> { - // let to_load = if let Some(pane) = pane.upgrade(cx) { - // cx.focus(&pane); + fn navigate_history( + &mut self, + pane: WeakView, + mode: NavigationMode, + cx: &mut ViewContext, + ) -> Task> { + let to_load = if let Some(pane) = pane.upgrade() { + // todo!("focus") + // cx.focus(&pane); - // pane.update(cx, |pane, cx| { - // loop { - // // Retrieve the weak item handle from the history. - // let entry = pane.nav_history_mut().pop(mode, cx)?; + pane.update(cx, |pane, cx| { + loop { + // Retrieve the weak item handle from the history. + let entry = pane.nav_history_mut().pop(mode, cx)?; - // // If the item is still present in this pane, then activate it. - // if let Some(index) = entry - // .item - // .upgrade(cx) - // .and_then(|v| pane.index_for_item(v.as_ref())) - // { - // let prev_active_item_index = pane.active_item_index(); - // pane.nav_history_mut().set_mode(mode); - // pane.activate_item(index, true, true, cx); - // pane.nav_history_mut().set_mode(NavigationMode::Normal); + // If the item is still present in this pane, then activate it. + if let Some(index) = entry + .item + .upgrade() + .and_then(|v| pane.index_for_item(v.as_ref())) + { + let prev_active_item_index = pane.active_item_index(); + pane.nav_history_mut().set_mode(mode); + pane.activate_item(index, true, true, cx); + pane.nav_history_mut().set_mode(NavigationMode::Normal); - // let mut navigated = prev_active_item_index != pane.active_item_index(); - // if let Some(data) = entry.data { - // navigated |= pane.active_item()?.navigate(data, cx); - // } + let mut navigated = prev_active_item_index != pane.active_item_index(); + if let Some(data) = entry.data { + navigated |= pane.active_item()?.navigate(data, cx); + } - // if navigated { - // break None; - // } - // } - // // If the item is no longer present in this pane, then retrieve its - // // project path in order to reopen it. - // else { - // break pane - // .nav_history() - // .path_for_item(entry.item.id()) - // .map(|(project_path, _)| (project_path, entry)); - // } - // } - // }) - // } else { - // None - // }; + if navigated { + break None; + } + } + // If the item is no longer present in this pane, then retrieve its + // project path in order to reopen it. + else { + break pane + .nav_history() + .path_for_item(entry.item.id()) + .map(|(project_path, _)| (project_path, entry)); + } + } + }) + } else { + None + }; - // if let Some((project_path, entry)) = to_load { - // // If the item was no longer present, then load it again from its previous path. - // let task = self.load_path(project_path, cx); - // cx.spawn(|workspace, mut cx| async move { - // let task = task.await; - // let mut navigated = false; - // if let Some((project_entry_id, build_item)) = task.log_err() { - // let prev_active_item_id = pane.update(&mut cx, |pane, _| { - // pane.nav_history_mut().set_mode(mode); - // pane.active_item().map(|p| p.id()) - // })?; + if let Some((project_path, entry)) = to_load { + // If the item was no longer present, then load it again from its previous path. + let task = self.load_path(project_path, cx); + cx.spawn(|workspace, mut cx| async move { + let task = task.await; + let mut navigated = false; + if let Some((project_entry_id, build_item)) = task.log_err() { + let prev_active_item_id = pane.update(&mut cx, |pane, _| { + pane.nav_history_mut().set_mode(mode); + pane.active_item().map(|p| p.id()) + })?; - // pane.update(&mut cx, |pane, cx| { - // let item = pane.open_item(project_entry_id, true, cx, build_item); - // navigated |= Some(item.id()) != prev_active_item_id; - // pane.nav_history_mut().set_mode(NavigationMode::Normal); - // if let Some(data) = entry.data { - // navigated |= item.navigate(data, cx); - // } - // })?; - // } + pane.update(&mut cx, |pane, cx| { + let item = pane.open_item(project_entry_id, true, cx, build_item); + navigated |= Some(item.id()) != prev_active_item_id; + pane.nav_history_mut().set_mode(NavigationMode::Normal); + if let Some(data) = entry.data { + navigated |= item.navigate(data, cx); + } + })?; + } - // if !navigated { - // workspace - // .update(&mut cx, |workspace, cx| { - // Self::navigate_history(workspace, pane, mode, cx) - // })? - // .await?; - // } + if !navigated { + workspace + .update(&mut cx, |workspace, cx| { + Self::navigate_history(workspace, pane, mode, cx) + })? + .await?; + } - // Ok(()) - // }) - // } else { - // Task::ready(Ok(())) - // } - // } + Ok(()) + }) + } else { + Task::ready(Ok(())) + } + } - // pub fn go_back( - // &mut self, - // pane: WeakView, - // cx: &mut ViewContext, - // ) -> Task> { - // self.navigate_history(pane, NavigationMode::GoingBack, cx) - // } + pub fn go_back( + &mut self, + pane: WeakView, + cx: &mut ViewContext, + ) -> Task> { + self.navigate_history(pane, NavigationMode::GoingBack, cx) + } - // pub fn go_forward( - // &mut self, - // pane: WeakView, - // cx: &mut ViewContext, - // ) -> Task> { - // self.navigate_history(pane, NavigationMode::GoingForward, cx) - // } + pub fn go_forward( + &mut self, + pane: WeakView, + cx: &mut ViewContext, + ) -> Task> { + self.navigate_history(pane, NavigationMode::GoingForward, cx) + } - // pub fn reopen_closed_item(&mut self, cx: &mut ViewContext) -> Task> { - // self.navigate_history( - // self.active_pane().downgrade(), - // NavigationMode::ReopeningClosedItem, - // cx, - // ) - // } + pub fn reopen_closed_item(&mut self, cx: &mut ViewContext) -> Task> { + self.navigate_history( + self.active_pane().downgrade(), + NavigationMode::ReopeningClosedItem, + cx, + ) + } - // pub fn client(&self) -> &Client { - // &self.app_state.client - // } + pub fn client(&self) -> &Client { + &self.app_state.client + } - // pub fn set_titlebar_item(&mut self, item: AnyViewHandle, cx: &mut ViewContext) { - // self.titlebar_item = Some(item); - // cx.notify(); - // } + // todo!() + // pub fn set_titlebar_item(&mut self, item: AnyViewHandle, cx: &mut ViewContext) { + // self.titlebar_item = Some(item); + // cx.notify(); + // } - // pub fn titlebar_item(&self) -> Option { - // self.titlebar_item.clone() - // } + // pub fn titlebar_item(&self) -> Option { + // self.titlebar_item.clone() + // } // /// Call the given callback with a workspace whose project is local. // /// @@ -1261,32 +1264,29 @@ impl Workspace { // } // } - // pub fn worktrees<'a>( - // &self, - // cx: &'a AppContext, - // ) -> impl 'a + Iterator> { - // self.project.read(cx).worktrees(cx) - // } + pub fn worktrees<'a>(&self, cx: &'a AppContext) -> impl 'a + Iterator> { + self.project.read(cx).worktrees() + } - // pub fn visible_worktrees<'a>( - // &self, - // cx: &'a AppContext, - // ) -> impl 'a + Iterator> { - // self.project.read(cx).visible_worktrees(cx) - // } + pub fn visible_worktrees<'a>( + &self, + cx: &'a AppContext, + ) -> impl 'a + Iterator> { + self.project.read(cx).visible_worktrees(cx) + } - // pub fn worktree_scans_complete(&self, cx: &AppContext) -> impl Future + 'static { - // let futures = self - // .worktrees(cx) - // .filter_map(|worktree| worktree.read(cx).as_local()) - // .map(|worktree| worktree.scan_complete()) - // .collect::>(); - // async move { - // for future in futures { - // future.await; - // } - // } - // } + pub fn worktree_scans_complete(&self, cx: &AppContext) -> impl Future + 'static { + let futures = self + .worktrees(cx) + .filter_map(|worktree| worktree.read(cx).as_local()) + .map(|worktree| worktree.scan_complete()) + .collect::>(); + async move { + for future in futures { + future.await; + } + } + } // pub fn close_global(_: &CloseWindow, cx: &mut AppContext) { // cx.spawn(|mut cx| async move { @@ -1699,31 +1699,31 @@ impl Workspace { self.active_pane().read(cx).active_item() } - // fn active_project_path(&self, cx: &ViewContext) -> Option { - // self.active_item(cx).and_then(|item| item.project_path(cx)) - // } + fn active_project_path(&self, cx: &ViewContext) -> Option { + self.active_item(cx).and_then(|item| item.project_path(cx)) + } - // pub fn save_active_item( - // &mut self, - // save_intent: SaveIntent, - // cx: &mut ViewContext, - // ) -> Task> { - // let project = self.project.clone(); - // let pane = self.active_pane(); - // let item_ix = pane.read(cx).active_item_index(); - // let item = pane.read(cx).active_item(); - // let pane = pane.downgrade(); + pub fn save_active_item( + &mut self, + save_intent: SaveIntent, + cx: &mut ViewContext, + ) -> Task> { + let project = self.project.clone(); + let pane = self.active_pane(); + let item_ix = pane.read(cx).active_item_index(); + let item = pane.read(cx).active_item(); + let pane = pane.downgrade(); - // cx.spawn(|_, mut cx| async move { - // if let Some(item) = item { - // Pane::save_item(project, &pane, item_ix, item.as_ref(), save_intent, &mut cx) - // .await - // .map(|_| ()) - // } else { - // Ok(()) - // } - // }) - // } + cx.spawn(|_, mut cx| async move { + if let Some(item) = item { + Pane::save_item(project, &pane, item_ix, item.as_ref(), save_intent, &mut cx) + .await + .map(|_| ()) + } else { + Ok(()) + } + }) + } // pub fn close_inactive_items_and_panes( // &mut self, @@ -1825,19 +1825,20 @@ impl Workspace { // self.serialize_workspace(cx); // } - // pub fn close_all_docks(&mut self, cx: &mut ViewContext) { - // let docks = [&self.left_dock, &self.bottom_dock, &self.right_dock]; + pub fn close_all_docks(&mut self, cx: &mut ViewContext) { + let docks = [&self.left_dock, &self.bottom_dock, &self.right_dock]; - // for dock in docks { - // dock.update(cx, |dock, cx| { - // dock.set_open(false, cx); - // }); - // } + for dock in docks { + dock.update(cx, |dock, cx| { + dock.set_open(false, cx); + }); + } - // cx.focus_self(); - // cx.notify(); - // self.serialize_workspace(cx); - // } + // todo!("focus") + // cx.focus_self(); + cx.notify(); + self.serialize_workspace(cx); + } // /// Transfer focus to the panel of the given type. // pub fn focus_panel(&mut self, cx: &mut ViewContext) -> Option> { @@ -1904,19 +1905,19 @@ impl Workspace { // None // } - // fn zoom_out(&mut self, cx: &mut ViewContext) { - // for pane in &self.panes { - // pane.update(cx, |pane, cx| pane.set_zoomed(false, cx)); - // } + fn zoom_out(&mut self, cx: &mut ViewContext) { + for pane in &self.panes { + pane.update(cx, |pane, cx| pane.set_zoomed(false, cx)); + } - // self.left_dock.update(cx, |dock, cx| dock.zoom_out(cx)); - // self.bottom_dock.update(cx, |dock, cx| dock.zoom_out(cx)); - // self.right_dock.update(cx, |dock, cx| dock.zoom_out(cx)); - // self.zoomed = None; - // self.zoomed_position = None; + self.left_dock.update(cx, |dock, cx| dock.zoom_out(cx)); + self.bottom_dock.update(cx, |dock, cx| dock.zoom_out(cx)); + self.right_dock.update(cx, |dock, cx| dock.zoom_out(cx)); + self.zoomed = None; + self.zoomed_position = None; - // cx.notify(); - // } + cx.notify(); + } // #[cfg(any(test, feature = "test-support"))] // pub fn zoomed_view(&self, cx: &AppContext) -> Option { @@ -1962,22 +1963,21 @@ impl Workspace { // cx.notify(); // } - fn add_pane(&mut self, _cx: &mut ViewContext) -> View { - todo!() - // let pane = cx.build_view(|cx| { - // Pane::new( - // self.weak_handle(), - // self.project.clone(), - // self.pane_history_timestamp.clone(), - // cx, - // ) - // }); - // cx.subscribe(&pane, Self::handle_pane_event).detach(); - // self.panes.push(pane.clone()); + fn add_pane(&mut self, cx: &mut ViewContext) -> View { + let pane = cx.build_view(|cx| { + Pane::new( + self.weak_handle(), + self.project.clone(), + self.pane_history_timestamp.clone(), + cx, + ) + }); + cx.subscribe(&pane, Self::handle_pane_event).detach(); + self.panes.push(pane.clone()); // todo!() // cx.focus(&pane); - // cx.emit(Event::PaneAdded(pane.clone())); - // pane + cx.emit(Event::PaneAdded(pane.clone())); + pane } // pub fn add_item_to_center( @@ -3122,6 +3122,7 @@ impl Workspace { None } + // todo!() // fn shared_screen_for_peer( // &self, // peer_id: PeerId, @@ -3498,6 +3499,7 @@ impl Workspace { }) } + // todo!() // #[cfg(any(test, feature = "test-support"))] // pub fn test_new(project: ModelHandle, cx: &mut ViewContext) -> Self { // use node_runtime::FakeNodeRuntime; @@ -3658,6 +3660,7 @@ fn open_items( }) } +// todo!() // fn notify_of_new_dock(workspace: &WeakView, cx: &mut AsyncAppContext) { // const NEW_PANEL_BLOG_POST: &str = "https://zed.dev/blog/new-panel-system"; // const NEW_DOCK_HINT_KEY: &str = "show_new_dock_key"; @@ -3738,23 +3741,22 @@ fn open_items( // }) // .ok(); -fn notify_if_database_failed(_workspace: WindowHandle, _cx: &mut AsyncAppContext) { +fn notify_if_database_failed(workspace: WindowHandle, cx: &mut AsyncAppContext) { const REPORT_ISSUE_URL: &str ="https://github.com/zed-industries/community/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml"; - // todo!() - // workspace - // .update(cx, |workspace, cx| { - // if (*db::ALL_FILE_DB_FAILED).load(std::sync::atomic::Ordering::Acquire) { - // workspace.show_notification_once(0, cx, |cx| { - // cx.build_view(|_| { - // MessageNotification::new("Failed to load the database file.") - // .with_click_message("Click to let us know about this error") - // .on_click(|cx| cx.platform().open_url(REPORT_ISSUE_URL)) - // }) - // }); - // } - // }) - // .log_err(); + workspace + .update(cx, |workspace, cx| { + if (*db2::ALL_FILE_DB_FAILED).load(std::sync::atomic::Ordering::Acquire) { + workspace.show_notification_once(0, cx, |cx| { + cx.build_view(|_| { + MessageNotification::new("Failed to load the database file.") + .with_click_message("Click to let us know about this error") + .on_click(|cx| cx.open_url(REPORT_ISSUE_URL)) + }) + }); + } + }) + .log_err(); } impl EventEmitter for Workspace { @@ -4176,36 +4178,32 @@ impl WorkspaceStore { } async fn handle_update_followers( - _this: Model, - _envelope: TypedEnvelope, + this: Model, + envelope: TypedEnvelope, _: Arc, - mut _cx: AsyncWindowContext, + mut cx: AsyncWindowContext, ) -> Result<()> { - // let leader_id = envelope.original_sender_id()?; - // let update = envelope.payload; + let leader_id = envelope.original_sender_id()?; + let update = envelope.payload; - // this.update(&mut cx, |this, cx| { - // for workspace in &this.workspaces { - // let Some(workspace) = workspace.upgrade() else { - // continue; - // }; - // workspace.update(cx, |workspace, cx| { - // let project_id = workspace.project.read(cx).remote_id(); - // if update.project_id != project_id && update.project_id.is_some() { - // return; - // } - // workspace.handle_update_followers(leader_id, update.clone(), cx); - // }); - // } - // Ok(()) - // })? - todo!() + this.update(&mut cx, |this, cx| { + for workspace in &this.workspaces { + workspace.update(cx, |workspace, cx| { + let project_id = workspace.project.read(cx).remote_id(); + if update.project_id != project_id && update.project_id.is_some() { + return; + } + workspace.handle_update_followers(leader_id, update.clone(), cx); + })?; + } + Ok(()) + })? } } -// impl Entity for WorkspaceStore { -// type Event = (); -// } +impl EventEmitter for WorkspaceStore { + type Event = (); +} impl ViewId { pub(crate) fn from_proto(message: proto::ViewId) -> Result { diff --git a/crates/zed2/src/zed2.rs b/crates/zed2/src/zed2.rs index 713345b2ee..a33498538e 100644 --- a/crates/zed2/src/zed2.rs +++ b/crates/zed2/src/zed2.rs @@ -114,7 +114,7 @@ pub async fn handle_cli_connection( match item { Some(Ok(mut item)) => { if let Some(point) = caret_positions.remove(path) { - todo!() + todo!("editor") // if let Some(active_editor) = item.downcast::() { // active_editor // .downgrade()