Merge branch 'main' of github.com:zed-industries/zed into model_cleanup

This commit is contained in:
KCaverly 2023-10-30 11:07:29 -04:00
commit 142b94bdd4
43 changed files with 752 additions and 424 deletions

57
Cargo.lock generated
View file

@ -310,6 +310,7 @@ dependencies = [
"language", "language",
"log", "log",
"menu", "menu",
"multi_buffer",
"ordered-float 2.10.0", "ordered-float 2.10.0",
"parking_lot 0.11.2", "parking_lot 0.11.2",
"project", "project",
@ -1468,7 +1469,7 @@ dependencies = [
[[package]] [[package]]
name = "collab" name = "collab"
version = "0.25.0" version = "0.27.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -1713,6 +1714,7 @@ dependencies = [
"log", "log",
"lsp", "lsp",
"node_runtime", "node_runtime",
"parking_lot 0.11.2",
"rpc", "rpc",
"serde", "serde",
"serde_derive", "serde_derive",
@ -2410,6 +2412,7 @@ dependencies = [
"lazy_static", "lazy_static",
"log", "log",
"lsp", "lsp",
"multi_buffer",
"ordered-float 2.10.0", "ordered-float 2.10.0",
"parking_lot 0.11.2", "parking_lot 0.11.2",
"postage", "postage",
@ -4600,6 +4603,55 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389"
[[package]]
name = "multi_buffer"
version = "0.1.0"
dependencies = [
"aho-corasick",
"anyhow",
"client",
"clock",
"collections",
"context_menu",
"convert_case 0.6.0",
"copilot",
"ctor",
"env_logger 0.9.3",
"futures 0.3.28",
"git",
"gpui",
"indoc",
"itertools 0.10.5",
"language",
"lazy_static",
"log",
"lsp",
"ordered-float 2.10.0",
"parking_lot 0.11.2",
"postage",
"project",
"pulldown-cmark",
"rand 0.8.5",
"rich_text",
"schemars",
"serde",
"serde_derive",
"settings",
"smallvec",
"smol",
"snippet",
"sum_tree",
"text",
"theme",
"tree-sitter",
"tree-sitter-html",
"tree-sitter-rust",
"tree-sitter-typescript",
"unindent",
"util",
"workspace",
]
[[package]] [[package]]
name = "multimap" name = "multimap"
version = "0.8.3" version = "0.8.3"
@ -5562,6 +5614,7 @@ dependencies = [
"log", "log",
"lsp", "lsp",
"node_runtime", "node_runtime",
"parking_lot 0.11.2",
"serde", "serde",
"serde_derive", "serde_derive",
"serde_json", "serde_json",
@ -10094,7 +10147,7 @@ dependencies = [
[[package]] [[package]]
name = "zed" name = "zed"
version = "0.110.0" version = "0.111.0"
dependencies = [ dependencies = [
"activity_indicator", "activity_indicator",
"ai", "ai",

View file

@ -46,6 +46,7 @@ members = [
"crates/lsp", "crates/lsp",
"crates/media", "crates/media",
"crates/menu", "crates/menu",
"crates/multi_buffer",
"crates/node_runtime", "crates/node_runtime",
"crates/notifications", "crates/notifications",
"crates/outline", "crates/outline",

View file

@ -17,6 +17,7 @@ fs = { path = "../fs" }
gpui = { path = "../gpui" } gpui = { path = "../gpui" }
language = { path = "../language" } language = { path = "../language" }
menu = { path = "../menu" } menu = { path = "../menu" }
multi_buffer = { path = "../multi_buffer" }
search = { path = "../search" } search = { path = "../search" }
settings = { path = "../settings" } settings = { path = "../settings" }
theme = { path = "../theme" } theme = { path = "../theme" }

View file

@ -292,7 +292,7 @@ impl AssistantPanel {
project: &ModelHandle<Project>, project: &ModelHandle<Project>,
) { ) {
let selection = editor.read(cx).selections.newest_anchor().clone(); let selection = editor.read(cx).selections.newest_anchor().clone();
if selection.start.excerpt_id() != selection.end.excerpt_id() { if selection.start.excerpt_id != selection.end.excerpt_id {
return; return;
} }
let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx); let snapshot = editor.read(cx).buffer().read(cx).snapshot(cx);

View file

@ -1,10 +1,11 @@
use crate::streaming_diff::{Hunk, StreamingDiff}; use crate::streaming_diff::{Hunk, StreamingDiff};
use ai::completion::{CompletionProvider, CompletionRequest}; use ai::completion::{CompletionProvider, CompletionRequest};
use anyhow::Result; use anyhow::Result;
use editor::{multi_buffer, Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint}; use editor::{Anchor, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint};
use futures::{channel::mpsc, SinkExt, Stream, StreamExt}; use futures::{channel::mpsc, SinkExt, Stream, StreamExt};
use gpui::{Entity, ModelContext, ModelHandle, Task}; use gpui::{Entity, ModelContext, ModelHandle, Task};
use language::{Rope, TransactionId}; use language::{Rope, TransactionId};
use multi_buffer;
use std::{cmp, future, ops::Range, sync::Arc}; use std::{cmp, future, ops::Range, sync::Arc};
pub enum Event { pub enum Event {

View file

@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab" default-run = "collab"
edition = "2021" edition = "2021"
name = "collab" name = "collab"
version = "0.25.0" version = "0.27.0"
publish = false publish = false
[[bin]] [[bin]]

View file

@ -942,7 +942,7 @@ async fn create_room(
let live_kit_room = live_kit_room.clone(); let live_kit_room = live_kit_room.clone();
let live_kit = session.live_kit_client.as_ref(); let live_kit = session.live_kit_client.as_ref();
util::async_iife!({ util::async_maybe!({
let live_kit = live_kit?; let live_kit = live_kit?;
let token = live_kit let token = live_kit

View file

@ -46,7 +46,7 @@ use serde_derive::{Deserialize, Serialize};
use settings::SettingsStore; use settings::SettingsStore;
use std::{borrow::Cow, hash::Hash, mem, sync::Arc}; use std::{borrow::Cow, hash::Hash, mem, sync::Arc};
use theme::{components::ComponentExt, IconButton, Interactive}; use theme::{components::ComponentExt, IconButton, Interactive};
use util::{iife, ResultExt, TryFutureExt}; use util::{maybe, ResultExt, TryFutureExt};
use workspace::{ use workspace::{
dock::{DockPosition, Panel}, dock::{DockPosition, Panel},
item::ItemHandle, item::ItemHandle,
@ -1461,7 +1461,7 @@ impl CollabPanel {
let text = match section { let text = match section {
Section::ActiveCall => { Section::ActiveCall => {
let channel_name = iife!({ let channel_name = maybe!({
let channel_id = ActiveCall::global(cx).read(cx).channel_id(cx)?; let channel_id = ActiveCall::global(cx).read(cx).channel_id(cx)?;
let channel = self.channel_store.read(cx).channel_for_id(channel_id)?; let channel = self.channel_store.read(cx).channel_for_id(channel_id)?;
@ -1941,7 +1941,7 @@ impl CollabPanel {
let disclosed = let disclosed =
has_children.then(|| !self.collapsed_channels.binary_search(&channel.id).is_ok()); has_children.then(|| !self.collapsed_channels.binary_search(&channel.id).is_ok());
let is_active = iife!({ let is_active = maybe!({
let call_channel = ActiveCall::global(cx) let call_channel = ActiveCall::global(cx)
.read(cx) .read(cx)
.room()? .room()?
@ -2791,7 +2791,7 @@ impl CollabPanel {
} }
} }
ListEntry::Channel { channel, .. } => { ListEntry::Channel { channel, .. } => {
let is_active = iife!({ let is_active = maybe!({
let call_channel = ActiveCall::global(cx) let call_channel = ActiveCall::global(cx)
.read(cx) .read(cx)
.room()? .room()?

View file

@ -36,6 +36,7 @@ serde.workspace = true
serde_derive.workspace = true serde_derive.workspace = true
smol.workspace = true smol.workspace = true
futures.workspace = true futures.workspace = true
parking_lot.workspace = true
[dev-dependencies] [dev-dependencies]
clock = { path = "../clock" } clock = { path = "../clock" }

View file

@ -16,6 +16,7 @@ use language::{
}; };
use lsp::{LanguageServer, LanguageServerBinary, LanguageServerId}; use lsp::{LanguageServer, LanguageServerBinary, LanguageServerId};
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
use parking_lot::Mutex;
use request::StatusNotification; use request::StatusNotification;
use settings::SettingsStore; use settings::SettingsStore;
use smol::{fs, io::BufReader, stream::StreamExt}; use smol::{fs, io::BufReader, stream::StreamExt};
@ -387,8 +388,15 @@ impl Copilot {
path: node_path, path: node_path,
arguments, arguments,
}; };
let server =
LanguageServer::new(new_server_id, binary, Path::new("/"), None, cx.clone())?; let server = LanguageServer::new(
Arc::new(Mutex::new(None)),
new_server_id,
binary,
Path::new("/"),
None,
cx.clone(),
)?;
server server
.on_notification::<StatusNotification, _>( .on_notification::<StatusNotification, _>(

View file

@ -20,7 +20,7 @@ use std::future::Future;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use util::channel::ReleaseChannel; use util::channel::ReleaseChannel;
use util::{async_iife, ResultExt}; use util::{async_maybe, ResultExt};
const CONNECTION_INITIALIZE_QUERY: &'static str = sql!( const CONNECTION_INITIALIZE_QUERY: &'static str = sql!(
PRAGMA foreign_keys=TRUE; PRAGMA foreign_keys=TRUE;
@ -57,7 +57,7 @@ pub async fn open_db<M: Migrator + 'static>(
let release_channel_name = release_channel.dev_name(); let release_channel_name = release_channel.dev_name();
let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name))); let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name)));
let connection = async_iife!({ let connection = async_maybe!({
smol::fs::create_dir_all(&main_db_dir) smol::fs::create_dir_all(&main_db_dir)
.await .await
.context("Could not create db directory") .context("Could not create db directory")

View file

@ -14,6 +14,7 @@ test-support = [
"text/test-support", "text/test-support",
"language/test-support", "language/test-support",
"gpui/test-support", "gpui/test-support",
"multi_buffer/test-support",
"project/test-support", "project/test-support",
"util/test-support", "util/test-support",
"workspace/test-support", "workspace/test-support",
@ -34,6 +35,7 @@ git = { path = "../git" }
gpui = { path = "../gpui" } gpui = { path = "../gpui" }
language = { path = "../language" } language = { path = "../language" }
lsp = { path = "../lsp" } lsp = { path = "../lsp" }
multi_buffer = { path = "../multi_buffer" }
project = { path = "../project" } project = { path = "../project" }
rpc = { path = "../rpc" } rpc = { path = "../rpc" }
rich_text = { path = "../rich_text" } rich_text = { path = "../rich_text" }

View file

@ -993,8 +993,8 @@ mod tests {
use super::*; use super::*;
use crate::display_map::inlay_map::InlayMap; use crate::display_map::inlay_map::InlayMap;
use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap}; use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap};
use crate::multi_buffer::MultiBuffer;
use gpui::{elements::Empty, Element}; use gpui::{elements::Empty, Element};
use multi_buffer::MultiBuffer;
use rand::prelude::*; use rand::prelude::*;
use settings::SettingsStore; use settings::SettingsStore;
use std::env; use std::env;

View file

@ -91,7 +91,7 @@ impl<'a> FoldMapWriter<'a> {
// For now, ignore any ranges that span an excerpt boundary. // For now, ignore any ranges that span an excerpt boundary.
let fold = Fold(buffer.anchor_after(range.start)..buffer.anchor_before(range.end)); let fold = Fold(buffer.anchor_after(range.start)..buffer.anchor_before(range.end));
if fold.0.start.excerpt_id() != fold.0.end.excerpt_id() { if fold.0.start.excerpt_id != fold.0.end.excerpt_id {
continue; continue;
} }

View file

@ -1,10 +1,8 @@
use crate::{ use crate::{Anchor, InlayId, MultiBufferSnapshot, ToOffset};
multi_buffer::{MultiBufferChunks, MultiBufferRows},
Anchor, InlayId, MultiBufferSnapshot, ToOffset,
};
use collections::{BTreeMap, BTreeSet}; use collections::{BTreeMap, BTreeSet};
use gpui::fonts::HighlightStyle; use gpui::fonts::HighlightStyle;
use language::{Chunk, Edit, Point, TextSummary}; use language::{Chunk, Edit, Point, TextSummary};
use multi_buffer::{MultiBufferChunks, MultiBufferRows};
use std::{ use std::{
any::TypeId, any::TypeId,
cmp, cmp,

View file

@ -11,7 +11,6 @@ pub mod items;
mod link_go_to_definition; mod link_go_to_definition;
mod mouse_context_menu; mod mouse_context_menu;
pub mod movement; pub mod movement;
pub mod multi_buffer;
mod persistence; mod persistence;
pub mod scroll; pub mod scroll;
pub mod selections_collection; pub mod selections_collection;
@ -968,7 +967,6 @@ impl CompletionsMenu {
self.selected_item -= 1; self.selected_item -= 1;
} else { } else {
self.selected_item = self.matches.len() - 1; self.selected_item = self.matches.len() - 1;
self.list.scroll_to(ScrollTarget::Show(self.selected_item));
} }
self.list.scroll_to(ScrollTarget::Show(self.selected_item)); self.list.scroll_to(ScrollTarget::Show(self.selected_item));
self.attempt_resolve_selected_completion_documentation(project, cx); self.attempt_resolve_selected_completion_documentation(project, cx);
@ -1539,7 +1537,6 @@ impl CodeActionsMenu {
self.selected_item -= 1; self.selected_item -= 1;
} else { } else {
self.selected_item = self.actions.len() - 1; self.selected_item = self.actions.len() - 1;
self.list.scroll_to(ScrollTarget::Show(self.selected_item));
} }
self.list.scroll_to(ScrollTarget::Show(self.selected_item)); self.list.scroll_to(ScrollTarget::Show(self.selected_item));
cx.notify(); cx.notify();
@ -1548,11 +1545,10 @@ impl CodeActionsMenu {
fn select_next(&mut self, cx: &mut ViewContext<Editor>) { fn select_next(&mut self, cx: &mut ViewContext<Editor>) {
if self.selected_item + 1 < self.actions.len() { if self.selected_item + 1 < self.actions.len() {
self.selected_item += 1; self.selected_item += 1;
self.list.scroll_to(ScrollTarget::Show(self.selected_item));
} else { } else {
self.selected_item = 0; self.selected_item = 0;
self.list.scroll_to(ScrollTarget::Show(self.selected_item));
} }
self.list.scroll_to(ScrollTarget::Show(self.selected_item));
cx.notify(); cx.notify();
} }
@ -7213,6 +7209,7 @@ impl Editor {
&& entry.diagnostic.severity <= DiagnosticSeverity::WARNING && entry.diagnostic.severity <= DiagnosticSeverity::WARNING
&& !entry.range.is_empty() && !entry.range.is_empty()
&& Some(entry.range.end) != active_primary_range.as_ref().map(|r| *r.end()) && Some(entry.range.end) != active_primary_range.as_ref().map(|r| *r.end())
&& !entry.range.contains(&search_start)
{ {
Some((entry.range, entry.diagnostic.group_id)) Some((entry.range, entry.diagnostic.group_id))
} else { } else {
@ -7319,11 +7316,11 @@ impl Editor {
let display_point = initial_point.to_display_point(snapshot); let display_point = initial_point.to_display_point(snapshot);
let mut hunks = hunks let mut hunks = hunks
.map(|hunk| diff_hunk_to_display(hunk, &snapshot)) .map(|hunk| diff_hunk_to_display(hunk, &snapshot))
.skip_while(|hunk| { .filter(|hunk| {
if is_wrapped { if is_wrapped {
false true
} else { } else {
hunk.contains_display_row(display_point.row()) !hunk.contains_display_row(display_point.row())
} }
}) })
.dedup(); .dedup();
@ -7715,8 +7712,8 @@ impl Editor {
let mut buffer_highlights = this let mut buffer_highlights = this
.document_highlights_for_position(selection.head(), &buffer) .document_highlights_for_position(selection.head(), &buffer)
.filter(|highlight| { .filter(|highlight| {
highlight.start.excerpt_id() == selection.head().excerpt_id() highlight.start.excerpt_id == selection.head().excerpt_id
&& highlight.end.excerpt_id() == selection.head().excerpt_id() && highlight.end.excerpt_id == selection.head().excerpt_id
}); });
buffer_highlights buffer_highlights
.next() .next()
@ -8957,6 +8954,16 @@ impl Editor {
telemetry.report_clickhouse_event(event, telemetry_settings); telemetry.report_clickhouse_event(event, telemetry_settings);
} }
#[cfg(any(test, feature = "test-support"))]
fn report_editor_event(
&self,
_operation: &'static str,
_file_extension: Option<String>,
_cx: &AppContext,
) {
}
#[cfg(not(any(test, feature = "test-support")))]
fn report_editor_event( fn report_editor_event(
&self, &self,
operation: &'static str, operation: &'static str,

View file

@ -6717,6 +6717,102 @@ fn test_combine_syntax_and_fuzzy_match_highlights() {
); );
} }
#[gpui::test]
async fn go_to_prev_overlapping_diagnostic(
deterministic: Arc<Deterministic>,
cx: &mut gpui::TestAppContext,
) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
let project = cx.update_editor(|editor, _| editor.project.clone().unwrap());
cx.set_state(indoc! {"
ˇfn func(abc def: i32) -> u32 {
}
"});
cx.update(|cx| {
project.update(cx, |project, cx| {
project
.update_diagnostics(
LanguageServerId(0),
lsp::PublishDiagnosticsParams {
uri: lsp::Url::from_file_path("/root/file").unwrap(),
version: None,
diagnostics: vec![
lsp::Diagnostic {
range: lsp::Range::new(
lsp::Position::new(0, 11),
lsp::Position::new(0, 12),
),
severity: Some(lsp::DiagnosticSeverity::ERROR),
..Default::default()
},
lsp::Diagnostic {
range: lsp::Range::new(
lsp::Position::new(0, 12),
lsp::Position::new(0, 15),
),
severity: Some(lsp::DiagnosticSeverity::ERROR),
..Default::default()
},
lsp::Diagnostic {
range: lsp::Range::new(
lsp::Position::new(0, 25),
lsp::Position::new(0, 28),
),
severity: Some(lsp::DiagnosticSeverity::ERROR),
..Default::default()
},
],
},
&[],
cx,
)
.unwrap()
});
});
deterministic.run_until_parked();
cx.update_editor(|editor, cx| {
editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abc def: i32) -> ˇu32 {
}
"});
cx.update_editor(|editor, cx| {
editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abc ˇdef: i32) -> u32 {
}
"});
cx.update_editor(|editor, cx| {
editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abcˇ def: i32) -> u32 {
}
"});
cx.update_editor(|editor, cx| {
editor.go_to_prev_diagnostic(&GoToPrevDiagnostic, cx);
});
cx.assert_editor_state(indoc! {"
fn func(abc def: i32) -> ˇu32 {
}
"});
}
#[gpui::test] #[gpui::test]
async fn go_to_hunk(deterministic: Arc<Deterministic>, cx: &mut gpui::TestAppContext) { async fn go_to_hunk(deterministic: Arc<Deterministic>, cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
@ -6799,6 +6895,46 @@ async fn go_to_hunk(deterministic: Arc<Deterministic>, cx: &mut gpui::TestAppCon
.unindent(), .unindent(),
); );
cx.update_editor(|editor, cx| {
editor.go_to_prev_hunk(&GoToPrevHunk, cx);
});
cx.assert_editor_state(
&r#"
use some::modified;
ˇ
fn main() {
println!("hello there");
println!("around the");
println!("world");
}
"#
.unindent(),
);
cx.update_editor(|editor, cx| {
for _ in 0..3 {
editor.go_to_prev_hunk(&GoToPrevHunk, cx);
}
});
cx.assert_editor_state(
&r#"
use some::modified;
fn main() {
ˇ println!("hello there");
println!("around the");
println!("world");
}
"#
.unindent(),
);
cx.update_editor(|editor, cx| { cx.update_editor(|editor, cx| {
editor.fold(&Fold, cx); editor.fold(&Fold, cx);

View file

@ -36,7 +36,7 @@ impl DisplayDiffHunk {
DisplayDiffHunk::Unfolded { DisplayDiffHunk::Unfolded {
display_row_range, .. display_row_range, ..
} => display_row_range.start..=display_row_range.end - 1, } => display_row_range.start..=display_row_range.end,
}; };
range.contains(&display_row) range.contains(&display_row)
@ -77,8 +77,8 @@ pub fn diff_hunk_to_display(hunk: DiffHunk<u32>, snapshot: &DisplaySnapshot) ->
} else { } else {
let start = hunk_start_point.to_display_point(snapshot).row(); let start = hunk_start_point.to_display_point(snapshot).row();
let hunk_end_row_inclusive = hunk.buffer_range.end.max(hunk.buffer_range.start); let hunk_end_row = hunk.buffer_range.end.max(hunk.buffer_range.start);
let hunk_end_point = Point::new(hunk_end_row_inclusive, 0); let hunk_end_point = Point::new(hunk_end_row, 0);
let end = hunk_end_point.to_display_point(snapshot).row(); let end = hunk_end_point.to_display_point(snapshot).row();
DisplayDiffHunk::Unfolded { DisplayDiffHunk::Unfolded {
@ -87,3 +87,196 @@ pub fn diff_hunk_to_display(hunk: DiffHunk<u32>, snapshot: &DisplaySnapshot) ->
} }
} }
} }
#[cfg(any(test, feature = "test_support"))]
mod tests {
use crate::editor_tests::init_test;
use crate::Point;
use gpui::TestAppContext;
use multi_buffer::{ExcerptRange, MultiBuffer};
use project::{FakeFs, Project};
use unindent::Unindent;
#[gpui::test]
async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
use git::diff::DiffHunkStatus;
init_test(cx, |_| {});
let fs = FakeFs::new(cx.background());
let project = Project::test(fs, [], cx).await;
// buffer has two modified hunks with two rows each
let buffer_1 = project
.update(cx, |project, cx| {
project.create_buffer(
"
1.zero
1.ONE
1.TWO
1.three
1.FOUR
1.FIVE
1.six
"
.unindent()
.as_str(),
None,
cx,
)
})
.unwrap();
buffer_1.update(cx, |buffer, cx| {
buffer.set_diff_base(
Some(
"
1.zero
1.one
1.two
1.three
1.four
1.five
1.six
"
.unindent(),
),
cx,
);
});
// buffer has a deletion hunk and an insertion hunk
let buffer_2 = project
.update(cx, |project, cx| {
project.create_buffer(
"
2.zero
2.one
2.two
2.three
2.four
2.five
2.six
"
.unindent()
.as_str(),
None,
cx,
)
})
.unwrap();
buffer_2.update(cx, |buffer, cx| {
buffer.set_diff_base(
Some(
"
2.zero
2.one
2.one-and-a-half
2.two
2.three
2.four
2.six
"
.unindent(),
),
cx,
);
});
cx.foreground().run_until_parked();
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts(
buffer_1.clone(),
[
// excerpt ends in the middle of a modified hunk
ExcerptRange {
context: Point::new(0, 0)..Point::new(1, 5),
primary: Default::default(),
},
// excerpt begins in the middle of a modified hunk
ExcerptRange {
context: Point::new(5, 0)..Point::new(6, 5),
primary: Default::default(),
},
],
cx,
);
multibuffer.push_excerpts(
buffer_2.clone(),
[
// excerpt ends at a deletion
ExcerptRange {
context: Point::new(0, 0)..Point::new(1, 5),
primary: Default::default(),
},
// excerpt starts at a deletion
ExcerptRange {
context: Point::new(2, 0)..Point::new(2, 5),
primary: Default::default(),
},
// excerpt fully contains a deletion hunk
ExcerptRange {
context: Point::new(1, 0)..Point::new(2, 5),
primary: Default::default(),
},
// excerpt fully contains an insertion hunk
ExcerptRange {
context: Point::new(4, 0)..Point::new(6, 5),
primary: Default::default(),
},
],
cx,
);
multibuffer
});
let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
assert_eq!(
snapshot.text(),
"
1.zero
1.ONE
1.FIVE
1.six
2.zero
2.one
2.two
2.one
2.two
2.four
2.five
2.six"
.unindent()
);
let expected = [
(DiffHunkStatus::Modified, 1..2),
(DiffHunkStatus::Modified, 2..3),
//TODO: Define better when and where removed hunks show up at range extremities
(DiffHunkStatus::Removed, 6..6),
(DiffHunkStatus::Removed, 8..8),
(DiffHunkStatus::Added, 10..11),
];
assert_eq!(
snapshot
.git_diff_hunks_in_range(0..12)
.map(|hunk| (hunk.status(), hunk.buffer_range))
.collect::<Vec<_>>(),
&expected,
);
assert_eq!(
snapshot
.git_diff_hunks_in_range_rev(0..12)
.map(|hunk| (hunk.status(), hunk.buffer_range))
.collect::<Vec<_>>(),
expected
.iter()
.rev()
.cloned()
.collect::<Vec<_>>()
.as_slice(),
);
}
}

View file

@ -8,6 +8,7 @@ use crate::{
use gpui::{ModelHandle, ViewContext}; use gpui::{ModelHandle, ViewContext};
use project::Project;
use util::test::{marked_text_offsets, marked_text_ranges}; use util::test::{marked_text_offsets, marked_text_ranges};
#[cfg(test)] #[cfg(test)]
@ -63,9 +64,20 @@ pub fn assert_text_with_selections(
assert_eq!(editor.selections.ranges(cx), text_ranges); assert_eq!(editor.selections.ranges(cx), text_ranges);
} }
// RA thinks this is dead code even though it is used in a whole lot of tests
#[allow(dead_code)]
#[cfg(any(test, feature = "test-support"))]
pub(crate) fn build_editor( pub(crate) fn build_editor(
buffer: ModelHandle<MultiBuffer>, buffer: ModelHandle<MultiBuffer>,
cx: &mut ViewContext<Editor>, cx: &mut ViewContext<Editor>,
) -> Editor { ) -> Editor {
Editor::new(EditorMode::Full, buffer, None, None, cx) Editor::new(EditorMode::Full, buffer, None, None, cx)
} }
pub(crate) fn build_editor_with_project(
project: ModelHandle<Project>,
buffer: ModelHandle<MultiBuffer>,
cx: &mut ViewContext<Editor>,
) -> Editor {
Editor::new(EditorMode::Full, buffer, Some(project), None, cx)
}

View file

@ -6,18 +6,18 @@ use std::{
use anyhow::Result; use anyhow::Result;
use crate::{Editor, ToPoint};
use collections::HashSet; use collections::HashSet;
use futures::Future; use futures::Future;
use gpui::{json, ViewContext, ViewHandle}; use gpui::{json, ViewContext, ViewHandle};
use indoc::indoc; use indoc::indoc;
use language::{point_to_lsp, FakeLspAdapter, Language, LanguageConfig, LanguageQueries}; use language::{point_to_lsp, FakeLspAdapter, Language, LanguageConfig, LanguageQueries};
use lsp::{notification, request}; use lsp::{notification, request};
use multi_buffer::ToPointUtf16;
use project::Project; use project::Project;
use smol::stream::StreamExt; use smol::stream::StreamExt;
use workspace::{AppState, Workspace, WorkspaceHandle}; use workspace::{AppState, Workspace, WorkspaceHandle};
use crate::{multi_buffer::ToPointUtf16, Editor, ToPoint};
use super::editor_test_context::EditorTestContext; use super::editor_test_context::EditorTestContext;
pub struct EditorLspTestContext<'a> { pub struct EditorLspTestContext<'a> {

View file

@ -18,7 +18,7 @@ use util::{
test::{generate_marked_text, marked_text_ranges}, test::{generate_marked_text, marked_text_ranges},
}; };
use super::build_editor; use super::build_editor_with_project;
pub struct EditorTestContext<'a> { pub struct EditorTestContext<'a> {
pub cx: &'a mut gpui::TestAppContext, pub cx: &'a mut gpui::TestAppContext,
@ -29,13 +29,24 @@ pub struct EditorTestContext<'a> {
impl<'a> EditorTestContext<'a> { impl<'a> EditorTestContext<'a> {
pub async fn new(cx: &'a mut gpui::TestAppContext) -> EditorTestContext<'a> { pub async fn new(cx: &'a mut gpui::TestAppContext) -> EditorTestContext<'a> {
let fs = FakeFs::new(cx.background()); let fs = FakeFs::new(cx.background());
let project = Project::test(fs, [], cx).await; // fs.insert_file("/file", "".to_owned()).await;
fs.insert_tree(
"/root",
gpui::serde_json::json!({
"file": "",
}),
)
.await;
let project = Project::test(fs, ["/root".as_ref()], cx).await;
let buffer = project let buffer = project
.update(cx, |project, cx| project.create_buffer("", None, cx)) .update(cx, |project, cx| {
project.open_local_buffer("/root/file", cx)
})
.await
.unwrap(); .unwrap();
let window = cx.add_window(|cx| { let window = cx.add_window(|cx| {
cx.focus_self(); cx.focus_self();
build_editor(MultiBuffer::build_from_buffer(buffer, cx), cx) build_editor_with_project(project, MultiBuffer::build_from_buffer(buffer, cx), cx)
}); });
let editor = window.root(cx); let editor = window.root(cx);
Self { Self {

View file

@ -38,7 +38,7 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
str, str,
sync::{ sync::{
atomic::{AtomicUsize, Ordering::SeqCst}, atomic::{AtomicU64, AtomicUsize, Ordering::SeqCst},
Arc, Arc,
}, },
}; };
@ -115,6 +115,7 @@ pub struct CachedLspAdapter {
pub disk_based_diagnostics_progress_token: Option<String>, pub disk_based_diagnostics_progress_token: Option<String>,
pub language_ids: HashMap<String, String>, pub language_ids: HashMap<String, String>,
pub adapter: Arc<dyn LspAdapter>, pub adapter: Arc<dyn LspAdapter>,
pub reinstall_attempt_count: AtomicU64,
} }
impl CachedLspAdapter { impl CachedLspAdapter {
@ -133,6 +134,7 @@ impl CachedLspAdapter {
disk_based_diagnostics_progress_token, disk_based_diagnostics_progress_token,
language_ids, language_ids,
adapter, adapter,
reinstall_attempt_count: AtomicU64::new(0),
}) })
} }
@ -645,7 +647,7 @@ struct LanguageRegistryState {
pub struct PendingLanguageServer { pub struct PendingLanguageServer {
pub server_id: LanguageServerId, pub server_id: LanguageServerId,
pub task: Task<Result<Option<lsp::LanguageServer>>>, pub task: Task<Result<lsp::LanguageServer>>,
pub container_dir: Option<Arc<Path>>, pub container_dir: Option<Arc<Path>>,
} }
@ -884,6 +886,7 @@ impl LanguageRegistry {
pub fn create_pending_language_server( pub fn create_pending_language_server(
self: &Arc<Self>, self: &Arc<Self>,
stderr_capture: Arc<Mutex<Option<String>>>,
language: Arc<Language>, language: Arc<Language>,
adapter: Arc<CachedLspAdapter>, adapter: Arc<CachedLspAdapter>,
root_path: Arc<Path>, root_path: Arc<Path>,
@ -923,7 +926,7 @@ impl LanguageRegistry {
}) })
.detach(); .detach();
Ok(Some(server)) Ok(server)
}); });
return Some(PendingLanguageServer { return Some(PendingLanguageServer {
@ -971,24 +974,23 @@ impl LanguageRegistry {
.clone(); .clone();
drop(lock); drop(lock);
let binary = match entry.clone().await.log_err() { let binary = match entry.clone().await {
Some(binary) => binary, Ok(binary) => binary,
None => return Ok(None), Err(err) => anyhow::bail!("{err}"),
}; };
if let Some(task) = adapter.will_start_server(&delegate, &mut cx) { if let Some(task) = adapter.will_start_server(&delegate, &mut cx) {
if task.await.log_err().is_none() { task.await?;
return Ok(None);
}
} }
Ok(Some(lsp::LanguageServer::new( lsp::LanguageServer::new(
stderr_capture,
server_id, server_id,
binary, binary,
&root_path, &root_path,
adapter.code_action_kinds(), adapter.code_action_kinds(),
cx, cx,
)?)) )
}) })
}; };

View file

@ -136,6 +136,7 @@ struct Error {
impl LanguageServer { impl LanguageServer {
pub fn new( pub fn new(
stderr_capture: Arc<Mutex<Option<String>>>,
server_id: LanguageServerId, server_id: LanguageServerId,
binary: LanguageServerBinary, binary: LanguageServerBinary,
root_path: &Path, root_path: &Path,
@ -165,6 +166,7 @@ impl LanguageServer {
stdin, stdin,
stdout, stdout,
Some(stderr), Some(stderr),
stderr_capture,
Some(server), Some(server),
root_path, root_path,
code_action_kinds, code_action_kinds,
@ -197,6 +199,7 @@ impl LanguageServer {
stdin: Stdin, stdin: Stdin,
stdout: Stdout, stdout: Stdout,
stderr: Option<Stderr>, stderr: Option<Stderr>,
stderr_capture: Arc<Mutex<Option<String>>>,
server: Option<Child>, server: Option<Child>,
root_path: &Path, root_path: &Path,
code_action_kinds: Option<Vec<CodeActionKind>>, code_action_kinds: Option<Vec<CodeActionKind>>,
@ -218,20 +221,23 @@ impl LanguageServer {
let io_handlers = Arc::new(Mutex::new(HashMap::default())); let io_handlers = Arc::new(Mutex::new(HashMap::default()));
let stdout_input_task = cx.spawn(|cx| { let stdout_input_task = cx.spawn(|cx| {
{ Self::handle_input(
Self::handle_input( stdout,
stdout, on_unhandled_notification.clone(),
on_unhandled_notification.clone(), notification_handlers.clone(),
notification_handlers.clone(), response_handlers.clone(),
response_handlers.clone(), io_handlers.clone(),
io_handlers.clone(), cx,
cx, )
)
}
.log_err() .log_err()
}); });
let stderr_input_task = stderr let stderr_input_task = stderr
.map(|stderr| cx.spawn(|_| Self::handle_stderr(stderr, io_handlers.clone()).log_err())) .map(|stderr| {
cx.spawn(|_| {
Self::handle_stderr(stderr, io_handlers.clone(), stderr_capture.clone())
.log_err()
})
})
.unwrap_or_else(|| Task::Ready(Some(None))); .unwrap_or_else(|| Task::Ready(Some(None)));
let input_task = cx.spawn(|_| async move { let input_task = cx.spawn(|_| async move {
let (stdout, stderr) = futures::join!(stdout_input_task, stderr_input_task); let (stdout, stderr) = futures::join!(stdout_input_task, stderr_input_task);
@ -353,12 +359,14 @@ impl LanguageServer {
async fn handle_stderr<Stderr>( async fn handle_stderr<Stderr>(
stderr: Stderr, stderr: Stderr,
io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>, io_handlers: Arc<Mutex<HashMap<usize, IoHandler>>>,
stderr_capture: Arc<Mutex<Option<String>>>,
) -> anyhow::Result<()> ) -> anyhow::Result<()>
where where
Stderr: AsyncRead + Unpin + Send + 'static, Stderr: AsyncRead + Unpin + Send + 'static,
{ {
let mut stderr = BufReader::new(stderr); let mut stderr = BufReader::new(stderr);
let mut buffer = Vec::new(); let mut buffer = Vec::new();
loop { loop {
buffer.clear(); buffer.clear();
stderr.read_until(b'\n', &mut buffer).await?; stderr.read_until(b'\n', &mut buffer).await?;
@ -367,6 +375,10 @@ impl LanguageServer {
for handler in io_handlers.lock().values_mut() { for handler in io_handlers.lock().values_mut() {
handler(IoKind::StdErr, message); handler(IoKind::StdErr, message);
} }
if let Some(stderr) = stderr_capture.lock().as_mut() {
stderr.push_str(message);
}
} }
// Don't starve the main thread when receiving lots of messages at once. // Don't starve the main thread when receiving lots of messages at once.
@ -938,6 +950,7 @@ impl LanguageServer {
stdin_writer, stdin_writer,
stdout_reader, stdout_reader,
None::<async_pipe::PipeReader>, None::<async_pipe::PipeReader>,
Arc::new(Mutex::new(None)),
None, None,
Path::new("/"), Path::new("/"),
None, None,
@ -950,6 +963,7 @@ impl LanguageServer {
stdout_writer, stdout_writer,
stdin_reader, stdin_reader,
None::<async_pipe::PipeReader>, None::<async_pipe::PipeReader>,
Arc::new(Mutex::new(None)),
None, None,
Path::new("/"), Path::new("/"),
None, None,

View file

@ -0,0 +1,80 @@
[package]
name = "multi_buffer"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/multi_buffer.rs"
doctest = false
[features]
test-support = [
"copilot/test-support",
"text/test-support",
"language/test-support",
"gpui/test-support",
"util/test-support",
"tree-sitter-rust",
"tree-sitter-typescript"
]
[dependencies]
client = { path = "../client" }
clock = { path = "../clock" }
collections = { path = "../collections" }
context_menu = { path = "../context_menu" }
git = { path = "../git" }
gpui = { path = "../gpui" }
language = { path = "../language" }
lsp = { path = "../lsp" }
rich_text = { path = "../rich_text" }
settings = { path = "../settings" }
snippet = { path = "../snippet" }
sum_tree = { path = "../sum_tree" }
text = { path = "../text" }
theme = { path = "../theme" }
util = { path = "../util" }
aho-corasick = "1.1"
anyhow.workspace = true
convert_case = "0.6.0"
futures.workspace = true
indoc = "1.0.4"
itertools = "0.10"
lazy_static.workspace = true
log.workspace = true
ordered-float.workspace = true
parking_lot.workspace = true
postage.workspace = true
pulldown-cmark = { version = "0.9.2", default-features = false }
rand.workspace = true
schemars.workspace = true
serde.workspace = true
serde_derive.workspace = true
smallvec.workspace = true
smol.workspace = true
tree-sitter-rust = { workspace = true, optional = true }
tree-sitter-html = { workspace = true, optional = true }
tree-sitter-typescript = { workspace = true, optional = true }
[dev-dependencies]
copilot = { path = "../copilot", features = ["test-support"] }
text = { path = "../text", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] }
workspace = { path = "../workspace", features = ["test-support"] }
ctor.workspace = true
env_logger.workspace = true
rand.workspace = true
unindent.workspace = true
tree-sitter.workspace = true
tree-sitter-rust.workspace = true
tree-sitter-html.workspace = true
tree-sitter-typescript.workspace = true

View file

@ -8,9 +8,9 @@ use sum_tree::Bias;
#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)] #[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
pub struct Anchor { pub struct Anchor {
pub(crate) buffer_id: Option<u64>, pub buffer_id: Option<u64>,
pub(crate) excerpt_id: ExcerptId, pub excerpt_id: ExcerptId,
pub(crate) text_anchor: text::Anchor, pub text_anchor: text::Anchor,
} }
impl Anchor { impl Anchor {
@ -30,10 +30,6 @@ impl Anchor {
} }
} }
pub fn excerpt_id(&self) -> ExcerptId {
self.excerpt_id
}
pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering { pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id, snapshot); let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id, snapshot);
if excerpt_id_cmp.is_eq() { if excerpt_id_cmp.is_eq() {

View file

@ -303,7 +303,7 @@ impl MultiBuffer {
self.snapshot.borrow().clone() self.snapshot.borrow().clone()
} }
pub(crate) fn read(&self, cx: &AppContext) -> Ref<MultiBufferSnapshot> { pub fn read(&self, cx: &AppContext) -> Ref<MultiBufferSnapshot> {
self.sync(cx); self.sync(cx);
self.snapshot.borrow() self.snapshot.borrow()
} }
@ -589,7 +589,7 @@ impl MultiBuffer {
self.start_transaction_at(Instant::now(), cx) self.start_transaction_at(Instant::now(), cx)
} }
pub(crate) fn start_transaction_at( pub fn start_transaction_at(
&mut self, &mut self,
now: Instant, now: Instant,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
@ -608,7 +608,7 @@ impl MultiBuffer {
self.end_transaction_at(Instant::now(), cx) self.end_transaction_at(Instant::now(), cx)
} }
pub(crate) fn end_transaction_at( pub fn end_transaction_at(
&mut self, &mut self,
now: Instant, now: Instant,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
@ -1508,7 +1508,7 @@ impl MultiBuffer {
"untitled".into() "untitled".into()
} }
#[cfg(test)] #[cfg(any(test, feature = "test-support"))]
pub fn is_parsing(&self, cx: &AppContext) -> bool { pub fn is_parsing(&self, cx: &AppContext) -> bool {
self.as_singleton().unwrap().read(cx).is_parsing() self.as_singleton().unwrap().read(cx).is_parsing()
} }
@ -3198,7 +3198,7 @@ impl MultiBufferSnapshot {
theme: Option<&SyntaxTheme>, theme: Option<&SyntaxTheme>,
) -> Option<(u64, Vec<OutlineItem<Anchor>>)> { ) -> Option<(u64, Vec<OutlineItem<Anchor>>)> {
let anchor = self.anchor_before(offset); let anchor = self.anchor_before(offset);
let excerpt_id = anchor.excerpt_id(); let excerpt_id = anchor.excerpt_id;
let excerpt = self.excerpt(excerpt_id)?; let excerpt = self.excerpt(excerpt_id)?;
Some(( Some((
excerpt.buffer_id, excerpt.buffer_id,
@ -4129,17 +4129,13 @@ where
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::editor_tests::init_test;
use super::*; use super::*;
use futures::StreamExt; use futures::StreamExt;
use gpui::{AppContext, TestAppContext}; use gpui::{AppContext, TestAppContext};
use language::{Buffer, Rope}; use language::{Buffer, Rope};
use project::{FakeFs, Project};
use rand::prelude::*; use rand::prelude::*;
use settings::SettingsStore; use settings::SettingsStore;
use std::{env, rc::Rc}; use std::{env, rc::Rc};
use unindent::Unindent;
use util::test::sample_text; use util::test::sample_text;
#[gpui::test] #[gpui::test]
@ -4838,190 +4834,6 @@ mod tests {
); );
} }
#[gpui::test]
async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
use git::diff::DiffHunkStatus;
init_test(cx, |_| {});
let fs = FakeFs::new(cx.background());
let project = Project::test(fs, [], cx).await;
// buffer has two modified hunks with two rows each
let buffer_1 = project
.update(cx, |project, cx| {
project.create_buffer(
"
1.zero
1.ONE
1.TWO
1.three
1.FOUR
1.FIVE
1.six
"
.unindent()
.as_str(),
None,
cx,
)
})
.unwrap();
buffer_1.update(cx, |buffer, cx| {
buffer.set_diff_base(
Some(
"
1.zero
1.one
1.two
1.three
1.four
1.five
1.six
"
.unindent(),
),
cx,
);
});
// buffer has a deletion hunk and an insertion hunk
let buffer_2 = project
.update(cx, |project, cx| {
project.create_buffer(
"
2.zero
2.one
2.two
2.three
2.four
2.five
2.six
"
.unindent()
.as_str(),
None,
cx,
)
})
.unwrap();
buffer_2.update(cx, |buffer, cx| {
buffer.set_diff_base(
Some(
"
2.zero
2.one
2.one-and-a-half
2.two
2.three
2.four
2.six
"
.unindent(),
),
cx,
);
});
cx.foreground().run_until_parked();
let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts(
buffer_1.clone(),
[
// excerpt ends in the middle of a modified hunk
ExcerptRange {
context: Point::new(0, 0)..Point::new(1, 5),
primary: Default::default(),
},
// excerpt begins in the middle of a modified hunk
ExcerptRange {
context: Point::new(5, 0)..Point::new(6, 5),
primary: Default::default(),
},
],
cx,
);
multibuffer.push_excerpts(
buffer_2.clone(),
[
// excerpt ends at a deletion
ExcerptRange {
context: Point::new(0, 0)..Point::new(1, 5),
primary: Default::default(),
},
// excerpt starts at a deletion
ExcerptRange {
context: Point::new(2, 0)..Point::new(2, 5),
primary: Default::default(),
},
// excerpt fully contains a deletion hunk
ExcerptRange {
context: Point::new(1, 0)..Point::new(2, 5),
primary: Default::default(),
},
// excerpt fully contains an insertion hunk
ExcerptRange {
context: Point::new(4, 0)..Point::new(6, 5),
primary: Default::default(),
},
],
cx,
);
multibuffer
});
let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
assert_eq!(
snapshot.text(),
"
1.zero
1.ONE
1.FIVE
1.six
2.zero
2.one
2.two
2.one
2.two
2.four
2.five
2.six"
.unindent()
);
let expected = [
(DiffHunkStatus::Modified, 1..2),
(DiffHunkStatus::Modified, 2..3),
//TODO: Define better when and where removed hunks show up at range extremities
(DiffHunkStatus::Removed, 6..6),
(DiffHunkStatus::Removed, 8..8),
(DiffHunkStatus::Added, 10..11),
];
assert_eq!(
snapshot
.git_diff_hunks_in_range(0..12)
.map(|hunk| (hunk.status(), hunk.buffer_range))
.collect::<Vec<_>>(),
&expected,
);
assert_eq!(
snapshot
.git_diff_hunks_in_range_rev(0..12)
.map(|hunk| (hunk.status(), hunk.buffer_range))
.collect::<Vec<_>>(),
expected
.iter()
.rev()
.cloned()
.collect::<Vec<_>>()
.as_slice(),
);
}
#[gpui::test(iterations = 100)] #[gpui::test(iterations = 100)]
fn test_random_multibuffer(cx: &mut AppContext, mut rng: StdRng) { fn test_random_multibuffer(cx: &mut AppContext, mut rng: StdRng) {
let operations = env::var("OPERATIONS") let operations = env::var("OPERATIONS")

View file

@ -27,6 +27,7 @@ serde_derive.workspace = true
serde_json.workspace = true serde_json.workspace = true
anyhow.workspace = true anyhow.workspace = true
futures.workspace = true futures.workspace = true
parking_lot.workspace = true
[dev-dependencies] [dev-dependencies]
language = { path = "../language", features = ["test-support"] } language = { path = "../language", features = ["test-support"] }

View file

@ -67,91 +67,39 @@ impl Prettier {
starting_path: Option<LocateStart>, starting_path: Option<LocateStart>,
fs: Arc<dyn Fs>, fs: Arc<dyn Fs>,
) -> anyhow::Result<PathBuf> { ) -> anyhow::Result<PathBuf> {
fn is_node_modules(path_component: &std::path::Component<'_>) -> bool {
path_component.as_os_str().to_string_lossy() == "node_modules"
}
let paths_to_check = match starting_path.as_ref() { let paths_to_check = match starting_path.as_ref() {
Some(starting_path) => { Some(starting_path) => {
let worktree_root = starting_path let worktree_root = starting_path
.worktree_root_path .worktree_root_path
.components() .components()
.into_iter() .into_iter()
.take_while(|path_component| { .take_while(|path_component| !is_node_modules(path_component))
path_component.as_os_str().to_string_lossy() != "node_modules"
})
.collect::<PathBuf>(); .collect::<PathBuf>();
if worktree_root != starting_path.worktree_root_path.as_ref() { if worktree_root != starting_path.worktree_root_path.as_ref() {
vec![worktree_root] vec![worktree_root]
} else { } else {
let (worktree_root_metadata, start_path_metadata) = if starting_path if starting_path.starting_path.as_ref() == Path::new("") {
.starting_path worktree_root
.as_ref() .parent()
== Path::new("") .map(|path| vec![path.to_path_buf()])
{ .unwrap_or_default()
let worktree_root_data =
fs.metadata(&worktree_root).await.with_context(|| {
format!(
"FS metadata fetch for worktree root path {worktree_root:?}",
)
})?;
(worktree_root_data.unwrap_or_else(|| {
panic!("cannot query prettier for non existing worktree root at {worktree_root_data:?}")
}), None)
} else { } else {
let full_starting_path = worktree_root.join(&starting_path.starting_path); let file_to_format = starting_path.starting_path.as_ref();
let (worktree_root_data, start_path_data) = futures::try_join!( let mut paths_to_check = VecDeque::new();
fs.metadata(&worktree_root), let mut current_path = worktree_root;
fs.metadata(&full_starting_path), for path_component in file_to_format.components().into_iter() {
) let new_path = current_path.join(path_component);
.with_context(|| { let old_path = std::mem::replace(&mut current_path, new_path);
format!("FS metadata fetch for starting path {full_starting_path:?}",) paths_to_check.push_front(old_path);
})?; if is_node_modules(&path_component) {
( break;
worktree_root_data.unwrap_or_else(|| {
panic!("cannot query prettier for non existing worktree root at {worktree_root_data:?}")
}),
start_path_data,
)
};
match start_path_metadata {
Some(start_path_metadata) => {
anyhow::ensure!(worktree_root_metadata.is_dir,
"For non-empty start path, worktree root {starting_path:?} should be a directory");
anyhow::ensure!(
!start_path_metadata.is_dir,
"For non-empty start path, it should not be a directory {starting_path:?}"
);
anyhow::ensure!(
!start_path_metadata.is_symlink,
"For non-empty start path, it should not be a symlink {starting_path:?}"
);
let file_to_format = starting_path.starting_path.as_ref();
let mut paths_to_check = VecDeque::from(vec![worktree_root.clone()]);
let mut current_path = worktree_root;
for path_component in file_to_format.components().into_iter() {
current_path = current_path.join(path_component);
paths_to_check.push_front(current_path.clone());
if path_component.as_os_str().to_string_lossy() == "node_modules" {
break;
}
} }
paths_to_check.pop_front(); // last one is the file itself or node_modules, skip it
Vec::from(paths_to_check)
}
None => {
anyhow::ensure!(
!worktree_root_metadata.is_dir,
"For empty start path, worktree root should not be a directory {starting_path:?}"
);
anyhow::ensure!(
!worktree_root_metadata.is_symlink,
"For empty start path, worktree root should not be a symlink {starting_path:?}"
);
worktree_root
.parent()
.map(|path| vec![path.to_path_buf()])
.unwrap_or_default()
} }
Vec::from(paths_to_check)
} }
} }
} }
@ -210,6 +158,7 @@ impl Prettier {
.spawn(async move { node.binary_path().await }) .spawn(async move { node.binary_path().await })
.await?; .await?;
let server = LanguageServer::new( let server = LanguageServer::new(
Arc::new(parking_lot::Mutex::new(None)),
server_id, server_id,
LanguageServerBinary { LanguageServerBinary {
path: node_path, path: node_path,

View file

@ -52,6 +52,7 @@ use lsp::{
}; };
use lsp_command::*; use lsp_command::*;
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
use parking_lot::Mutex;
use postage::watch; use postage::watch;
use prettier::{LocateStart, Prettier}; use prettier::{LocateStart, Prettier};
use project_settings::{LspSettings, ProjectSettings}; use project_settings::{LspSettings, ProjectSettings};
@ -90,6 +91,8 @@ pub use fs::*;
pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX; pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
pub use worktree::*; pub use worktree::*;
const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
pub trait Item { pub trait Item {
fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>; fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>; fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
@ -2721,12 +2724,18 @@ impl Project {
language: Arc<Language>, language: Arc<Language>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
return;
}
let key = (worktree_id, adapter.name.clone()); let key = (worktree_id, adapter.name.clone());
if self.language_server_ids.contains_key(&key) { if self.language_server_ids.contains_key(&key) {
return; return;
} }
let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
let pending_server = match self.languages.create_pending_language_server( let pending_server = match self.languages.create_pending_language_server(
stderr_capture.clone(),
language.clone(), language.clone(),
adapter.clone(), adapter.clone(),
worktree_path, worktree_path,
@ -2763,29 +2772,41 @@ impl Project {
.await; .await;
match result { match result {
Ok(server) => server, Ok(server) => {
stderr_capture.lock().take();
Some(server)
}
Err(err) => { Err(err) => {
log::error!("failed to start language server {:?}: {}", server_name, err); log::error!("failed to start language server {server_name:?}: {err}");
log::error!("server stderr: {:?}", stderr_capture.lock().take());
if let Some(this) = this.upgrade(&cx) { let this = this.upgrade(&cx)?;
if let Some(container_dir) = container_dir { let container_dir = container_dir?;
let installation_test_binary = adapter
.installation_test_binary(container_dir.to_path_buf())
.await;
this.update(&mut cx, |_, cx| { let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
Self::check_errored_server( if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
language, let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
adapter, log::error!(
server_id, "Hit {max} max reinstallation attempts for {server_name:?}"
installation_test_binary, );
cx, return None;
)
});
}
} }
let installation_test_binary = adapter
.installation_test_binary(container_dir.to_path_buf())
.await;
this.update(&mut cx, |_, cx| {
Self::check_errored_server(
language,
adapter,
server_id,
installation_test_binary,
cx,
)
});
None None
} }
} }
@ -2862,20 +2883,17 @@ impl Project {
server_id: LanguageServerId, server_id: LanguageServerId,
key: (WorktreeId, LanguageServerName), key: (WorktreeId, LanguageServerName),
cx: &mut AsyncAppContext, cx: &mut AsyncAppContext,
) -> Result<Option<Arc<LanguageServer>>> { ) -> Result<Arc<LanguageServer>> {
let setup = Self::setup_pending_language_server( let language_server = Self::setup_pending_language_server(
this, this,
override_initialization_options, override_initialization_options,
pending_server, pending_server,
adapter.clone(), adapter.clone(),
server_id, server_id,
cx, cx,
); )
.await?;
let language_server = match setup.await? {
Some(language_server) => language_server,
None => return Ok(None),
};
let this = match this.upgrade(cx) { let this = match this.upgrade(cx) {
Some(this) => this, Some(this) => this,
None => return Err(anyhow!("failed to upgrade project handle")), None => return Err(anyhow!("failed to upgrade project handle")),
@ -2892,7 +2910,7 @@ impl Project {
) )
})?; })?;
Ok(Some(language_server)) Ok(language_server)
} }
async fn setup_pending_language_server( async fn setup_pending_language_server(
@ -2902,12 +2920,9 @@ impl Project {
adapter: Arc<CachedLspAdapter>, adapter: Arc<CachedLspAdapter>,
server_id: LanguageServerId, server_id: LanguageServerId,
cx: &mut AsyncAppContext, cx: &mut AsyncAppContext,
) -> Result<Option<Arc<LanguageServer>>> { ) -> Result<Arc<LanguageServer>> {
let workspace_config = cx.update(|cx| adapter.workspace_configuration(cx)).await; let workspace_config = cx.update(|cx| adapter.workspace_configuration(cx)).await;
let language_server = match pending_server.task.await? { let language_server = pending_server.task.await?;
Some(server) => server,
None => return Ok(None),
};
language_server language_server
.on_notification::<lsp::notification::PublishDiagnostics, _>({ .on_notification::<lsp::notification::PublishDiagnostics, _>({
@ -2978,6 +2993,7 @@ impl Project {
}, },
) )
.detach(); .detach();
language_server language_server
.on_request::<lsp::request::RegisterCapability, _, _>({ .on_request::<lsp::request::RegisterCapability, _, _>({
move |params, mut cx| async move { move |params, mut cx| async move {
@ -3043,6 +3059,7 @@ impl Project {
} }
}) })
.detach(); .detach();
let mut initialization_options = adapter.adapter.initialization_options().await; let mut initialization_options = adapter.adapter.initialization_options().await;
match (&mut initialization_options, override_options) { match (&mut initialization_options, override_options) {
(Some(initialization_options), Some(override_options)) => { (Some(initialization_options), Some(override_options)) => {
@ -3062,7 +3079,7 @@ impl Project {
) )
.ok(); .ok();
Ok(Some(language_server)) Ok(language_server)
} }
fn insert_newly_running_language_server( fn insert_newly_running_language_server(

View file

@ -4,7 +4,7 @@ use collections::HashMap;
use gpui::{AppContext, AssetSource}; use gpui::{AppContext, AssetSource};
use serde_derive::Deserialize; use serde_derive::Deserialize;
use util::{iife, paths::PathExt}; use util::{maybe, paths::PathExt};
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
struct TypeConfig { struct TypeConfig {
@ -42,12 +42,12 @@ impl FileAssociations {
} }
pub fn get_icon(path: &Path, cx: &AppContext) -> Arc<str> { pub fn get_icon(path: &Path, cx: &AppContext) -> Arc<str> {
iife!({ maybe!({
let this = cx.has_global::<Self>().then(|| cx.global::<Self>())?; let this = cx.has_global::<Self>().then(|| cx.global::<Self>())?;
// FIXME: Associate a type with the languages and have the file's langauge // FIXME: Associate a type with the languages and have the file's langauge
// override these associations // override these associations
iife!({ maybe!({
let suffix = path.icon_suffix()?; let suffix = path.icon_suffix()?;
this.suffixes this.suffixes
@ -61,7 +61,7 @@ impl FileAssociations {
} }
pub fn get_folder_icon(expanded: bool, cx: &AppContext) -> Arc<str> { pub fn get_folder_icon(expanded: bool, cx: &AppContext) -> Arc<str> {
iife!({ maybe!({
let this = cx.has_global::<Self>().then(|| cx.global::<Self>())?; let this = cx.has_global::<Self>().then(|| cx.global::<Self>())?;
let key = if expanded { let key = if expanded {
@ -78,7 +78,7 @@ impl FileAssociations {
} }
pub fn get_chevron_icon(expanded: bool, cx: &AppContext) -> Arc<str> { pub fn get_chevron_icon(expanded: bool, cx: &AppContext) -> Arc<str> {
iife!({ maybe!({
let this = cx.has_global::<Self>().then(|| cx.global::<Self>())?; let this = cx.has_global::<Self>().then(|| cx.global::<Self>())?;
let key = if expanded { let key = if expanded {

View file

@ -9,4 +9,4 @@ pub use notification::*;
pub use peer::*; pub use peer::*;
mod macros; mod macros;
pub const PROTOCOL_VERSION: u32 = 65; pub const PROTOCOL_VERSION: u32 = 66;

View file

@ -1,5 +1,5 @@
use crate::http::HttpClient; use crate::http::HttpClient;
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, bail, Context, Result};
use futures::AsyncReadExt; use futures::AsyncReadExt;
use serde::Deserialize; use serde::Deserialize;
use std::sync::Arc; use std::sync::Arc;
@ -46,6 +46,14 @@ pub async fn latest_github_release(
.await .await
.context("error reading latest release")?; .context("error reading latest release")?;
if response.status().is_client_error() {
let text = String::from_utf8_lossy(body.as_slice());
bail!(
"status error {}, response: {text:?}",
response.status().as_u16()
);
}
let releases = match serde_json::from_slice::<Vec<GithubRelease>>(body.as_slice()) { let releases = match serde_json::from_slice::<Vec<GithubRelease>>(body.as_slice()) {
Ok(releases) => releases, Ok(releases) => releases,

View file

@ -349,19 +349,19 @@ pub fn unzip_option<T, U>(option: Option<(T, U)>) -> (Option<T>, Option<U>) {
} }
} }
/// Immediately invoked function expression. Good for using the ? operator /// Evaluates to an immediately invoked function expression. Good for using the ? operator
/// in functions which do not return an Option or Result /// in functions which do not return an Option or Result
#[macro_export] #[macro_export]
macro_rules! iife { macro_rules! maybe {
($block:block) => { ($block:block) => {
(|| $block)() (|| $block)()
}; };
} }
/// Async Immediately invoked function expression. Good for using the ? operator /// Evaluates to an immediately invoked function expression. Good for using the ? operator
/// in functions which do not return an Option or Result. Async version of above /// in functions which do not return an Option or Result, but async.
#[macro_export] #[macro_export]
macro_rules! async_iife { macro_rules! async_maybe {
($block:block) => { ($block:block) => {
(|| async move { $block })() (|| async move { $block })()
}; };
@ -434,7 +434,7 @@ mod tests {
None None
} }
let foo = iife!({ let foo = maybe!({
option_returning_function()?; option_returning_function()?;
Some(()) Some(())
}); });

View file

@ -16,7 +16,7 @@ actions!(branches, [OpenRecent]);
pub fn init(cx: &mut AppContext) { pub fn init(cx: &mut AppContext) {
Picker::<BranchListDelegate>::init(cx); Picker::<BranchListDelegate>::init(cx);
cx.add_async_action(toggle); cx.add_action(toggle);
} }
pub type BranchList = Picker<BranchListDelegate>; pub type BranchList = Picker<BranchListDelegate>;
@ -24,30 +24,29 @@ pub fn build_branch_list(
workspace: ViewHandle<Workspace>, workspace: ViewHandle<Workspace>,
cx: &mut ViewContext<BranchList>, cx: &mut ViewContext<BranchList>,
) -> Result<BranchList> { ) -> Result<BranchList> {
Ok(Picker::new(BranchListDelegate::new(workspace, 29, cx)?, cx) let delegate = workspace.read_with(cx, |workspace, cx| {
.with_theme(|theme| theme.picker.clone())) BranchListDelegate::new(workspace, cx.handle(), 29, cx)
})?;
Ok(Picker::new(delegate, cx).with_theme(|theme| theme.picker.clone()))
} }
fn toggle( fn toggle(
_: &mut Workspace, workspace: &mut Workspace,
_: &OpenRecent, _: &OpenRecent,
cx: &mut ViewContext<Workspace>, cx: &mut ViewContext<Workspace>,
) -> Option<Task<Result<()>>> { ) -> Result<()> {
Some(cx.spawn(|workspace, mut cx| async move { // Modal branch picker has a longer trailoff than a popover one.
workspace.update(&mut cx, |workspace, cx| { let delegate = BranchListDelegate::new(workspace, cx.handle(), 70, cx)?;
// Modal branch picker has a longer trailoff than a popover one. workspace.toggle_modal(cx, |_, cx| {
let delegate = BranchListDelegate::new(cx.handle(), 70, cx)?; cx.add_view(|cx| {
workspace.toggle_modal(cx, |_, cx| { Picker::new(delegate, cx)
cx.add_view(|cx| { .with_theme(|theme| theme.picker.clone())
Picker::new(delegate, cx) .with_max_size(800., 1200.)
.with_theme(|theme| theme.picker.clone()) })
.with_max_size(800., 1200.) });
})
}); Ok(())
Ok::<_, anyhow::Error>(())
})??;
Ok(())
}))
} }
pub struct BranchListDelegate { pub struct BranchListDelegate {
@ -62,15 +61,16 @@ pub struct BranchListDelegate {
impl BranchListDelegate { impl BranchListDelegate {
fn new( fn new(
workspace: ViewHandle<Workspace>, workspace: &Workspace,
handle: ViewHandle<Workspace>,
branch_name_trailoff_after: usize, branch_name_trailoff_after: usize,
cx: &AppContext, cx: &AppContext,
) -> Result<Self> { ) -> Result<Self> {
let project = workspace.read(cx).project().read(&cx); let project = workspace.project().read(&cx);
let Some(worktree) = project.visible_worktrees(cx).next() else { let Some(worktree) = project.visible_worktrees(cx).next() else {
bail!("Cannot update branch list as there are no visible worktrees") bail!("Cannot update branch list as there are no visible worktrees")
}; };
let mut cwd = worktree.read(cx).abs_path().to_path_buf(); let mut cwd = worktree.read(cx).abs_path().to_path_buf();
cwd.push(".git"); cwd.push(".git");
let Some(repo) = project.fs().open_repo(&cwd) else { let Some(repo) = project.fs().open_repo(&cwd) else {
@ -79,13 +79,14 @@ impl BranchListDelegate {
let all_branches = repo.lock().branches()?; let all_branches = repo.lock().branches()?;
Ok(Self { Ok(Self {
matches: vec![], matches: vec![],
workspace, workspace: handle,
all_branches, all_branches,
selected_index: 0, selected_index: 0,
last_query: Default::default(), last_query: Default::default(),
branch_name_trailoff_after, branch_name_trailoff_after,
}) })
} }
fn display_error_toast(&self, message: String, cx: &mut ViewContext<BranchList>) { fn display_error_toast(&self, message: String, cx: &mut ViewContext<BranchList>) {
const GIT_CHECKOUT_FAILURE_ID: usize = 2048; const GIT_CHECKOUT_FAILURE_ID: usize = 2048;
self.workspace.update(cx, |model, ctx| { self.workspace.update(cx, |model, ctx| {

View file

@ -1,7 +1,10 @@
use editor::scroll::VERTICAL_SCROLL_MARGIN; use editor::scroll::VERTICAL_SCROLL_MARGIN;
use indoc::indoc; use indoc::indoc;
use settings::SettingsStore; use settings::SettingsStore;
use std::ops::{Deref, DerefMut}; use std::{
ops::{Deref, DerefMut},
panic, thread,
};
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use gpui::{geometry::vector::vec2f, ContextHandle}; use gpui::{geometry::vector::vec2f, ContextHandle};
@ -59,12 +62,22 @@ pub struct NeovimBackedTestContext<'a> {
impl<'a> NeovimBackedTestContext<'a> { impl<'a> NeovimBackedTestContext<'a> {
pub async fn new(cx: &'a mut gpui::TestAppContext) -> NeovimBackedTestContext<'a> { pub async fn new(cx: &'a mut gpui::TestAppContext) -> NeovimBackedTestContext<'a> {
let function_name = cx.function_name.clone(); // rust stores the name of the test on the current thread.
let cx = VimTestContext::new(cx, true).await; // We use this to automatically name a file that will store
// the neovim connection's requests/responses so that we can
// run without neovim on CI.
let thread = thread::current();
let test_name = thread
.name()
.expect("thread is not named")
.split(":")
.last()
.unwrap()
.to_string();
Self { Self {
cx, cx: VimTestContext::new(cx, true).await,
exemptions: Default::default(), exemptions: Default::default(),
neovim: NeovimConnection::new(function_name).await, neovim: NeovimConnection::new(test_name).await,
last_set_state: None, last_set_state: None,
recent_keystrokes: Default::default(), recent_keystrokes: Default::default(),

View file

@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor." description = "The fast, collaborative code editor."
edition = "2021" edition = "2021"
name = "zed" name = "zed"
version = "0.110.0" version = "0.111.0"
publish = false publish = false
[lib] [lib]

View file

@ -2,8 +2,6 @@
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0"> <plist version="1.0">
<dict> <dict>
<key>com.apple.developer.associated-domains</key>
<array><string>applinks:zed.dev</string></array>
<key>com.apple.security.automation.apple-events</key> <key>com.apple.security.automation.apple-events</key>
<true/> <true/>
<key>com.apple.security.cs.allow-jit</key> <key>com.apple.security.cs.allow-jit</key>
@ -12,8 +10,14 @@
<true/> <true/>
<key>com.apple.security.device.camera</key> <key>com.apple.security.device.camera</key>
<true/> <true/>
<key>com.apple.security.keychain-access-groups</key> <key>com.apple.security.personal-information.addressbook</key>
<array><string>MQ55VZLNZQ.dev.zed.Shared</string></array> <true/>
<key>com.apple.security.personal-information.calendars</key>
<true/>
<key>com.apple.security.personal-information.location</key>
<true/>
<key>com.apple.security.personal-information.photos-library</key>
<true/>
<!-- <key>com.apple.security.cs.disable-library-validation</key> <!-- <key>com.apple.security.cs.disable-library-validation</key>
<true/> --> <true/> -->
</dict> </dict>

View file

@ -19,7 +19,7 @@ use std::{
}, },
}; };
use util::{ use util::{
async_iife, async_maybe,
fs::remove_matching, fs::remove_matching,
github::{latest_github_release, GitHubLspBinaryVersion}, github::{latest_github_release, GitHubLspBinaryVersion},
ResultExt, ResultExt,
@ -421,7 +421,7 @@ impl LspAdapter for NextLspAdapter {
} }
async fn get_cached_server_binary_next(container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn get_cached_server_binary_next(container_dir: PathBuf) -> Option<LanguageServerBinary> {
async_iife!({ async_maybe!({
let mut last_binary_path = None; let mut last_binary_path = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {

View file

@ -8,7 +8,7 @@ use lsp::LanguageServerBinary;
use smol::fs; use smol::fs;
use std::{any::Any, env::consts, path::PathBuf}; use std::{any::Any, env::consts, path::PathBuf};
use util::{ use util::{
async_iife, async_maybe,
github::{latest_github_release, GitHubLspBinaryVersion}, github::{latest_github_release, GitHubLspBinaryVersion},
ResultExt, ResultExt,
}; };
@ -106,7 +106,7 @@ impl super::LspAdapter for LuaLspAdapter {
} }
async fn get_cached_server_binary(container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn get_cached_server_binary(container_dir: PathBuf) -> Option<LanguageServerBinary> {
async_iife!({ async_maybe!({
let mut last_binary_path = None; let mut last_binary_path = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await { while let Some(entry) = entries.next().await {

View file

@ -1,4 +1,4 @@
use anyhow::{anyhow, Result}; use anyhow::{anyhow, ensure, Result};
use async_trait::async_trait; use async_trait::async_trait;
use futures::StreamExt; use futures::StreamExt;
pub use language::*; pub use language::*;
@ -98,7 +98,10 @@ impl super::LspAdapter for VueLspAdapter {
) )
.await?; .await?;
} }
assert!(fs::metadata(&server_path).await.is_ok()); ensure!(
fs::metadata(&server_path).await.is_ok(),
"@vue/language-server package installation failed"
);
if fs::metadata(&ts_path).await.is_err() { if fs::metadata(&ts_path).await.is_err() {
self.node self.node
.npm_install_packages( .npm_install_packages(
@ -108,7 +111,10 @@ impl super::LspAdapter for VueLspAdapter {
.await?; .await?;
} }
assert!(fs::metadata(&ts_path).await.is_ok()); ensure!(
fs::metadata(&ts_path).await.is_ok(),
"typescript for Vue package installation failed"
);
*self.typescript_install_path.lock() = Some(ts_path); *self.typescript_install_path.lock() = Some(ts_path);
Ok(LanguageServerBinary { Ok(LanguageServerBinary {
path: self.node.binary_path().await?, path: self.node.binary_path().await?,

View file

@ -34,7 +34,7 @@ use std::{
Arc, Weak, Arc, Weak,
}, },
thread, thread,
time::{Duration, SystemTime, UNIX_EPOCH}, time::{SystemTime, UNIX_EPOCH},
}; };
use util::{ use util::{
channel::{parse_zed_link, ReleaseChannel}, channel::{parse_zed_link, ReleaseChannel},
@ -684,7 +684,7 @@ fn load_embedded_fonts(app: &App) {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
async fn watch_themes(fs: Arc<dyn Fs>, mut cx: AsyncAppContext) -> Option<()> { async fn watch_themes(fs: Arc<dyn Fs>, mut cx: AsyncAppContext) -> Option<()> {
let mut events = fs let mut events = fs
.watch("styles/src".as_ref(), Duration::from_millis(100)) .watch("styles/src".as_ref(), std::time::Duration::from_millis(100))
.await; .await;
while (events.next().await).is_some() { while (events.next().await).is_some() {
let output = Command::new("npm") let output = Command::new("npm")
@ -710,7 +710,7 @@ async fn watch_languages(fs: Arc<dyn Fs>, languages: Arc<LanguageRegistry>) -> O
let mut events = fs let mut events = fs
.watch( .watch(
"crates/zed/src/languages".as_ref(), "crates/zed/src/languages".as_ref(),
Duration::from_millis(100), std::time::Duration::from_millis(100),
) )
.await; .await;
while (events.next().await).is_some() { while (events.next().await).is_some() {
@ -725,7 +725,7 @@ fn watch_file_types(fs: Arc<dyn Fs>, cx: &mut AppContext) {
let mut events = fs let mut events = fs
.watch( .watch(
"assets/icons/file_icons/file_types.json".as_ref(), "assets/icons/file_icons/file_types.json".as_ref(),
Duration::from_millis(100), std::time::Duration::from_millis(100),
) )
.await; .await;
while (events.next().await).is_some() { while (events.next().await).is_some() {

View file

@ -147,8 +147,9 @@ if [[ -n $MACOS_CERTIFICATE && -n $MACOS_CERTIFICATE_PASSWORD && -n $APPLE_NOTAR
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k "$MACOS_CERTIFICATE_PASSWORD" zed.keychain security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k "$MACOS_CERTIFICATE_PASSWORD" zed.keychain
# sequence of codesign commands modeled after this example: https://developer.apple.com/forums/thread/701514 # sequence of codesign commands modeled after this example: https://developer.apple.com/forums/thread/701514
/usr/bin/codesign --force --timestamp --sign "Zed Industries, Inc." "${app_path}/Contents/Frameworks/WebRTC.framework" -v /usr/bin/codesign --deep --force --timestamp --sign "Zed Industries, Inc." "${app_path}/Contents/Frameworks/WebRTC.framework" -v
/usr/bin/codesign --force --timestamp --options runtime --sign "Zed Industries, Inc." "${app_path}/Contents/MacOS/cli" -v /usr/bin/codesign --deep --force --timestamp --options runtime --sign "Zed Industries, Inc." "${app_path}/Contents/MacOS/cli" -v
/usr/bin/codesign --deep --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "Zed Industries, Inc." "${app_path}/Contents/MacOS/zed" -v
/usr/bin/codesign --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "Zed Industries, Inc." "${app_path}" -v /usr/bin/codesign --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "Zed Industries, Inc." "${app_path}" -v
security default-keychain -s login.keychain security default-keychain -s login.keychain