Merge branch 'main' into fix-broken-lsp-installations

This commit is contained in:
Julia 2023-06-20 17:04:58 -04:00
commit f91e95f24a
54 changed files with 1827 additions and 595 deletions

1
.gitignore vendored
View file

@ -18,4 +18,5 @@ DerivedData/
.swiftpm/config/registries.json .swiftpm/config/registries.json
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
.netrc .netrc
.swiftpm
**/*.db **/*.db

44
Cargo.lock generated
View file

@ -114,6 +114,7 @@ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
"settings", "settings",
"smol",
"theme", "theme",
"tiktoken-rs", "tiktoken-rs",
"util", "util",
@ -593,7 +594,7 @@ dependencies = [
"http", "http",
"http-body", "http-body",
"hyper", "hyper",
"itoa", "itoa 1.0.6",
"matchit", "matchit",
"memchr", "memchr",
"mime", "mime",
@ -3011,7 +3012,7 @@ checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
dependencies = [ dependencies = [
"bytes 1.4.0", "bytes 1.4.0",
"fnv", "fnv",
"itoa", "itoa 1.0.6",
] ]
[[package]] [[package]]
@ -3070,7 +3071,7 @@ dependencies = [
"http-body", "http-body",
"httparse", "httparse",
"httpdate", "httpdate",
"itoa", "itoa 1.0.6",
"pin-project-lite 0.2.9", "pin-project-lite 0.2.9",
"socket2", "socket2",
"tokio", "tokio",
@ -3336,6 +3337,12 @@ dependencies = [
"either", "either",
] ]
[[package]]
name = "itoa"
version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.6" version = "1.0.6"
@ -3396,12 +3403,6 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "json_comments"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41ee439ee368ba4a77ac70d04f14015415af8600d6c894dc1f11bd79758c57d5"
[[package]] [[package]]
name = "jwt" name = "jwt"
version = "0.16.0" version = "0.16.0"
@ -5667,7 +5668,7 @@ dependencies = [
"bitflags", "bitflags",
"errno 0.2.8", "errno 0.2.8",
"io-lifetimes 0.5.3", "io-lifetimes 0.5.3",
"itoa", "itoa 1.0.6",
"libc", "libc",
"linux-raw-sys 0.0.42", "linux-raw-sys 0.0.42",
"once_cell", "once_cell",
@ -6099,7 +6100,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
dependencies = [ dependencies = [
"indexmap", "indexmap",
"itoa", "itoa 1.0.6",
"ryu",
"serde",
]
[[package]]
name = "serde_json_lenient"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d7b9ce5b0a63c6269b9623ed828b39259545a6ec0d8a35d6135ad6af6232add"
dependencies = [
"indexmap",
"itoa 0.4.8",
"ryu", "ryu",
"serde", "serde",
] ]
@ -6122,7 +6135,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
dependencies = [ dependencies = [
"form_urlencoded", "form_urlencoded",
"itoa", "itoa 1.0.6",
"ryu", "ryu",
"serde", "serde",
] ]
@ -6148,7 +6161,7 @@ dependencies = [
"fs", "fs",
"futures 0.3.28", "futures 0.3.28",
"gpui", "gpui",
"json_comments", "indoc",
"lazy_static", "lazy_static",
"postage", "postage",
"pretty_assertions", "pretty_assertions",
@ -6157,6 +6170,7 @@ dependencies = [
"serde", "serde",
"serde_derive", "serde_derive",
"serde_json", "serde_json",
"serde_json_lenient",
"smallvec", "smallvec",
"sqlez", "sqlez",
"staff_mode", "staff_mode",
@ -6507,7 +6521,7 @@ dependencies = [
"hkdf", "hkdf",
"hmac 0.12.1", "hmac 0.12.1",
"indexmap", "indexmap",
"itoa", "itoa 1.0.6",
"libc", "libc",
"libsqlite3-sys", "libsqlite3-sys",
"log", "log",
@ -6993,7 +7007,7 @@ version = "0.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f3403384eaacbca9923fa06940178ac13e4edb725486d70e8e15881d0c836cc" checksum = "8f3403384eaacbca9923fa06940178ac13e4edb725486d70e8e15881d0c836cc"
dependencies = [ dependencies = [
"itoa", "itoa 1.0.6",
"serde", "serde",
"time-core", "time-core",
"time-macros", "time-macros",

View file

@ -55,7 +55,40 @@
"context": "Pane", "context": "Pane",
"bindings": { "bindings": {
"alt-cmd-/": "search::ToggleRegex", "alt-cmd-/": "search::ToggleRegex",
"ctrl-0": "project_panel::ToggleFocus" "ctrl-0": "project_panel::ToggleFocus",
"cmd-1": [
"pane::ActivateItem",
0
],
"cmd-2": [
"pane::ActivateItem",
1
],
"cmd-3": [
"pane::ActivateItem",
2
],
"cmd-4": [
"pane::ActivateItem",
3
],
"cmd-5": [
"pane::ActivateItem",
4
],
"cmd-6": [
"pane::ActivateItem",
5
],
"cmd-7": [
"pane::ActivateItem",
6
],
"cmd-8": [
"pane::ActivateItem",
7
],
"cmd-9": "pane::ActivateLastItem"
} }
}, },
{ {

View file

@ -200,7 +200,9 @@
"context": "AssistantEditor > Editor", "context": "AssistantEditor > Editor",
"bindings": { "bindings": {
"cmd-enter": "assistant::Assist", "cmd-enter": "assistant::Assist",
"cmd->": "assistant::QuoteSelection" "cmd->": "assistant::QuoteSelection",
"shift-enter": "assistant::Split",
"ctrl-r": "assistant::CycleMessageRole"
} }
}, },
{ {

View file

@ -28,6 +28,7 @@ isahc.workspace = true
schemars.workspace = true schemars.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
smol.workspace = true
tiktoken-rs = "0.4" tiktoken-rs = "0.4"
[dev-dependencies] [dev-dependencies]

View file

@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use std::fmt::{self, Display}; use std::fmt::{self, Display};
// Data types for chat completion requests // Data types for chat completion requests
#[derive(Serialize)] #[derive(Debug, Serialize)]
struct OpenAIRequest { struct OpenAIRequest {
model: String, model: String,
messages: Vec<RequestMessage>, messages: Vec<RequestMessage>,

File diff suppressed because it is too large Load diff

View file

@ -243,7 +243,7 @@ impl BlockMap {
// Preserve any old transforms that precede this edit. // Preserve any old transforms that precede this edit.
let old_start = WrapRow(edit.old.start); let old_start = WrapRow(edit.old.start);
let new_start = WrapRow(edit.new.start); let new_start = WrapRow(edit.new.start);
new_transforms.push_tree(cursor.slice(&old_start, Bias::Left, &()), &()); new_transforms.append(cursor.slice(&old_start, Bias::Left, &()), &());
if let Some(transform) = cursor.item() { if let Some(transform) = cursor.item() {
if transform.is_isomorphic() && old_start == cursor.end(&()) { if transform.is_isomorphic() && old_start == cursor.end(&()) {
new_transforms.push(transform.clone(), &()); new_transforms.push(transform.clone(), &());
@ -425,7 +425,7 @@ impl BlockMap {
push_isomorphic(&mut new_transforms, extent_after_edit); push_isomorphic(&mut new_transforms, extent_after_edit);
} }
new_transforms.push_tree(cursor.suffix(&()), &()); new_transforms.append(cursor.suffix(&()), &());
debug_assert_eq!( debug_assert_eq!(
new_transforms.summary().input_rows, new_transforms.summary().input_rows,
wrap_snapshot.max_point().row() + 1 wrap_snapshot.max_point().row() + 1

View file

@ -115,10 +115,10 @@ impl<'a> FoldMapWriter<'a> {
let mut new_tree = SumTree::new(); let mut new_tree = SumTree::new();
let mut cursor = self.0.folds.cursor::<Fold>(); let mut cursor = self.0.folds.cursor::<Fold>();
for fold in folds { for fold in folds {
new_tree.push_tree(cursor.slice(&fold, Bias::Right, &buffer), &buffer); new_tree.append(cursor.slice(&fold, Bias::Right, &buffer), &buffer);
new_tree.push(fold, &buffer); new_tree.push(fold, &buffer);
} }
new_tree.push_tree(cursor.suffix(&buffer), &buffer); new_tree.append(cursor.suffix(&buffer), &buffer);
new_tree new_tree
}; };
@ -165,10 +165,10 @@ impl<'a> FoldMapWriter<'a> {
let mut cursor = self.0.folds.cursor::<usize>(); let mut cursor = self.0.folds.cursor::<usize>();
let mut folds = SumTree::new(); let mut folds = SumTree::new();
for fold_ix in fold_ixs_to_delete { for fold_ix in fold_ixs_to_delete {
folds.push_tree(cursor.slice(&fold_ix, Bias::Right, &buffer), &buffer); folds.append(cursor.slice(&fold_ix, Bias::Right, &buffer), &buffer);
cursor.next(&buffer); cursor.next(&buffer);
} }
folds.push_tree(cursor.suffix(&buffer), &buffer); folds.append(cursor.suffix(&buffer), &buffer);
folds folds
}; };
@ -302,7 +302,7 @@ impl FoldMap {
cursor.seek(&0, Bias::Right, &()); cursor.seek(&0, Bias::Right, &());
while let Some(mut edit) = buffer_edits_iter.next() { while let Some(mut edit) = buffer_edits_iter.next() {
new_transforms.push_tree(cursor.slice(&edit.old.start, Bias::Left, &()), &()); new_transforms.append(cursor.slice(&edit.old.start, Bias::Left, &()), &());
edit.new.start -= edit.old.start - cursor.start(); edit.new.start -= edit.old.start - cursor.start();
edit.old.start = *cursor.start(); edit.old.start = *cursor.start();
@ -412,7 +412,7 @@ impl FoldMap {
} }
} }
new_transforms.push_tree(cursor.suffix(&()), &()); new_transforms.append(cursor.suffix(&()), &());
if new_transforms.is_empty() { if new_transforms.is_empty() {
let text_summary = new_buffer.text_summary(); let text_summary = new_buffer.text_summary();
new_transforms.push( new_transforms.push(

View file

@ -353,7 +353,7 @@ impl WrapSnapshot {
} }
old_cursor.next(&()); old_cursor.next(&());
new_transforms.push_tree( new_transforms.append(
old_cursor.slice(&next_edit.old.start, Bias::Right, &()), old_cursor.slice(&next_edit.old.start, Bias::Right, &()),
&(), &(),
); );
@ -366,7 +366,7 @@ impl WrapSnapshot {
new_transforms.push_or_extend(Transform::isomorphic(summary)); new_transforms.push_or_extend(Transform::isomorphic(summary));
} }
old_cursor.next(&()); old_cursor.next(&());
new_transforms.push_tree(old_cursor.suffix(&()), &()); new_transforms.append(old_cursor.suffix(&()), &());
} }
} }
} }
@ -500,7 +500,7 @@ impl WrapSnapshot {
new_transforms.push_or_extend(Transform::isomorphic(summary)); new_transforms.push_or_extend(Transform::isomorphic(summary));
} }
old_cursor.next(&()); old_cursor.next(&());
new_transforms.push_tree( new_transforms.append(
old_cursor.slice( old_cursor.slice(
&TabPoint::new(next_edit.old_rows.start, 0), &TabPoint::new(next_edit.old_rows.start, 0),
Bias::Right, Bias::Right,
@ -517,7 +517,7 @@ impl WrapSnapshot {
new_transforms.push_or_extend(Transform::isomorphic(summary)); new_transforms.push_or_extend(Transform::isomorphic(summary));
} }
old_cursor.next(&()); old_cursor.next(&());
new_transforms.push_tree(old_cursor.suffix(&()), &()); new_transforms.append(old_cursor.suffix(&()), &());
} }
} }
} }

View file

@ -1010,7 +1010,7 @@ impl MultiBuffer {
let suffix = cursor.suffix(&()); let suffix = cursor.suffix(&());
let changed_trailing_excerpt = suffix.is_empty(); let changed_trailing_excerpt = suffix.is_empty();
new_excerpts.push_tree(suffix, &()); new_excerpts.append(suffix, &());
drop(cursor); drop(cursor);
snapshot.excerpts = new_excerpts; snapshot.excerpts = new_excerpts;
snapshot.excerpt_ids = new_excerpt_ids; snapshot.excerpt_ids = new_excerpt_ids;
@ -1193,7 +1193,7 @@ impl MultiBuffer {
while let Some(excerpt_id) = excerpt_ids.next() { while let Some(excerpt_id) = excerpt_ids.next() {
// Seek to the next excerpt to remove, preserving any preceding excerpts. // Seek to the next excerpt to remove, preserving any preceding excerpts.
let locator = snapshot.excerpt_locator_for_id(excerpt_id); let locator = snapshot.excerpt_locator_for_id(excerpt_id);
new_excerpts.push_tree(cursor.slice(&Some(locator), Bias::Left, &()), &()); new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
if let Some(mut excerpt) = cursor.item() { if let Some(mut excerpt) = cursor.item() {
if excerpt.id != excerpt_id { if excerpt.id != excerpt_id {
@ -1245,7 +1245,7 @@ impl MultiBuffer {
} }
let suffix = cursor.suffix(&()); let suffix = cursor.suffix(&());
let changed_trailing_excerpt = suffix.is_empty(); let changed_trailing_excerpt = suffix.is_empty();
new_excerpts.push_tree(suffix, &()); new_excerpts.append(suffix, &());
drop(cursor); drop(cursor);
snapshot.excerpts = new_excerpts; snapshot.excerpts = new_excerpts;
@ -1509,7 +1509,7 @@ impl MultiBuffer {
let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(); let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>();
for (locator, buffer, buffer_edited) in excerpts_to_edit { for (locator, buffer, buffer_edited) in excerpts_to_edit {
new_excerpts.push_tree(cursor.slice(&Some(locator), Bias::Left, &()), &()); new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
let old_excerpt = cursor.item().unwrap(); let old_excerpt = cursor.item().unwrap();
let buffer = buffer.read(cx); let buffer = buffer.read(cx);
let buffer_id = buffer.remote_id(); let buffer_id = buffer.remote_id();
@ -1549,7 +1549,7 @@ impl MultiBuffer {
new_excerpts.push(new_excerpt, &()); new_excerpts.push(new_excerpt, &());
cursor.next(&()); cursor.next(&());
} }
new_excerpts.push_tree(cursor.suffix(&()), &()); new_excerpts.append(cursor.suffix(&()), &());
drop(cursor); drop(cursor);
snapshot.excerpts = new_excerpts; snapshot.excerpts = new_excerpts;

View file

@ -445,7 +445,7 @@ type WindowBoundsCallback = Box<dyn FnMut(WindowBounds, Uuid, &mut WindowContext
type KeystrokeCallback = type KeystrokeCallback =
Box<dyn FnMut(&Keystroke, &MatchResult, Option<&Box<dyn Action>>, &mut WindowContext) -> bool>; Box<dyn FnMut(&Keystroke, &MatchResult, Option<&Box<dyn Action>>, &mut WindowContext) -> bool>;
type ActiveLabeledTasksCallback = Box<dyn FnMut(&mut AppContext) -> bool>; type ActiveLabeledTasksCallback = Box<dyn FnMut(&mut AppContext) -> bool>;
type DeserializeActionCallback = fn(json: &str) -> anyhow::Result<Box<dyn Action>>; type DeserializeActionCallback = fn(json: serde_json::Value) -> anyhow::Result<Box<dyn Action>>;
type WindowShouldCloseSubscriptionCallback = Box<dyn FnMut(&mut AppContext) -> bool>; type WindowShouldCloseSubscriptionCallback = Box<dyn FnMut(&mut AppContext) -> bool>;
pub struct AppContext { pub struct AppContext {
@ -624,14 +624,14 @@ impl AppContext {
pub fn deserialize_action( pub fn deserialize_action(
&self, &self,
name: &str, name: &str,
argument: Option<&str>, argument: Option<serde_json::Value>,
) -> Result<Box<dyn Action>> { ) -> Result<Box<dyn Action>> {
let callback = self let callback = self
.action_deserializers .action_deserializers
.get(name) .get(name)
.ok_or_else(|| anyhow!("unknown action {}", name))? .ok_or_else(|| anyhow!("unknown action {}", name))?
.1; .1;
callback(argument.unwrap_or("{}")) callback(argument.unwrap_or_else(|| serde_json::Value::Object(Default::default())))
.with_context(|| format!("invalid data for action {}", name)) .with_context(|| format!("invalid data for action {}", name))
} }
@ -5573,7 +5573,7 @@ mod tests {
let action1 = cx let action1 = cx
.deserialize_action( .deserialize_action(
"test::something::ComplexAction", "test::something::ComplexAction",
Some(r#"{"arg": "a", "count": 5}"#), Some(serde_json::from_str(r#"{"arg": "a", "count": 5}"#).unwrap()),
) )
.unwrap(); .unwrap();
let action2 = cx let action2 = cx

View file

@ -11,7 +11,7 @@ pub trait Action: 'static {
fn qualified_name() -> &'static str fn qualified_name() -> &'static str
where where
Self: Sized; Self: Sized;
fn from_json_str(json: &str) -> anyhow::Result<Box<dyn Action>> fn from_json_str(json: serde_json::Value) -> anyhow::Result<Box<dyn Action>>
where where
Self: Sized; Self: Sized;
} }
@ -38,7 +38,7 @@ macro_rules! actions {
$crate::__impl_action! { $crate::__impl_action! {
$namespace, $namespace,
$name, $name,
fn from_json_str(_: &str) -> $crate::anyhow::Result<Box<dyn $crate::Action>> { fn from_json_str(_: $crate::serde_json::Value) -> $crate::anyhow::Result<Box<dyn $crate::Action>> {
Ok(Box::new(Self)) Ok(Box::new(Self))
} }
} }
@ -58,8 +58,8 @@ macro_rules! impl_actions {
$crate::__impl_action! { $crate::__impl_action! {
$namespace, $namespace,
$name, $name,
fn from_json_str(json: &str) -> $crate::anyhow::Result<Box<dyn $crate::Action>> { fn from_json_str(json: $crate::serde_json::Value) -> $crate::anyhow::Result<Box<dyn $crate::Action>> {
Ok(Box::new($crate::serde_json::from_str::<Self>(json)?)) Ok(Box::new($crate::serde_json::from_value::<Self>(json)?))
} }
} }
)* )*

View file

@ -394,7 +394,7 @@ impl<'a> WindowContext<'a> {
.iter() .iter()
.filter_map(move |(name, (type_id, deserialize))| { .filter_map(move |(name, (type_id, deserialize))| {
if let Some(action_depth) = handler_depths_by_action_type.get(type_id).copied() { if let Some(action_depth) = handler_depths_by_action_type.get(type_id).copied() {
let action = deserialize("{}").ok()?; let action = deserialize(serde_json::Value::Object(Default::default())).ok()?;
let bindings = self let bindings = self
.keystroke_matcher .keystroke_matcher
.bindings_for_action_type(*type_id) .bindings_for_action_type(*type_id)

View file

@ -211,7 +211,7 @@ impl<V: View> Element<V> for List<V> {
let mut cursor = old_items.cursor::<Count>(); let mut cursor = old_items.cursor::<Count>();
if state.rendered_range.start < new_rendered_range.start { if state.rendered_range.start < new_rendered_range.start {
new_items.push_tree( new_items.append(
cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()), cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()),
&(), &(),
); );
@ -221,7 +221,7 @@ impl<V: View> Element<V> for List<V> {
cursor.next(&()); cursor.next(&());
} }
} }
new_items.push_tree( new_items.append(
cursor.slice(&Count(new_rendered_range.start), Bias::Right, &()), cursor.slice(&Count(new_rendered_range.start), Bias::Right, &()),
&(), &(),
); );
@ -230,7 +230,7 @@ impl<V: View> Element<V> for List<V> {
cursor.seek(&Count(new_rendered_range.end), Bias::Right, &()); cursor.seek(&Count(new_rendered_range.end), Bias::Right, &());
if new_rendered_range.end < state.rendered_range.start { if new_rendered_range.end < state.rendered_range.start {
new_items.push_tree( new_items.append(
cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()), cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()),
&(), &(),
); );
@ -240,7 +240,7 @@ impl<V: View> Element<V> for List<V> {
cursor.next(&()); cursor.next(&());
} }
new_items.push_tree(cursor.suffix(&()), &()); new_items.append(cursor.suffix(&()), &());
state.items = new_items; state.items = new_items;
state.rendered_range = new_rendered_range; state.rendered_range = new_rendered_range;
@ -413,7 +413,7 @@ impl<V: View> ListState<V> {
old_heights.seek_forward(&Count(old_range.end), Bias::Right, &()); old_heights.seek_forward(&Count(old_range.end), Bias::Right, &());
new_heights.extend((0..count).map(|_| ListItem::Unrendered), &()); new_heights.extend((0..count).map(|_| ListItem::Unrendered), &());
new_heights.push_tree(old_heights.suffix(&()), &()); new_heights.append(old_heights.suffix(&()), &());
drop(old_heights); drop(old_heights);
state.items = new_heights; state.items = new_heights;
} }

View file

@ -786,7 +786,7 @@ impl platform::Platform for MacPlatform {
fn set_cursor_style(&self, style: CursorStyle) { fn set_cursor_style(&self, style: CursorStyle) {
unsafe { unsafe {
let cursor: id = match style { let new_cursor: id = match style {
CursorStyle::Arrow => msg_send![class!(NSCursor), arrowCursor], CursorStyle::Arrow => msg_send![class!(NSCursor), arrowCursor],
CursorStyle::ResizeLeftRight => { CursorStyle::ResizeLeftRight => {
msg_send![class!(NSCursor), resizeLeftRightCursor] msg_send![class!(NSCursor), resizeLeftRightCursor]
@ -795,7 +795,11 @@ impl platform::Platform for MacPlatform {
CursorStyle::PointingHand => msg_send![class!(NSCursor), pointingHandCursor], CursorStyle::PointingHand => msg_send![class!(NSCursor), pointingHandCursor],
CursorStyle::IBeam => msg_send![class!(NSCursor), IBeamCursor], CursorStyle::IBeam => msg_send![class!(NSCursor), IBeamCursor],
}; };
let _: () = msg_send![cursor, set];
let old_cursor: id = msg_send![class!(NSCursor), currentCursor];
if new_cursor != old_cursor {
let _: () = msg_send![new_cursor, set];
}
} }
} }

View file

@ -17,7 +17,7 @@ use futures::{
future::{BoxFuture, Shared}, future::{BoxFuture, Shared},
FutureExt, TryFutureExt as _, FutureExt, TryFutureExt as _,
}; };
use gpui::{executor::Background, AppContext, Task}; use gpui::{executor::Background, AppContext, AsyncAppContext, Task};
use highlight_map::HighlightMap; use highlight_map::HighlightMap;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use lsp::{CodeActionKind, LanguageServerBinaries, LanguageServerBinary}; use lsp::{CodeActionKind, LanguageServerBinaries, LanguageServerBinary};
@ -118,27 +118,46 @@ impl CachedLspAdapter {
pub async fn fetch_latest_server_version( pub async fn fetch_latest_server_version(
&self, &self,
http: Arc<dyn HttpClient>, delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
self.adapter.fetch_latest_server_version(http).await self.adapter.fetch_latest_server_version(delegate).await
}
pub fn will_fetch_server(
&self,
delegate: &Arc<dyn LspAdapterDelegate>,
cx: &mut AsyncAppContext,
) -> Option<Task<Result<()>>> {
self.adapter.will_fetch_server(delegate, cx)
}
pub fn will_start_server(
&self,
delegate: &Arc<dyn LspAdapterDelegate>,
cx: &mut AsyncAppContext,
) -> Option<Task<Result<()>>> {
self.adapter.will_start_server(delegate, cx)
} }
pub async fn fetch_server_binary( pub async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
http: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
self.adapter self.adapter
.fetch_server_binary(version, http, container_dir) .fetch_server_binary(version, container_dir, delegate)
.await .await
} }
pub async fn cached_server_binary( pub async fn cached_server_binary(
&self, &self,
container_dir: PathBuf, container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> { ) -> Option<LanguageServerBinary> {
self.adapter.cached_server_binary(container_dir).await self.adapter
.cached_server_binary(container_dir, delegate)
.await
} }
pub async fn installation_test_binary( pub async fn installation_test_binary(
@ -187,23 +206,48 @@ impl CachedLspAdapter {
} }
} }
pub trait LspAdapterDelegate: Send + Sync {
fn show_notification(&self, message: &str, cx: &mut AppContext);
fn http_client(&self) -> Arc<dyn HttpClient>;
}
#[async_trait] #[async_trait]
pub trait LspAdapter: 'static + Send + Sync { pub trait LspAdapter: 'static + Send + Sync {
async fn name(&self) -> LanguageServerName; async fn name(&self) -> LanguageServerName;
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
http: Arc<dyn HttpClient>, delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>>; ) -> Result<Box<dyn 'static + Send + Any>>;
fn will_fetch_server(
&self,
_: &Arc<dyn LspAdapterDelegate>,
_: &mut AsyncAppContext,
) -> Option<Task<Result<()>>> {
None
}
fn will_start_server(
&self,
_: &Arc<dyn LspAdapterDelegate>,
_: &mut AsyncAppContext,
) -> Option<Task<Result<()>>> {
None
}
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
http: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary>; ) -> Result<LanguageServerBinary>;
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary>; async fn cached_server_binary(
&self,
container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary>;
async fn installation_test_binary( async fn installation_test_binary(
&self, &self,
@ -523,7 +567,7 @@ pub struct LanguageRegistry {
lsp_binary_paths: Mutex< lsp_binary_paths: Mutex<
HashMap< HashMap<
LanguageServerName, LanguageServerName,
Shared<BoxFuture<'static, Result<LanguageServerBinaries, Arc<anyhow::Error>>>>, Shared<Task<Result<LanguageServerBinaries, Arc<anyhow::Error>>>>,
>, >,
>, >,
executor: Option<Arc<Background>>, executor: Option<Arc<Background>>,
@ -821,7 +865,7 @@ impl LanguageRegistry {
language: Arc<Language>, language: Arc<Language>,
adapter: Arc<CachedLspAdapter>, adapter: Arc<CachedLspAdapter>,
root_path: Arc<Path>, root_path: Arc<Path>,
http_client: Arc<dyn HttpClient>, delegate: Arc<dyn LspAdapterDelegate>,
cx: &mut AppContext, cx: &mut AppContext,
) -> Option<PendingLanguageServer> { ) -> Option<PendingLanguageServer> {
let server_id = self.state.write().next_language_server_id(); let server_id = self.state.write().next_language_server_id();
@ -873,7 +917,6 @@ impl LanguageRegistry {
.log_err()?; .log_err()?;
let this = self.clone(); let this = self.clone();
let language = language.clone(); let language = language.clone();
let http_client = http_client.clone();
let container_dir: Arc<Path> = Arc::from(download_dir.join(adapter.name.0.as_ref())); let container_dir: Arc<Path> = Arc::from(download_dir.join(adapter.name.0.as_ref()));
let root_path = root_path.clone(); let root_path = root_path.clone();
let adapter = adapter.clone(); let adapter = adapter.clone();
@ -882,28 +925,35 @@ impl LanguageRegistry {
let task = { let task = {
let container_dir = container_dir.clone(); let container_dir = container_dir.clone();
cx.spawn(|cx| async move { cx.spawn(|mut cx| async move {
login_shell_env_loaded.await; login_shell_env_loaded.await;
let mut lock = this.lsp_binary_paths.lock(); let mut lock = this.lsp_binary_paths.lock();
let entry = lock let entry = lock
.entry(adapter.name.clone()) .entry(adapter.name.clone())
.or_insert_with(|| { .or_insert_with(|| {
get_binaries( cx.spawn(|cx| {
adapter.clone(), get_binaries(
language.clone(), adapter.clone(),
http_client, language.clone(),
container_dir, delegate.clone(),
lsp_binary_statuses, container_dir,
) lsp_binary_statuses,
.map_err(Arc::new) cx,
.boxed() )
.map_err(Arc::new)
})
.shared() .shared()
}) })
.clone(); .clone();
drop(lock); drop(lock);
let binaries = entry.clone().map_err(|e| anyhow!(e)).await?; let binaries = entry.clone().map_err(|e| anyhow!(e)).await?;
if let Some(task) = adapter.will_start_server(&delegate, &mut cx) {
task.await?;
}
println!("starting server"); println!("starting server");
let server = lsp::LanguageServer::new( let server = lsp::LanguageServer::new(
server_id, server_id,
@ -1001,9 +1051,10 @@ impl Default for LanguageRegistry {
async fn get_binaries( async fn get_binaries(
adapter: Arc<CachedLspAdapter>, adapter: Arc<CachedLspAdapter>,
language: Arc<Language>, language: Arc<Language>,
http_client: Arc<dyn HttpClient>, delegate: Arc<dyn LspAdapterDelegate>,
container_dir: Arc<Path>, container_dir: Arc<Path>,
statuses: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>, statuses: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
mut cx: AsyncAppContext,
) -> Result<LanguageServerBinaries> { ) -> Result<LanguageServerBinaries> {
if !container_dir.exists() { if !container_dir.exists() {
smol::fs::create_dir_all(&container_dir) smol::fs::create_dir_all(&container_dir)
@ -1011,11 +1062,15 @@ async fn get_binaries(
.context("failed to create container directory")?; .context("failed to create container directory")?;
} }
if let Some(task) = adapter.will_fetch_server(&delegate, &mut cx) {
task.await?;
}
println!("fetching binary"); println!("fetching binary");
let binary = fetch_latest_binary( let binary = fetch_latest_binary(
adapter.clone(), adapter.clone(),
language.clone(), language.clone(),
http_client, delegate.as_ref(),
&container_dir, &container_dir,
statuses.clone(), statuses.clone(),
) )
@ -1023,7 +1078,7 @@ async fn get_binaries(
if let Err(error) = binary.as_ref() { if let Err(error) = binary.as_ref() {
if let Some(binary) = adapter if let Some(binary) = adapter
.cached_server_binary(container_dir.to_path_buf()) .cached_server_binary(container_dir.to_path_buf(), delegate.as_ref())
.await .await
{ {
statuses statuses
@ -1054,7 +1109,7 @@ async fn get_binaries(
async fn fetch_latest_binary( async fn fetch_latest_binary(
adapter: Arc<CachedLspAdapter>, adapter: Arc<CachedLspAdapter>,
language: Arc<Language>, language: Arc<Language>,
http_client: Arc<dyn HttpClient>, delegate: &dyn LspAdapterDelegate,
container_dir: &Path, container_dir: &Path,
lsp_binary_statuses_tx: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>, lsp_binary_statuses_tx: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
) -> Result<LanguageServerBinaries> { ) -> Result<LanguageServerBinaries> {
@ -1066,15 +1121,13 @@ async fn fetch_latest_binary(
)) ))
.await?; .await?;
let version_info = adapter let version_info = adapter.fetch_latest_server_version(delegate).await?;
.fetch_latest_server_version(http_client.clone())
.await?;
lsp_binary_statuses_tx lsp_binary_statuses_tx
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloading)) .broadcast((language.clone(), LanguageServerBinaryStatus::Downloading))
.await?; .await?;
let binary = adapter let binary = adapter
.fetch_server_binary(version_info, http_client, container_dir.to_path_buf()) .fetch_server_binary(version_info, container_dir.to_path_buf(), delegate)
.await?; .await?;
let installation_test_binary = adapter let installation_test_binary = adapter
.installation_test_binary(container_dir.to_path_buf()) .installation_test_binary(container_dir.to_path_buf())
@ -1605,7 +1658,7 @@ impl LspAdapter for Arc<FakeLspAdapter> {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
_: Arc<dyn HttpClient>, _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
unreachable!(); unreachable!();
} }
@ -1613,13 +1666,17 @@ impl LspAdapter for Arc<FakeLspAdapter> {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
_: Box<dyn 'static + Send + Any>, _: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
_: PathBuf, _: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
unreachable!(); unreachable!();
} }
async fn cached_server_binary(&self, _: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
_: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
unreachable!(); unreachable!();
} }

View file

@ -288,7 +288,7 @@ impl SyntaxSnapshot {
}; };
if target.cmp(&cursor.start(), text).is_gt() { if target.cmp(&cursor.start(), text).is_gt() {
let slice = cursor.slice(&target, Bias::Left, text); let slice = cursor.slice(&target, Bias::Left, text);
layers.push_tree(slice, text); layers.append(slice, text);
} }
} }
// If this layer follows all of the edits, then preserve it and any // If this layer follows all of the edits, then preserve it and any
@ -303,7 +303,7 @@ impl SyntaxSnapshot {
Bias::Left, Bias::Left,
text, text,
); );
layers.push_tree(slice, text); layers.append(slice, text);
continue; continue;
}; };
@ -369,7 +369,7 @@ impl SyntaxSnapshot {
cursor.next(text); cursor.next(text);
} }
layers.push_tree(cursor.suffix(&text), &text); layers.append(cursor.suffix(&text), &text);
drop(cursor); drop(cursor);
self.layers = layers; self.layers = layers;
} }
@ -478,7 +478,7 @@ impl SyntaxSnapshot {
if bounded_position.cmp(&cursor.start(), &text).is_gt() { if bounded_position.cmp(&cursor.start(), &text).is_gt() {
let slice = cursor.slice(&bounded_position, Bias::Left, text); let slice = cursor.slice(&bounded_position, Bias::Left, text);
if !slice.is_empty() { if !slice.is_empty() {
layers.push_tree(slice, &text); layers.append(slice, &text);
if changed_regions.prune(cursor.end(text), text) { if changed_regions.prune(cursor.end(text), text) {
done = false; done = false;
} }

View file

@ -6,17 +6,23 @@ import ScreenCaptureKit
class LKRoomDelegate: RoomDelegate { class LKRoomDelegate: RoomDelegate {
var data: UnsafeRawPointer var data: UnsafeRawPointer
var onDidDisconnect: @convention(c) (UnsafeRawPointer) -> Void var onDidDisconnect: @convention(c) (UnsafeRawPointer) -> Void
var onDidSubscribeToRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void
var onDidUnsubscribeFromRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
var onDidSubscribeToRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void var onDidSubscribeToRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void
var onDidUnsubscribeFromRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void var onDidUnsubscribeFromRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
init( init(
data: UnsafeRawPointer, data: UnsafeRawPointer,
onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void, onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void, onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void) onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void)
{ {
self.data = data self.data = data
self.onDidDisconnect = onDidDisconnect self.onDidDisconnect = onDidDisconnect
self.onDidSubscribeToRemoteAudioTrack = onDidSubscribeToRemoteAudioTrack
self.onDidUnsubscribeFromRemoteAudioTrack = onDidUnsubscribeFromRemoteAudioTrack
self.onDidSubscribeToRemoteVideoTrack = onDidSubscribeToRemoteVideoTrack self.onDidSubscribeToRemoteVideoTrack = onDidSubscribeToRemoteVideoTrack
self.onDidUnsubscribeFromRemoteVideoTrack = onDidUnsubscribeFromRemoteVideoTrack self.onDidUnsubscribeFromRemoteVideoTrack = onDidUnsubscribeFromRemoteVideoTrack
} }
@ -30,12 +36,16 @@ class LKRoomDelegate: RoomDelegate {
func room(_ room: Room, participant: RemoteParticipant, didSubscribe publication: RemoteTrackPublication, track: Track) { func room(_ room: Room, participant: RemoteParticipant, didSubscribe publication: RemoteTrackPublication, track: Track) {
if track.kind == .video { if track.kind == .video {
self.onDidSubscribeToRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque()) self.onDidSubscribeToRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque())
} else if track.kind == .audio {
self.onDidSubscribeToRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque())
} }
} }
func room(_ room: Room, participant: RemoteParticipant, didUnsubscribe publication: RemoteTrackPublication, track: Track) { func room(_ room: Room, participant: RemoteParticipant, didUnsubscribe publication: RemoteTrackPublication, track: Track) {
if track.kind == .video { if track.kind == .video {
self.onDidUnsubscribeFromRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString) self.onDidUnsubscribeFromRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString)
} else if track.kind == .audio {
self.onDidUnsubscribeFromRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString)
} }
} }
} }
@ -77,12 +87,16 @@ class LKVideoRenderer: NSObject, VideoRenderer {
public func LKRoomDelegateCreate( public func LKRoomDelegateCreate(
data: UnsafeRawPointer, data: UnsafeRawPointer,
onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void, onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void, onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
) -> UnsafeMutableRawPointer { ) -> UnsafeMutableRawPointer {
let delegate = LKRoomDelegate( let delegate = LKRoomDelegate(
data: data, data: data,
onDidDisconnect: onDidDisconnect, onDidDisconnect: onDidDisconnect,
onDidSubscribeToRemoteAudioTrack: onDidSubscribeToRemoteAudioTrack,
onDidUnsubscribeFromRemoteAudioTrack: onDidUnsubscribeFromRemoteAudioTrack,
onDidSubscribeToRemoteVideoTrack: onDidSubscribeToRemoteVideoTrack, onDidSubscribeToRemoteVideoTrack: onDidSubscribeToRemoteVideoTrack,
onDidUnsubscribeFromRemoteVideoTrack: onDidUnsubscribeFromRemoteVideoTrack onDidUnsubscribeFromRemoteVideoTrack: onDidUnsubscribeFromRemoteVideoTrack
) )
@ -123,6 +137,18 @@ public func LKRoomPublishVideoTrack(room: UnsafeRawPointer, track: UnsafeRawPoin
} }
} }
@_cdecl("LKRoomPublishAudioTrack")
public func LKRoomPublishAudioTrack(room: UnsafeRawPointer, track: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, UnsafeMutableRawPointer?, CFString?) -> Void, callback_data: UnsafeRawPointer) {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
let track = Unmanaged<LocalAudioTrack>.fromOpaque(track).takeUnretainedValue()
room.localParticipant?.publishAudioTrack(track: track).then { publication in
callback(callback_data, Unmanaged.passRetained(publication).toOpaque(), nil)
}.catch { error in
callback(callback_data, nil, error.localizedDescription as CFString)
}
}
@_cdecl("LKRoomUnpublishTrack") @_cdecl("LKRoomUnpublishTrack")
public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawPointer) { public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawPointer) {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue() let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
@ -130,6 +156,20 @@ public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawP
let _ = room.localParticipant?.unpublish(publication: publication) let _ = room.localParticipant?.unpublish(publication: publication)
} }
@_cdecl("LKRoomAudioTracksForRemoteParticipant")
public func LKRoomAudioTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
for (_, participant) in room.remoteParticipants {
if participant.identity == participantId as String {
return participant.audioTracks.compactMap { $0.track as? RemoteAudioTrack } as CFArray?
}
}
return nil;
}
@_cdecl("LKRoomVideoTracksForRemoteParticipant") @_cdecl("LKRoomVideoTracksForRemoteParticipant")
public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? { public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue() let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
@ -143,6 +183,17 @@ public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, partic
return nil; return nil;
} }
@_cdecl("LKLocalAudioTrackCreateTrack")
public func LKLocalAudioTrackCreateTrack() -> UnsafeMutableRawPointer {
let track = LocalAudioTrack.createTrack(options: AudioCaptureOptions(
echoCancellation: true,
noiseSuppression: true
))
return Unmanaged.passRetained(track).toOpaque()
}
@_cdecl("LKCreateScreenShareTrackForDisplay") @_cdecl("LKCreateScreenShareTrackForDisplay")
public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer) -> UnsafeMutableRawPointer { public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer) -> UnsafeMutableRawPointer {
let display = Unmanaged<MacOSDisplay>.fromOpaque(display).takeUnretainedValue() let display = Unmanaged<MacOSDisplay>.fromOpaque(display).takeUnretainedValue()
@ -150,6 +201,19 @@ public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer)
return Unmanaged.passRetained(track).toOpaque() return Unmanaged.passRetained(track).toOpaque()
} }
@_cdecl("LKRemoteAudioTrackStart")
public func LKRemoteAudioTrackStart(track: UnsafeRawPointer, onStart: @escaping @convention(c) (UnsafeRawPointer, Bool) -> Void, callbackData: UnsafeRawPointer) {
let track = Unmanaged<Track>.fromOpaque(track).takeUnretainedValue() as! RemoteAudioTrack
track.start().then { success in
onStart(callbackData, success)
}
.catch { _ in
onStart(callbackData, false)
}
}
@_cdecl("LKVideoRendererCreate") @_cdecl("LKVideoRendererCreate")
public func LKVideoRendererCreate(data: UnsafeRawPointer, onFrame: @escaping @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool, onDrop: @escaping @convention(c) (UnsafeRawPointer) -> Void) -> UnsafeMutableRawPointer { public func LKVideoRendererCreate(data: UnsafeRawPointer, onFrame: @escaping @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool, onDrop: @escaping @convention(c) (UnsafeRawPointer) -> Void) -> UnsafeMutableRawPointer {
Unmanaged.passRetained(LKVideoRenderer(data: data, onFrame: onFrame, onDrop: onDrop)).toOpaque() Unmanaged.passRetained(LKVideoRenderer(data: data, onFrame: onFrame, onDrop: onDrop)).toOpaque()
@ -169,6 +233,12 @@ public func LKRemoteVideoTrackGetSid(track: UnsafeRawPointer) -> CFString {
return track.sid! as CFString return track.sid! as CFString
} }
@_cdecl("LKRemoteAudioTrackGetSid")
public func LKRemoteAudioTrackGetSid(track: UnsafeRawPointer) -> CFString {
let track = Unmanaged<RemoteAudioTrack>.fromOpaque(track).takeUnretainedValue()
return track.sid! as CFString
}
@_cdecl("LKDisplaySources") @_cdecl("LKDisplaySources")
public func LKDisplaySources(data: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, CFArray?, CFString?) -> Void) { public func LKDisplaySources(data: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, CFArray?, CFString?) -> Void) {
MacOSScreenCapturer.sources(for: .display, includeCurrentApplication: false, preferredMethod: .legacy).then { displaySources in MacOSScreenCapturer.sources(for: .display, includeCurrentApplication: false, preferredMethod: .legacy).then { displaySources in

View file

@ -1,6 +1,10 @@
use std::time::Duration;
use futures::StreamExt; use futures::StreamExt;
use gpui::{actions, keymap_matcher::Binding, Menu, MenuItem}; use gpui::{actions, keymap_matcher::Binding, Menu, MenuItem};
use live_kit_client::{LocalVideoTrack, RemoteVideoTrackUpdate, Room}; use live_kit_client::{
LocalAudioTrack, LocalVideoTrack, RemoteAudioTrackUpdate, RemoteVideoTrackUpdate, Room,
};
use live_kit_server::token::{self, VideoGrant}; use live_kit_server::token::{self, VideoGrant};
use log::LevelFilter; use log::LevelFilter;
use simplelog::SimpleLogger; use simplelog::SimpleLogger;
@ -11,6 +15,12 @@ fn main() {
SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger"); SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
gpui::App::new(()).unwrap().run(|cx| { gpui::App::new(()).unwrap().run(|cx| {
#[cfg(any(test, feature = "test-support"))]
println!("USING TEST LIVEKIT");
#[cfg(not(any(test, feature = "test-support")))]
println!("USING REAL LIVEKIT");
cx.platform().activate(true); cx.platform().activate(true);
cx.add_global_action(quit); cx.add_global_action(quit);
@ -49,16 +59,14 @@ fn main() {
let room_b = Room::new(); let room_b = Room::new();
room_b.connect(&live_kit_url, &user2_token).await.unwrap(); room_b.connect(&live_kit_url, &user2_token).await.unwrap();
let mut track_changes = room_b.remote_video_track_updates(); let mut audio_track_updates = room_b.remote_audio_track_updates();
let audio_track = LocalAudioTrack::create();
let audio_track_publication = room_a.publish_audio_track(&audio_track).await.unwrap();
let displays = room_a.display_sources().await.unwrap(); if let RemoteAudioTrackUpdate::Subscribed(track) =
let display = displays.into_iter().next().unwrap(); audio_track_updates.next().await.unwrap()
{
let track_a = LocalVideoTrack::screen_share_for_display(&display); let remote_tracks = room_b.remote_audio_tracks("test-participant-1");
let track_a_publication = room_a.publish_video_track(&track_a).await.unwrap();
if let RemoteVideoTrackUpdate::Subscribed(track) = track_changes.next().await.unwrap() {
let remote_tracks = room_b.remote_video_tracks("test-participant-1");
assert_eq!(remote_tracks.len(), 1); assert_eq!(remote_tracks.len(), 1);
assert_eq!(remote_tracks[0].publisher_id(), "test-participant-1"); assert_eq!(remote_tracks[0].publisher_id(), "test-participant-1");
assert_eq!(track.publisher_id(), "test-participant-1"); assert_eq!(track.publisher_id(), "test-participant-1");
@ -66,18 +74,60 @@ fn main() {
panic!("unexpected message"); panic!("unexpected message");
} }
let remote_track = room_b println!("Pausing for 5 seconds to test audio, make some noise!");
let timer = cx.background().timer(Duration::from_secs(5));
timer.await;
let remote_audio_track = room_b
.remote_audio_tracks("test-participant-1")
.pop()
.unwrap();
room_a.unpublish_track(audio_track_publication);
if let RemoteAudioTrackUpdate::Unsubscribed {
publisher_id,
track_id,
} = audio_track_updates.next().await.unwrap()
{
assert_eq!(publisher_id, "test-participant-1");
assert_eq!(remote_audio_track.sid(), track_id);
assert_eq!(room_b.remote_audio_tracks("test-participant-1").len(), 0);
} else {
panic!("unexpected message");
}
let mut video_track_updates = room_b.remote_video_track_updates();
let displays = room_a.display_sources().await.unwrap();
let display = displays.into_iter().next().unwrap();
let local_video_track = LocalVideoTrack::screen_share_for_display(&display);
let local_video_track_publication = room_a
.publish_video_track(&local_video_track)
.await
.unwrap();
if let RemoteVideoTrackUpdate::Subscribed(track) =
video_track_updates.next().await.unwrap()
{
let remote_video_tracks = room_b.remote_video_tracks("test-participant-1");
assert_eq!(remote_video_tracks.len(), 1);
assert_eq!(remote_video_tracks[0].publisher_id(), "test-participant-1");
assert_eq!(track.publisher_id(), "test-participant-1");
} else {
panic!("unexpected message");
}
let remote_video_track = room_b
.remote_video_tracks("test-participant-1") .remote_video_tracks("test-participant-1")
.pop() .pop()
.unwrap(); .unwrap();
room_a.unpublish_track(track_a_publication); room_a.unpublish_track(local_video_track_publication);
if let RemoteVideoTrackUpdate::Unsubscribed { if let RemoteVideoTrackUpdate::Unsubscribed {
publisher_id, publisher_id,
track_id, track_id,
} = track_changes.next().await.unwrap() } = video_track_updates.next().await.unwrap()
{ {
assert_eq!(publisher_id, "test-participant-1"); assert_eq!(publisher_id, "test-participant-1");
assert_eq!(remote_track.sid(), track_id); assert_eq!(remote_video_track.sid(), track_id);
assert_eq!(room_b.remote_video_tracks("test-participant-1").len(), 0); assert_eq!(room_b.remote_video_tracks("test-participant-1").len(), 0);
} else { } else {
panic!("unexpected message"); panic!("unexpected message");

View file

@ -4,7 +4,7 @@ pub mod prod;
pub use prod::*; pub use prod::*;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
mod test; pub mod test;
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub use test::*; pub use test::*;

View file

@ -21,6 +21,17 @@ extern "C" {
fn LKRoomDelegateCreate( fn LKRoomDelegateCreate(
callback_data: *mut c_void, callback_data: *mut c_void,
on_did_disconnect: extern "C" fn(callback_data: *mut c_void), on_did_disconnect: extern "C" fn(callback_data: *mut c_void),
on_did_subscribe_to_remote_audio_track: extern "C" fn(
callback_data: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
remote_track: *const c_void,
),
on_did_unsubscribe_from_remote_audio_track: extern "C" fn(
callback_data: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
),
on_did_subscribe_to_remote_video_track: extern "C" fn( on_did_subscribe_to_remote_video_track: extern "C" fn(
callback_data: *mut c_void, callback_data: *mut c_void,
publisher_id: CFStringRef, publisher_id: CFStringRef,
@ -49,7 +60,18 @@ extern "C" {
callback: extern "C" fn(*mut c_void, *mut c_void, CFStringRef), callback: extern "C" fn(*mut c_void, *mut c_void, CFStringRef),
callback_data: *mut c_void, callback_data: *mut c_void,
); );
fn LKRoomPublishAudioTrack(
room: *const c_void,
track: *const c_void,
callback: extern "C" fn(*mut c_void, *mut c_void, CFStringRef),
callback_data: *mut c_void,
);
fn LKRoomUnpublishTrack(room: *const c_void, publication: *const c_void); fn LKRoomUnpublishTrack(room: *const c_void, publication: *const c_void);
fn LKRoomAudioTracksForRemoteParticipant(
room: *const c_void,
participant_id: CFStringRef,
) -> CFArrayRef;
fn LKRoomVideoTracksForRemoteParticipant( fn LKRoomVideoTracksForRemoteParticipant(
room: *const c_void, room: *const c_void,
participant_id: CFStringRef, participant_id: CFStringRef,
@ -61,6 +83,13 @@ extern "C" {
on_drop: extern "C" fn(callback_data: *mut c_void), on_drop: extern "C" fn(callback_data: *mut c_void),
) -> *const c_void; ) -> *const c_void;
fn LKRemoteAudioTrackGetSid(track: *const c_void) -> CFStringRef;
// fn LKRemoteAudioTrackStart(
// track: *const c_void,
// callback: extern "C" fn(*mut c_void, bool),
// callback_data: *mut c_void
// );
fn LKVideoTrackAddRenderer(track: *const c_void, renderer: *const c_void); fn LKVideoTrackAddRenderer(track: *const c_void, renderer: *const c_void);
fn LKRemoteVideoTrackGetSid(track: *const c_void) -> CFStringRef; fn LKRemoteVideoTrackGetSid(track: *const c_void) -> CFStringRef;
@ -73,6 +102,7 @@ extern "C" {
), ),
); );
fn LKCreateScreenShareTrackForDisplay(display: *const c_void) -> *const c_void; fn LKCreateScreenShareTrackForDisplay(display: *const c_void) -> *const c_void;
fn LKLocalAudioTrackCreateTrack() -> *const c_void;
} }
pub type Sid = String; pub type Sid = String;
@ -89,6 +119,7 @@ pub struct Room {
watch::Sender<ConnectionState>, watch::Sender<ConnectionState>,
watch::Receiver<ConnectionState>, watch::Receiver<ConnectionState>,
)>, )>,
remote_audio_track_subscribers: Mutex<Vec<mpsc::UnboundedSender<RemoteAudioTrackUpdate>>>,
remote_video_track_subscribers: Mutex<Vec<mpsc::UnboundedSender<RemoteVideoTrackUpdate>>>, remote_video_track_subscribers: Mutex<Vec<mpsc::UnboundedSender<RemoteVideoTrackUpdate>>>,
_delegate: RoomDelegate, _delegate: RoomDelegate,
} }
@ -100,6 +131,7 @@ impl Room {
Self { Self {
native_room: unsafe { LKRoomCreate(delegate.native_delegate) }, native_room: unsafe { LKRoomCreate(delegate.native_delegate) },
connection: Mutex::new(watch::channel_with(ConnectionState::Disconnected)), connection: Mutex::new(watch::channel_with(ConnectionState::Disconnected)),
remote_audio_track_subscribers: Default::default(),
remote_video_track_subscribers: Default::default(), remote_video_track_subscribers: Default::default(),
_delegate: delegate, _delegate: delegate,
} }
@ -191,6 +223,32 @@ impl Room {
async { rx.await.unwrap().context("error publishing video track") } async { rx.await.unwrap().context("error publishing video track") }
} }
pub fn publish_audio_track(
self: &Arc<Self>,
track: &LocalAudioTrack,
) -> impl Future<Output = Result<LocalTrackPublication>> {
let (tx, rx) = oneshot::channel::<Result<LocalTrackPublication>>();
extern "C" fn callback(tx: *mut c_void, publication: *mut c_void, error: CFStringRef) {
let tx =
unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<LocalTrackPublication>>) };
if error.is_null() {
let _ = tx.send(Ok(LocalTrackPublication(publication)));
} else {
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
let _ = tx.send(Err(anyhow!(error)));
}
}
unsafe {
LKRoomPublishAudioTrack(
self.native_room,
track.0,
callback,
Box::into_raw(Box::new(tx)) as *mut c_void,
);
}
async { rx.await.unwrap().context("error publishing video track") }
}
pub fn unpublish_track(&self, publication: LocalTrackPublication) { pub fn unpublish_track(&self, publication: LocalTrackPublication) {
unsafe { unsafe {
LKRoomUnpublishTrack(self.native_room, publication.0); LKRoomUnpublishTrack(self.native_room, publication.0);
@ -226,12 +284,65 @@ impl Room {
} }
} }
pub fn remote_audio_tracks(&self, participant_id: &str) -> Vec<Arc<RemoteAudioTrack>> {
unsafe {
let tracks = LKRoomAudioTracksForRemoteParticipant(
self.native_room,
CFString::new(participant_id).as_concrete_TypeRef(),
);
if tracks.is_null() {
Vec::new()
} else {
let tracks = CFArray::wrap_under_get_rule(tracks);
tracks
.into_iter()
.map(|native_track| {
let native_track = *native_track;
let id =
CFString::wrap_under_get_rule(LKRemoteAudioTrackGetSid(native_track))
.to_string();
Arc::new(RemoteAudioTrack::new(
native_track,
id,
participant_id.into(),
))
})
.collect()
}
}
}
pub fn remote_audio_track_updates(&self) -> mpsc::UnboundedReceiver<RemoteAudioTrackUpdate> {
let (tx, rx) = mpsc::unbounded();
self.remote_audio_track_subscribers.lock().push(tx);
rx
}
pub fn remote_video_track_updates(&self) -> mpsc::UnboundedReceiver<RemoteVideoTrackUpdate> { pub fn remote_video_track_updates(&self) -> mpsc::UnboundedReceiver<RemoteVideoTrackUpdate> {
let (tx, rx) = mpsc::unbounded(); let (tx, rx) = mpsc::unbounded();
self.remote_video_track_subscribers.lock().push(tx); self.remote_video_track_subscribers.lock().push(tx);
rx rx
} }
fn did_subscribe_to_remote_audio_track(&self, track: RemoteAudioTrack) {
let track = Arc::new(track);
self.remote_audio_track_subscribers.lock().retain(|tx| {
tx.unbounded_send(RemoteAudioTrackUpdate::Subscribed(track.clone()))
.is_ok()
});
}
fn did_unsubscribe_from_remote_audio_track(&self, publisher_id: String, track_id: String) {
self.remote_audio_track_subscribers.lock().retain(|tx| {
tx.unbounded_send(RemoteAudioTrackUpdate::Unsubscribed {
publisher_id: publisher_id.clone(),
track_id: track_id.clone(),
})
.is_ok()
});
}
fn did_subscribe_to_remote_video_track(&self, track: RemoteVideoTrack) { fn did_subscribe_to_remote_video_track(&self, track: RemoteVideoTrack) {
let track = Arc::new(track); let track = Arc::new(track);
self.remote_video_track_subscribers.lock().retain(|tx| { self.remote_video_track_subscribers.lock().retain(|tx| {
@ -294,6 +405,8 @@ impl RoomDelegate {
LKRoomDelegateCreate( LKRoomDelegateCreate(
weak_room as *mut c_void, weak_room as *mut c_void,
Self::on_did_disconnect, Self::on_did_disconnect,
Self::on_did_subscribe_to_remote_audio_track,
Self::on_did_unsubscribe_from_remote_audio_track,
Self::on_did_subscribe_to_remote_video_track, Self::on_did_subscribe_to_remote_video_track,
Self::on_did_unsubscribe_from_remote_video_track, Self::on_did_unsubscribe_from_remote_video_track,
) )
@ -312,6 +425,36 @@ impl RoomDelegate {
let _ = Weak::into_raw(room); let _ = Weak::into_raw(room);
} }
extern "C" fn on_did_subscribe_to_remote_audio_track(
room: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
track: *const c_void,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
let track = RemoteAudioTrack::new(track, track_id, publisher_id);
if let Some(room) = room.upgrade() {
room.did_subscribe_to_remote_audio_track(track);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_unsubscribe_from_remote_audio_track(
room: *mut c_void,
publisher_id: CFStringRef,
track_id: CFStringRef,
) {
let room = unsafe { Weak::from_raw(room as *mut Room) };
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
if let Some(room) = room.upgrade() {
room.did_unsubscribe_from_remote_audio_track(publisher_id, track_id);
}
let _ = Weak::into_raw(room);
}
extern "C" fn on_did_subscribe_to_remote_video_track( extern "C" fn on_did_subscribe_to_remote_video_track(
room: *mut c_void, room: *mut c_void,
publisher_id: CFStringRef, publisher_id: CFStringRef,
@ -352,6 +495,20 @@ impl Drop for RoomDelegate {
} }
} }
pub struct LocalAudioTrack(*const c_void);
impl LocalAudioTrack {
pub fn create() -> Self {
Self(unsafe { LKLocalAudioTrackCreateTrack() })
}
}
impl Drop for LocalAudioTrack {
fn drop(&mut self) {
unsafe { CFRelease(self.0) }
}
}
pub struct LocalVideoTrack(*const c_void); pub struct LocalVideoTrack(*const c_void);
impl LocalVideoTrack { impl LocalVideoTrack {
@ -374,6 +531,34 @@ impl Drop for LocalTrackPublication {
} }
} }
#[derive(Debug)]
pub struct RemoteAudioTrack {
_native_track: *const c_void,
sid: Sid,
publisher_id: String,
}
impl RemoteAudioTrack {
fn new(native_track: *const c_void, sid: Sid, publisher_id: String) -> Self {
unsafe {
CFRetain(native_track);
}
Self {
_native_track: native_track,
sid,
publisher_id,
}
}
pub fn sid(&self) -> &str {
&self.sid
}
pub fn publisher_id(&self) -> &str {
&self.publisher_id
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct RemoteVideoTrack { pub struct RemoteVideoTrack {
native_track: *const c_void, native_track: *const c_void,
@ -453,6 +638,11 @@ pub enum RemoteVideoTrackUpdate {
Unsubscribed { publisher_id: Sid, track_id: Sid }, Unsubscribed { publisher_id: Sid, track_id: Sid },
} }
pub enum RemoteAudioTrackUpdate {
Subscribed(Arc<RemoteAudioTrack>),
Unsubscribed { publisher_id: Sid, track_id: Sid },
}
pub struct MacOSDisplay(*const c_void); pub struct MacOSDisplay(*const c_void);
impl MacOSDisplay { impl MacOSDisplay {

View file

@ -67,7 +67,7 @@ impl TestServer {
} }
} }
async fn create_room(&self, room: String) -> Result<()> { pub async fn create_room(&self, room: String) -> Result<()> {
self.background.simulate_random_delay().await; self.background.simulate_random_delay().await;
let mut server_rooms = self.rooms.lock(); let mut server_rooms = self.rooms.lock();
if server_rooms.contains_key(&room) { if server_rooms.contains_key(&room) {
@ -104,7 +104,7 @@ impl TestServer {
room_name room_name
)) ))
} else { } else {
for track in &room.tracks { for track in &room.video_tracks {
client_room client_room
.0 .0
.lock() .lock()
@ -182,7 +182,7 @@ impl TestServer {
frames_rx: local_track.frames_rx.clone(), frames_rx: local_track.frames_rx.clone(),
}); });
room.tracks.push(track.clone()); room.video_tracks.push(track.clone());
for (id, client_room) in &room.client_rooms { for (id, client_room) in &room.client_rooms {
if *id != identity { if *id != identity {
@ -199,6 +199,43 @@ impl TestServer {
Ok(()) Ok(())
} }
async fn publish_audio_track(
&self,
token: String,
_local_track: &LocalAudioTrack,
) -> Result<()> {
self.background.simulate_random_delay().await;
let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
let identity = claims.sub.unwrap().to_string();
let room_name = claims.video.room.unwrap();
let mut server_rooms = self.rooms.lock();
let room = server_rooms
.get_mut(&*room_name)
.ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
let track = Arc::new(RemoteAudioTrack {
sid: nanoid::nanoid!(17),
publisher_id: identity.clone(),
});
room.audio_tracks.push(track.clone());
for (id, client_room) in &room.client_rooms {
if *id != identity {
let _ = client_room
.0
.lock()
.audio_track_updates
.0
.try_broadcast(RemoteAudioTrackUpdate::Subscribed(track.clone()))
.unwrap();
}
}
Ok(())
}
fn video_tracks(&self, token: String) -> Result<Vec<Arc<RemoteVideoTrack>>> { fn video_tracks(&self, token: String) -> Result<Vec<Arc<RemoteVideoTrack>>> {
let claims = live_kit_server::token::validate(&token, &self.secret_key)?; let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
let room_name = claims.video.room.unwrap(); let room_name = claims.video.room.unwrap();
@ -207,14 +244,26 @@ impl TestServer {
let room = server_rooms let room = server_rooms
.get_mut(&*room_name) .get_mut(&*room_name)
.ok_or_else(|| anyhow!("room {} does not exist", room_name))?; .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
Ok(room.tracks.clone()) Ok(room.video_tracks.clone())
}
fn audio_tracks(&self, token: String) -> Result<Vec<Arc<RemoteAudioTrack>>> {
let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
let room_name = claims.video.room.unwrap();
let mut server_rooms = self.rooms.lock();
let room = server_rooms
.get_mut(&*room_name)
.ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
Ok(room.audio_tracks.clone())
} }
} }
#[derive(Default)] #[derive(Default)]
struct TestServerRoom { struct TestServerRoom {
client_rooms: HashMap<Sid, Arc<Room>>, client_rooms: HashMap<Sid, Arc<Room>>,
tracks: Vec<Arc<RemoteVideoTrack>>, video_tracks: Vec<Arc<RemoteVideoTrack>>,
audio_tracks: Vec<Arc<RemoteAudioTrack>>,
} }
impl TestServerRoom {} impl TestServerRoom {}
@ -266,6 +315,10 @@ struct RoomState {
watch::Receiver<ConnectionState>, watch::Receiver<ConnectionState>,
), ),
display_sources: Vec<MacOSDisplay>, display_sources: Vec<MacOSDisplay>,
audio_track_updates: (
async_broadcast::Sender<RemoteAudioTrackUpdate>,
async_broadcast::Receiver<RemoteAudioTrackUpdate>,
),
video_track_updates: ( video_track_updates: (
async_broadcast::Sender<RemoteVideoTrackUpdate>, async_broadcast::Sender<RemoteVideoTrackUpdate>,
async_broadcast::Receiver<RemoteVideoTrackUpdate>, async_broadcast::Receiver<RemoteVideoTrackUpdate>,
@ -286,6 +339,7 @@ impl Room {
connection: watch::channel_with(ConnectionState::Disconnected), connection: watch::channel_with(ConnectionState::Disconnected),
display_sources: Default::default(), display_sources: Default::default(),
video_track_updates: async_broadcast::broadcast(128), video_track_updates: async_broadcast::broadcast(128),
audio_track_updates: async_broadcast::broadcast(128),
}))) })))
} }
@ -327,8 +381,34 @@ impl Room {
Ok(LocalTrackPublication) Ok(LocalTrackPublication)
} }
} }
pub fn publish_audio_track(
self: &Arc<Self>,
track: &LocalAudioTrack,
) -> impl Future<Output = Result<LocalTrackPublication>> {
let this = self.clone();
let track = track.clone();
async move {
this.test_server()
.publish_audio_track(this.token(), &track)
.await?;
Ok(LocalTrackPublication)
}
}
pub fn unpublish_track(&self, _: LocalTrackPublication) {} pub fn unpublish_track(&self, _publication: LocalTrackPublication) {}
pub fn remote_audio_tracks(&self, publisher_id: &str) -> Vec<Arc<RemoteAudioTrack>> {
if !self.is_connected() {
return Vec::new();
}
self.test_server()
.audio_tracks(self.token())
.unwrap()
.into_iter()
.filter(|track| track.publisher_id() == publisher_id)
.collect()
}
pub fn remote_video_tracks(&self, publisher_id: &str) -> Vec<Arc<RemoteVideoTrack>> { pub fn remote_video_tracks(&self, publisher_id: &str) -> Vec<Arc<RemoteVideoTrack>> {
if !self.is_connected() { if !self.is_connected() {
@ -343,6 +423,10 @@ impl Room {
.collect() .collect()
} }
pub fn remote_audio_track_updates(&self) -> impl Stream<Item = RemoteAudioTrackUpdate> {
self.0.lock().audio_track_updates.1.clone()
}
pub fn remote_video_track_updates(&self) -> impl Stream<Item = RemoteVideoTrackUpdate> { pub fn remote_video_track_updates(&self) -> impl Stream<Item = RemoteVideoTrackUpdate> {
self.0.lock().video_track_updates.1.clone() self.0.lock().video_track_updates.1.clone()
} }
@ -404,6 +488,15 @@ impl LocalVideoTrack {
} }
} }
#[derive(Clone)]
pub struct LocalAudioTrack;
impl LocalAudioTrack {
pub fn create() -> Self {
Self
}
}
pub struct RemoteVideoTrack { pub struct RemoteVideoTrack {
sid: Sid, sid: Sid,
publisher_id: Sid, publisher_id: Sid,
@ -424,12 +517,33 @@ impl RemoteVideoTrack {
} }
} }
pub struct RemoteAudioTrack {
sid: Sid,
publisher_id: Sid,
}
impl RemoteAudioTrack {
pub fn sid(&self) -> &str {
&self.sid
}
pub fn publisher_id(&self) -> &str {
&self.publisher_id
}
}
#[derive(Clone)] #[derive(Clone)]
pub enum RemoteVideoTrackUpdate { pub enum RemoteVideoTrackUpdate {
Subscribed(Arc<RemoteVideoTrack>), Subscribed(Arc<RemoteVideoTrack>),
Unsubscribed { publisher_id: Sid, track_id: Sid }, Unsubscribed { publisher_id: Sid, track_id: Sid },
} }
#[derive(Clone)]
pub enum RemoteAudioTrackUpdate {
Subscribed(Arc<RemoteAudioTrack>),
Unsubscribed { publisher_id: Sid, track_id: Sid },
}
#[derive(Clone)] #[derive(Clone)]
pub struct MacOSDisplay { pub struct MacOSDisplay {
frames: ( frames: (

View file

@ -117,14 +117,14 @@ struct Notification<'a, T> {
params: T, params: T,
} }
#[derive(Deserialize)] #[derive(Debug, Clone, Deserialize)]
struct AnyNotification<'a> { struct AnyNotification<'a> {
#[serde(default)] #[serde(default)]
id: Option<usize>, id: Option<usize>,
#[serde(borrow)] #[serde(borrow)]
method: &'a str, method: &'a str,
#[serde(borrow)] #[serde(borrow, default)]
params: &'a RawValue, params: Option<&'a RawValue>,
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
@ -171,9 +171,12 @@ impl LanguageServer {
"unhandled notification {}:\n{}", "unhandled notification {}:\n{}",
notification.method, notification.method,
serde_json::to_string_pretty( serde_json::to_string_pretty(
&Value::from_str(notification.params.get()).unwrap() &notification
.params
.and_then(|params| Value::from_str(params.get()).ok())
.unwrap_or(Value::Null)
) )
.unwrap() .unwrap(),
); );
}, },
); );
@ -313,7 +316,11 @@ impl LanguageServer {
if let Ok(msg) = serde_json::from_slice::<AnyNotification>(&buffer) { if let Ok(msg) = serde_json::from_slice::<AnyNotification>(&buffer) {
if let Some(handler) = notification_handlers.lock().get_mut(msg.method) { if let Some(handler) = notification_handlers.lock().get_mut(msg.method) {
handler(msg.id, msg.params.get(), cx.clone()); handler(
msg.id,
&msg.params.map(|params| params.get()).unwrap_or("null"),
cx.clone(),
);
} else { } else {
on_unhandled_notification(msg); on_unhandled_notification(msg);
} }
@ -864,7 +871,13 @@ impl LanguageServer {
cx, cx,
move |msg| { move |msg| {
notifications_tx notifications_tx
.try_send((msg.method.to_string(), msg.params.get().to_string())) .try_send((
msg.method.to_string(),
msg.params
.map(|raw_value| raw_value.get())
.unwrap_or("null")
.to_string(),
))
.ok(); .ok();
}, },
)), )),

View file

@ -38,9 +38,9 @@ use language::{
}, },
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel, range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt, Operation, Patch, Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate, OffsetRangeExt,
PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset,
Unclipped, ToPointUtf16, Transaction, Unclipped,
}; };
use log::error; use log::error;
use lsp::{ use lsp::{
@ -76,8 +76,8 @@ use std::{
}; };
use terminals::Terminals; use terminals::Terminals;
use util::{ use util::{
debug_panic, defer, merge_json_value_into, paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, debug_panic, defer, http::HttpClient, merge_json_value_into,
ResultExt, TryFutureExt as _, paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
}; };
pub use fs::*; pub use fs::*;
@ -254,6 +254,7 @@ pub enum Event {
LanguageServerAdded(LanguageServerId), LanguageServerAdded(LanguageServerId),
LanguageServerRemoved(LanguageServerId), LanguageServerRemoved(LanguageServerId),
LanguageServerLog(LanguageServerId, String), LanguageServerLog(LanguageServerId, String),
Notification(String),
ActiveEntryChanged(Option<ProjectEntryId>), ActiveEntryChanged(Option<ProjectEntryId>),
WorktreeAdded, WorktreeAdded,
WorktreeRemoved(WorktreeId), WorktreeRemoved(WorktreeId),
@ -444,6 +445,11 @@ pub enum FormatTrigger {
Manual, Manual,
} }
struct ProjectLspAdapterDelegate {
project: ModelHandle<Project>,
http_client: Arc<dyn HttpClient>,
}
impl FormatTrigger { impl FormatTrigger {
fn from_proto(value: i32) -> FormatTrigger { fn from_proto(value: i32) -> FormatTrigger {
match value { match value {
@ -2427,7 +2433,7 @@ impl Project {
language.clone(), language.clone(),
adapter.clone(), adapter.clone(),
worktree_path, worktree_path,
self.client.http_client(), ProjectLspAdapterDelegate::new(self, cx),
cx, cx,
) { ) {
Some(pending_server) => pending_server, Some(pending_server) => pending_server,
@ -7481,6 +7487,26 @@ impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
} }
} }
impl ProjectLspAdapterDelegate {
fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
Arc::new(Self {
project: cx.handle(),
http_client: project.client.http_client(),
})
}
}
impl LspAdapterDelegate for ProjectLspAdapterDelegate {
fn show_notification(&self, message: &str, cx: &mut AppContext) {
self.project
.update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
}
fn http_client(&self) -> Arc<dyn HttpClient> {
self.http_client.clone()
}
}
fn split_operations( fn split_operations(
mut operations: Vec<proto::Operation>, mut operations: Vec<proto::Operation>,
) -> impl Iterator<Item = Vec<proto::Operation>> { ) -> impl Iterator<Item = Vec<proto::Operation>> {

View file

@ -1470,7 +1470,7 @@ impl Snapshot {
break; break;
} }
} }
new_entries_by_path.push_tree(cursor.suffix(&()), &()); new_entries_by_path.append(cursor.suffix(&()), &());
new_entries_by_path new_entries_by_path
}; };
@ -2259,7 +2259,7 @@ impl BackgroundScannerState {
let mut cursor = self.snapshot.entries_by_path.cursor::<TraversalProgress>(); let mut cursor = self.snapshot.entries_by_path.cursor::<TraversalProgress>();
new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &()); new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &()); removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
new_entries.push_tree(cursor.suffix(&()), &()); new_entries.append(cursor.suffix(&()), &());
} }
self.snapshot.entries_by_path = new_entries; self.snapshot.entries_by_path = new_entries;

View file

@ -53,7 +53,7 @@ impl Rope {
} }
} }
self.chunks.push_tree(chunks.suffix(&()), &()); self.chunks.append(chunks.suffix(&()), &());
self.check_invariants(); self.check_invariants();
} }

View file

@ -21,7 +21,7 @@ util = { path = "../util" }
anyhow.workspace = true anyhow.workspace = true
futures.workspace = true futures.workspace = true
json_comments = "0.2" serde_json_lenient = {version = "0.1", features = ["preserve_order", "raw_value"]}
lazy_static.workspace = true lazy_static.workspace = true
postage.workspace = true postage.workspace = true
rust-embed.workspace = true rust-embed.workspace = true
@ -37,6 +37,6 @@ tree-sitter-json = "*"
[dev-dependencies] [dev-dependencies]
gpui = { path = "../gpui", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] } fs = { path = "../fs", features = ["test-support"] }
indoc.workspace = true
pretty_assertions = "1.3.0" pretty_assertions = "1.3.0"
unindent.workspace = true unindent.workspace = true

View file

@ -1,5 +1,5 @@
use crate::{settings_store::parse_json_with_comments, SettingsAssets}; use crate::{settings_store::parse_json_with_comments, SettingsAssets};
use anyhow::{Context, Result}; use anyhow::{anyhow, Context, Result};
use collections::BTreeMap; use collections::BTreeMap;
use gpui::{keymap_matcher::Binding, AppContext}; use gpui::{keymap_matcher::Binding, AppContext};
use schemars::{ use schemars::{
@ -8,7 +8,7 @@ use schemars::{
JsonSchema, JsonSchema,
}; };
use serde::Deserialize; use serde::Deserialize;
use serde_json::{value::RawValue, Value}; use serde_json::Value;
use util::{asset_str, ResultExt}; use util::{asset_str, ResultExt};
#[derive(Deserialize, Default, Clone, JsonSchema)] #[derive(Deserialize, Default, Clone, JsonSchema)]
@ -24,7 +24,7 @@ pub struct KeymapBlock {
#[derive(Deserialize, Default, Clone)] #[derive(Deserialize, Default, Clone)]
#[serde(transparent)] #[serde(transparent)]
pub struct KeymapAction(Box<RawValue>); pub struct KeymapAction(Value);
impl JsonSchema for KeymapAction { impl JsonSchema for KeymapAction {
fn schema_name() -> String { fn schema_name() -> String {
@ -37,11 +37,12 @@ impl JsonSchema for KeymapAction {
} }
#[derive(Deserialize)] #[derive(Deserialize)]
struct ActionWithData(Box<str>, Box<RawValue>); struct ActionWithData(Box<str>, Value);
impl KeymapFile { impl KeymapFile {
pub fn load_asset(asset_path: &str, cx: &mut AppContext) -> Result<()> { pub fn load_asset(asset_path: &str, cx: &mut AppContext) -> Result<()> {
let content = asset_str::<SettingsAssets>(asset_path); let content = asset_str::<SettingsAssets>(asset_path);
Self::parse(content.as_ref())?.add_to_cx(cx) Self::parse(content.as_ref())?.add_to_cx(cx)
} }
@ -54,18 +55,27 @@ impl KeymapFile {
let bindings = bindings let bindings = bindings
.into_iter() .into_iter()
.filter_map(|(keystroke, action)| { .filter_map(|(keystroke, action)| {
let action = action.0.get(); let action = action.0;
// This is a workaround for a limitation in serde: serde-rs/json#497 // This is a workaround for a limitation in serde: serde-rs/json#497
// We want to deserialize the action data as a `RawValue` so that we can // We want to deserialize the action data as a `RawValue` so that we can
// deserialize the action itself dynamically directly from the JSON // deserialize the action itself dynamically directly from the JSON
// string. But `RawValue` currently does not work inside of an untagged enum. // string. But `RawValue` currently does not work inside of an untagged enum.
if action.starts_with('[') { if let Value::Array(items) = action {
let ActionWithData(name, data) = serde_json::from_str(action).log_err()?; let Ok([name, data]): Result<[serde_json::Value; 2], _> = items.try_into() else {
cx.deserialize_action(&name, Some(data.get())) return Some(Err(anyhow!("Expected array of length 2")));
};
let serde_json::Value::String(name) = name else {
return Some(Err(anyhow!("Expected first item in array to be a string.")))
};
cx.deserialize_action(
&name,
Some(data),
)
} else if let Value::String(name) = action {
cx.deserialize_action(&name, None)
} else { } else {
let name = serde_json::from_str(action).log_err()?; return Some(Err(anyhow!("Expected two-element array, got {:?}", action)));
cx.deserialize_action(name, None)
} }
.with_context(|| { .with_context(|| {
format!( format!(
@ -118,3 +128,24 @@ impl KeymapFile {
serde_json::to_value(root_schema).unwrap() serde_json::to_value(root_schema).unwrap()
} }
} }
#[cfg(test)]
mod tests {
use crate::KeymapFile;
#[test]
fn can_deserialize_keymap_with_trailing_comma() {
let json = indoc::indoc! {"[
// Standard macOS bindings
{
\"bindings\": {
\"up\": \"menu::SelectPrev\",
},
},
]
"
};
KeymapFile::parse(json).unwrap();
}
}

View file

@ -834,11 +834,8 @@ fn to_pretty_json(value: &impl Serialize, indent_size: usize, indent_prefix_len:
} }
pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> { pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
Ok(serde_json::from_reader( Ok(serde_json_lenient::from_str(content)?)
json_comments::CommentSettings::c_style().strip_comments(content.as_bytes()),
)?)
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View file

@ -669,7 +669,7 @@ impl<'a, T: Item> SeekAggregate<'a, T> for () {
impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate<T> { impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate<T> {
fn begin_leaf(&mut self) {} fn begin_leaf(&mut self) {}
fn end_leaf(&mut self, cx: &<T::Summary as Summary>::Context) { fn end_leaf(&mut self, cx: &<T::Summary as Summary>::Context) {
self.tree.push_tree( self.tree.append(
SumTree(Arc::new(Node::Leaf { SumTree(Arc::new(Node::Leaf {
summary: mem::take(&mut self.leaf_summary), summary: mem::take(&mut self.leaf_summary),
items: mem::take(&mut self.leaf_items), items: mem::take(&mut self.leaf_items),
@ -689,7 +689,7 @@ impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate<T> {
_: &T::Summary, _: &T::Summary,
cx: &<T::Summary as Summary>::Context, cx: &<T::Summary as Summary>::Context,
) { ) {
self.tree.push_tree(tree.clone(), cx); self.tree.append(tree.clone(), cx);
} }
} }

View file

@ -268,7 +268,7 @@ impl<T: Item> SumTree<T> {
for item in iter { for item in iter {
if leaf.is_some() && leaf.as_ref().unwrap().items().len() == 2 * TREE_BASE { if leaf.is_some() && leaf.as_ref().unwrap().items().len() == 2 * TREE_BASE {
self.push_tree(SumTree(Arc::new(leaf.take().unwrap())), cx); self.append(SumTree(Arc::new(leaf.take().unwrap())), cx);
} }
if leaf.is_none() { if leaf.is_none() {
@ -295,13 +295,13 @@ impl<T: Item> SumTree<T> {
} }
if leaf.is_some() { if leaf.is_some() {
self.push_tree(SumTree(Arc::new(leaf.take().unwrap())), cx); self.append(SumTree(Arc::new(leaf.take().unwrap())), cx);
} }
} }
pub fn push(&mut self, item: T, cx: &<T::Summary as Summary>::Context) { pub fn push(&mut self, item: T, cx: &<T::Summary as Summary>::Context) {
let summary = item.summary(); let summary = item.summary();
self.push_tree( self.append(
SumTree(Arc::new(Node::Leaf { SumTree(Arc::new(Node::Leaf {
summary: summary.clone(), summary: summary.clone(),
items: ArrayVec::from_iter(Some(item)), items: ArrayVec::from_iter(Some(item)),
@ -311,11 +311,11 @@ impl<T: Item> SumTree<T> {
); );
} }
pub fn push_tree(&mut self, other: Self, cx: &<T::Summary as Summary>::Context) { pub fn append(&mut self, other: Self, cx: &<T::Summary as Summary>::Context) {
if !other.0.is_leaf() || !other.0.items().is_empty() { if !other.0.is_leaf() || !other.0.items().is_empty() {
if self.0.height() < other.0.height() { if self.0.height() < other.0.height() {
for tree in other.0.child_trees() { for tree in other.0.child_trees() {
self.push_tree(tree.clone(), cx); self.append(tree.clone(), cx);
} }
} else if let Some(split_tree) = self.push_tree_recursive(other, cx) { } else if let Some(split_tree) = self.push_tree_recursive(other, cx) {
*self = Self::from_child_trees(self.clone(), split_tree, cx); *self = Self::from_child_trees(self.clone(), split_tree, cx);
@ -512,7 +512,7 @@ impl<T: KeyedItem> SumTree<T> {
} }
} }
new_tree.push(item, cx); new_tree.push(item, cx);
new_tree.push_tree(cursor.suffix(cx), cx); new_tree.append(cursor.suffix(cx), cx);
new_tree new_tree
}; };
replaced replaced
@ -529,7 +529,7 @@ impl<T: KeyedItem> SumTree<T> {
cursor.next(cx); cursor.next(cx);
} }
} }
new_tree.push_tree(cursor.suffix(cx), cx); new_tree.append(cursor.suffix(cx), cx);
new_tree new_tree
}; };
removed removed
@ -563,7 +563,7 @@ impl<T: KeyedItem> SumTree<T> {
{ {
new_tree.extend(buffered_items.drain(..), cx); new_tree.extend(buffered_items.drain(..), cx);
let slice = cursor.slice(&new_key, Bias::Left, cx); let slice = cursor.slice(&new_key, Bias::Left, cx);
new_tree.push_tree(slice, cx); new_tree.append(slice, cx);
old_item = cursor.item(); old_item = cursor.item();
} }
@ -583,7 +583,7 @@ impl<T: KeyedItem> SumTree<T> {
} }
new_tree.extend(buffered_items, cx); new_tree.extend(buffered_items, cx);
new_tree.push_tree(cursor.suffix(cx), cx); new_tree.append(cursor.suffix(cx), cx);
new_tree new_tree
}; };
@ -719,7 +719,7 @@ mod tests {
let mut tree2 = SumTree::new(); let mut tree2 = SumTree::new();
tree2.extend(50..100, &()); tree2.extend(50..100, &());
tree1.push_tree(tree2, &()); tree1.append(tree2, &());
assert_eq!( assert_eq!(
tree1.items(&()), tree1.items(&()),
(0..20).chain(50..100).collect::<Vec<u8>>() (0..20).chain(50..100).collect::<Vec<u8>>()
@ -766,7 +766,7 @@ mod tests {
let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &()); let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &());
new_tree.extend(new_items, &()); new_tree.extend(new_items, &());
cursor.seek(&Count(splice_end), Bias::Right, &()); cursor.seek(&Count(splice_end), Bias::Right, &());
new_tree.push_tree(cursor.slice(&tree_end, Bias::Right, &()), &()); new_tree.append(cursor.slice(&tree_end, Bias::Right, &()), &());
new_tree new_tree
}; };

View file

@ -67,7 +67,7 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
removed = Some(cursor.item().unwrap().value.clone()); removed = Some(cursor.item().unwrap().value.clone());
cursor.next(&()); cursor.next(&());
} }
new_tree.push_tree(cursor.suffix(&()), &()); new_tree.append(cursor.suffix(&()), &());
drop(cursor); drop(cursor);
self.0 = new_tree; self.0 = new_tree;
removed removed
@ -79,7 +79,7 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>(); let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
let mut new_tree = cursor.slice(&start, Bias::Left, &()); let mut new_tree = cursor.slice(&start, Bias::Left, &());
cursor.seek(&end, Bias::Left, &()); cursor.seek(&end, Bias::Left, &());
new_tree.push_tree(cursor.suffix(&()), &()); new_tree.append(cursor.suffix(&()), &());
drop(cursor); drop(cursor);
self.0 = new_tree; self.0 = new_tree;
} }
@ -117,7 +117,7 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
new_tree.push(updated, &()); new_tree.push(updated, &());
cursor.next(&()); cursor.next(&());
} }
new_tree.push_tree(cursor.suffix(&()), &()); new_tree.append(cursor.suffix(&()), &());
drop(cursor); drop(cursor);
self.0 = new_tree; self.0 = new_tree;
result result

View file

@ -600,7 +600,7 @@ impl Buffer {
let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>(); let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>();
let mut new_fragments = let mut new_fragments =
old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None); old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None);
new_ropes.push_tree(new_fragments.summary().text); new_ropes.append(new_fragments.summary().text);
let mut fragment_start = old_fragments.start().visible; let mut fragment_start = old_fragments.start().visible;
for (range, new_text) in edits { for (range, new_text) in edits {
@ -625,8 +625,8 @@ impl Buffer {
} }
let slice = old_fragments.slice(&range.start, Bias::Right, &None); let slice = old_fragments.slice(&range.start, Bias::Right, &None);
new_ropes.push_tree(slice.summary().text); new_ropes.append(slice.summary().text);
new_fragments.push_tree(slice, &None); new_fragments.append(slice, &None);
fragment_start = old_fragments.start().visible; fragment_start = old_fragments.start().visible;
} }
@ -728,8 +728,8 @@ impl Buffer {
} }
let suffix = old_fragments.suffix(&None); let suffix = old_fragments.suffix(&None);
new_ropes.push_tree(suffix.summary().text); new_ropes.append(suffix.summary().text);
new_fragments.push_tree(suffix, &None); new_fragments.append(suffix, &None);
let (visible_text, deleted_text) = new_ropes.finish(); let (visible_text, deleted_text) = new_ropes.finish();
drop(old_fragments); drop(old_fragments);
@ -828,7 +828,7 @@ impl Buffer {
Bias::Left, Bias::Left,
&cx, &cx,
); );
new_ropes.push_tree(new_fragments.summary().text); new_ropes.append(new_fragments.summary().text);
let mut fragment_start = old_fragments.start().0.full_offset(); let mut fragment_start = old_fragments.start().0.full_offset();
for (range, new_text) in edits { for (range, new_text) in edits {
@ -854,8 +854,8 @@ impl Buffer {
let slice = let slice =
old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx); old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx);
new_ropes.push_tree(slice.summary().text); new_ropes.append(slice.summary().text);
new_fragments.push_tree(slice, &None); new_fragments.append(slice, &None);
fragment_start = old_fragments.start().0.full_offset(); fragment_start = old_fragments.start().0.full_offset();
} }
@ -986,8 +986,8 @@ impl Buffer {
} }
let suffix = old_fragments.suffix(&cx); let suffix = old_fragments.suffix(&cx);
new_ropes.push_tree(suffix.summary().text); new_ropes.append(suffix.summary().text);
new_fragments.push_tree(suffix, &None); new_fragments.append(suffix, &None);
let (visible_text, deleted_text) = new_ropes.finish(); let (visible_text, deleted_text) = new_ropes.finish();
drop(old_fragments); drop(old_fragments);
@ -1056,8 +1056,8 @@ impl Buffer {
for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) { for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None); let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None);
new_ropes.push_tree(preceding_fragments.summary().text); new_ropes.append(preceding_fragments.summary().text);
new_fragments.push_tree(preceding_fragments, &None); new_fragments.append(preceding_fragments, &None);
if let Some(fragment) = old_fragments.item() { if let Some(fragment) = old_fragments.item() {
let mut fragment = fragment.clone(); let mut fragment = fragment.clone();
@ -1087,8 +1087,8 @@ impl Buffer {
} }
let suffix = old_fragments.suffix(&None); let suffix = old_fragments.suffix(&None);
new_ropes.push_tree(suffix.summary().text); new_ropes.append(suffix.summary().text);
new_fragments.push_tree(suffix, &None); new_fragments.append(suffix, &None);
drop(old_fragments); drop(old_fragments);
let (visible_text, deleted_text) = new_ropes.finish(); let (visible_text, deleted_text) = new_ropes.finish();
@ -2070,7 +2070,7 @@ impl<'a> RopeBuilder<'a> {
} }
} }
fn push_tree(&mut self, len: FragmentTextSummary) { fn append(&mut self, len: FragmentTextSummary) {
self.push(len.visible, true, true); self.push(len.visible, true, true);
self.push(len.deleted, false, false); self.push(len.deleted, false, false);
} }

View file

@ -162,6 +162,12 @@ define_connection! {
ALTER TABLE workspaces ADD COLUMN right_dock_active_panel TEXT; ALTER TABLE workspaces ADD COLUMN right_dock_active_panel TEXT;
ALTER TABLE workspaces ADD COLUMN bottom_dock_visible INTEGER; //bool ALTER TABLE workspaces ADD COLUMN bottom_dock_visible INTEGER; //bool
ALTER TABLE workspaces ADD COLUMN bottom_dock_active_panel TEXT; ALTER TABLE workspaces ADD COLUMN bottom_dock_active_panel TEXT;
),
// Add panel zoom persistence
sql!(
ALTER TABLE workspaces ADD COLUMN left_dock_zoom INTEGER; //bool
ALTER TABLE workspaces ADD COLUMN right_dock_zoom INTEGER; //bool
ALTER TABLE workspaces ADD COLUMN bottom_dock_zoom INTEGER; //bool
)]; )];
} }
@ -196,10 +202,13 @@ impl WorkspaceDb {
display, display,
left_dock_visible, left_dock_visible,
left_dock_active_panel, left_dock_active_panel,
left_dock_zoom,
right_dock_visible, right_dock_visible,
right_dock_active_panel, right_dock_active_panel,
right_dock_zoom,
bottom_dock_visible, bottom_dock_visible,
bottom_dock_active_panel bottom_dock_active_panel,
bottom_dock_zoom
FROM workspaces FROM workspaces
WHERE workspace_location = ? WHERE workspace_location = ?
}) })
@ -244,22 +253,28 @@ impl WorkspaceDb {
workspace_location, workspace_location,
left_dock_visible, left_dock_visible,
left_dock_active_panel, left_dock_active_panel,
left_dock_zoom,
right_dock_visible, right_dock_visible,
right_dock_active_panel, right_dock_active_panel,
right_dock_zoom,
bottom_dock_visible, bottom_dock_visible,
bottom_dock_active_panel, bottom_dock_active_panel,
bottom_dock_zoom,
timestamp timestamp
) )
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, CURRENT_TIMESTAMP) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, CURRENT_TIMESTAMP)
ON CONFLICT DO ON CONFLICT DO
UPDATE SET UPDATE SET
workspace_location = ?2, workspace_location = ?2,
left_dock_visible = ?3, left_dock_visible = ?3,
left_dock_active_panel = ?4, left_dock_active_panel = ?4,
right_dock_visible = ?5, left_dock_zoom = ?5,
right_dock_active_panel = ?6, right_dock_visible = ?6,
bottom_dock_visible = ?7, right_dock_active_panel = ?7,
bottom_dock_active_panel = ?8, right_dock_zoom = ?8,
bottom_dock_visible = ?9,
bottom_dock_active_panel = ?10,
bottom_dock_zoom = ?11,
timestamp = CURRENT_TIMESTAMP timestamp = CURRENT_TIMESTAMP
))?((workspace.id, &workspace.location, workspace.docks)) ))?((workspace.id, &workspace.location, workspace.docks))
.context("Updating workspace")?; .context("Updating workspace")?;

View file

@ -100,16 +100,19 @@ impl Bind for DockStructure {
pub struct DockData { pub struct DockData {
pub(crate) visible: bool, pub(crate) visible: bool,
pub(crate) active_panel: Option<String>, pub(crate) active_panel: Option<String>,
pub(crate) zoom: bool,
} }
impl Column for DockData { impl Column for DockData {
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
let (visible, next_index) = Option::<bool>::column(statement, start_index)?; let (visible, next_index) = Option::<bool>::column(statement, start_index)?;
let (active_panel, next_index) = Option::<String>::column(statement, next_index)?; let (active_panel, next_index) = Option::<String>::column(statement, next_index)?;
let (zoom, next_index) = Option::<bool>::column(statement, next_index)?;
Ok(( Ok((
DockData { DockData {
visible: visible.unwrap_or(false), visible: visible.unwrap_or(false),
active_panel, active_panel,
zoom: zoom.unwrap_or(false),
}, },
next_index, next_index,
)) ))
@ -119,7 +122,8 @@ impl Column for DockData {
impl Bind for DockData { impl Bind for DockData {
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> { fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
let next_index = statement.bind(&self.visible, start_index)?; let next_index = statement.bind(&self.visible, start_index)?;
statement.bind(&self.active_panel, next_index) let next_index = statement.bind(&self.active_panel, next_index)?;
statement.bind(&self.zoom, next_index)
} }
} }

View file

@ -553,6 +553,10 @@ impl Workspace {
} }
} }
project::Event::Notification(message) => this.show_notification(0, cx, |cx| {
cx.add_view(|_| MessageNotification::new(message.clone()))
}),
_ => {} _ => {}
} }
cx.notify() cx.notify()
@ -1599,9 +1603,7 @@ impl Workspace {
focus_center = true; focus_center = true;
} }
} else { } else {
if active_panel.is_zoomed(cx) { cx.focus(active_panel.as_any());
cx.focus(active_panel.as_any());
}
reveal_dock = true; reveal_dock = true;
} }
} }
@ -2850,7 +2852,7 @@ impl Workspace {
cx.notify(); cx.notify();
} }
fn serialize_workspace(&self, cx: &AppContext) { fn serialize_workspace(&self, cx: &ViewContext<Self>) {
fn serialize_pane_handle( fn serialize_pane_handle(
pane_handle: &ViewHandle<Pane>, pane_handle: &ViewHandle<Pane>,
cx: &AppContext, cx: &AppContext,
@ -2893,7 +2895,7 @@ impl Workspace {
} }
} }
fn build_serialized_docks(this: &Workspace, cx: &AppContext) -> DockStructure { fn build_serialized_docks(this: &Workspace, cx: &ViewContext<Workspace>) -> DockStructure {
let left_dock = this.left_dock.read(cx); let left_dock = this.left_dock.read(cx);
let left_visible = left_dock.is_open(); let left_visible = left_dock.is_open();
let left_active_panel = left_dock.visible_panel().and_then(|panel| { let left_active_panel = left_dock.visible_panel().and_then(|panel| {
@ -2902,6 +2904,10 @@ impl Workspace {
.to_string(), .to_string(),
) )
}); });
let left_dock_zoom = left_dock
.visible_panel()
.map(|panel| panel.is_zoomed(cx))
.unwrap_or(false);
let right_dock = this.right_dock.read(cx); let right_dock = this.right_dock.read(cx);
let right_visible = right_dock.is_open(); let right_visible = right_dock.is_open();
@ -2911,6 +2917,10 @@ impl Workspace {
.to_string(), .to_string(),
) )
}); });
let right_dock_zoom = right_dock
.visible_panel()
.map(|panel| panel.is_zoomed(cx))
.unwrap_or(false);
let bottom_dock = this.bottom_dock.read(cx); let bottom_dock = this.bottom_dock.read(cx);
let bottom_visible = bottom_dock.is_open(); let bottom_visible = bottom_dock.is_open();
@ -2920,19 +2930,26 @@ impl Workspace {
.to_string(), .to_string(),
) )
}); });
let bottom_dock_zoom = bottom_dock
.visible_panel()
.map(|panel| panel.is_zoomed(cx))
.unwrap_or(false);
DockStructure { DockStructure {
left: DockData { left: DockData {
visible: left_visible, visible: left_visible,
active_panel: left_active_panel, active_panel: left_active_panel,
zoom: left_dock_zoom,
}, },
right: DockData { right: DockData {
visible: right_visible, visible: right_visible,
active_panel: right_active_panel, active_panel: right_active_panel,
zoom: right_dock_zoom,
}, },
bottom: DockData { bottom: DockData {
visible: bottom_visible, visible: bottom_visible,
active_panel: bottom_active_panel, active_panel: bottom_active_panel,
zoom: bottom_dock_zoom,
}, },
} }
} }
@ -3045,14 +3062,31 @@ impl Workspace {
dock.activate_panel(ix, cx); dock.activate_panel(ix, cx);
} }
} }
dock.active_panel()
.map(|panel| {
panel.set_zoomed(docks.left.zoom, cx)
});
if docks.left.visible && docks.left.zoom {
cx.focus_self()
}
}); });
// TODO: I think the bug is that setting zoom or active undoes the bottom zoom or something
workspace.right_dock.update(cx, |dock, cx| { workspace.right_dock.update(cx, |dock, cx| {
dock.set_open(docks.right.visible, cx); dock.set_open(docks.right.visible, cx);
if let Some(active_panel) = docks.right.active_panel { if let Some(active_panel) = docks.right.active_panel {
if let Some(ix) = dock.panel_index_for_ui_name(&active_panel, cx) { if let Some(ix) = dock.panel_index_for_ui_name(&active_panel, cx) {
dock.activate_panel(ix, cx); dock.activate_panel(ix, cx);
} }
} }
dock.active_panel()
.map(|panel| {
panel.set_zoomed(docks.right.zoom, cx)
});
if docks.right.visible && docks.right.zoom {
cx.focus_self()
}
}); });
workspace.bottom_dock.update(cx, |dock, cx| { workspace.bottom_dock.update(cx, |dock, cx| {
dock.set_open(docks.bottom.visible, cx); dock.set_open(docks.bottom.visible, cx);
@ -3061,8 +3095,18 @@ impl Workspace {
dock.activate_panel(ix, cx); dock.activate_panel(ix, cx);
} }
} }
dock.active_panel()
.map(|panel| {
panel.set_zoomed(docks.bottom.zoom, cx)
});
if docks.bottom.visible && docks.bottom.zoom {
cx.focus_self()
}
}); });
cx.notify(); cx.notify();
})?; })?;
@ -4425,7 +4469,7 @@ mod tests {
workspace.read_with(cx, |workspace, cx| { workspace.read_with(cx, |workspace, cx| {
assert!(workspace.right_dock().read(cx).is_open()); assert!(workspace.right_dock().read(cx).is_open());
assert!(!panel.is_zoomed(cx)); assert!(!panel.is_zoomed(cx));
assert!(!panel.has_focus(cx)); assert!(panel.has_focus(cx));
}); });
// Focus and zoom panel // Focus and zoom panel
@ -4500,7 +4544,7 @@ mod tests {
workspace.read_with(cx, |workspace, cx| { workspace.read_with(cx, |workspace, cx| {
let pane = pane.read(cx); let pane = pane.read(cx);
assert!(!pane.is_zoomed()); assert!(!pane.is_zoomed());
assert!(pane.has_focus()); assert!(!pane.has_focus());
assert!(workspace.right_dock().read(cx).is_open()); assert!(workspace.right_dock().read(cx).is_open());
assert!(workspace.zoomed.is_none()); assert!(workspace.zoomed.is_none());
}); });

View file

@ -5,12 +5,11 @@ pub use language::*;
use lsp::LanguageServerBinary; use lsp::LanguageServerBinary;
use smol::fs::{self, File}; use smol::fs::{self, File};
use std::{any::Any, path::PathBuf, sync::Arc}; use std::{any::Any, path::PathBuf, sync::Arc};
use util::fs::remove_matching; use util::{
use util::github::latest_github_release; fs::remove_matching,
use util::http::HttpClient; github::{latest_github_release, GitHubLspBinaryVersion},
use util::ResultExt; ResultExt,
};
use util::github::GitHubLspBinaryVersion;
pub struct CLspAdapter; pub struct CLspAdapter;
@ -22,9 +21,9 @@ impl super::LspAdapter for CLspAdapter {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
http: Arc<dyn HttpClient>, delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
let release = latest_github_release("clangd/clangd", false, http).await?; let release = latest_github_release("clangd/clangd", false, delegate.http_client()).await?;
let asset_name = format!("clangd-mac-{}.zip", release.name); let asset_name = format!("clangd-mac-{}.zip", release.name);
let asset = release let asset = release
.assets .assets
@ -41,8 +40,8 @@ impl super::LspAdapter for CLspAdapter {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
http: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap(); let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let zip_path = container_dir.join(format!("clangd_{}.zip", version.name)); let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
@ -50,7 +49,8 @@ impl super::LspAdapter for CLspAdapter {
let binary_path = version_dir.join("bin/clangd"); let binary_path = version_dir.join("bin/clangd");
if fs::metadata(&binary_path).await.is_err() { if fs::metadata(&binary_path).await.is_err() {
let mut response = http let mut response = delegate
.http_client()
.get(&version.url, Default::default(), true) .get(&version.url, Default::default(), true)
.await .await
.context("error downloading release")?; .context("error downloading release")?;
@ -82,39 +82,19 @@ impl super::LspAdapter for CLspAdapter {
}) })
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
(|| async move { &self,
let mut last_clangd_dir = None; container_dir: PathBuf,
let mut entries = fs::read_dir(&container_dir).await?; _: &dyn LspAdapterDelegate,
while let Some(entry) = entries.next().await { ) -> Option<LanguageServerBinary> {
let entry = entry?; get_cached_server_binary(container_dir).await
if entry.file_type().await?.is_dir() {
last_clangd_dir = Some(entry.path());
}
}
let clangd_dir = last_clangd_dir.ok_or_else(|| anyhow!("no cached binary"))?;
let clangd_bin = clangd_dir.join("bin/clangd");
if clangd_bin.exists() {
Ok(LanguageServerBinary {
path: clangd_bin,
arguments: vec![],
})
} else {
Err(anyhow!(
"missing clangd binary in directory {:?}",
clangd_dir
))
}
})()
.await
.log_err()
} }
async fn installation_test_binary( async fn installation_test_binary(
&self, &self,
container_dir: PathBuf, container_dir: PathBuf,
) -> Option<LanguageServerBinary> { ) -> Option<LanguageServerBinary> {
self.cached_server_binary(container_dir) get_cached_server_binary(container_dir)
.await .await
.map(|mut binary| { .map(|mut binary| {
binary.arguments = vec!["--help".into()]; binary.arguments = vec!["--help".into()];
@ -259,6 +239,34 @@ impl super::LspAdapter for CLspAdapter {
} }
} }
async fn get_cached_server_binary(container_dir: PathBuf) -> Option<LanguageServerBinary> {
(|| async move {
let mut last_clangd_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
while let Some(entry) = entries.next().await {
let entry = entry?;
if entry.file_type().await?.is_dir() {
last_clangd_dir = Some(entry.path());
}
}
let clangd_dir = last_clangd_dir.ok_or_else(|| anyhow!("no cached binary"))?;
let clangd_bin = clangd_dir.join("bin/clangd");
if clangd_bin.exists() {
Ok(LanguageServerBinary {
path: clangd_bin,
arguments: vec![],
})
} else {
Err(anyhow!(
"missing clangd binary in directory {:?}",
clangd_dir
))
}
})()
.await
.log_err()
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use gpui::TestAppContext; use gpui::TestAppContext;

View file

@ -1,16 +1,23 @@
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use async_trait::async_trait; use async_trait::async_trait;
use futures::StreamExt; use futures::StreamExt;
use gpui::{AsyncAppContext, Task};
pub use language::*; pub use language::*;
use lsp::{CompletionItemKind, LanguageServerBinary, SymbolKind}; use lsp::{CompletionItemKind, LanguageServerBinary, SymbolKind};
use smol::fs::{self, File}; use smol::fs::{self, File};
use std::{any::Any, path::PathBuf, sync::Arc}; use std::{
use util::fs::remove_matching; any::Any,
use util::github::latest_github_release; path::PathBuf,
use util::http::HttpClient; sync::{
use util::ResultExt; atomic::{AtomicBool, Ordering::SeqCst},
Arc,
use util::github::GitHubLspBinaryVersion; },
};
use util::{
fs::remove_matching,
github::{latest_github_release, GitHubLspBinaryVersion},
ResultExt,
};
pub struct ElixirLspAdapter; pub struct ElixirLspAdapter;
@ -20,11 +27,43 @@ impl LspAdapter for ElixirLspAdapter {
LanguageServerName("elixir-ls".into()) LanguageServerName("elixir-ls".into())
} }
fn will_start_server(
&self,
delegate: &Arc<dyn LspAdapterDelegate>,
cx: &mut AsyncAppContext,
) -> Option<Task<Result<()>>> {
static DID_SHOW_NOTIFICATION: AtomicBool = AtomicBool::new(false);
const NOTIFICATION_MESSAGE: &str = "Could not run the elixir language server, `elixir-ls`, because `elixir` was not found.";
let delegate = delegate.clone();
Some(cx.spawn(|mut cx| async move {
let elixir_output = smol::process::Command::new("elixir")
.args(["--version"])
.output()
.await;
if elixir_output.is_err() {
if DID_SHOW_NOTIFICATION
.compare_exchange(false, true, SeqCst, SeqCst)
.is_ok()
{
cx.update(|cx| {
delegate.show_notification(NOTIFICATION_MESSAGE, cx);
})
}
return Err(anyhow!("cannot run elixir-ls"));
}
Ok(())
}))
}
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
http: Arc<dyn HttpClient>, delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
let release = latest_github_release("elixir-lsp/elixir-ls", false, http).await?; let release =
latest_github_release("elixir-lsp/elixir-ls", false, delegate.http_client()).await?;
let asset_name = "elixir-ls.zip"; let asset_name = "elixir-ls.zip";
let asset = release let asset = release
.assets .assets
@ -41,8 +80,8 @@ impl LspAdapter for ElixirLspAdapter {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
http: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap(); let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let zip_path = container_dir.join(format!("elixir-ls_{}.zip", version.name)); let zip_path = container_dir.join(format!("elixir-ls_{}.zip", version.name));
@ -50,7 +89,8 @@ impl LspAdapter for ElixirLspAdapter {
let binary_path = version_dir.join("language_server.sh"); let binary_path = version_dir.join("language_server.sh");
if fs::metadata(&binary_path).await.is_err() { if fs::metadata(&binary_path).await.is_err() {
let mut response = http let mut response = delegate
.http_client()
.get(&version.url, Default::default(), true) .get(&version.url, Default::default(), true)
.await .await
.context("error downloading release")?; .context("error downloading release")?;
@ -88,7 +128,11 @@ impl LspAdapter for ElixirLspAdapter {
}) })
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
(|| async move { (|| async move {
let mut last = None; let mut last = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;

View file

@ -36,8 +36,6 @@
(char) @constant (char) @constant
(interpolation "#{" @punctuation.special "}" @punctuation.special) @embedded
(escape_sequence) @string.escape (escape_sequence) @string.escape
[ [
@ -146,3 +144,10 @@
"<<" "<<"
">>" ">>"
] @punctuation.bracket ] @punctuation.bracket
(interpolation "#{" @punctuation.special "}" @punctuation.special) @embedded
((sigil
(sigil_name) @_sigil_name
(quoted_content) @embedded)
(#eq? @_sigil_name "H"))

View file

@ -1,17 +1,24 @@
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use async_trait::async_trait; use async_trait::async_trait;
use futures::StreamExt; use futures::StreamExt;
use gpui::{AsyncAppContext, Task};
pub use language::*; pub use language::*;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use lsp::LanguageServerBinary; use lsp::LanguageServerBinary;
use regex::Regex; use regex::Regex;
use smol::{fs, process}; use smol::{fs, process};
use std::ffi::{OsStr, OsString}; use std::{
use std::{any::Any, ops::Range, path::PathBuf, str, sync::Arc}; any::Any,
use util::fs::remove_matching; ffi::{OsStr, OsString},
use util::github::latest_github_release; ops::Range,
use util::http::HttpClient; path::PathBuf,
use util::ResultExt; str,
sync::{
atomic::{AtomicBool, Ordering::SeqCst},
Arc,
},
};
use util::{fs::remove_matching, github::latest_github_release, ResultExt};
fn server_binary_arguments() -> Vec<OsString> { fn server_binary_arguments() -> Vec<OsString> {
vec!["-mode=stdio".into()] vec!["-mode=stdio".into()]
@ -32,9 +39,9 @@ impl super::LspAdapter for GoLspAdapter {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
http: Arc<dyn HttpClient>, delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
let release = latest_github_release("golang/tools", false, http).await?; let release = latest_github_release("golang/tools", false, delegate.http_client()).await?;
let version: Option<String> = release.name.strip_prefix("gopls/v").map(str::to_string); let version: Option<String> = release.name.strip_prefix("gopls/v").map(str::to_string);
if version.is_none() { if version.is_none() {
log::warn!( log::warn!(
@ -45,11 +52,39 @@ impl super::LspAdapter for GoLspAdapter {
Ok(Box::new(version) as Box<_>) Ok(Box::new(version) as Box<_>)
} }
fn will_fetch_server(
&self,
delegate: &Arc<dyn LspAdapterDelegate>,
cx: &mut AsyncAppContext,
) -> Option<Task<Result<()>>> {
static DID_SHOW_NOTIFICATION: AtomicBool = AtomicBool::new(false);
const NOTIFICATION_MESSAGE: &str =
"Could not install the Go language server `gopls`, because `go` was not found.";
let delegate = delegate.clone();
Some(cx.spawn(|mut cx| async move {
let install_output = process::Command::new("go").args(["version"]).output().await;
if install_output.is_err() {
if DID_SHOW_NOTIFICATION
.compare_exchange(false, true, SeqCst, SeqCst)
.is_ok()
{
cx.update(|cx| {
delegate.show_notification(NOTIFICATION_MESSAGE, cx);
})
}
return Err(anyhow!("cannot install gopls"));
}
Ok(())
}))
}
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<Option<String>>().unwrap(); let version = version.downcast::<Option<String>>().unwrap();
let this = *self; let this = *self;
@ -69,7 +104,10 @@ impl super::LspAdapter for GoLspAdapter {
}); });
} }
} }
} else if let Some(path) = this.cached_server_binary(container_dir.clone()).await { } else if let Some(path) = this
.cached_server_binary(container_dir.clone(), delegate)
.await
{
return Ok(path); return Ok(path);
} }
@ -106,7 +144,11 @@ impl super::LspAdapter for GoLspAdapter {
}) })
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
(|| async move { (|| async move {
let mut last_binary_path = None; let mut last_binary_path = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;

View file

@ -1,17 +1,11 @@
; HEEx delimiters ; HEEx delimiters
[ [
"%>"
"--%>" "--%>"
"-->" "-->"
"/>" "/>"
"<!" "<!"
"<!--" "<!--"
"<" "<"
"<%!--"
"<%"
"<%#"
"<%%="
"<%="
"</" "</"
"</:" "</:"
"<:" "<:"
@ -20,6 +14,15 @@
"}" "}"
] @punctuation.bracket ] @punctuation.bracket
[
"<%!--"
"<%"
"<%#"
"<%%="
"<%="
"%>"
] @keyword
; HEEx operators are highlighted as such ; HEEx operators are highlighted as such
"=" @operator "=" @operator

View file

@ -1,11 +1,13 @@
((directive (partial_expression_value) @content) (
(#set! language "elixir") (directive
(#set! include-children) [
(#set! combined)) (partial_expression_value)
(expression_value)
; Regular expression_values do not need to be combined (ending_expression_value)
((directive (expression_value) @content) ] @content)
(#set! language "elixir")) (#set! language "elixir")
(#set! combined)
)
; expressions live within HTML tags, and do not need to be combined ; expressions live within HTML tags, and do not need to be combined
; <link href={ Routes.static_path(..) } /> ; <link href={ Routes.static_path(..) } />

View file

@ -1,15 +1,17 @@
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use async_trait::async_trait; use async_trait::async_trait;
use futures::StreamExt; use futures::StreamExt;
use language::{LanguageServerName, LspAdapter}; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary; use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
use serde_json::json; use serde_json::json;
use smol::fs; use smol::fs;
use std::ffi::OsString; use std::{
use std::path::Path; any::Any,
use std::{any::Any, path::PathBuf, sync::Arc}; ffi::OsString,
use util::http::HttpClient; path::{Path, PathBuf},
sync::Arc,
};
use util::ResultExt; use util::ResultExt;
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> { fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
@ -37,7 +39,7 @@ impl LspAdapter for HtmlLspAdapter {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
_: Arc<dyn HttpClient>, _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Any + Send>> { ) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new( Ok(Box::new(
self.node self.node
@ -49,8 +51,8 @@ impl LspAdapter for HtmlLspAdapter {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<String>().unwrap(); let version = version.downcast::<String>().unwrap();
let server_path = container_dir.join(Self::SERVER_PATH); let server_path = container_dir.join(Self::SERVER_PATH);
@ -70,7 +72,11 @@ impl LspAdapter for HtmlLspAdapter {
}) })
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
(|| async move { (|| async move {
let mut last_version_dir = None; let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;

View file

@ -3,7 +3,7 @@ use async_trait::async_trait;
use collections::HashMap; use collections::HashMap;
use futures::{future::BoxFuture, FutureExt, StreamExt}; use futures::{future::BoxFuture, FutureExt, StreamExt};
use gpui::AppContext; use gpui::AppContext;
use language::{LanguageRegistry, LanguageServerName, LspAdapter}; use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary; use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
use serde_json::json; use serde_json::json;
@ -17,7 +17,6 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use util::http::HttpClient;
use util::{paths, ResultExt}; use util::{paths, ResultExt};
const SERVER_PATH: &'static str = const SERVER_PATH: &'static str =
@ -46,7 +45,7 @@ impl LspAdapter for JsonLspAdapter {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
_: Arc<dyn HttpClient>, _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
Ok(Box::new( Ok(Box::new(
self.node self.node
@ -58,8 +57,8 @@ impl LspAdapter for JsonLspAdapter {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<String>().unwrap(); let version = version.downcast::<String>().unwrap();
let server_path = container_dir.join(SERVER_PATH); let server_path = container_dir.join(SERVER_PATH);
@ -79,7 +78,11 @@ impl LspAdapter for JsonLspAdapter {
}) })
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
(|| async move { (|| async move {
let mut last_version_dir = None; let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;

View file

@ -3,11 +3,10 @@ use async_trait::async_trait;
use collections::HashMap; use collections::HashMap;
use futures::lock::Mutex; use futures::lock::Mutex;
use gpui::executor::Background; use gpui::executor::Background;
use language::{LanguageServerName, LspAdapter}; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary; use lsp::LanguageServerBinary;
use plugin_runtime::{Plugin, PluginBinary, PluginBuilder, WasiFn}; use plugin_runtime::{Plugin, PluginBinary, PluginBuilder, WasiFn};
use std::{any::Any, path::PathBuf, sync::Arc}; use std::{any::Any, path::PathBuf, sync::Arc};
use util::http::HttpClient;
use util::ResultExt; use util::ResultExt;
#[allow(dead_code)] #[allow(dead_code)]
@ -73,7 +72,7 @@ impl LspAdapter for PluginLspAdapter {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
_: Arc<dyn HttpClient>, _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
let runtime = self.runtime.clone(); let runtime = self.runtime.clone();
let function = self.fetch_latest_server_version; let function = self.fetch_latest_server_version;
@ -93,8 +92,8 @@ impl LspAdapter for PluginLspAdapter {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = *version.downcast::<String>().unwrap(); let version = *version.downcast::<String>().unwrap();
let runtime = self.runtime.clone(); let runtime = self.runtime.clone();
@ -111,7 +110,11 @@ impl LspAdapter for PluginLspAdapter {
.await .await
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
let runtime = self.runtime.clone(); let runtime = self.runtime.clone();
let function = self.cached_server_binary; let function = self.cached_server_binary;

View file

@ -3,13 +3,15 @@ use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive; use async_tar::Archive;
use async_trait::async_trait; use async_trait::async_trait;
use futures::{io::BufReader, StreamExt}; use futures::{io::BufReader, StreamExt};
use language::LanguageServerName; use language::{LanguageServerName, LspAdapterDelegate};
use lsp::LanguageServerBinary; use lsp::LanguageServerBinary;
use smol::fs; use smol::fs;
use std::{any::Any, env::consts, ffi::OsString, path::PathBuf, sync::Arc}; use std::{any::Any, env::consts, ffi::OsString, path::PathBuf};
use util::{async_iife, github::latest_github_release, http::HttpClient, ResultExt}; use util::{
async_iife,
use util::github::GitHubLspBinaryVersion; github::{latest_github_release, GitHubLspBinaryVersion},
ResultExt,
};
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub struct LuaLspAdapter; pub struct LuaLspAdapter;
@ -29,9 +31,11 @@ impl super::LspAdapter for LuaLspAdapter {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
http: Arc<dyn HttpClient>, delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
let release = latest_github_release("LuaLS/lua-language-server", false, http).await?; let release =
latest_github_release("LuaLS/lua-language-server", false, delegate.http_client())
.await?;
let version = release.name.clone(); let version = release.name.clone();
let platform = match consts::ARCH { let platform = match consts::ARCH {
"x86_64" => "x64", "x86_64" => "x64",
@ -54,15 +58,16 @@ impl super::LspAdapter for LuaLspAdapter {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
http: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap(); let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let binary_path = container_dir.join("bin/lua-language-server"); let binary_path = container_dir.join("bin/lua-language-server");
if fs::metadata(&binary_path).await.is_err() { if fs::metadata(&binary_path).await.is_err() {
let mut response = http let mut response = delegate
.http_client()
.get(&version.url, Default::default(), true) .get(&version.url, Default::default(), true)
.await .await
.map_err(|err| anyhow!("error downloading release: {}", err))?; .map_err(|err| anyhow!("error downloading release: {}", err))?;
@ -82,7 +87,11 @@ impl super::LspAdapter for LuaLspAdapter {
}) })
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
async_iife!({ async_iife!({
let mut last_binary_path = None; let mut last_binary_path = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;

View file

@ -1,7 +1,7 @@
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use async_trait::async_trait; use async_trait::async_trait;
use futures::StreamExt; use futures::StreamExt;
use language::{LanguageServerName, LspAdapter}; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary; use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
use smol::fs; use smol::fs;
@ -11,7 +11,6 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use util::http::HttpClient;
use util::ResultExt; use util::ResultExt;
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> { fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
@ -38,7 +37,7 @@ impl LspAdapter for PythonLspAdapter {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
_: Arc<dyn HttpClient>, _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Any + Send>> { ) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(self.node.npm_package_latest_version("pyright").await?) as Box<_>) Ok(Box::new(self.node.npm_package_latest_version("pyright").await?) as Box<_>)
} }
@ -46,8 +45,8 @@ impl LspAdapter for PythonLspAdapter {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<String>().unwrap(); let version = version.downcast::<String>().unwrap();
let server_path = container_dir.join(Self::SERVER_PATH); let server_path = container_dir.join(Self::SERVER_PATH);
@ -64,7 +63,11 @@ impl LspAdapter for PythonLspAdapter {
}) })
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
(|| async move { (|| async move {
let mut last_version_dir = None; let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;

View file

@ -1,9 +1,8 @@
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use async_trait::async_trait; use async_trait::async_trait;
use language::{LanguageServerName, LspAdapter}; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary; use lsp::LanguageServerBinary;
use std::{any::Any, path::PathBuf, sync::Arc}; use std::{any::Any, path::PathBuf, sync::Arc};
use util::http::HttpClient;
pub struct RubyLanguageServer; pub struct RubyLanguageServer;
@ -15,7 +14,7 @@ impl LspAdapter for RubyLanguageServer {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
_: Arc<dyn HttpClient>, _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Any + Send>> { ) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(())) Ok(Box::new(()))
} }
@ -23,13 +22,17 @@ impl LspAdapter for RubyLanguageServer {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
_version: Box<dyn 'static + Send + Any>, _version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
_container_dir: PathBuf, _container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
Err(anyhow!("solargraph must be installed manually")) Err(anyhow!("solargraph must be installed manually"))
} }
async fn cached_server_binary(&self, _container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
_: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
Some(LanguageServerBinary { Some(LanguageServerBinary {
path: "solargraph".into(), path: "solargraph".into(),
arguments: vec!["stdio".into()], arguments: vec!["stdio".into()],

View file

@ -8,10 +8,11 @@ use lsp::LanguageServerBinary;
use regex::Regex; use regex::Regex;
use smol::fs::{self, File}; use smol::fs::{self, File};
use std::{any::Any, borrow::Cow, env::consts, path::PathBuf, str, sync::Arc}; use std::{any::Any, borrow::Cow, env::consts, path::PathBuf, str, sync::Arc};
use util::fs::remove_matching; use util::{
use util::github::{latest_github_release, GitHubLspBinaryVersion}; fs::remove_matching,
use util::http::HttpClient; github::{latest_github_release, GitHubLspBinaryVersion},
use util::ResultExt; ResultExt,
};
pub struct RustLspAdapter; pub struct RustLspAdapter;
@ -23,9 +24,11 @@ impl LspAdapter for RustLspAdapter {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
http: Arc<dyn HttpClient>, delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
let release = latest_github_release("rust-analyzer/rust-analyzer", false, http).await?; let release =
latest_github_release("rust-analyzer/rust-analyzer", false, delegate.http_client())
.await?;
let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH); let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH);
let asset = release let asset = release
.assets .assets
@ -41,14 +44,15 @@ impl LspAdapter for RustLspAdapter {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
http: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap(); let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name)); let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
if fs::metadata(&destination_path).await.is_err() { if fs::metadata(&destination_path).await.is_err() {
let mut response = http let mut response = delegate
.http_client()
.get(&version.url, Default::default(), true) .get(&version.url, Default::default(), true)
.await .await
.map_err(|err| anyhow!("error downloading release: {}", err))?; .map_err(|err| anyhow!("error downloading release: {}", err))?;
@ -70,7 +74,11 @@ impl LspAdapter for RustLspAdapter {
}) })
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
(|| async move { (|| async move {
let mut last = None; let mut last = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;

View file

@ -4,7 +4,7 @@ use async_tar::Archive;
use async_trait::async_trait; use async_trait::async_trait;
use futures::{future::BoxFuture, FutureExt}; use futures::{future::BoxFuture, FutureExt};
use gpui::AppContext; use gpui::AppContext;
use language::{LanguageServerName, LspAdapter}; use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::{CodeActionKind, LanguageServerBinary}; use lsp::{CodeActionKind, LanguageServerBinary};
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
use serde_json::{json, Value}; use serde_json::{json, Value};
@ -16,7 +16,7 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use util::{fs::remove_matching, github::latest_github_release, http::HttpClient}; use util::{fs::remove_matching, github::latest_github_release};
use util::{github::GitHubLspBinaryVersion, ResultExt}; use util::{github::GitHubLspBinaryVersion, ResultExt};
fn typescript_server_binary_arguments(server_path: &Path) -> Vec<OsString> { fn typescript_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
@ -58,7 +58,7 @@ impl LspAdapter for TypeScriptLspAdapter {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
_: Arc<dyn HttpClient>, _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
Ok(Box::new(TypeScriptVersions { Ok(Box::new(TypeScriptVersions {
typescript_version: self.node.npm_package_latest_version("typescript").await?, typescript_version: self.node.npm_package_latest_version("typescript").await?,
@ -72,8 +72,8 @@ impl LspAdapter for TypeScriptLspAdapter {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<TypeScriptVersions>().unwrap(); let version = version.downcast::<TypeScriptVersions>().unwrap();
let server_path = container_dir.join(Self::NEW_SERVER_PATH); let server_path = container_dir.join(Self::NEW_SERVER_PATH);
@ -99,7 +99,11 @@ impl LspAdapter for TypeScriptLspAdapter {
}) })
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
(|| async move { (|| async move {
let old_server_path = container_dir.join(Self::OLD_SERVER_PATH); let old_server_path = container_dir.join(Self::OLD_SERVER_PATH);
let new_server_path = container_dir.join(Self::NEW_SERVER_PATH); let new_server_path = container_dir.join(Self::NEW_SERVER_PATH);
@ -204,12 +208,13 @@ impl LspAdapter for EsLintLspAdapter {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
http: Arc<dyn HttpClient>, delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> { ) -> Result<Box<dyn 'static + Send + Any>> {
// At the time of writing the latest vscode-eslint release was released in 2020 and requires // At the time of writing the latest vscode-eslint release was released in 2020 and requires
// special custom LSP protocol extensions be handled to fully initialize. Download the latest // special custom LSP protocol extensions be handled to fully initialize. Download the latest
// prerelease instead to sidestep this issue // prerelease instead to sidestep this issue
let release = latest_github_release("microsoft/vscode-eslint", true, http).await?; let release =
latest_github_release("microsoft/vscode-eslint", true, delegate.http_client()).await?;
Ok(Box::new(GitHubLspBinaryVersion { Ok(Box::new(GitHubLspBinaryVersion {
name: release.name, name: release.name,
url: release.tarball_url, url: release.tarball_url,
@ -219,8 +224,8 @@ impl LspAdapter for EsLintLspAdapter {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
http: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap(); let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let destination_path = container_dir.join(format!("vscode-eslint-{}", version.name)); let destination_path = container_dir.join(format!("vscode-eslint-{}", version.name));
@ -229,7 +234,8 @@ impl LspAdapter for EsLintLspAdapter {
if fs::metadata(&server_path).await.is_err() { if fs::metadata(&server_path).await.is_err() {
remove_matching(&container_dir, |entry| entry != destination_path).await; remove_matching(&container_dir, |entry| entry != destination_path).await;
let mut response = http let mut response = delegate
.http_client()
.get(&version.url, Default::default(), true) .get(&version.url, Default::default(), true)
.await .await
.map_err(|err| anyhow!("error downloading release: {}", err))?; .map_err(|err| anyhow!("error downloading release: {}", err))?;
@ -257,7 +263,11 @@ impl LspAdapter for EsLintLspAdapter {
}) })
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
(|| async move { (|| async move {
// This is unfortunate but we don't know what the version is to build a path directly // This is unfortunate but we don't know what the version is to build a path directly
let mut dir = fs::read_dir(&container_dir).await?; let mut dir = fs::read_dir(&container_dir).await?;

View file

@ -2,7 +2,9 @@ use anyhow::{anyhow, Result};
use async_trait::async_trait; use async_trait::async_trait;
use futures::{future::BoxFuture, FutureExt, StreamExt}; use futures::{future::BoxFuture, FutureExt, StreamExt};
use gpui::AppContext; use gpui::AppContext;
use language::{language_settings::all_language_settings, LanguageServerName, LspAdapter}; use language::{
language_settings::all_language_settings, LanguageServerName, LspAdapter, LspAdapterDelegate,
};
use lsp::LanguageServerBinary; use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime; use node_runtime::NodeRuntime;
use serde_json::Value; use serde_json::Value;
@ -14,7 +16,6 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
use util::http::HttpClient;
use util::ResultExt; use util::ResultExt;
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> { fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
@ -41,7 +42,7 @@ impl LspAdapter for YamlLspAdapter {
async fn fetch_latest_server_version( async fn fetch_latest_server_version(
&self, &self,
_: Arc<dyn HttpClient>, _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Any + Send>> { ) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new( Ok(Box::new(
self.node self.node
@ -53,8 +54,8 @@ impl LspAdapter for YamlLspAdapter {
async fn fetch_server_binary( async fn fetch_server_binary(
&self, &self,
version: Box<dyn 'static + Send + Any>, version: Box<dyn 'static + Send + Any>,
_: Arc<dyn HttpClient>,
container_dir: PathBuf, container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> { ) -> Result<LanguageServerBinary> {
let version = version.downcast::<String>().unwrap(); let version = version.downcast::<String>().unwrap();
let server_path = container_dir.join(Self::SERVER_PATH); let server_path = container_dir.join(Self::SERVER_PATH);
@ -71,7 +72,11 @@ impl LspAdapter for YamlLspAdapter {
}) })
} }
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> { async fn cached_server_binary(
&self,
container_dir: PathBuf,
_: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
(|| async move { (|| async move {
let mut last_version_dir = None; let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?; let mut entries = fs::read_dir(&container_dir).await?;

View file

@ -31,7 +31,6 @@ use std::{
ffi::OsStr, ffi::OsStr,
fs::OpenOptions, fs::OpenOptions,
io::Write as _, io::Write as _,
ops::Not,
os::unix::prelude::OsStrExt, os::unix::prelude::OsStrExt,
panic, panic,
path::{Path, PathBuf}, path::{Path, PathBuf},
@ -373,7 +372,6 @@ struct Panic {
os_version: Option<String>, os_version: Option<String>,
architecture: String, architecture: String,
panicked_on: u128, panicked_on: u128,
identifying_backtrace: Option<Vec<String>>,
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -401,61 +399,18 @@ fn init_panic_hook(app: &App) {
.unwrap_or_else(|| "Box<Any>".to_string()); .unwrap_or_else(|| "Box<Any>".to_string());
let backtrace = Backtrace::new(); let backtrace = Backtrace::new();
let backtrace = backtrace let mut backtrace = backtrace
.frames() .frames()
.iter() .iter()
.filter_map(|frame| { .filter_map(|frame| Some(format!("{:#}", frame.symbols().first()?.name()?)))
let symbol = frame.symbols().first()?;
let path = symbol.filename()?;
Some((path, symbol.lineno(), format!("{:#}", symbol.name()?)))
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let this_file_path = Path::new(file!()); // Strip out leading stack frames for rust panic-handling.
if let Some(ix) = backtrace
// Find the first frame in the backtrace for this panic hook itself. Exclude .iter()
// that frame and all frames before it. .position(|name| name == "rust_begin_unwind")
let mut start_frame_ix = 0; {
let mut codebase_root_path = None; backtrace.drain(0..=ix);
for (ix, (path, _, _)) in backtrace.iter().enumerate() {
if path.ends_with(this_file_path) {
start_frame_ix = ix + 1;
codebase_root_path = path.ancestors().nth(this_file_path.components().count());
break;
}
}
// Exclude any subsequent frames inside of rust's panic handling system.
while let Some((path, _, _)) = backtrace.get(start_frame_ix) {
if path.starts_with("/rustc") {
start_frame_ix += 1;
} else {
break;
}
}
// Build two backtraces:
// * one for display, which includes symbol names for all frames, and files
// and line numbers for symbols in this codebase
// * one for identification and de-duplication, which only includes symbol
// names for symbols in this codebase.
let mut display_backtrace = Vec::new();
let mut identifying_backtrace = Vec::new();
for (path, line, symbol) in &backtrace[start_frame_ix..] {
display_backtrace.push(symbol.clone());
if let Some(codebase_root_path) = &codebase_root_path {
if let Ok(suffix) = path.strip_prefix(&codebase_root_path) {
identifying_backtrace.push(symbol.clone());
let display_path = suffix.to_string_lossy();
if let Some(line) = line {
display_backtrace.push(format!(" {display_path}:{line}"));
} else {
display_backtrace.push(format!(" {display_path}"));
}
}
}
} }
let panic_data = Panic { let panic_data = Panic {
@ -477,29 +432,27 @@ fn init_panic_hook(app: &App) {
.duration_since(UNIX_EPOCH) .duration_since(UNIX_EPOCH)
.unwrap() .unwrap()
.as_millis(), .as_millis(),
backtrace: display_backtrace, backtrace,
identifying_backtrace: identifying_backtrace
.is_empty()
.not()
.then_some(identifying_backtrace),
}; };
if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() { if is_pty {
if is_pty { if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() {
eprintln!("{}", panic_data_json); eprintln!("{}", panic_data_json);
return; return;
} }
} else {
let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string(); if let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() {
let panic_file_path = paths::LOGS_DIR.join(format!("zed-{}.panic", timestamp)); let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
let panic_file = std::fs::OpenOptions::new() let panic_file_path = paths::LOGS_DIR.join(format!("zed-{}.panic", timestamp));
.append(true) let panic_file = std::fs::OpenOptions::new()
.create(true) .append(true)
.open(&panic_file_path) .create(true)
.log_err(); .open(&panic_file_path)
if let Some(mut panic_file) = panic_file { .log_err();
write!(&mut panic_file, "{}", panic_data_json).log_err(); if let Some(mut panic_file) = panic_file {
panic_file.flush().log_err(); writeln!(&mut panic_file, "{}", panic_data_json).log_err();
panic_file.flush().log_err();
}
} }
} }
})); }));
@ -531,23 +484,45 @@ fn upload_previous_panics(http: Arc<dyn HttpClient>, cx: &mut AppContext) {
} }
if telemetry_settings.diagnostics { if telemetry_settings.diagnostics {
let panic_data_text = smol::fs::read_to_string(&child_path) let panic_file_content = smol::fs::read_to_string(&child_path)
.await .await
.context("error reading panic file")?; .context("error reading panic file")?;
let body = serde_json::to_string(&PanicRequest { let panic = serde_json::from_str(&panic_file_content)
panic: serde_json::from_str(&panic_data_text)?, .ok()
token: ZED_SECRET_CLIENT_TOKEN.into(), .or_else(|| {
}) panic_file_content
.unwrap(); .lines()
.next()
.and_then(|line| serde_json::from_str(line).ok())
})
.unwrap_or_else(|| {
log::error!(
"failed to deserialize panic file {:?}",
panic_file_content
);
None
});
let request = Request::post(&panic_report_url) if let Some(panic) = panic {
.redirect_policy(isahc::config::RedirectPolicy::Follow) let body = serde_json::to_string(&PanicRequest {
.header("Content-Type", "application/json") panic,
.body(body.into())?; token: ZED_SECRET_CLIENT_TOKEN.into(),
let response = http.send(request).await.context("error sending panic")?; })
if !response.status().is_success() { .unwrap();
log::error!("Error uploading panic to server: {}", response.status());
let request = Request::post(&panic_report_url)
.redirect_policy(isahc::config::RedirectPolicy::Follow)
.header("Content-Type", "application/json")
.body(body.into())?;
let response =
http.send(request).await.context("error sending panic")?;
if !response.status().is_success() {
log::error!(
"Error uploading panic to server: {}",
response.status()
);
}
} }
} }

View file

@ -384,6 +384,8 @@ pub fn initialize_workspace(
workspace.toggle_dock(project_panel_position, cx); workspace.toggle_dock(project_panel_position, cx);
} }
cx.focus_self();
workspace.add_panel(terminal_panel, cx); workspace.add_panel(terminal_panel, cx);
if let Some(assistant_panel) = assistant_panel { if let Some(assistant_panel) = assistant_panel {
workspace.add_panel(assistant_panel, cx); workspace.add_panel(assistant_panel, cx);