Merge remote-tracking branch 'origin/main' into save-conversations
This commit is contained in:
commit
c83a918a7e
63 changed files with 2035 additions and 559 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -18,4 +18,5 @@ DerivedData/
|
||||||
.swiftpm/config/registries.json
|
.swiftpm/config/registries.json
|
||||||
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
|
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
|
||||||
.netrc
|
.netrc
|
||||||
|
.swiftpm
|
||||||
**/*.db
|
**/*.db
|
||||||
|
|
44
Cargo.lock
generated
44
Cargo.lock
generated
|
@ -114,6 +114,7 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"settings",
|
"settings",
|
||||||
|
"smol",
|
||||||
"theme",
|
"theme",
|
||||||
"tiktoken-rs",
|
"tiktoken-rs",
|
||||||
"util",
|
"util",
|
||||||
|
@ -593,7 +594,7 @@ dependencies = [
|
||||||
"http",
|
"http",
|
||||||
"http-body",
|
"http-body",
|
||||||
"hyper",
|
"hyper",
|
||||||
"itoa",
|
"itoa 1.0.6",
|
||||||
"matchit",
|
"matchit",
|
||||||
"memchr",
|
"memchr",
|
||||||
"mime",
|
"mime",
|
||||||
|
@ -3011,7 +3012,7 @@ checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes 1.4.0",
|
"bytes 1.4.0",
|
||||||
"fnv",
|
"fnv",
|
||||||
"itoa",
|
"itoa 1.0.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -3070,7 +3071,7 @@ dependencies = [
|
||||||
"http-body",
|
"http-body",
|
||||||
"httparse",
|
"httparse",
|
||||||
"httpdate",
|
"httpdate",
|
||||||
"itoa",
|
"itoa 1.0.6",
|
||||||
"pin-project-lite 0.2.9",
|
"pin-project-lite 0.2.9",
|
||||||
"socket2",
|
"socket2",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
@ -3336,6 +3337,12 @@ dependencies = [
|
||||||
"either",
|
"either",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itoa"
|
||||||
|
version = "0.4.8"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "itoa"
|
name = "itoa"
|
||||||
version = "1.0.6"
|
version = "1.0.6"
|
||||||
|
@ -3396,12 +3403,6 @@ dependencies = [
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "json_comments"
|
|
||||||
version = "0.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "41ee439ee368ba4a77ac70d04f14015415af8600d6c894dc1f11bd79758c57d5"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jwt"
|
name = "jwt"
|
||||||
version = "0.16.0"
|
version = "0.16.0"
|
||||||
|
@ -5667,7 +5668,7 @@ dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"errno 0.2.8",
|
"errno 0.2.8",
|
||||||
"io-lifetimes 0.5.3",
|
"io-lifetimes 0.5.3",
|
||||||
"itoa",
|
"itoa 1.0.6",
|
||||||
"libc",
|
"libc",
|
||||||
"linux-raw-sys 0.0.42",
|
"linux-raw-sys 0.0.42",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
@ -6099,7 +6100,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
|
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"itoa",
|
"itoa 1.0.6",
|
||||||
|
"ryu",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "serde_json_lenient"
|
||||||
|
version = "0.1.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7d7b9ce5b0a63c6269b9623ed828b39259545a6ec0d8a35d6135ad6af6232add"
|
||||||
|
dependencies = [
|
||||||
|
"indexmap",
|
||||||
|
"itoa 0.4.8",
|
||||||
"ryu",
|
"ryu",
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
@ -6122,7 +6135,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
|
checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"form_urlencoded",
|
"form_urlencoded",
|
||||||
"itoa",
|
"itoa 1.0.6",
|
||||||
"ryu",
|
"ryu",
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
@ -6148,7 +6161,7 @@ dependencies = [
|
||||||
"fs",
|
"fs",
|
||||||
"futures 0.3.28",
|
"futures 0.3.28",
|
||||||
"gpui",
|
"gpui",
|
||||||
"json_comments",
|
"indoc",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"postage",
|
"postage",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
|
@ -6157,6 +6170,7 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"serde_json_lenient",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"sqlez",
|
"sqlez",
|
||||||
"staff_mode",
|
"staff_mode",
|
||||||
|
@ -6507,7 +6521,7 @@ dependencies = [
|
||||||
"hkdf",
|
"hkdf",
|
||||||
"hmac 0.12.1",
|
"hmac 0.12.1",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"itoa",
|
"itoa 1.0.6",
|
||||||
"libc",
|
"libc",
|
||||||
"libsqlite3-sys",
|
"libsqlite3-sys",
|
||||||
"log",
|
"log",
|
||||||
|
@ -6993,7 +7007,7 @@ version = "0.3.21"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8f3403384eaacbca9923fa06940178ac13e4edb725486d70e8e15881d0c836cc"
|
checksum = "8f3403384eaacbca9923fa06940178ac13e4edb725486d70e8e15881d0c836cc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa",
|
"itoa 1.0.6",
|
||||||
"serde",
|
"serde",
|
||||||
"time-core",
|
"time-core",
|
||||||
"time-macros",
|
"time-macros",
|
||||||
|
|
|
@ -55,7 +55,40 @@
|
||||||
"context": "Pane",
|
"context": "Pane",
|
||||||
"bindings": {
|
"bindings": {
|
||||||
"alt-cmd-/": "search::ToggleRegex",
|
"alt-cmd-/": "search::ToggleRegex",
|
||||||
"ctrl-0": "project_panel::ToggleFocus"
|
"ctrl-0": "project_panel::ToggleFocus",
|
||||||
|
"cmd-1": [
|
||||||
|
"pane::ActivateItem",
|
||||||
|
0
|
||||||
|
],
|
||||||
|
"cmd-2": [
|
||||||
|
"pane::ActivateItem",
|
||||||
|
1
|
||||||
|
],
|
||||||
|
"cmd-3": [
|
||||||
|
"pane::ActivateItem",
|
||||||
|
2
|
||||||
|
],
|
||||||
|
"cmd-4": [
|
||||||
|
"pane::ActivateItem",
|
||||||
|
3
|
||||||
|
],
|
||||||
|
"cmd-5": [
|
||||||
|
"pane::ActivateItem",
|
||||||
|
4
|
||||||
|
],
|
||||||
|
"cmd-6": [
|
||||||
|
"pane::ActivateItem",
|
||||||
|
5
|
||||||
|
],
|
||||||
|
"cmd-7": [
|
||||||
|
"pane::ActivateItem",
|
||||||
|
6
|
||||||
|
],
|
||||||
|
"cmd-8": [
|
||||||
|
"pane::ActivateItem",
|
||||||
|
7
|
||||||
|
],
|
||||||
|
"cmd-9": "pane::ActivateLastItem"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
@ -28,6 +28,7 @@ isahc.workspace = true
|
||||||
schemars.workspace = true
|
schemars.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
|
smol.workspace = true
|
||||||
tiktoken-rs = "0.4"
|
tiktoken-rs = "0.4"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|
|
@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
|
||||||
use std::fmt::{self, Display};
|
use std::fmt::{self, Display};
|
||||||
|
|
||||||
// Data types for chat completion requests
|
// Data types for chat completion requests
|
||||||
#[derive(Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
struct OpenAIRequest {
|
struct OpenAIRequest {
|
||||||
model: String,
|
model: String,
|
||||||
messages: Vec<RequestMessage>,
|
messages: Vec<RequestMessage>,
|
||||||
|
|
|
@ -8,7 +8,7 @@ use collections::{HashMap, HashSet};
|
||||||
use editor::{
|
use editor::{
|
||||||
display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, ToDisplayPoint},
|
display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, ToDisplayPoint},
|
||||||
scroll::autoscroll::{Autoscroll, AutoscrollStrategy},
|
scroll::autoscroll::{Autoscroll, AutoscrollStrategy},
|
||||||
Anchor, Editor,
|
Anchor, Editor, ToOffset,
|
||||||
};
|
};
|
||||||
use fs::Fs;
|
use fs::Fs;
|
||||||
use futures::{io::BufReader, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
|
use futures::{io::BufReader, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
|
||||||
|
@ -483,7 +483,7 @@ impl Assistant {
|
||||||
language_registry: Arc<LanguageRegistry>,
|
language_registry: Arc<LanguageRegistry>,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let model = "gpt-3.5-turbo";
|
let model = "gpt-3.5-turbo-0613";
|
||||||
let markdown = language_registry.language_for_name("Markdown");
|
let markdown = language_registry.language_for_name("Markdown");
|
||||||
let buffer = cx.add_model(|cx| {
|
let buffer = cx.add_model(|cx| {
|
||||||
let mut buffer = Buffer::new(0, "", cx);
|
let mut buffer = Buffer::new(0, "", cx);
|
||||||
|
@ -528,7 +528,7 @@ impl Assistant {
|
||||||
MessageMetadata {
|
MessageMetadata {
|
||||||
role: Role::User,
|
role: Role::User,
|
||||||
sent_at: Local::now(),
|
sent_at: Local::now(),
|
||||||
error: None,
|
status: MessageStatus::Done,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -553,7 +553,7 @@ impl Assistant {
|
||||||
|
|
||||||
fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
|
fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
|
||||||
let messages = self
|
let messages = self
|
||||||
.open_ai_request_messages(cx)
|
.messages(cx)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|message| {
|
.filter_map(|message| {
|
||||||
Some(tiktoken_rs::ChatCompletionRequestMessage {
|
Some(tiktoken_rs::ChatCompletionRequestMessage {
|
||||||
|
@ -562,7 +562,7 @@ impl Assistant {
|
||||||
Role::Assistant => "assistant".into(),
|
Role::Assistant => "assistant".into(),
|
||||||
Role::System => "system".into(),
|
Role::System => "system".into(),
|
||||||
},
|
},
|
||||||
content: message.content,
|
content: self.buffer.read(cx).text_for_range(message.range).collect(),
|
||||||
name: None,
|
name: None,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -599,97 +599,169 @@ impl Assistant {
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<(MessageAnchor, MessageAnchor)> {
|
fn assist(
|
||||||
let request = OpenAIRequest {
|
&mut self,
|
||||||
model: self.model.clone(),
|
selected_messages: HashSet<MessageId>,
|
||||||
messages: self.open_ai_request_messages(cx),
|
cx: &mut ModelContext<Self>,
|
||||||
stream: true,
|
) -> Vec<MessageAnchor> {
|
||||||
};
|
let mut user_messages = Vec::new();
|
||||||
|
let mut tasks = Vec::new();
|
||||||
|
for selected_message_id in selected_messages {
|
||||||
|
let selected_message_role =
|
||||||
|
if let Some(metadata) = self.messages_metadata.get(&selected_message_id) {
|
||||||
|
metadata.role
|
||||||
|
} else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
let api_key = self.api_key.borrow().clone()?;
|
if selected_message_role == Role::Assistant {
|
||||||
let stream = stream_completion(api_key, cx.background().clone(), request);
|
if let Some(user_message) = self.insert_message_after(
|
||||||
let assistant_message =
|
selected_message_id,
|
||||||
self.insert_message_after(self.message_anchors.last()?.id, Role::Assistant, cx)?;
|
Role::User,
|
||||||
let user_message = self.insert_message_after(assistant_message.id, Role::User, cx)?;
|
MessageStatus::Done,
|
||||||
let task = cx.spawn_weak({
|
cx,
|
||||||
|this, mut cx| async move {
|
) {
|
||||||
let assistant_message_id = assistant_message.id;
|
user_messages.push(user_message);
|
||||||
let stream_completion = async {
|
} else {
|
||||||
let mut messages = stream.await?;
|
continue;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let request = OpenAIRequest {
|
||||||
|
model: self.model.clone(),
|
||||||
|
messages: self
|
||||||
|
.messages(cx)
|
||||||
|
.filter(|message| matches!(message.status, MessageStatus::Done))
|
||||||
|
.flat_map(|message| {
|
||||||
|
let mut system_message = None;
|
||||||
|
if message.id == selected_message_id {
|
||||||
|
system_message = Some(RequestMessage {
|
||||||
|
role: Role::System,
|
||||||
|
content: concat!(
|
||||||
|
"Treat the following messages as additional knowledge you have learned about, ",
|
||||||
|
"but act as if they were not part of this conversation. That is, treat them ",
|
||||||
|
"as if the user didn't see them and couldn't possibly inquire about them."
|
||||||
|
).into()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(message.to_open_ai_message(self.buffer.read(cx))).into_iter().chain(system_message)
|
||||||
|
})
|
||||||
|
.chain(Some(RequestMessage {
|
||||||
|
role: Role::System,
|
||||||
|
content: format!(
|
||||||
|
"Direct your reply to message with id {}. Do not include a [Message X] header.",
|
||||||
|
selected_message_id.0
|
||||||
|
),
|
||||||
|
}))
|
||||||
|
.collect(),
|
||||||
|
stream: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(api_key) = self.api_key.borrow().clone() else { continue };
|
||||||
|
let stream = stream_completion(api_key, cx.background().clone(), request);
|
||||||
|
let assistant_message = self
|
||||||
|
.insert_message_after(
|
||||||
|
selected_message_id,
|
||||||
|
Role::Assistant,
|
||||||
|
MessageStatus::Pending,
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
tasks.push(cx.spawn_weak({
|
||||||
|
|this, mut cx| async move {
|
||||||
|
let assistant_message_id = assistant_message.id;
|
||||||
|
let stream_completion = async {
|
||||||
|
let mut messages = stream.await?;
|
||||||
|
|
||||||
|
while let Some(message) = messages.next().await {
|
||||||
|
let mut message = message?;
|
||||||
|
if let Some(choice) = message.choices.pop() {
|
||||||
|
this.upgrade(&cx)
|
||||||
|
.ok_or_else(|| anyhow!("assistant was dropped"))?
|
||||||
|
.update(&mut cx, |this, cx| {
|
||||||
|
let text: Arc<str> = choice.delta.content?.into();
|
||||||
|
let message_ix = this.message_anchors.iter().position(
|
||||||
|
|message| message.id == assistant_message_id,
|
||||||
|
)?;
|
||||||
|
this.buffer.update(cx, |buffer, cx| {
|
||||||
|
let offset = this.message_anchors[message_ix + 1..]
|
||||||
|
.iter()
|
||||||
|
.find(|message| message.start.is_valid(buffer))
|
||||||
|
.map_or(buffer.len(), |message| {
|
||||||
|
message
|
||||||
|
.start
|
||||||
|
.to_offset(buffer)
|
||||||
|
.saturating_sub(1)
|
||||||
|
});
|
||||||
|
buffer.edit([(offset..offset, text)], None, cx);
|
||||||
|
});
|
||||||
|
cx.emit(AssistantEvent::StreamedCompletion);
|
||||||
|
|
||||||
|
Some(())
|
||||||
|
});
|
||||||
|
}
|
||||||
|
smol::future::yield_now().await;
|
||||||
|
}
|
||||||
|
|
||||||
while let Some(message) = messages.next().await {
|
|
||||||
let mut message = message?;
|
|
||||||
if let Some(choice) = message.choices.pop() {
|
|
||||||
this.upgrade(&cx)
|
this.upgrade(&cx)
|
||||||
.ok_or_else(|| anyhow!("assistant was dropped"))?
|
.ok_or_else(|| anyhow!("assistant was dropped"))?
|
||||||
.update(&mut cx, |this, cx| {
|
.update(&mut cx, |this, cx| {
|
||||||
let text: Arc<str> = choice.delta.content?.into();
|
this.pending_completions.retain(|completion| {
|
||||||
let message_ix = this
|
completion.id != this.completion_count
|
||||||
.message_anchors
|
|
||||||
.iter()
|
|
||||||
.position(|message| message.id == assistant_message_id)?;
|
|
||||||
this.buffer.update(cx, |buffer, cx| {
|
|
||||||
let offset = if message_ix + 1 == this.message_anchors.len()
|
|
||||||
{
|
|
||||||
buffer.len()
|
|
||||||
} else {
|
|
||||||
this.message_anchors[message_ix + 1]
|
|
||||||
.start
|
|
||||||
.to_offset(buffer)
|
|
||||||
.saturating_sub(1)
|
|
||||||
};
|
|
||||||
buffer.edit([(offset..offset, text)], None, cx);
|
|
||||||
});
|
});
|
||||||
cx.emit(AssistantEvent::StreamedCompletion);
|
this.summarize(cx);
|
||||||
|
|
||||||
Some(())
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
anyhow::Ok(())
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = stream_completion.await;
|
||||||
|
if let Some(this) = this.upgrade(&cx) {
|
||||||
|
this.update(&mut cx, |this, cx| {
|
||||||
|
if let Some(metadata) =
|
||||||
|
this.messages_metadata.get_mut(&assistant_message.id)
|
||||||
|
{
|
||||||
|
match result {
|
||||||
|
Ok(_) => {
|
||||||
|
metadata.status = MessageStatus::Done;
|
||||||
|
}
|
||||||
|
Err(error) => {
|
||||||
|
metadata.status = MessageStatus::Error(
|
||||||
|
error.to_string().trim().into(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cx.notify();
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}));
|
||||||
this.upgrade(&cx)
|
|
||||||
.ok_or_else(|| anyhow!("assistant was dropped"))?
|
|
||||||
.update(&mut cx, |this, cx| {
|
|
||||||
this.pending_completions
|
|
||||||
.retain(|completion| completion.id != this.completion_count);
|
|
||||||
this.summarize(cx);
|
|
||||||
});
|
|
||||||
|
|
||||||
anyhow::Ok(())
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = stream_completion.await;
|
|
||||||
if let Some(this) = this.upgrade(&cx) {
|
|
||||||
this.update(&mut cx, |this, cx| {
|
|
||||||
if let Err(error) = result {
|
|
||||||
if let Some(metadata) =
|
|
||||||
this.messages_metadata.get_mut(&assistant_message.id)
|
|
||||||
{
|
|
||||||
metadata.error = Some(error.to_string().trim().into());
|
|
||||||
cx.notify();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
self.pending_completions.push(PendingCompletion {
|
if !tasks.is_empty() {
|
||||||
id: post_inc(&mut self.completion_count),
|
self.pending_completions.push(PendingCompletion {
|
||||||
_task: task,
|
id: post_inc(&mut self.completion_count),
|
||||||
});
|
_tasks: tasks,
|
||||||
Some((assistant_message, user_message))
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
user_messages
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cancel_last_assist(&mut self) -> bool {
|
fn cancel_last_assist(&mut self) -> bool {
|
||||||
self.pending_completions.pop().is_some()
|
self.pending_completions.pop().is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cycle_message_role(&mut self, id: MessageId, cx: &mut ModelContext<Self>) {
|
fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
|
||||||
if let Some(metadata) = self.messages_metadata.get_mut(&id) {
|
for id in ids {
|
||||||
metadata.role.cycle();
|
if let Some(metadata) = self.messages_metadata.get_mut(&id) {
|
||||||
cx.emit(AssistantEvent::MessagesEdited);
|
metadata.role.cycle();
|
||||||
cx.notify();
|
cx.emit(AssistantEvent::MessagesEdited);
|
||||||
|
cx.notify();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -697,6 +769,7 @@ impl Assistant {
|
||||||
&mut self,
|
&mut self,
|
||||||
message_id: MessageId,
|
message_id: MessageId,
|
||||||
role: Role,
|
role: Role,
|
||||||
|
status: MessageStatus,
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Option<MessageAnchor> {
|
) -> Option<MessageAnchor> {
|
||||||
if let Some(prev_message_ix) = self
|
if let Some(prev_message_ix) = self
|
||||||
|
@ -723,7 +796,7 @@ impl Assistant {
|
||||||
MessageMetadata {
|
MessageMetadata {
|
||||||
role,
|
role,
|
||||||
sent_at: Local::now(),
|
sent_at: Local::now(),
|
||||||
error: None,
|
status,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
cx.emit(AssistantEvent::MessagesEdited);
|
cx.emit(AssistantEvent::MessagesEdited);
|
||||||
|
@ -782,7 +855,7 @@ impl Assistant {
|
||||||
MessageMetadata {
|
MessageMetadata {
|
||||||
role,
|
role,
|
||||||
sent_at: Local::now(),
|
sent_at: Local::now(),
|
||||||
error: None,
|
status: MessageStatus::Done,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -824,7 +897,7 @@ impl Assistant {
|
||||||
MessageMetadata {
|
MessageMetadata {
|
||||||
role,
|
role,
|
||||||
sent_at: Local::now(),
|
sent_at: Local::now(),
|
||||||
error: None,
|
status: MessageStatus::Done,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
(Some(selection), Some(suffix))
|
(Some(selection), Some(suffix))
|
||||||
|
@ -843,16 +916,19 @@ impl Assistant {
|
||||||
if self.message_anchors.len() >= 2 && self.summary.is_none() {
|
if self.message_anchors.len() >= 2 && self.summary.is_none() {
|
||||||
let api_key = self.api_key.borrow().clone();
|
let api_key = self.api_key.borrow().clone();
|
||||||
if let Some(api_key) = api_key {
|
if let Some(api_key) = api_key {
|
||||||
let mut messages = self.open_ai_request_messages(cx);
|
let messages = self
|
||||||
messages.truncate(2);
|
.messages(cx)
|
||||||
messages.push(RequestMessage {
|
.take(2)
|
||||||
role: Role::User,
|
.map(|message| message.to_open_ai_message(self.buffer.read(cx)))
|
||||||
content: "Summarize the conversation into a short title without punctuation"
|
.chain(Some(RequestMessage {
|
||||||
.into(),
|
role: Role::User,
|
||||||
});
|
content:
|
||||||
|
"Summarize the conversation into a short title without punctuation"
|
||||||
|
.into(),
|
||||||
|
}));
|
||||||
let request = OpenAIRequest {
|
let request = OpenAIRequest {
|
||||||
model: self.model.clone(),
|
model: self.model.clone(),
|
||||||
messages,
|
messages: messages.collect(),
|
||||||
stream: true,
|
stream: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -880,24 +956,39 @@ impl Assistant {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn open_ai_request_messages(&self, cx: &AppContext) -> Vec<RequestMessage> {
|
fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
|
||||||
let buffer = self.buffer.read(cx);
|
self.messages_for_offsets([offset], cx).pop()
|
||||||
self.messages(cx)
|
|
||||||
.map(|message| RequestMessage {
|
|
||||||
role: message.role,
|
|
||||||
content: buffer.text_for_range(message.range).collect(),
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn message_for_offset<'a>(&'a self, offset: usize, cx: &'a AppContext) -> Option<Message> {
|
fn messages_for_offsets(
|
||||||
|
&self,
|
||||||
|
offsets: impl IntoIterator<Item = usize>,
|
||||||
|
cx: &AppContext,
|
||||||
|
) -> Vec<Message> {
|
||||||
|
let mut result = Vec::new();
|
||||||
|
|
||||||
|
let buffer_len = self.buffer.read(cx).len();
|
||||||
let mut messages = self.messages(cx).peekable();
|
let mut messages = self.messages(cx).peekable();
|
||||||
while let Some(message) = messages.next() {
|
let mut offsets = offsets.into_iter().peekable();
|
||||||
if message.range.contains(&offset) || messages.peek().is_none() {
|
while let Some(offset) = offsets.next() {
|
||||||
return Some(message);
|
// Skip messages that start after the offset.
|
||||||
|
while messages.peek().map_or(false, |message| {
|
||||||
|
message.range.end < offset || (message.range.end == offset && offset < buffer_len)
|
||||||
|
}) {
|
||||||
|
messages.next();
|
||||||
}
|
}
|
||||||
|
let Some(message) = messages.peek() else { continue };
|
||||||
|
|
||||||
|
// Skip offsets that are in the same message.
|
||||||
|
while offsets.peek().map_or(false, |offset| {
|
||||||
|
message.range.contains(offset) || message.range.end == buffer_len
|
||||||
|
}) {
|
||||||
|
offsets.next();
|
||||||
|
}
|
||||||
|
|
||||||
|
result.push(message.clone());
|
||||||
}
|
}
|
||||||
None
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
|
fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
|
||||||
|
@ -926,7 +1017,7 @@ impl Assistant {
|
||||||
anchor: message_anchor.start,
|
anchor: message_anchor.start,
|
||||||
role: metadata.role,
|
role: metadata.role,
|
||||||
sent_at: metadata.sent_at,
|
sent_at: metadata.sent_at,
|
||||||
error: metadata.error.clone(),
|
status: metadata.status.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
|
@ -963,7 +1054,7 @@ impl Assistant {
|
||||||
|
|
||||||
struct PendingCompletion {
|
struct PendingCompletion {
|
||||||
id: usize,
|
id: usize,
|
||||||
_task: Task<()>,
|
_tasks: Vec<Task<()>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
enum AssistantEditorEvent {
|
enum AssistantEditorEvent {
|
||||||
|
@ -1019,20 +1110,31 @@ impl AssistantEditor {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assist(&mut self, _: &Assist, cx: &mut ViewContext<Self>) {
|
fn assist(&mut self, _: &Assist, cx: &mut ViewContext<Self>) {
|
||||||
let user_message = self.assistant.update(cx, |assistant, cx| {
|
let cursors = self.cursors(cx);
|
||||||
let (_, user_message) = assistant.assist(cx)?;
|
|
||||||
Some(user_message)
|
|
||||||
});
|
|
||||||
|
|
||||||
if let Some(user_message) = user_message {
|
let user_messages = self.assistant.update(cx, |assistant, cx| {
|
||||||
let cursor = user_message
|
let selected_messages = assistant
|
||||||
.start
|
.messages_for_offsets(cursors, cx)
|
||||||
.to_offset(&self.assistant.read(cx).buffer.read(cx));
|
.into_iter()
|
||||||
|
.map(|message| message.id)
|
||||||
|
.collect();
|
||||||
|
assistant.assist(selected_messages, cx)
|
||||||
|
});
|
||||||
|
let new_selections = user_messages
|
||||||
|
.iter()
|
||||||
|
.map(|message| {
|
||||||
|
let cursor = message
|
||||||
|
.start
|
||||||
|
.to_offset(self.assistant.read(cx).buffer.read(cx));
|
||||||
|
cursor..cursor
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
if !new_selections.is_empty() {
|
||||||
self.editor.update(cx, |editor, cx| {
|
self.editor.update(cx, |editor, cx| {
|
||||||
editor.change_selections(
|
editor.change_selections(
|
||||||
Some(Autoscroll::Strategy(AutoscrollStrategy::Fit)),
|
Some(Autoscroll::Strategy(AutoscrollStrategy::Fit)),
|
||||||
cx,
|
cx,
|
||||||
|selections| selections.select_ranges([cursor..cursor]),
|
|selections| selections.select_ranges(new_selections),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1048,14 +1150,25 @@ impl AssistantEditor {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn cycle_message_role(&mut self, _: &CycleMessageRole, cx: &mut ViewContext<Self>) {
|
fn cycle_message_role(&mut self, _: &CycleMessageRole, cx: &mut ViewContext<Self>) {
|
||||||
let cursor_offset = self.editor.read(cx).selections.newest(cx).head();
|
let cursors = self.cursors(cx);
|
||||||
self.assistant.update(cx, |assistant, cx| {
|
self.assistant.update(cx, |assistant, cx| {
|
||||||
if let Some(message) = assistant.message_for_offset(cursor_offset, cx) {
|
let messages = assistant
|
||||||
assistant.cycle_message_role(message.id, cx);
|
.messages_for_offsets(cursors, cx)
|
||||||
}
|
.into_iter()
|
||||||
|
.map(|message| message.id)
|
||||||
|
.collect();
|
||||||
|
assistant.cycle_message_roles(messages, cx)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn cursors(&self, cx: &AppContext) -> Vec<usize> {
|
||||||
|
let selections = self.editor.read(cx).selections.all::<usize>(cx);
|
||||||
|
selections
|
||||||
|
.into_iter()
|
||||||
|
.map(|selection| selection.head())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
fn handle_assistant_event(
|
fn handle_assistant_event(
|
||||||
&mut self,
|
&mut self,
|
||||||
_: ModelHandle<Assistant>,
|
_: ModelHandle<Assistant>,
|
||||||
|
@ -1184,7 +1297,10 @@ impl AssistantEditor {
|
||||||
let assistant = assistant.clone();
|
let assistant = assistant.clone();
|
||||||
move |_, _, cx| {
|
move |_, _, cx| {
|
||||||
assistant.update(cx, |assistant, cx| {
|
assistant.update(cx, |assistant, cx| {
|
||||||
assistant.cycle_message_role(message_id, cx)
|
assistant.cycle_message_roles(
|
||||||
|
HashSet::from_iter(Some(message_id)),
|
||||||
|
cx,
|
||||||
|
)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -1200,22 +1316,28 @@ impl AssistantEditor {
|
||||||
.with_style(style.sent_at.container)
|
.with_style(style.sent_at.container)
|
||||||
.aligned(),
|
.aligned(),
|
||||||
)
|
)
|
||||||
.with_children(message.error.as_ref().map(|error| {
|
.with_children(
|
||||||
Svg::new("icons/circle_x_mark_12.svg")
|
if let MessageStatus::Error(error) = &message.status {
|
||||||
.with_color(style.error_icon.color)
|
Some(
|
||||||
.constrained()
|
Svg::new("icons/circle_x_mark_12.svg")
|
||||||
.with_width(style.error_icon.width)
|
.with_color(style.error_icon.color)
|
||||||
.contained()
|
.constrained()
|
||||||
.with_style(style.error_icon.container)
|
.with_width(style.error_icon.width)
|
||||||
.with_tooltip::<ErrorTooltip>(
|
.contained()
|
||||||
message_id.0,
|
.with_style(style.error_icon.container)
|
||||||
error.to_string(),
|
.with_tooltip::<ErrorTooltip>(
|
||||||
None,
|
message_id.0,
|
||||||
theme.tooltip.clone(),
|
error.to_string(),
|
||||||
cx,
|
None,
|
||||||
|
theme.tooltip.clone(),
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
.aligned(),
|
||||||
)
|
)
|
||||||
.aligned()
|
} else {
|
||||||
}))
|
None
|
||||||
|
},
|
||||||
|
)
|
||||||
.aligned()
|
.aligned()
|
||||||
.left()
|
.left()
|
||||||
.contained()
|
.contained()
|
||||||
|
@ -1334,8 +1456,14 @@ impl AssistantEditor {
|
||||||
|
|
||||||
fn split(&mut self, _: &Split, cx: &mut ViewContext<Self>) {
|
fn split(&mut self, _: &Split, cx: &mut ViewContext<Self>) {
|
||||||
self.assistant.update(cx, |assistant, cx| {
|
self.assistant.update(cx, |assistant, cx| {
|
||||||
let range = self.editor.read(cx).selections.newest::<usize>(cx).range();
|
let selections = self.editor.read(cx).selections.disjoint_anchors();
|
||||||
assistant.split_message(range, cx);
|
for selection in selections.into_iter() {
|
||||||
|
let buffer = self.editor.read(cx).buffer().read(cx).snapshot(cx);
|
||||||
|
let range = selection
|
||||||
|
.map(|endpoint| endpoint.to_offset(&buffer))
|
||||||
|
.range();
|
||||||
|
assistant.split_message(range, cx);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1348,8 +1476,8 @@ impl AssistantEditor {
|
||||||
fn cycle_model(&mut self, cx: &mut ViewContext<Self>) {
|
fn cycle_model(&mut self, cx: &mut ViewContext<Self>) {
|
||||||
self.assistant.update(cx, |assistant, cx| {
|
self.assistant.update(cx, |assistant, cx| {
|
||||||
let new_model = match assistant.model.as_str() {
|
let new_model = match assistant.model.as_str() {
|
||||||
"gpt-4" => "gpt-3.5-turbo",
|
"gpt-4-0613" => "gpt-3.5-turbo-0613",
|
||||||
_ => "gpt-4",
|
_ => "gpt-4-0613",
|
||||||
};
|
};
|
||||||
assistant.set_model(new_model.into(), cx);
|
assistant.set_model(new_model.into(), cx);
|
||||||
});
|
});
|
||||||
|
@ -1463,7 +1591,14 @@ struct MessageAnchor {
|
||||||
struct MessageMetadata {
|
struct MessageMetadata {
|
||||||
role: Role,
|
role: Role,
|
||||||
sent_at: DateTime<Local>,
|
sent_at: DateTime<Local>,
|
||||||
error: Option<Arc<str>>,
|
status: MessageStatus,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
enum MessageStatus {
|
||||||
|
Pending,
|
||||||
|
Done,
|
||||||
|
Error(Arc<str>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
|
@ -1474,7 +1609,18 @@ pub struct Message {
|
||||||
anchor: language::Anchor,
|
anchor: language::Anchor,
|
||||||
role: Role,
|
role: Role,
|
||||||
sent_at: DateTime<Local>,
|
sent_at: DateTime<Local>,
|
||||||
error: Option<Arc<str>>,
|
status: MessageStatus,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Message {
|
||||||
|
fn to_open_ai_message(&self, buffer: &Buffer) -> RequestMessage {
|
||||||
|
let mut content = format!("[Message {}]\n", self.id.0).to_string();
|
||||||
|
content.extend(buffer.text_for_range(self.range.clone()));
|
||||||
|
RequestMessage {
|
||||||
|
role: self.role,
|
||||||
|
content,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn stream_completion(
|
async fn stream_completion(
|
||||||
|
@ -1582,7 +1728,7 @@ mod tests {
|
||||||
|
|
||||||
let message_2 = assistant.update(cx, |assistant, cx| {
|
let message_2 = assistant.update(cx, |assistant, cx| {
|
||||||
assistant
|
assistant
|
||||||
.insert_message_after(message_1.id, Role::Assistant, cx)
|
.insert_message_after(message_1.id, Role::Assistant, MessageStatus::Done, cx)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
});
|
});
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -1606,7 +1752,7 @@ mod tests {
|
||||||
|
|
||||||
let message_3 = assistant.update(cx, |assistant, cx| {
|
let message_3 = assistant.update(cx, |assistant, cx| {
|
||||||
assistant
|
assistant
|
||||||
.insert_message_after(message_2.id, Role::User, cx)
|
.insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
});
|
});
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -1620,7 +1766,7 @@ mod tests {
|
||||||
|
|
||||||
let message_4 = assistant.update(cx, |assistant, cx| {
|
let message_4 = assistant.update(cx, |assistant, cx| {
|
||||||
assistant
|
assistant
|
||||||
.insert_message_after(message_2.id, Role::User, cx)
|
.insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
});
|
});
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -1681,7 +1827,7 @@ mod tests {
|
||||||
// Ensure we can still insert after a merged message.
|
// Ensure we can still insert after a merged message.
|
||||||
let message_5 = assistant.update(cx, |assistant, cx| {
|
let message_5 = assistant.update(cx, |assistant, cx| {
|
||||||
assistant
|
assistant
|
||||||
.insert_message_after(message_1.id, Role::System, cx)
|
.insert_message_after(message_1.id, Role::System, MessageStatus::Done, cx)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
});
|
});
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -1787,6 +1933,66 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
fn test_messages_for_offsets(cx: &mut AppContext) {
|
||||||
|
let registry = Arc::new(LanguageRegistry::test());
|
||||||
|
let assistant = cx.add_model(|cx| Assistant::new(Default::default(), registry, cx));
|
||||||
|
let buffer = assistant.read(cx).buffer.clone();
|
||||||
|
|
||||||
|
let message_1 = assistant.read(cx).message_anchors[0].clone();
|
||||||
|
assert_eq!(
|
||||||
|
messages(&assistant, cx),
|
||||||
|
vec![(message_1.id, Role::User, 0..0)]
|
||||||
|
);
|
||||||
|
|
||||||
|
buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "aaa")], None, cx));
|
||||||
|
let message_2 = assistant
|
||||||
|
.update(cx, |assistant, cx| {
|
||||||
|
assistant.insert_message_after(message_1.id, Role::User, MessageStatus::Done, cx)
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "bbb")], None, cx));
|
||||||
|
|
||||||
|
let message_3 = assistant
|
||||||
|
.update(cx, |assistant, cx| {
|
||||||
|
assistant.insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx)
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
buffer.update(cx, |buffer, cx| buffer.edit([(8..8, "ccc")], None, cx));
|
||||||
|
|
||||||
|
assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc");
|
||||||
|
assert_eq!(
|
||||||
|
messages(&assistant, cx),
|
||||||
|
vec![
|
||||||
|
(message_1.id, Role::User, 0..4),
|
||||||
|
(message_2.id, Role::User, 4..8),
|
||||||
|
(message_3.id, Role::User, 8..11)
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
message_ids_for_offsets(&assistant, &[0, 4, 9], cx),
|
||||||
|
[message_1.id, message_2.id, message_3.id]
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
message_ids_for_offsets(&assistant, &[0, 1, 11], cx),
|
||||||
|
[message_1.id, message_3.id]
|
||||||
|
);
|
||||||
|
|
||||||
|
fn message_ids_for_offsets(
|
||||||
|
assistant: &ModelHandle<Assistant>,
|
||||||
|
offsets: &[usize],
|
||||||
|
cx: &AppContext,
|
||||||
|
) -> Vec<MessageId> {
|
||||||
|
assistant
|
||||||
|
.read(cx)
|
||||||
|
.messages_for_offsets(offsets.iter().copied(), cx)
|
||||||
|
.into_iter()
|
||||||
|
.map(|message| message.id)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn messages(
|
fn messages(
|
||||||
assistant: &ModelHandle<Assistant>,
|
assistant: &ModelHandle<Assistant>,
|
||||||
cx: &AppContext,
|
cx: &AppContext,
|
||||||
|
|
|
@ -39,7 +39,12 @@ use std::{
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use unindent::Unindent as _;
|
use unindent::Unindent as _;
|
||||||
use workspace::{item::ItemHandle as _, shared_screen::SharedScreen, SplitDirection, Workspace};
|
use workspace::{
|
||||||
|
dock::{test::TestPanel, DockPosition},
|
||||||
|
item::{test::TestItem, ItemHandle as _},
|
||||||
|
shared_screen::SharedScreen,
|
||||||
|
SplitDirection, Workspace,
|
||||||
|
};
|
||||||
|
|
||||||
#[ctor::ctor]
|
#[ctor::ctor]
|
||||||
fn init_logger() {
|
fn init_logger() {
|
||||||
|
@ -6847,12 +6852,43 @@ async fn test_basic_following(
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
// Client B activates an external window again, and the previously-opened screen-sharing item
|
// Client B activates a panel, and the previously-opened screen-sharing item gets activated.
|
||||||
// gets activated.
|
let panel = cx_b.add_view(workspace_b.window_id(), |_| {
|
||||||
active_call_b
|
TestPanel::new(DockPosition::Left)
|
||||||
.update(cx_b, |call, cx| call.set_location(None, cx))
|
});
|
||||||
.await
|
workspace_b.update(cx_b, |workspace, cx| {
|
||||||
.unwrap();
|
workspace.add_panel(panel, cx);
|
||||||
|
workspace.toggle_panel_focus::<TestPanel>(cx);
|
||||||
|
});
|
||||||
|
deterministic.run_until_parked();
|
||||||
|
assert_eq!(
|
||||||
|
workspace_a.read_with(cx_a, |workspace, cx| workspace
|
||||||
|
.active_item(cx)
|
||||||
|
.unwrap()
|
||||||
|
.id()),
|
||||||
|
shared_screen.id()
|
||||||
|
);
|
||||||
|
|
||||||
|
// Toggling the focus back to the pane causes client A to return to the multibuffer.
|
||||||
|
workspace_b.update(cx_b, |workspace, cx| {
|
||||||
|
workspace.toggle_panel_focus::<TestPanel>(cx);
|
||||||
|
});
|
||||||
|
deterministic.run_until_parked();
|
||||||
|
workspace_a.read_with(cx_a, |workspace, cx| {
|
||||||
|
assert_eq!(
|
||||||
|
workspace.active_item(cx).unwrap().id(),
|
||||||
|
multibuffer_editor_a.id()
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
// Client B activates an item that doesn't implement following,
|
||||||
|
// so the previously-opened screen-sharing item gets activated.
|
||||||
|
let unfollowable_item = cx_b.add_view(workspace_b.window_id(), |_| TestItem::new());
|
||||||
|
workspace_b.update(cx_b, |workspace, cx| {
|
||||||
|
workspace.active_pane().update(cx, |pane, cx| {
|
||||||
|
pane.add_item(Box::new(unfollowable_item), true, true, None, cx)
|
||||||
|
})
|
||||||
|
});
|
||||||
deterministic.run_until_parked();
|
deterministic.run_until_parked();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
workspace_a.read_with(cx_a, |workspace, cx| workspace
|
workspace_a.read_with(cx_a, |workspace, cx| workspace
|
||||||
|
|
|
@ -243,7 +243,7 @@ impl BlockMap {
|
||||||
// Preserve any old transforms that precede this edit.
|
// Preserve any old transforms that precede this edit.
|
||||||
let old_start = WrapRow(edit.old.start);
|
let old_start = WrapRow(edit.old.start);
|
||||||
let new_start = WrapRow(edit.new.start);
|
let new_start = WrapRow(edit.new.start);
|
||||||
new_transforms.push_tree(cursor.slice(&old_start, Bias::Left, &()), &());
|
new_transforms.append(cursor.slice(&old_start, Bias::Left, &()), &());
|
||||||
if let Some(transform) = cursor.item() {
|
if let Some(transform) = cursor.item() {
|
||||||
if transform.is_isomorphic() && old_start == cursor.end(&()) {
|
if transform.is_isomorphic() && old_start == cursor.end(&()) {
|
||||||
new_transforms.push(transform.clone(), &());
|
new_transforms.push(transform.clone(), &());
|
||||||
|
@ -425,7 +425,7 @@ impl BlockMap {
|
||||||
push_isomorphic(&mut new_transforms, extent_after_edit);
|
push_isomorphic(&mut new_transforms, extent_after_edit);
|
||||||
}
|
}
|
||||||
|
|
||||||
new_transforms.push_tree(cursor.suffix(&()), &());
|
new_transforms.append(cursor.suffix(&()), &());
|
||||||
debug_assert_eq!(
|
debug_assert_eq!(
|
||||||
new_transforms.summary().input_rows,
|
new_transforms.summary().input_rows,
|
||||||
wrap_snapshot.max_point().row() + 1
|
wrap_snapshot.max_point().row() + 1
|
||||||
|
|
|
@ -115,10 +115,10 @@ impl<'a> FoldMapWriter<'a> {
|
||||||
let mut new_tree = SumTree::new();
|
let mut new_tree = SumTree::new();
|
||||||
let mut cursor = self.0.folds.cursor::<Fold>();
|
let mut cursor = self.0.folds.cursor::<Fold>();
|
||||||
for fold in folds {
|
for fold in folds {
|
||||||
new_tree.push_tree(cursor.slice(&fold, Bias::Right, &buffer), &buffer);
|
new_tree.append(cursor.slice(&fold, Bias::Right, &buffer), &buffer);
|
||||||
new_tree.push(fold, &buffer);
|
new_tree.push(fold, &buffer);
|
||||||
}
|
}
|
||||||
new_tree.push_tree(cursor.suffix(&buffer), &buffer);
|
new_tree.append(cursor.suffix(&buffer), &buffer);
|
||||||
new_tree
|
new_tree
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -165,10 +165,10 @@ impl<'a> FoldMapWriter<'a> {
|
||||||
let mut cursor = self.0.folds.cursor::<usize>();
|
let mut cursor = self.0.folds.cursor::<usize>();
|
||||||
let mut folds = SumTree::new();
|
let mut folds = SumTree::new();
|
||||||
for fold_ix in fold_ixs_to_delete {
|
for fold_ix in fold_ixs_to_delete {
|
||||||
folds.push_tree(cursor.slice(&fold_ix, Bias::Right, &buffer), &buffer);
|
folds.append(cursor.slice(&fold_ix, Bias::Right, &buffer), &buffer);
|
||||||
cursor.next(&buffer);
|
cursor.next(&buffer);
|
||||||
}
|
}
|
||||||
folds.push_tree(cursor.suffix(&buffer), &buffer);
|
folds.append(cursor.suffix(&buffer), &buffer);
|
||||||
folds
|
folds
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -302,7 +302,7 @@ impl FoldMap {
|
||||||
cursor.seek(&0, Bias::Right, &());
|
cursor.seek(&0, Bias::Right, &());
|
||||||
|
|
||||||
while let Some(mut edit) = buffer_edits_iter.next() {
|
while let Some(mut edit) = buffer_edits_iter.next() {
|
||||||
new_transforms.push_tree(cursor.slice(&edit.old.start, Bias::Left, &()), &());
|
new_transforms.append(cursor.slice(&edit.old.start, Bias::Left, &()), &());
|
||||||
edit.new.start -= edit.old.start - cursor.start();
|
edit.new.start -= edit.old.start - cursor.start();
|
||||||
edit.old.start = *cursor.start();
|
edit.old.start = *cursor.start();
|
||||||
|
|
||||||
|
@ -412,7 +412,7 @@ impl FoldMap {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
new_transforms.push_tree(cursor.suffix(&()), &());
|
new_transforms.append(cursor.suffix(&()), &());
|
||||||
if new_transforms.is_empty() {
|
if new_transforms.is_empty() {
|
||||||
let text_summary = new_buffer.text_summary();
|
let text_summary = new_buffer.text_summary();
|
||||||
new_transforms.push(
|
new_transforms.push(
|
||||||
|
|
|
@ -353,7 +353,7 @@ impl WrapSnapshot {
|
||||||
}
|
}
|
||||||
|
|
||||||
old_cursor.next(&());
|
old_cursor.next(&());
|
||||||
new_transforms.push_tree(
|
new_transforms.append(
|
||||||
old_cursor.slice(&next_edit.old.start, Bias::Right, &()),
|
old_cursor.slice(&next_edit.old.start, Bias::Right, &()),
|
||||||
&(),
|
&(),
|
||||||
);
|
);
|
||||||
|
@ -366,7 +366,7 @@ impl WrapSnapshot {
|
||||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||||
}
|
}
|
||||||
old_cursor.next(&());
|
old_cursor.next(&());
|
||||||
new_transforms.push_tree(old_cursor.suffix(&()), &());
|
new_transforms.append(old_cursor.suffix(&()), &());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -500,7 +500,7 @@ impl WrapSnapshot {
|
||||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||||
}
|
}
|
||||||
old_cursor.next(&());
|
old_cursor.next(&());
|
||||||
new_transforms.push_tree(
|
new_transforms.append(
|
||||||
old_cursor.slice(
|
old_cursor.slice(
|
||||||
&TabPoint::new(next_edit.old_rows.start, 0),
|
&TabPoint::new(next_edit.old_rows.start, 0),
|
||||||
Bias::Right,
|
Bias::Right,
|
||||||
|
@ -517,7 +517,7 @@ impl WrapSnapshot {
|
||||||
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
new_transforms.push_or_extend(Transform::isomorphic(summary));
|
||||||
}
|
}
|
||||||
old_cursor.next(&());
|
old_cursor.next(&());
|
||||||
new_transforms.push_tree(old_cursor.suffix(&()), &());
|
new_transforms.append(old_cursor.suffix(&()), &());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1010,7 +1010,7 @@ impl MultiBuffer {
|
||||||
|
|
||||||
let suffix = cursor.suffix(&());
|
let suffix = cursor.suffix(&());
|
||||||
let changed_trailing_excerpt = suffix.is_empty();
|
let changed_trailing_excerpt = suffix.is_empty();
|
||||||
new_excerpts.push_tree(suffix, &());
|
new_excerpts.append(suffix, &());
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
snapshot.excerpts = new_excerpts;
|
snapshot.excerpts = new_excerpts;
|
||||||
snapshot.excerpt_ids = new_excerpt_ids;
|
snapshot.excerpt_ids = new_excerpt_ids;
|
||||||
|
@ -1193,7 +1193,7 @@ impl MultiBuffer {
|
||||||
while let Some(excerpt_id) = excerpt_ids.next() {
|
while let Some(excerpt_id) = excerpt_ids.next() {
|
||||||
// Seek to the next excerpt to remove, preserving any preceding excerpts.
|
// Seek to the next excerpt to remove, preserving any preceding excerpts.
|
||||||
let locator = snapshot.excerpt_locator_for_id(excerpt_id);
|
let locator = snapshot.excerpt_locator_for_id(excerpt_id);
|
||||||
new_excerpts.push_tree(cursor.slice(&Some(locator), Bias::Left, &()), &());
|
new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
|
||||||
|
|
||||||
if let Some(mut excerpt) = cursor.item() {
|
if let Some(mut excerpt) = cursor.item() {
|
||||||
if excerpt.id != excerpt_id {
|
if excerpt.id != excerpt_id {
|
||||||
|
@ -1245,7 +1245,7 @@ impl MultiBuffer {
|
||||||
}
|
}
|
||||||
let suffix = cursor.suffix(&());
|
let suffix = cursor.suffix(&());
|
||||||
let changed_trailing_excerpt = suffix.is_empty();
|
let changed_trailing_excerpt = suffix.is_empty();
|
||||||
new_excerpts.push_tree(suffix, &());
|
new_excerpts.append(suffix, &());
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
snapshot.excerpts = new_excerpts;
|
snapshot.excerpts = new_excerpts;
|
||||||
|
|
||||||
|
@ -1509,7 +1509,7 @@ impl MultiBuffer {
|
||||||
let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>();
|
let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>();
|
||||||
|
|
||||||
for (locator, buffer, buffer_edited) in excerpts_to_edit {
|
for (locator, buffer, buffer_edited) in excerpts_to_edit {
|
||||||
new_excerpts.push_tree(cursor.slice(&Some(locator), Bias::Left, &()), &());
|
new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
|
||||||
let old_excerpt = cursor.item().unwrap();
|
let old_excerpt = cursor.item().unwrap();
|
||||||
let buffer = buffer.read(cx);
|
let buffer = buffer.read(cx);
|
||||||
let buffer_id = buffer.remote_id();
|
let buffer_id = buffer.remote_id();
|
||||||
|
@ -1549,7 +1549,7 @@ impl MultiBuffer {
|
||||||
new_excerpts.push(new_excerpt, &());
|
new_excerpts.push(new_excerpt, &());
|
||||||
cursor.next(&());
|
cursor.next(&());
|
||||||
}
|
}
|
||||||
new_excerpts.push_tree(cursor.suffix(&()), &());
|
new_excerpts.append(cursor.suffix(&()), &());
|
||||||
|
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
snapshot.excerpts = new_excerpts;
|
snapshot.excerpts = new_excerpts;
|
||||||
|
|
|
@ -445,7 +445,7 @@ type WindowBoundsCallback = Box<dyn FnMut(WindowBounds, Uuid, &mut WindowContext
|
||||||
type KeystrokeCallback =
|
type KeystrokeCallback =
|
||||||
Box<dyn FnMut(&Keystroke, &MatchResult, Option<&Box<dyn Action>>, &mut WindowContext) -> bool>;
|
Box<dyn FnMut(&Keystroke, &MatchResult, Option<&Box<dyn Action>>, &mut WindowContext) -> bool>;
|
||||||
type ActiveLabeledTasksCallback = Box<dyn FnMut(&mut AppContext) -> bool>;
|
type ActiveLabeledTasksCallback = Box<dyn FnMut(&mut AppContext) -> bool>;
|
||||||
type DeserializeActionCallback = fn(json: &str) -> anyhow::Result<Box<dyn Action>>;
|
type DeserializeActionCallback = fn(json: serde_json::Value) -> anyhow::Result<Box<dyn Action>>;
|
||||||
type WindowShouldCloseSubscriptionCallback = Box<dyn FnMut(&mut AppContext) -> bool>;
|
type WindowShouldCloseSubscriptionCallback = Box<dyn FnMut(&mut AppContext) -> bool>;
|
||||||
|
|
||||||
pub struct AppContext {
|
pub struct AppContext {
|
||||||
|
@ -624,14 +624,14 @@ impl AppContext {
|
||||||
pub fn deserialize_action(
|
pub fn deserialize_action(
|
||||||
&self,
|
&self,
|
||||||
name: &str,
|
name: &str,
|
||||||
argument: Option<&str>,
|
argument: Option<serde_json::Value>,
|
||||||
) -> Result<Box<dyn Action>> {
|
) -> Result<Box<dyn Action>> {
|
||||||
let callback = self
|
let callback = self
|
||||||
.action_deserializers
|
.action_deserializers
|
||||||
.get(name)
|
.get(name)
|
||||||
.ok_or_else(|| anyhow!("unknown action {}", name))?
|
.ok_or_else(|| anyhow!("unknown action {}", name))?
|
||||||
.1;
|
.1;
|
||||||
callback(argument.unwrap_or("{}"))
|
callback(argument.unwrap_or_else(|| serde_json::Value::Object(Default::default())))
|
||||||
.with_context(|| format!("invalid data for action {}", name))
|
.with_context(|| format!("invalid data for action {}", name))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -5573,7 +5573,7 @@ mod tests {
|
||||||
let action1 = cx
|
let action1 = cx
|
||||||
.deserialize_action(
|
.deserialize_action(
|
||||||
"test::something::ComplexAction",
|
"test::something::ComplexAction",
|
||||||
Some(r#"{"arg": "a", "count": 5}"#),
|
Some(serde_json::from_str(r#"{"arg": "a", "count": 5}"#).unwrap()),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let action2 = cx
|
let action2 = cx
|
||||||
|
|
|
@ -11,7 +11,7 @@ pub trait Action: 'static {
|
||||||
fn qualified_name() -> &'static str
|
fn qualified_name() -> &'static str
|
||||||
where
|
where
|
||||||
Self: Sized;
|
Self: Sized;
|
||||||
fn from_json_str(json: &str) -> anyhow::Result<Box<dyn Action>>
|
fn from_json_str(json: serde_json::Value) -> anyhow::Result<Box<dyn Action>>
|
||||||
where
|
where
|
||||||
Self: Sized;
|
Self: Sized;
|
||||||
}
|
}
|
||||||
|
@ -38,7 +38,7 @@ macro_rules! actions {
|
||||||
$crate::__impl_action! {
|
$crate::__impl_action! {
|
||||||
$namespace,
|
$namespace,
|
||||||
$name,
|
$name,
|
||||||
fn from_json_str(_: &str) -> $crate::anyhow::Result<Box<dyn $crate::Action>> {
|
fn from_json_str(_: $crate::serde_json::Value) -> $crate::anyhow::Result<Box<dyn $crate::Action>> {
|
||||||
Ok(Box::new(Self))
|
Ok(Box::new(Self))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -58,8 +58,8 @@ macro_rules! impl_actions {
|
||||||
$crate::__impl_action! {
|
$crate::__impl_action! {
|
||||||
$namespace,
|
$namespace,
|
||||||
$name,
|
$name,
|
||||||
fn from_json_str(json: &str) -> $crate::anyhow::Result<Box<dyn $crate::Action>> {
|
fn from_json_str(json: $crate::serde_json::Value) -> $crate::anyhow::Result<Box<dyn $crate::Action>> {
|
||||||
Ok(Box::new($crate::serde_json::from_str::<Self>(json)?))
|
Ok(Box::new($crate::serde_json::from_value::<Self>(json)?))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)*
|
)*
|
||||||
|
|
|
@ -394,7 +394,7 @@ impl<'a> WindowContext<'a> {
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(move |(name, (type_id, deserialize))| {
|
.filter_map(move |(name, (type_id, deserialize))| {
|
||||||
if let Some(action_depth) = handler_depths_by_action_type.get(type_id).copied() {
|
if let Some(action_depth) = handler_depths_by_action_type.get(type_id).copied() {
|
||||||
let action = deserialize("{}").ok()?;
|
let action = deserialize(serde_json::Value::Object(Default::default())).ok()?;
|
||||||
let bindings = self
|
let bindings = self
|
||||||
.keystroke_matcher
|
.keystroke_matcher
|
||||||
.bindings_for_action_type(*type_id)
|
.bindings_for_action_type(*type_id)
|
||||||
|
|
|
@ -211,7 +211,7 @@ impl<V: View> Element<V> for List<V> {
|
||||||
let mut cursor = old_items.cursor::<Count>();
|
let mut cursor = old_items.cursor::<Count>();
|
||||||
|
|
||||||
if state.rendered_range.start < new_rendered_range.start {
|
if state.rendered_range.start < new_rendered_range.start {
|
||||||
new_items.push_tree(
|
new_items.append(
|
||||||
cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()),
|
cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()),
|
||||||
&(),
|
&(),
|
||||||
);
|
);
|
||||||
|
@ -221,7 +221,7 @@ impl<V: View> Element<V> for List<V> {
|
||||||
cursor.next(&());
|
cursor.next(&());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
new_items.push_tree(
|
new_items.append(
|
||||||
cursor.slice(&Count(new_rendered_range.start), Bias::Right, &()),
|
cursor.slice(&Count(new_rendered_range.start), Bias::Right, &()),
|
||||||
&(),
|
&(),
|
||||||
);
|
);
|
||||||
|
@ -230,7 +230,7 @@ impl<V: View> Element<V> for List<V> {
|
||||||
cursor.seek(&Count(new_rendered_range.end), Bias::Right, &());
|
cursor.seek(&Count(new_rendered_range.end), Bias::Right, &());
|
||||||
|
|
||||||
if new_rendered_range.end < state.rendered_range.start {
|
if new_rendered_range.end < state.rendered_range.start {
|
||||||
new_items.push_tree(
|
new_items.append(
|
||||||
cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()),
|
cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()),
|
||||||
&(),
|
&(),
|
||||||
);
|
);
|
||||||
|
@ -240,7 +240,7 @@ impl<V: View> Element<V> for List<V> {
|
||||||
cursor.next(&());
|
cursor.next(&());
|
||||||
}
|
}
|
||||||
|
|
||||||
new_items.push_tree(cursor.suffix(&()), &());
|
new_items.append(cursor.suffix(&()), &());
|
||||||
|
|
||||||
state.items = new_items;
|
state.items = new_items;
|
||||||
state.rendered_range = new_rendered_range;
|
state.rendered_range = new_rendered_range;
|
||||||
|
@ -413,7 +413,7 @@ impl<V: View> ListState<V> {
|
||||||
old_heights.seek_forward(&Count(old_range.end), Bias::Right, &());
|
old_heights.seek_forward(&Count(old_range.end), Bias::Right, &());
|
||||||
|
|
||||||
new_heights.extend((0..count).map(|_| ListItem::Unrendered), &());
|
new_heights.extend((0..count).map(|_| ListItem::Unrendered), &());
|
||||||
new_heights.push_tree(old_heights.suffix(&()), &());
|
new_heights.append(old_heights.suffix(&()), &());
|
||||||
drop(old_heights);
|
drop(old_heights);
|
||||||
state.items = new_heights;
|
state.items = new_heights;
|
||||||
}
|
}
|
||||||
|
|
|
@ -786,7 +786,7 @@ impl platform::Platform for MacPlatform {
|
||||||
|
|
||||||
fn set_cursor_style(&self, style: CursorStyle) {
|
fn set_cursor_style(&self, style: CursorStyle) {
|
||||||
unsafe {
|
unsafe {
|
||||||
let cursor: id = match style {
|
let new_cursor: id = match style {
|
||||||
CursorStyle::Arrow => msg_send![class!(NSCursor), arrowCursor],
|
CursorStyle::Arrow => msg_send![class!(NSCursor), arrowCursor],
|
||||||
CursorStyle::ResizeLeftRight => {
|
CursorStyle::ResizeLeftRight => {
|
||||||
msg_send![class!(NSCursor), resizeLeftRightCursor]
|
msg_send![class!(NSCursor), resizeLeftRightCursor]
|
||||||
|
@ -795,7 +795,11 @@ impl platform::Platform for MacPlatform {
|
||||||
CursorStyle::PointingHand => msg_send![class!(NSCursor), pointingHandCursor],
|
CursorStyle::PointingHand => msg_send![class!(NSCursor), pointingHandCursor],
|
||||||
CursorStyle::IBeam => msg_send![class!(NSCursor), IBeamCursor],
|
CursorStyle::IBeam => msg_send![class!(NSCursor), IBeamCursor],
|
||||||
};
|
};
|
||||||
let _: () = msg_send![cursor, set];
|
|
||||||
|
let old_cursor: id = msg_send![class!(NSCursor), currentCursor];
|
||||||
|
if new_cursor != old_cursor {
|
||||||
|
let _: () = msg_send![new_cursor, set];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@ use futures::{
|
||||||
future::{BoxFuture, Shared},
|
future::{BoxFuture, Shared},
|
||||||
FutureExt, TryFutureExt as _,
|
FutureExt, TryFutureExt as _,
|
||||||
};
|
};
|
||||||
use gpui::{executor::Background, AppContext, Task};
|
use gpui::{executor::Background, AppContext, AsyncAppContext, Task};
|
||||||
use highlight_map::HighlightMap;
|
use highlight_map::HighlightMap;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use lsp::CodeActionKind;
|
use lsp::CodeActionKind;
|
||||||
|
@ -125,27 +125,46 @@ impl CachedLspAdapter {
|
||||||
|
|
||||||
pub async fn fetch_latest_server_version(
|
pub async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
http: Arc<dyn HttpClient>,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
self.adapter.fetch_latest_server_version(http).await
|
self.adapter.fetch_latest_server_version(delegate).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn will_fetch_server(
|
||||||
|
&self,
|
||||||
|
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||||
|
cx: &mut AsyncAppContext,
|
||||||
|
) -> Option<Task<Result<()>>> {
|
||||||
|
self.adapter.will_fetch_server(delegate, cx)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn will_start_server(
|
||||||
|
&self,
|
||||||
|
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||||
|
cx: &mut AsyncAppContext,
|
||||||
|
) -> Option<Task<Result<()>>> {
|
||||||
|
self.adapter.will_start_server(delegate, cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn fetch_server_binary(
|
pub async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
self.adapter
|
self.adapter
|
||||||
.fetch_server_binary(version, http, container_dir)
|
.fetch_server_binary(version, container_dir, delegate)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn cached_server_binary(
|
pub async fn cached_server_binary(
|
||||||
&self,
|
&self,
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Option<LanguageServerBinary> {
|
) -> Option<LanguageServerBinary> {
|
||||||
self.adapter.cached_server_binary(container_dir).await
|
self.adapter
|
||||||
|
.cached_server_binary(container_dir, delegate)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
|
pub fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
|
||||||
|
@ -187,23 +206,48 @@ impl CachedLspAdapter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait LspAdapterDelegate: Send + Sync {
|
||||||
|
fn show_notification(&self, message: &str, cx: &mut AppContext);
|
||||||
|
fn http_client(&self) -> Arc<dyn HttpClient>;
|
||||||
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait LspAdapter: 'static + Send + Sync {
|
pub trait LspAdapter: 'static + Send + Sync {
|
||||||
async fn name(&self) -> LanguageServerName;
|
async fn name(&self) -> LanguageServerName;
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
http: Arc<dyn HttpClient>,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>>;
|
) -> Result<Box<dyn 'static + Send + Any>>;
|
||||||
|
|
||||||
|
fn will_fetch_server(
|
||||||
|
&self,
|
||||||
|
_: &Arc<dyn LspAdapterDelegate>,
|
||||||
|
_: &mut AsyncAppContext,
|
||||||
|
) -> Option<Task<Result<()>>> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn will_start_server(
|
||||||
|
&self,
|
||||||
|
_: &Arc<dyn LspAdapterDelegate>,
|
||||||
|
_: &mut AsyncAppContext,
|
||||||
|
) -> Option<Task<Result<()>>> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary>;
|
) -> Result<LanguageServerBinary>;
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary>;
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
delegate: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary>;
|
||||||
|
|
||||||
async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||||
|
|
||||||
|
@ -513,10 +557,7 @@ pub struct LanguageRegistry {
|
||||||
login_shell_env_loaded: Shared<Task<()>>,
|
login_shell_env_loaded: Shared<Task<()>>,
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
lsp_binary_paths: Mutex<
|
lsp_binary_paths: Mutex<
|
||||||
HashMap<
|
HashMap<LanguageServerName, Shared<Task<Result<LanguageServerBinary, Arc<anyhow::Error>>>>>,
|
||||||
LanguageServerName,
|
|
||||||
Shared<BoxFuture<'static, Result<LanguageServerBinary, Arc<anyhow::Error>>>>,
|
|
||||||
>,
|
|
||||||
>,
|
>,
|
||||||
executor: Option<Arc<Background>>,
|
executor: Option<Arc<Background>>,
|
||||||
}
|
}
|
||||||
|
@ -812,7 +853,7 @@ impl LanguageRegistry {
|
||||||
language: Arc<Language>,
|
language: Arc<Language>,
|
||||||
adapter: Arc<CachedLspAdapter>,
|
adapter: Arc<CachedLspAdapter>,
|
||||||
root_path: Arc<Path>,
|
root_path: Arc<Path>,
|
||||||
http_client: Arc<dyn HttpClient>,
|
delegate: Arc<dyn LspAdapterDelegate>,
|
||||||
cx: &mut AppContext,
|
cx: &mut AppContext,
|
||||||
) -> Option<PendingLanguageServer> {
|
) -> Option<PendingLanguageServer> {
|
||||||
let server_id = self.state.write().next_language_server_id();
|
let server_id = self.state.write().next_language_server_id();
|
||||||
|
@ -860,35 +901,40 @@ impl LanguageRegistry {
|
||||||
.log_err()?;
|
.log_err()?;
|
||||||
let this = self.clone();
|
let this = self.clone();
|
||||||
let language = language.clone();
|
let language = language.clone();
|
||||||
let http_client = http_client.clone();
|
|
||||||
let download_dir = download_dir.clone();
|
let download_dir = download_dir.clone();
|
||||||
let root_path = root_path.clone();
|
let root_path = root_path.clone();
|
||||||
let adapter = adapter.clone();
|
let adapter = adapter.clone();
|
||||||
let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone();
|
let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone();
|
||||||
let login_shell_env_loaded = self.login_shell_env_loaded.clone();
|
let login_shell_env_loaded = self.login_shell_env_loaded.clone();
|
||||||
|
|
||||||
let task = cx.spawn(|cx| async move {
|
let task = cx.spawn(|mut cx| async move {
|
||||||
login_shell_env_loaded.await;
|
login_shell_env_loaded.await;
|
||||||
|
|
||||||
let mut lock = this.lsp_binary_paths.lock();
|
let entry = this
|
||||||
let entry = lock
|
.lsp_binary_paths
|
||||||
|
.lock()
|
||||||
.entry(adapter.name.clone())
|
.entry(adapter.name.clone())
|
||||||
.or_insert_with(|| {
|
.or_insert_with(|| {
|
||||||
get_binary(
|
cx.spawn(|cx| {
|
||||||
adapter.clone(),
|
get_binary(
|
||||||
language.clone(),
|
adapter.clone(),
|
||||||
http_client,
|
language.clone(),
|
||||||
download_dir,
|
delegate.clone(),
|
||||||
lsp_binary_statuses,
|
download_dir,
|
||||||
)
|
lsp_binary_statuses,
|
||||||
.map_err(Arc::new)
|
cx,
|
||||||
.boxed()
|
)
|
||||||
|
.map_err(Arc::new)
|
||||||
|
})
|
||||||
.shared()
|
.shared()
|
||||||
})
|
})
|
||||||
.clone();
|
.clone();
|
||||||
drop(lock);
|
|
||||||
let binary = entry.clone().map_err(|e| anyhow!(e)).await?;
|
let binary = entry.clone().map_err(|e| anyhow!(e)).await?;
|
||||||
|
|
||||||
|
if let Some(task) = adapter.will_start_server(&delegate, &mut cx) {
|
||||||
|
task.await?;
|
||||||
|
}
|
||||||
|
|
||||||
let server = lsp::LanguageServer::new(
|
let server = lsp::LanguageServer::new(
|
||||||
server_id,
|
server_id,
|
||||||
&binary.path,
|
&binary.path,
|
||||||
|
@ -958,9 +1004,10 @@ impl Default for LanguageRegistry {
|
||||||
async fn get_binary(
|
async fn get_binary(
|
||||||
adapter: Arc<CachedLspAdapter>,
|
adapter: Arc<CachedLspAdapter>,
|
||||||
language: Arc<Language>,
|
language: Arc<Language>,
|
||||||
http_client: Arc<dyn HttpClient>,
|
delegate: Arc<dyn LspAdapterDelegate>,
|
||||||
download_dir: Arc<Path>,
|
download_dir: Arc<Path>,
|
||||||
statuses: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
statuses: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||||
|
mut cx: AsyncAppContext,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let container_dir = download_dir.join(adapter.name.0.as_ref());
|
let container_dir = download_dir.join(adapter.name.0.as_ref());
|
||||||
if !container_dir.exists() {
|
if !container_dir.exists() {
|
||||||
|
@ -969,17 +1016,24 @@ async fn get_binary(
|
||||||
.context("failed to create container directory")?;
|
.context("failed to create container directory")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(task) = adapter.will_fetch_server(&delegate, &mut cx) {
|
||||||
|
task.await?;
|
||||||
|
}
|
||||||
|
|
||||||
let binary = fetch_latest_binary(
|
let binary = fetch_latest_binary(
|
||||||
adapter.clone(),
|
adapter.clone(),
|
||||||
language.clone(),
|
language.clone(),
|
||||||
http_client,
|
delegate.as_ref(),
|
||||||
&container_dir,
|
&container_dir,
|
||||||
statuses.clone(),
|
statuses.clone(),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
if let Err(error) = binary.as_ref() {
|
if let Err(error) = binary.as_ref() {
|
||||||
if let Some(cached) = adapter.cached_server_binary(container_dir).await {
|
if let Some(cached) = adapter
|
||||||
|
.cached_server_binary(container_dir, delegate.as_ref())
|
||||||
|
.await
|
||||||
|
{
|
||||||
statuses
|
statuses
|
||||||
.broadcast((language.clone(), LanguageServerBinaryStatus::Cached))
|
.broadcast((language.clone(), LanguageServerBinaryStatus::Cached))
|
||||||
.await?;
|
.await?;
|
||||||
|
@ -1001,7 +1055,7 @@ async fn get_binary(
|
||||||
async fn fetch_latest_binary(
|
async fn fetch_latest_binary(
|
||||||
adapter: Arc<CachedLspAdapter>,
|
adapter: Arc<CachedLspAdapter>,
|
||||||
language: Arc<Language>,
|
language: Arc<Language>,
|
||||||
http_client: Arc<dyn HttpClient>,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
container_dir: &Path,
|
container_dir: &Path,
|
||||||
lsp_binary_statuses_tx: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
lsp_binary_statuses_tx: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
|
@ -1012,14 +1066,12 @@ async fn fetch_latest_binary(
|
||||||
LanguageServerBinaryStatus::CheckingForUpdate,
|
LanguageServerBinaryStatus::CheckingForUpdate,
|
||||||
))
|
))
|
||||||
.await?;
|
.await?;
|
||||||
let version_info = adapter
|
let version_info = adapter.fetch_latest_server_version(delegate).await?;
|
||||||
.fetch_latest_server_version(http_client.clone())
|
|
||||||
.await?;
|
|
||||||
lsp_binary_statuses_tx
|
lsp_binary_statuses_tx
|
||||||
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloading))
|
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloading))
|
||||||
.await?;
|
.await?;
|
||||||
let binary = adapter
|
let binary = adapter
|
||||||
.fetch_server_binary(version_info, http_client, container_dir.to_path_buf())
|
.fetch_server_binary(version_info, container_dir.to_path_buf(), delegate)
|
||||||
.await?;
|
.await?;
|
||||||
lsp_binary_statuses_tx
|
lsp_binary_statuses_tx
|
||||||
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloaded))
|
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloaded))
|
||||||
|
@ -1543,7 +1595,7 @@ impl LspAdapter for Arc<FakeLspAdapter> {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
_: Arc<dyn HttpClient>,
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
|
@ -1551,13 +1603,17 @@ impl LspAdapter for Arc<FakeLspAdapter> {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
_: Box<dyn 'static + Send + Any>,
|
_: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
|
||||||
_: PathBuf,
|
_: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, _: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
_: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -288,7 +288,7 @@ impl SyntaxSnapshot {
|
||||||
};
|
};
|
||||||
if target.cmp(&cursor.start(), text).is_gt() {
|
if target.cmp(&cursor.start(), text).is_gt() {
|
||||||
let slice = cursor.slice(&target, Bias::Left, text);
|
let slice = cursor.slice(&target, Bias::Left, text);
|
||||||
layers.push_tree(slice, text);
|
layers.append(slice, text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// If this layer follows all of the edits, then preserve it and any
|
// If this layer follows all of the edits, then preserve it and any
|
||||||
|
@ -303,7 +303,7 @@ impl SyntaxSnapshot {
|
||||||
Bias::Left,
|
Bias::Left,
|
||||||
text,
|
text,
|
||||||
);
|
);
|
||||||
layers.push_tree(slice, text);
|
layers.append(slice, text);
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -369,7 +369,7 @@ impl SyntaxSnapshot {
|
||||||
cursor.next(text);
|
cursor.next(text);
|
||||||
}
|
}
|
||||||
|
|
||||||
layers.push_tree(cursor.suffix(&text), &text);
|
layers.append(cursor.suffix(&text), &text);
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
self.layers = layers;
|
self.layers = layers;
|
||||||
}
|
}
|
||||||
|
@ -478,7 +478,7 @@ impl SyntaxSnapshot {
|
||||||
if bounded_position.cmp(&cursor.start(), &text).is_gt() {
|
if bounded_position.cmp(&cursor.start(), &text).is_gt() {
|
||||||
let slice = cursor.slice(&bounded_position, Bias::Left, text);
|
let slice = cursor.slice(&bounded_position, Bias::Left, text);
|
||||||
if !slice.is_empty() {
|
if !slice.is_empty() {
|
||||||
layers.push_tree(slice, &text);
|
layers.append(slice, &text);
|
||||||
if changed_regions.prune(cursor.end(text), text) {
|
if changed_regions.prune(cursor.end(text), text) {
|
||||||
done = false;
|
done = false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,17 +6,23 @@ import ScreenCaptureKit
|
||||||
class LKRoomDelegate: RoomDelegate {
|
class LKRoomDelegate: RoomDelegate {
|
||||||
var data: UnsafeRawPointer
|
var data: UnsafeRawPointer
|
||||||
var onDidDisconnect: @convention(c) (UnsafeRawPointer) -> Void
|
var onDidDisconnect: @convention(c) (UnsafeRawPointer) -> Void
|
||||||
|
var onDidSubscribeToRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void
|
||||||
|
var onDidUnsubscribeFromRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
|
||||||
var onDidSubscribeToRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void
|
var onDidSubscribeToRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void
|
||||||
var onDidUnsubscribeFromRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
|
var onDidUnsubscribeFromRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
|
||||||
|
|
||||||
init(
|
init(
|
||||||
data: UnsafeRawPointer,
|
data: UnsafeRawPointer,
|
||||||
onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
|
onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
|
||||||
|
onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
|
||||||
|
onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
|
||||||
onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
|
onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
|
||||||
onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void)
|
onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void)
|
||||||
{
|
{
|
||||||
self.data = data
|
self.data = data
|
||||||
self.onDidDisconnect = onDidDisconnect
|
self.onDidDisconnect = onDidDisconnect
|
||||||
|
self.onDidSubscribeToRemoteAudioTrack = onDidSubscribeToRemoteAudioTrack
|
||||||
|
self.onDidUnsubscribeFromRemoteAudioTrack = onDidUnsubscribeFromRemoteAudioTrack
|
||||||
self.onDidSubscribeToRemoteVideoTrack = onDidSubscribeToRemoteVideoTrack
|
self.onDidSubscribeToRemoteVideoTrack = onDidSubscribeToRemoteVideoTrack
|
||||||
self.onDidUnsubscribeFromRemoteVideoTrack = onDidUnsubscribeFromRemoteVideoTrack
|
self.onDidUnsubscribeFromRemoteVideoTrack = onDidUnsubscribeFromRemoteVideoTrack
|
||||||
}
|
}
|
||||||
|
@ -30,12 +36,16 @@ class LKRoomDelegate: RoomDelegate {
|
||||||
func room(_ room: Room, participant: RemoteParticipant, didSubscribe publication: RemoteTrackPublication, track: Track) {
|
func room(_ room: Room, participant: RemoteParticipant, didSubscribe publication: RemoteTrackPublication, track: Track) {
|
||||||
if track.kind == .video {
|
if track.kind == .video {
|
||||||
self.onDidSubscribeToRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque())
|
self.onDidSubscribeToRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque())
|
||||||
|
} else if track.kind == .audio {
|
||||||
|
self.onDidSubscribeToRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func room(_ room: Room, participant: RemoteParticipant, didUnsubscribe publication: RemoteTrackPublication, track: Track) {
|
func room(_ room: Room, participant: RemoteParticipant, didUnsubscribe publication: RemoteTrackPublication, track: Track) {
|
||||||
if track.kind == .video {
|
if track.kind == .video {
|
||||||
self.onDidUnsubscribeFromRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString)
|
self.onDidUnsubscribeFromRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString)
|
||||||
|
} else if track.kind == .audio {
|
||||||
|
self.onDidUnsubscribeFromRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -77,12 +87,16 @@ class LKVideoRenderer: NSObject, VideoRenderer {
|
||||||
public func LKRoomDelegateCreate(
|
public func LKRoomDelegateCreate(
|
||||||
data: UnsafeRawPointer,
|
data: UnsafeRawPointer,
|
||||||
onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
|
onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
|
||||||
|
onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
|
||||||
|
onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
|
||||||
onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
|
onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
|
||||||
onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
|
onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
|
||||||
) -> UnsafeMutableRawPointer {
|
) -> UnsafeMutableRawPointer {
|
||||||
let delegate = LKRoomDelegate(
|
let delegate = LKRoomDelegate(
|
||||||
data: data,
|
data: data,
|
||||||
onDidDisconnect: onDidDisconnect,
|
onDidDisconnect: onDidDisconnect,
|
||||||
|
onDidSubscribeToRemoteAudioTrack: onDidSubscribeToRemoteAudioTrack,
|
||||||
|
onDidUnsubscribeFromRemoteAudioTrack: onDidUnsubscribeFromRemoteAudioTrack,
|
||||||
onDidSubscribeToRemoteVideoTrack: onDidSubscribeToRemoteVideoTrack,
|
onDidSubscribeToRemoteVideoTrack: onDidSubscribeToRemoteVideoTrack,
|
||||||
onDidUnsubscribeFromRemoteVideoTrack: onDidUnsubscribeFromRemoteVideoTrack
|
onDidUnsubscribeFromRemoteVideoTrack: onDidUnsubscribeFromRemoteVideoTrack
|
||||||
)
|
)
|
||||||
|
@ -123,6 +137,18 @@ public func LKRoomPublishVideoTrack(room: UnsafeRawPointer, track: UnsafeRawPoin
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@_cdecl("LKRoomPublishAudioTrack")
|
||||||
|
public func LKRoomPublishAudioTrack(room: UnsafeRawPointer, track: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, UnsafeMutableRawPointer?, CFString?) -> Void, callback_data: UnsafeRawPointer) {
|
||||||
|
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
|
||||||
|
let track = Unmanaged<LocalAudioTrack>.fromOpaque(track).takeUnretainedValue()
|
||||||
|
room.localParticipant?.publishAudioTrack(track: track).then { publication in
|
||||||
|
callback(callback_data, Unmanaged.passRetained(publication).toOpaque(), nil)
|
||||||
|
}.catch { error in
|
||||||
|
callback(callback_data, nil, error.localizedDescription as CFString)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@_cdecl("LKRoomUnpublishTrack")
|
@_cdecl("LKRoomUnpublishTrack")
|
||||||
public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawPointer) {
|
public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawPointer) {
|
||||||
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
|
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
|
||||||
|
@ -130,6 +156,20 @@ public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawP
|
||||||
let _ = room.localParticipant?.unpublish(publication: publication)
|
let _ = room.localParticipant?.unpublish(publication: publication)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@_cdecl("LKRoomAudioTracksForRemoteParticipant")
|
||||||
|
public func LKRoomAudioTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
|
||||||
|
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
|
||||||
|
|
||||||
|
for (_, participant) in room.remoteParticipants {
|
||||||
|
if participant.identity == participantId as String {
|
||||||
|
return participant.audioTracks.compactMap { $0.track as? RemoteAudioTrack } as CFArray?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@_cdecl("LKRoomVideoTracksForRemoteParticipant")
|
@_cdecl("LKRoomVideoTracksForRemoteParticipant")
|
||||||
public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
|
public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
|
||||||
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
|
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
|
||||||
|
@ -143,6 +183,17 @@ public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, partic
|
||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@_cdecl("LKLocalAudioTrackCreateTrack")
|
||||||
|
public func LKLocalAudioTrackCreateTrack() -> UnsafeMutableRawPointer {
|
||||||
|
let track = LocalAudioTrack.createTrack(options: AudioCaptureOptions(
|
||||||
|
echoCancellation: true,
|
||||||
|
noiseSuppression: true
|
||||||
|
))
|
||||||
|
|
||||||
|
return Unmanaged.passRetained(track).toOpaque()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@_cdecl("LKCreateScreenShareTrackForDisplay")
|
@_cdecl("LKCreateScreenShareTrackForDisplay")
|
||||||
public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer) -> UnsafeMutableRawPointer {
|
public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer) -> UnsafeMutableRawPointer {
|
||||||
let display = Unmanaged<MacOSDisplay>.fromOpaque(display).takeUnretainedValue()
|
let display = Unmanaged<MacOSDisplay>.fromOpaque(display).takeUnretainedValue()
|
||||||
|
@ -150,6 +201,19 @@ public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer)
|
||||||
return Unmanaged.passRetained(track).toOpaque()
|
return Unmanaged.passRetained(track).toOpaque()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@_cdecl("LKRemoteAudioTrackStart")
|
||||||
|
public func LKRemoteAudioTrackStart(track: UnsafeRawPointer, onStart: @escaping @convention(c) (UnsafeRawPointer, Bool) -> Void, callbackData: UnsafeRawPointer) {
|
||||||
|
let track = Unmanaged<Track>.fromOpaque(track).takeUnretainedValue() as! RemoteAudioTrack
|
||||||
|
|
||||||
|
track.start().then { success in
|
||||||
|
onStart(callbackData, success)
|
||||||
|
}
|
||||||
|
.catch { _ in
|
||||||
|
onStart(callbackData, false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@_cdecl("LKVideoRendererCreate")
|
@_cdecl("LKVideoRendererCreate")
|
||||||
public func LKVideoRendererCreate(data: UnsafeRawPointer, onFrame: @escaping @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool, onDrop: @escaping @convention(c) (UnsafeRawPointer) -> Void) -> UnsafeMutableRawPointer {
|
public func LKVideoRendererCreate(data: UnsafeRawPointer, onFrame: @escaping @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool, onDrop: @escaping @convention(c) (UnsafeRawPointer) -> Void) -> UnsafeMutableRawPointer {
|
||||||
Unmanaged.passRetained(LKVideoRenderer(data: data, onFrame: onFrame, onDrop: onDrop)).toOpaque()
|
Unmanaged.passRetained(LKVideoRenderer(data: data, onFrame: onFrame, onDrop: onDrop)).toOpaque()
|
||||||
|
@ -169,6 +233,12 @@ public func LKRemoteVideoTrackGetSid(track: UnsafeRawPointer) -> CFString {
|
||||||
return track.sid! as CFString
|
return track.sid! as CFString
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@_cdecl("LKRemoteAudioTrackGetSid")
|
||||||
|
public func LKRemoteAudioTrackGetSid(track: UnsafeRawPointer) -> CFString {
|
||||||
|
let track = Unmanaged<RemoteAudioTrack>.fromOpaque(track).takeUnretainedValue()
|
||||||
|
return track.sid! as CFString
|
||||||
|
}
|
||||||
|
|
||||||
@_cdecl("LKDisplaySources")
|
@_cdecl("LKDisplaySources")
|
||||||
public func LKDisplaySources(data: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, CFArray?, CFString?) -> Void) {
|
public func LKDisplaySources(data: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, CFArray?, CFString?) -> Void) {
|
||||||
MacOSScreenCapturer.sources(for: .display, includeCurrentApplication: false, preferredMethod: .legacy).then { displaySources in
|
MacOSScreenCapturer.sources(for: .display, includeCurrentApplication: false, preferredMethod: .legacy).then { displaySources in
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use gpui::{actions, keymap_matcher::Binding, Menu, MenuItem};
|
use gpui::{actions, keymap_matcher::Binding, Menu, MenuItem};
|
||||||
use live_kit_client::{LocalVideoTrack, RemoteVideoTrackUpdate, Room};
|
use live_kit_client::{
|
||||||
|
LocalAudioTrack, LocalVideoTrack, RemoteAudioTrackUpdate, RemoteVideoTrackUpdate, Room,
|
||||||
|
};
|
||||||
use live_kit_server::token::{self, VideoGrant};
|
use live_kit_server::token::{self, VideoGrant};
|
||||||
use log::LevelFilter;
|
use log::LevelFilter;
|
||||||
use simplelog::SimpleLogger;
|
use simplelog::SimpleLogger;
|
||||||
|
@ -11,6 +15,12 @@ fn main() {
|
||||||
SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
|
SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
|
||||||
|
|
||||||
gpui::App::new(()).unwrap().run(|cx| {
|
gpui::App::new(()).unwrap().run(|cx| {
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
println!("USING TEST LIVEKIT");
|
||||||
|
|
||||||
|
#[cfg(not(any(test, feature = "test-support")))]
|
||||||
|
println!("USING REAL LIVEKIT");
|
||||||
|
|
||||||
cx.platform().activate(true);
|
cx.platform().activate(true);
|
||||||
cx.add_global_action(quit);
|
cx.add_global_action(quit);
|
||||||
|
|
||||||
|
@ -49,16 +59,14 @@ fn main() {
|
||||||
let room_b = Room::new();
|
let room_b = Room::new();
|
||||||
room_b.connect(&live_kit_url, &user2_token).await.unwrap();
|
room_b.connect(&live_kit_url, &user2_token).await.unwrap();
|
||||||
|
|
||||||
let mut track_changes = room_b.remote_video_track_updates();
|
let mut audio_track_updates = room_b.remote_audio_track_updates();
|
||||||
|
let audio_track = LocalAudioTrack::create();
|
||||||
|
let audio_track_publication = room_a.publish_audio_track(&audio_track).await.unwrap();
|
||||||
|
|
||||||
let displays = room_a.display_sources().await.unwrap();
|
if let RemoteAudioTrackUpdate::Subscribed(track) =
|
||||||
let display = displays.into_iter().next().unwrap();
|
audio_track_updates.next().await.unwrap()
|
||||||
|
{
|
||||||
let track_a = LocalVideoTrack::screen_share_for_display(&display);
|
let remote_tracks = room_b.remote_audio_tracks("test-participant-1");
|
||||||
let track_a_publication = room_a.publish_video_track(&track_a).await.unwrap();
|
|
||||||
|
|
||||||
if let RemoteVideoTrackUpdate::Subscribed(track) = track_changes.next().await.unwrap() {
|
|
||||||
let remote_tracks = room_b.remote_video_tracks("test-participant-1");
|
|
||||||
assert_eq!(remote_tracks.len(), 1);
|
assert_eq!(remote_tracks.len(), 1);
|
||||||
assert_eq!(remote_tracks[0].publisher_id(), "test-participant-1");
|
assert_eq!(remote_tracks[0].publisher_id(), "test-participant-1");
|
||||||
assert_eq!(track.publisher_id(), "test-participant-1");
|
assert_eq!(track.publisher_id(), "test-participant-1");
|
||||||
|
@ -66,18 +74,60 @@ fn main() {
|
||||||
panic!("unexpected message");
|
panic!("unexpected message");
|
||||||
}
|
}
|
||||||
|
|
||||||
let remote_track = room_b
|
println!("Pausing for 5 seconds to test audio, make some noise!");
|
||||||
|
let timer = cx.background().timer(Duration::from_secs(5));
|
||||||
|
timer.await;
|
||||||
|
|
||||||
|
let remote_audio_track = room_b
|
||||||
|
.remote_audio_tracks("test-participant-1")
|
||||||
|
.pop()
|
||||||
|
.unwrap();
|
||||||
|
room_a.unpublish_track(audio_track_publication);
|
||||||
|
if let RemoteAudioTrackUpdate::Unsubscribed {
|
||||||
|
publisher_id,
|
||||||
|
track_id,
|
||||||
|
} = audio_track_updates.next().await.unwrap()
|
||||||
|
{
|
||||||
|
assert_eq!(publisher_id, "test-participant-1");
|
||||||
|
assert_eq!(remote_audio_track.sid(), track_id);
|
||||||
|
assert_eq!(room_b.remote_audio_tracks("test-participant-1").len(), 0);
|
||||||
|
} else {
|
||||||
|
panic!("unexpected message");
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut video_track_updates = room_b.remote_video_track_updates();
|
||||||
|
let displays = room_a.display_sources().await.unwrap();
|
||||||
|
let display = displays.into_iter().next().unwrap();
|
||||||
|
|
||||||
|
let local_video_track = LocalVideoTrack::screen_share_for_display(&display);
|
||||||
|
let local_video_track_publication = room_a
|
||||||
|
.publish_video_track(&local_video_track)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
if let RemoteVideoTrackUpdate::Subscribed(track) =
|
||||||
|
video_track_updates.next().await.unwrap()
|
||||||
|
{
|
||||||
|
let remote_video_tracks = room_b.remote_video_tracks("test-participant-1");
|
||||||
|
assert_eq!(remote_video_tracks.len(), 1);
|
||||||
|
assert_eq!(remote_video_tracks[0].publisher_id(), "test-participant-1");
|
||||||
|
assert_eq!(track.publisher_id(), "test-participant-1");
|
||||||
|
} else {
|
||||||
|
panic!("unexpected message");
|
||||||
|
}
|
||||||
|
|
||||||
|
let remote_video_track = room_b
|
||||||
.remote_video_tracks("test-participant-1")
|
.remote_video_tracks("test-participant-1")
|
||||||
.pop()
|
.pop()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
room_a.unpublish_track(track_a_publication);
|
room_a.unpublish_track(local_video_track_publication);
|
||||||
if let RemoteVideoTrackUpdate::Unsubscribed {
|
if let RemoteVideoTrackUpdate::Unsubscribed {
|
||||||
publisher_id,
|
publisher_id,
|
||||||
track_id,
|
track_id,
|
||||||
} = track_changes.next().await.unwrap()
|
} = video_track_updates.next().await.unwrap()
|
||||||
{
|
{
|
||||||
assert_eq!(publisher_id, "test-participant-1");
|
assert_eq!(publisher_id, "test-participant-1");
|
||||||
assert_eq!(remote_track.sid(), track_id);
|
assert_eq!(remote_video_track.sid(), track_id);
|
||||||
assert_eq!(room_b.remote_video_tracks("test-participant-1").len(), 0);
|
assert_eq!(room_b.remote_video_tracks("test-participant-1").len(), 0);
|
||||||
} else {
|
} else {
|
||||||
panic!("unexpected message");
|
panic!("unexpected message");
|
||||||
|
|
|
@ -4,7 +4,7 @@ pub mod prod;
|
||||||
pub use prod::*;
|
pub use prod::*;
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
mod test;
|
pub mod test;
|
||||||
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
pub use test::*;
|
pub use test::*;
|
||||||
|
|
|
@ -21,6 +21,17 @@ extern "C" {
|
||||||
fn LKRoomDelegateCreate(
|
fn LKRoomDelegateCreate(
|
||||||
callback_data: *mut c_void,
|
callback_data: *mut c_void,
|
||||||
on_did_disconnect: extern "C" fn(callback_data: *mut c_void),
|
on_did_disconnect: extern "C" fn(callback_data: *mut c_void),
|
||||||
|
on_did_subscribe_to_remote_audio_track: extern "C" fn(
|
||||||
|
callback_data: *mut c_void,
|
||||||
|
publisher_id: CFStringRef,
|
||||||
|
track_id: CFStringRef,
|
||||||
|
remote_track: *const c_void,
|
||||||
|
),
|
||||||
|
on_did_unsubscribe_from_remote_audio_track: extern "C" fn(
|
||||||
|
callback_data: *mut c_void,
|
||||||
|
publisher_id: CFStringRef,
|
||||||
|
track_id: CFStringRef,
|
||||||
|
),
|
||||||
on_did_subscribe_to_remote_video_track: extern "C" fn(
|
on_did_subscribe_to_remote_video_track: extern "C" fn(
|
||||||
callback_data: *mut c_void,
|
callback_data: *mut c_void,
|
||||||
publisher_id: CFStringRef,
|
publisher_id: CFStringRef,
|
||||||
|
@ -49,7 +60,18 @@ extern "C" {
|
||||||
callback: extern "C" fn(*mut c_void, *mut c_void, CFStringRef),
|
callback: extern "C" fn(*mut c_void, *mut c_void, CFStringRef),
|
||||||
callback_data: *mut c_void,
|
callback_data: *mut c_void,
|
||||||
);
|
);
|
||||||
|
fn LKRoomPublishAudioTrack(
|
||||||
|
room: *const c_void,
|
||||||
|
track: *const c_void,
|
||||||
|
callback: extern "C" fn(*mut c_void, *mut c_void, CFStringRef),
|
||||||
|
callback_data: *mut c_void,
|
||||||
|
);
|
||||||
fn LKRoomUnpublishTrack(room: *const c_void, publication: *const c_void);
|
fn LKRoomUnpublishTrack(room: *const c_void, publication: *const c_void);
|
||||||
|
fn LKRoomAudioTracksForRemoteParticipant(
|
||||||
|
room: *const c_void,
|
||||||
|
participant_id: CFStringRef,
|
||||||
|
) -> CFArrayRef;
|
||||||
|
|
||||||
fn LKRoomVideoTracksForRemoteParticipant(
|
fn LKRoomVideoTracksForRemoteParticipant(
|
||||||
room: *const c_void,
|
room: *const c_void,
|
||||||
participant_id: CFStringRef,
|
participant_id: CFStringRef,
|
||||||
|
@ -61,6 +83,13 @@ extern "C" {
|
||||||
on_drop: extern "C" fn(callback_data: *mut c_void),
|
on_drop: extern "C" fn(callback_data: *mut c_void),
|
||||||
) -> *const c_void;
|
) -> *const c_void;
|
||||||
|
|
||||||
|
fn LKRemoteAudioTrackGetSid(track: *const c_void) -> CFStringRef;
|
||||||
|
// fn LKRemoteAudioTrackStart(
|
||||||
|
// track: *const c_void,
|
||||||
|
// callback: extern "C" fn(*mut c_void, bool),
|
||||||
|
// callback_data: *mut c_void
|
||||||
|
// );
|
||||||
|
|
||||||
fn LKVideoTrackAddRenderer(track: *const c_void, renderer: *const c_void);
|
fn LKVideoTrackAddRenderer(track: *const c_void, renderer: *const c_void);
|
||||||
fn LKRemoteVideoTrackGetSid(track: *const c_void) -> CFStringRef;
|
fn LKRemoteVideoTrackGetSid(track: *const c_void) -> CFStringRef;
|
||||||
|
|
||||||
|
@ -73,6 +102,7 @@ extern "C" {
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
fn LKCreateScreenShareTrackForDisplay(display: *const c_void) -> *const c_void;
|
fn LKCreateScreenShareTrackForDisplay(display: *const c_void) -> *const c_void;
|
||||||
|
fn LKLocalAudioTrackCreateTrack() -> *const c_void;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type Sid = String;
|
pub type Sid = String;
|
||||||
|
@ -89,6 +119,7 @@ pub struct Room {
|
||||||
watch::Sender<ConnectionState>,
|
watch::Sender<ConnectionState>,
|
||||||
watch::Receiver<ConnectionState>,
|
watch::Receiver<ConnectionState>,
|
||||||
)>,
|
)>,
|
||||||
|
remote_audio_track_subscribers: Mutex<Vec<mpsc::UnboundedSender<RemoteAudioTrackUpdate>>>,
|
||||||
remote_video_track_subscribers: Mutex<Vec<mpsc::UnboundedSender<RemoteVideoTrackUpdate>>>,
|
remote_video_track_subscribers: Mutex<Vec<mpsc::UnboundedSender<RemoteVideoTrackUpdate>>>,
|
||||||
_delegate: RoomDelegate,
|
_delegate: RoomDelegate,
|
||||||
}
|
}
|
||||||
|
@ -100,6 +131,7 @@ impl Room {
|
||||||
Self {
|
Self {
|
||||||
native_room: unsafe { LKRoomCreate(delegate.native_delegate) },
|
native_room: unsafe { LKRoomCreate(delegate.native_delegate) },
|
||||||
connection: Mutex::new(watch::channel_with(ConnectionState::Disconnected)),
|
connection: Mutex::new(watch::channel_with(ConnectionState::Disconnected)),
|
||||||
|
remote_audio_track_subscribers: Default::default(),
|
||||||
remote_video_track_subscribers: Default::default(),
|
remote_video_track_subscribers: Default::default(),
|
||||||
_delegate: delegate,
|
_delegate: delegate,
|
||||||
}
|
}
|
||||||
|
@ -191,6 +223,32 @@ impl Room {
|
||||||
async { rx.await.unwrap().context("error publishing video track") }
|
async { rx.await.unwrap().context("error publishing video track") }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn publish_audio_track(
|
||||||
|
self: &Arc<Self>,
|
||||||
|
track: &LocalAudioTrack,
|
||||||
|
) -> impl Future<Output = Result<LocalTrackPublication>> {
|
||||||
|
let (tx, rx) = oneshot::channel::<Result<LocalTrackPublication>>();
|
||||||
|
extern "C" fn callback(tx: *mut c_void, publication: *mut c_void, error: CFStringRef) {
|
||||||
|
let tx =
|
||||||
|
unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<LocalTrackPublication>>) };
|
||||||
|
if error.is_null() {
|
||||||
|
let _ = tx.send(Ok(LocalTrackPublication(publication)));
|
||||||
|
} else {
|
||||||
|
let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
|
||||||
|
let _ = tx.send(Err(anyhow!(error)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
unsafe {
|
||||||
|
LKRoomPublishAudioTrack(
|
||||||
|
self.native_room,
|
||||||
|
track.0,
|
||||||
|
callback,
|
||||||
|
Box::into_raw(Box::new(tx)) as *mut c_void,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
async { rx.await.unwrap().context("error publishing video track") }
|
||||||
|
}
|
||||||
|
|
||||||
pub fn unpublish_track(&self, publication: LocalTrackPublication) {
|
pub fn unpublish_track(&self, publication: LocalTrackPublication) {
|
||||||
unsafe {
|
unsafe {
|
||||||
LKRoomUnpublishTrack(self.native_room, publication.0);
|
LKRoomUnpublishTrack(self.native_room, publication.0);
|
||||||
|
@ -226,12 +284,65 @@ impl Room {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn remote_audio_tracks(&self, participant_id: &str) -> Vec<Arc<RemoteAudioTrack>> {
|
||||||
|
unsafe {
|
||||||
|
let tracks = LKRoomAudioTracksForRemoteParticipant(
|
||||||
|
self.native_room,
|
||||||
|
CFString::new(participant_id).as_concrete_TypeRef(),
|
||||||
|
);
|
||||||
|
|
||||||
|
if tracks.is_null() {
|
||||||
|
Vec::new()
|
||||||
|
} else {
|
||||||
|
let tracks = CFArray::wrap_under_get_rule(tracks);
|
||||||
|
tracks
|
||||||
|
.into_iter()
|
||||||
|
.map(|native_track| {
|
||||||
|
let native_track = *native_track;
|
||||||
|
let id =
|
||||||
|
CFString::wrap_under_get_rule(LKRemoteAudioTrackGetSid(native_track))
|
||||||
|
.to_string();
|
||||||
|
Arc::new(RemoteAudioTrack::new(
|
||||||
|
native_track,
|
||||||
|
id,
|
||||||
|
participant_id.into(),
|
||||||
|
))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remote_audio_track_updates(&self) -> mpsc::UnboundedReceiver<RemoteAudioTrackUpdate> {
|
||||||
|
let (tx, rx) = mpsc::unbounded();
|
||||||
|
self.remote_audio_track_subscribers.lock().push(tx);
|
||||||
|
rx
|
||||||
|
}
|
||||||
|
|
||||||
pub fn remote_video_track_updates(&self) -> mpsc::UnboundedReceiver<RemoteVideoTrackUpdate> {
|
pub fn remote_video_track_updates(&self) -> mpsc::UnboundedReceiver<RemoteVideoTrackUpdate> {
|
||||||
let (tx, rx) = mpsc::unbounded();
|
let (tx, rx) = mpsc::unbounded();
|
||||||
self.remote_video_track_subscribers.lock().push(tx);
|
self.remote_video_track_subscribers.lock().push(tx);
|
||||||
rx
|
rx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn did_subscribe_to_remote_audio_track(&self, track: RemoteAudioTrack) {
|
||||||
|
let track = Arc::new(track);
|
||||||
|
self.remote_audio_track_subscribers.lock().retain(|tx| {
|
||||||
|
tx.unbounded_send(RemoteAudioTrackUpdate::Subscribed(track.clone()))
|
||||||
|
.is_ok()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn did_unsubscribe_from_remote_audio_track(&self, publisher_id: String, track_id: String) {
|
||||||
|
self.remote_audio_track_subscribers.lock().retain(|tx| {
|
||||||
|
tx.unbounded_send(RemoteAudioTrackUpdate::Unsubscribed {
|
||||||
|
publisher_id: publisher_id.clone(),
|
||||||
|
track_id: track_id.clone(),
|
||||||
|
})
|
||||||
|
.is_ok()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
fn did_subscribe_to_remote_video_track(&self, track: RemoteVideoTrack) {
|
fn did_subscribe_to_remote_video_track(&self, track: RemoteVideoTrack) {
|
||||||
let track = Arc::new(track);
|
let track = Arc::new(track);
|
||||||
self.remote_video_track_subscribers.lock().retain(|tx| {
|
self.remote_video_track_subscribers.lock().retain(|tx| {
|
||||||
|
@ -294,6 +405,8 @@ impl RoomDelegate {
|
||||||
LKRoomDelegateCreate(
|
LKRoomDelegateCreate(
|
||||||
weak_room as *mut c_void,
|
weak_room as *mut c_void,
|
||||||
Self::on_did_disconnect,
|
Self::on_did_disconnect,
|
||||||
|
Self::on_did_subscribe_to_remote_audio_track,
|
||||||
|
Self::on_did_unsubscribe_from_remote_audio_track,
|
||||||
Self::on_did_subscribe_to_remote_video_track,
|
Self::on_did_subscribe_to_remote_video_track,
|
||||||
Self::on_did_unsubscribe_from_remote_video_track,
|
Self::on_did_unsubscribe_from_remote_video_track,
|
||||||
)
|
)
|
||||||
|
@ -312,6 +425,36 @@ impl RoomDelegate {
|
||||||
let _ = Weak::into_raw(room);
|
let _ = Weak::into_raw(room);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
extern "C" fn on_did_subscribe_to_remote_audio_track(
|
||||||
|
room: *mut c_void,
|
||||||
|
publisher_id: CFStringRef,
|
||||||
|
track_id: CFStringRef,
|
||||||
|
track: *const c_void,
|
||||||
|
) {
|
||||||
|
let room = unsafe { Weak::from_raw(room as *mut Room) };
|
||||||
|
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
|
||||||
|
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
|
||||||
|
let track = RemoteAudioTrack::new(track, track_id, publisher_id);
|
||||||
|
if let Some(room) = room.upgrade() {
|
||||||
|
room.did_subscribe_to_remote_audio_track(track);
|
||||||
|
}
|
||||||
|
let _ = Weak::into_raw(room);
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C" fn on_did_unsubscribe_from_remote_audio_track(
|
||||||
|
room: *mut c_void,
|
||||||
|
publisher_id: CFStringRef,
|
||||||
|
track_id: CFStringRef,
|
||||||
|
) {
|
||||||
|
let room = unsafe { Weak::from_raw(room as *mut Room) };
|
||||||
|
let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
|
||||||
|
let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
|
||||||
|
if let Some(room) = room.upgrade() {
|
||||||
|
room.did_unsubscribe_from_remote_audio_track(publisher_id, track_id);
|
||||||
|
}
|
||||||
|
let _ = Weak::into_raw(room);
|
||||||
|
}
|
||||||
|
|
||||||
extern "C" fn on_did_subscribe_to_remote_video_track(
|
extern "C" fn on_did_subscribe_to_remote_video_track(
|
||||||
room: *mut c_void,
|
room: *mut c_void,
|
||||||
publisher_id: CFStringRef,
|
publisher_id: CFStringRef,
|
||||||
|
@ -352,6 +495,20 @@ impl Drop for RoomDelegate {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct LocalAudioTrack(*const c_void);
|
||||||
|
|
||||||
|
impl LocalAudioTrack {
|
||||||
|
pub fn create() -> Self {
|
||||||
|
Self(unsafe { LKLocalAudioTrackCreateTrack() })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for LocalAudioTrack {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
unsafe { CFRelease(self.0) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct LocalVideoTrack(*const c_void);
|
pub struct LocalVideoTrack(*const c_void);
|
||||||
|
|
||||||
impl LocalVideoTrack {
|
impl LocalVideoTrack {
|
||||||
|
@ -374,6 +531,34 @@ impl Drop for LocalTrackPublication {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct RemoteAudioTrack {
|
||||||
|
_native_track: *const c_void,
|
||||||
|
sid: Sid,
|
||||||
|
publisher_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RemoteAudioTrack {
|
||||||
|
fn new(native_track: *const c_void, sid: Sid, publisher_id: String) -> Self {
|
||||||
|
unsafe {
|
||||||
|
CFRetain(native_track);
|
||||||
|
}
|
||||||
|
Self {
|
||||||
|
_native_track: native_track,
|
||||||
|
sid,
|
||||||
|
publisher_id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sid(&self) -> &str {
|
||||||
|
&self.sid
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn publisher_id(&self) -> &str {
|
||||||
|
&self.publisher_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct RemoteVideoTrack {
|
pub struct RemoteVideoTrack {
|
||||||
native_track: *const c_void,
|
native_track: *const c_void,
|
||||||
|
@ -453,6 +638,11 @@ pub enum RemoteVideoTrackUpdate {
|
||||||
Unsubscribed { publisher_id: Sid, track_id: Sid },
|
Unsubscribed { publisher_id: Sid, track_id: Sid },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum RemoteAudioTrackUpdate {
|
||||||
|
Subscribed(Arc<RemoteAudioTrack>),
|
||||||
|
Unsubscribed { publisher_id: Sid, track_id: Sid },
|
||||||
|
}
|
||||||
|
|
||||||
pub struct MacOSDisplay(*const c_void);
|
pub struct MacOSDisplay(*const c_void);
|
||||||
|
|
||||||
impl MacOSDisplay {
|
impl MacOSDisplay {
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl TestServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn create_room(&self, room: String) -> Result<()> {
|
pub async fn create_room(&self, room: String) -> Result<()> {
|
||||||
self.background.simulate_random_delay().await;
|
self.background.simulate_random_delay().await;
|
||||||
let mut server_rooms = self.rooms.lock();
|
let mut server_rooms = self.rooms.lock();
|
||||||
if server_rooms.contains_key(&room) {
|
if server_rooms.contains_key(&room) {
|
||||||
|
@ -104,7 +104,7 @@ impl TestServer {
|
||||||
room_name
|
room_name
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
for track in &room.tracks {
|
for track in &room.video_tracks {
|
||||||
client_room
|
client_room
|
||||||
.0
|
.0
|
||||||
.lock()
|
.lock()
|
||||||
|
@ -182,7 +182,7 @@ impl TestServer {
|
||||||
frames_rx: local_track.frames_rx.clone(),
|
frames_rx: local_track.frames_rx.clone(),
|
||||||
});
|
});
|
||||||
|
|
||||||
room.tracks.push(track.clone());
|
room.video_tracks.push(track.clone());
|
||||||
|
|
||||||
for (id, client_room) in &room.client_rooms {
|
for (id, client_room) in &room.client_rooms {
|
||||||
if *id != identity {
|
if *id != identity {
|
||||||
|
@ -199,6 +199,43 @@ impl TestServer {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn publish_audio_track(
|
||||||
|
&self,
|
||||||
|
token: String,
|
||||||
|
_local_track: &LocalAudioTrack,
|
||||||
|
) -> Result<()> {
|
||||||
|
self.background.simulate_random_delay().await;
|
||||||
|
let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
|
||||||
|
let identity = claims.sub.unwrap().to_string();
|
||||||
|
let room_name = claims.video.room.unwrap();
|
||||||
|
|
||||||
|
let mut server_rooms = self.rooms.lock();
|
||||||
|
let room = server_rooms
|
||||||
|
.get_mut(&*room_name)
|
||||||
|
.ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
|
||||||
|
|
||||||
|
let track = Arc::new(RemoteAudioTrack {
|
||||||
|
sid: nanoid::nanoid!(17),
|
||||||
|
publisher_id: identity.clone(),
|
||||||
|
});
|
||||||
|
|
||||||
|
room.audio_tracks.push(track.clone());
|
||||||
|
|
||||||
|
for (id, client_room) in &room.client_rooms {
|
||||||
|
if *id != identity {
|
||||||
|
let _ = client_room
|
||||||
|
.0
|
||||||
|
.lock()
|
||||||
|
.audio_track_updates
|
||||||
|
.0
|
||||||
|
.try_broadcast(RemoteAudioTrackUpdate::Subscribed(track.clone()))
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn video_tracks(&self, token: String) -> Result<Vec<Arc<RemoteVideoTrack>>> {
|
fn video_tracks(&self, token: String) -> Result<Vec<Arc<RemoteVideoTrack>>> {
|
||||||
let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
|
let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
|
||||||
let room_name = claims.video.room.unwrap();
|
let room_name = claims.video.room.unwrap();
|
||||||
|
@ -207,14 +244,26 @@ impl TestServer {
|
||||||
let room = server_rooms
|
let room = server_rooms
|
||||||
.get_mut(&*room_name)
|
.get_mut(&*room_name)
|
||||||
.ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
|
.ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
|
||||||
Ok(room.tracks.clone())
|
Ok(room.video_tracks.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn audio_tracks(&self, token: String) -> Result<Vec<Arc<RemoteAudioTrack>>> {
|
||||||
|
let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
|
||||||
|
let room_name = claims.video.room.unwrap();
|
||||||
|
|
||||||
|
let mut server_rooms = self.rooms.lock();
|
||||||
|
let room = server_rooms
|
||||||
|
.get_mut(&*room_name)
|
||||||
|
.ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
|
||||||
|
Ok(room.audio_tracks.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct TestServerRoom {
|
struct TestServerRoom {
|
||||||
client_rooms: HashMap<Sid, Arc<Room>>,
|
client_rooms: HashMap<Sid, Arc<Room>>,
|
||||||
tracks: Vec<Arc<RemoteVideoTrack>>,
|
video_tracks: Vec<Arc<RemoteVideoTrack>>,
|
||||||
|
audio_tracks: Vec<Arc<RemoteAudioTrack>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TestServerRoom {}
|
impl TestServerRoom {}
|
||||||
|
@ -266,6 +315,10 @@ struct RoomState {
|
||||||
watch::Receiver<ConnectionState>,
|
watch::Receiver<ConnectionState>,
|
||||||
),
|
),
|
||||||
display_sources: Vec<MacOSDisplay>,
|
display_sources: Vec<MacOSDisplay>,
|
||||||
|
audio_track_updates: (
|
||||||
|
async_broadcast::Sender<RemoteAudioTrackUpdate>,
|
||||||
|
async_broadcast::Receiver<RemoteAudioTrackUpdate>,
|
||||||
|
),
|
||||||
video_track_updates: (
|
video_track_updates: (
|
||||||
async_broadcast::Sender<RemoteVideoTrackUpdate>,
|
async_broadcast::Sender<RemoteVideoTrackUpdate>,
|
||||||
async_broadcast::Receiver<RemoteVideoTrackUpdate>,
|
async_broadcast::Receiver<RemoteVideoTrackUpdate>,
|
||||||
|
@ -286,6 +339,7 @@ impl Room {
|
||||||
connection: watch::channel_with(ConnectionState::Disconnected),
|
connection: watch::channel_with(ConnectionState::Disconnected),
|
||||||
display_sources: Default::default(),
|
display_sources: Default::default(),
|
||||||
video_track_updates: async_broadcast::broadcast(128),
|
video_track_updates: async_broadcast::broadcast(128),
|
||||||
|
audio_track_updates: async_broadcast::broadcast(128),
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -327,8 +381,34 @@ impl Room {
|
||||||
Ok(LocalTrackPublication)
|
Ok(LocalTrackPublication)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
pub fn publish_audio_track(
|
||||||
|
self: &Arc<Self>,
|
||||||
|
track: &LocalAudioTrack,
|
||||||
|
) -> impl Future<Output = Result<LocalTrackPublication>> {
|
||||||
|
let this = self.clone();
|
||||||
|
let track = track.clone();
|
||||||
|
async move {
|
||||||
|
this.test_server()
|
||||||
|
.publish_audio_track(this.token(), &track)
|
||||||
|
.await?;
|
||||||
|
Ok(LocalTrackPublication)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn unpublish_track(&self, _: LocalTrackPublication) {}
|
pub fn unpublish_track(&self, _publication: LocalTrackPublication) {}
|
||||||
|
|
||||||
|
pub fn remote_audio_tracks(&self, publisher_id: &str) -> Vec<Arc<RemoteAudioTrack>> {
|
||||||
|
if !self.is_connected() {
|
||||||
|
return Vec::new();
|
||||||
|
}
|
||||||
|
|
||||||
|
self.test_server()
|
||||||
|
.audio_tracks(self.token())
|
||||||
|
.unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.filter(|track| track.publisher_id() == publisher_id)
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn remote_video_tracks(&self, publisher_id: &str) -> Vec<Arc<RemoteVideoTrack>> {
|
pub fn remote_video_tracks(&self, publisher_id: &str) -> Vec<Arc<RemoteVideoTrack>> {
|
||||||
if !self.is_connected() {
|
if !self.is_connected() {
|
||||||
|
@ -343,6 +423,10 @@ impl Room {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn remote_audio_track_updates(&self) -> impl Stream<Item = RemoteAudioTrackUpdate> {
|
||||||
|
self.0.lock().audio_track_updates.1.clone()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn remote_video_track_updates(&self) -> impl Stream<Item = RemoteVideoTrackUpdate> {
|
pub fn remote_video_track_updates(&self) -> impl Stream<Item = RemoteVideoTrackUpdate> {
|
||||||
self.0.lock().video_track_updates.1.clone()
|
self.0.lock().video_track_updates.1.clone()
|
||||||
}
|
}
|
||||||
|
@ -404,6 +488,15 @@ impl LocalVideoTrack {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct LocalAudioTrack;
|
||||||
|
|
||||||
|
impl LocalAudioTrack {
|
||||||
|
pub fn create() -> Self {
|
||||||
|
Self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct RemoteVideoTrack {
|
pub struct RemoteVideoTrack {
|
||||||
sid: Sid,
|
sid: Sid,
|
||||||
publisher_id: Sid,
|
publisher_id: Sid,
|
||||||
|
@ -424,12 +517,33 @@ impl RemoteVideoTrack {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct RemoteAudioTrack {
|
||||||
|
sid: Sid,
|
||||||
|
publisher_id: Sid,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RemoteAudioTrack {
|
||||||
|
pub fn sid(&self) -> &str {
|
||||||
|
&self.sid
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn publisher_id(&self) -> &str {
|
||||||
|
&self.publisher_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum RemoteVideoTrackUpdate {
|
pub enum RemoteVideoTrackUpdate {
|
||||||
Subscribed(Arc<RemoteVideoTrack>),
|
Subscribed(Arc<RemoteVideoTrack>),
|
||||||
Unsubscribed { publisher_id: Sid, track_id: Sid },
|
Unsubscribed { publisher_id: Sid, track_id: Sid },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub enum RemoteAudioTrackUpdate {
|
||||||
|
Subscribed(Arc<RemoteAudioTrack>),
|
||||||
|
Unsubscribed { publisher_id: Sid, track_id: Sid },
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct MacOSDisplay {
|
pub struct MacOSDisplay {
|
||||||
frames: (
|
frames: (
|
||||||
|
|
|
@ -103,14 +103,14 @@ struct Notification<'a, T> {
|
||||||
params: T,
|
params: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
struct AnyNotification<'a> {
|
struct AnyNotification<'a> {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
id: Option<usize>,
|
id: Option<usize>,
|
||||||
#[serde(borrow)]
|
#[serde(borrow)]
|
||||||
method: &'a str,
|
method: &'a str,
|
||||||
#[serde(borrow)]
|
#[serde(borrow, default)]
|
||||||
params: &'a RawValue,
|
params: Option<&'a RawValue>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
@ -157,9 +157,12 @@ impl LanguageServer {
|
||||||
"unhandled notification {}:\n{}",
|
"unhandled notification {}:\n{}",
|
||||||
notification.method,
|
notification.method,
|
||||||
serde_json::to_string_pretty(
|
serde_json::to_string_pretty(
|
||||||
&Value::from_str(notification.params.get()).unwrap()
|
¬ification
|
||||||
|
.params
|
||||||
|
.and_then(|params| Value::from_str(params.get()).ok())
|
||||||
|
.unwrap_or(Value::Null)
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap(),
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
@ -279,7 +282,11 @@ impl LanguageServer {
|
||||||
|
|
||||||
if let Ok(msg) = serde_json::from_slice::<AnyNotification>(&buffer) {
|
if let Ok(msg) = serde_json::from_slice::<AnyNotification>(&buffer) {
|
||||||
if let Some(handler) = notification_handlers.lock().get_mut(msg.method) {
|
if let Some(handler) = notification_handlers.lock().get_mut(msg.method) {
|
||||||
handler(msg.id, msg.params.get(), cx.clone());
|
handler(
|
||||||
|
msg.id,
|
||||||
|
&msg.params.map(|params| params.get()).unwrap_or("null"),
|
||||||
|
cx.clone(),
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
on_unhandled_notification(msg);
|
on_unhandled_notification(msg);
|
||||||
}
|
}
|
||||||
|
@ -828,7 +835,13 @@ impl LanguageServer {
|
||||||
cx,
|
cx,
|
||||||
move |msg| {
|
move |msg| {
|
||||||
notifications_tx
|
notifications_tx
|
||||||
.try_send((msg.method.to_string(), msg.params.get().to_string()))
|
.try_send((
|
||||||
|
msg.method.to_string(),
|
||||||
|
msg.params
|
||||||
|
.map(|raw_value| raw_value.get())
|
||||||
|
.unwrap_or("null")
|
||||||
|
.to_string(),
|
||||||
|
))
|
||||||
.ok();
|
.ok();
|
||||||
},
|
},
|
||||||
)),
|
)),
|
||||||
|
|
|
@ -38,9 +38,9 @@ use language::{
|
||||||
},
|
},
|
||||||
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
|
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
|
||||||
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
|
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
|
||||||
Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt, Operation, Patch,
|
Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate, OffsetRangeExt,
|
||||||
PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
|
Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset,
|
||||||
Unclipped,
|
ToPointUtf16, Transaction, Unclipped,
|
||||||
};
|
};
|
||||||
use log::error;
|
use log::error;
|
||||||
use lsp::{
|
use lsp::{
|
||||||
|
@ -75,8 +75,8 @@ use std::{
|
||||||
};
|
};
|
||||||
use terminals::Terminals;
|
use terminals::Terminals;
|
||||||
use util::{
|
use util::{
|
||||||
debug_panic, defer, merge_json_value_into, paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc,
|
debug_panic, defer, http::HttpClient, merge_json_value_into,
|
||||||
ResultExt, TryFutureExt as _,
|
paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use fs::*;
|
pub use fs::*;
|
||||||
|
@ -252,6 +252,7 @@ pub enum Event {
|
||||||
LanguageServerAdded(LanguageServerId),
|
LanguageServerAdded(LanguageServerId),
|
||||||
LanguageServerRemoved(LanguageServerId),
|
LanguageServerRemoved(LanguageServerId),
|
||||||
LanguageServerLog(LanguageServerId, String),
|
LanguageServerLog(LanguageServerId, String),
|
||||||
|
Notification(String),
|
||||||
ActiveEntryChanged(Option<ProjectEntryId>),
|
ActiveEntryChanged(Option<ProjectEntryId>),
|
||||||
WorktreeAdded,
|
WorktreeAdded,
|
||||||
WorktreeRemoved(WorktreeId),
|
WorktreeRemoved(WorktreeId),
|
||||||
|
@ -435,6 +436,11 @@ pub enum FormatTrigger {
|
||||||
Manual,
|
Manual,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct ProjectLspAdapterDelegate {
|
||||||
|
project: ModelHandle<Project>,
|
||||||
|
http_client: Arc<dyn HttpClient>,
|
||||||
|
}
|
||||||
|
|
||||||
impl FormatTrigger {
|
impl FormatTrigger {
|
||||||
fn from_proto(value: i32) -> FormatTrigger {
|
fn from_proto(value: i32) -> FormatTrigger {
|
||||||
match value {
|
match value {
|
||||||
|
@ -2407,7 +2413,7 @@ impl Project {
|
||||||
language.clone(),
|
language.clone(),
|
||||||
adapter.clone(),
|
adapter.clone(),
|
||||||
worktree_path.clone(),
|
worktree_path.clone(),
|
||||||
self.client.http_client(),
|
ProjectLspAdapterDelegate::new(self, cx),
|
||||||
cx,
|
cx,
|
||||||
) {
|
) {
|
||||||
Some(pending_server) => pending_server,
|
Some(pending_server) => pending_server,
|
||||||
|
@ -7188,6 +7194,26 @@ impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ProjectLspAdapterDelegate {
|
||||||
|
fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
|
||||||
|
Arc::new(Self {
|
||||||
|
project: cx.handle(),
|
||||||
|
http_client: project.client.http_client(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl LspAdapterDelegate for ProjectLspAdapterDelegate {
|
||||||
|
fn show_notification(&self, message: &str, cx: &mut AppContext) {
|
||||||
|
self.project
|
||||||
|
.update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn http_client(&self) -> Arc<dyn HttpClient> {
|
||||||
|
self.http_client.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn split_operations(
|
fn split_operations(
|
||||||
mut operations: Vec<proto::Operation>,
|
mut operations: Vec<proto::Operation>,
|
||||||
) -> impl Iterator<Item = Vec<proto::Operation>> {
|
) -> impl Iterator<Item = Vec<proto::Operation>> {
|
||||||
|
|
|
@ -1470,7 +1470,7 @@ impl Snapshot {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
new_entries_by_path.push_tree(cursor.suffix(&()), &());
|
new_entries_by_path.append(cursor.suffix(&()), &());
|
||||||
new_entries_by_path
|
new_entries_by_path
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -2259,7 +2259,7 @@ impl BackgroundScannerState {
|
||||||
let mut cursor = self.snapshot.entries_by_path.cursor::<TraversalProgress>();
|
let mut cursor = self.snapshot.entries_by_path.cursor::<TraversalProgress>();
|
||||||
new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
|
new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
|
||||||
removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
|
removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
|
||||||
new_entries.push_tree(cursor.suffix(&()), &());
|
new_entries.append(cursor.suffix(&()), &());
|
||||||
}
|
}
|
||||||
self.snapshot.entries_by_path = new_entries;
|
self.snapshot.entries_by_path = new_entries;
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ impl Rope {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.chunks.push_tree(chunks.suffix(&()), &());
|
self.chunks.append(chunks.suffix(&()), &());
|
||||||
self.check_invariants();
|
self.check_invariants();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ util = { path = "../util" }
|
||||||
|
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
futures.workspace = true
|
futures.workspace = true
|
||||||
json_comments = "0.2"
|
serde_json_lenient = {version = "0.1", features = ["preserve_order", "raw_value"]}
|
||||||
lazy_static.workspace = true
|
lazy_static.workspace = true
|
||||||
postage.workspace = true
|
postage.workspace = true
|
||||||
rust-embed.workspace = true
|
rust-embed.workspace = true
|
||||||
|
@ -37,6 +37,6 @@ tree-sitter-json = "*"
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
gpui = { path = "../gpui", features = ["test-support"] }
|
gpui = { path = "../gpui", features = ["test-support"] }
|
||||||
fs = { path = "../fs", features = ["test-support"] }
|
fs = { path = "../fs", features = ["test-support"] }
|
||||||
|
indoc.workspace = true
|
||||||
pretty_assertions = "1.3.0"
|
pretty_assertions = "1.3.0"
|
||||||
unindent.workspace = true
|
unindent.workspace = true
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::{settings_store::parse_json_with_comments, SettingsAssets};
|
use crate::{settings_store::parse_json_with_comments, SettingsAssets};
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use collections::BTreeMap;
|
use collections::BTreeMap;
|
||||||
use gpui::{keymap_matcher::Binding, AppContext};
|
use gpui::{keymap_matcher::Binding, AppContext};
|
||||||
use schemars::{
|
use schemars::{
|
||||||
|
@ -8,7 +8,7 @@ use schemars::{
|
||||||
JsonSchema,
|
JsonSchema,
|
||||||
};
|
};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use serde_json::{value::RawValue, Value};
|
use serde_json::Value;
|
||||||
use util::{asset_str, ResultExt};
|
use util::{asset_str, ResultExt};
|
||||||
|
|
||||||
#[derive(Deserialize, Default, Clone, JsonSchema)]
|
#[derive(Deserialize, Default, Clone, JsonSchema)]
|
||||||
|
@ -24,7 +24,7 @@ pub struct KeymapBlock {
|
||||||
|
|
||||||
#[derive(Deserialize, Default, Clone)]
|
#[derive(Deserialize, Default, Clone)]
|
||||||
#[serde(transparent)]
|
#[serde(transparent)]
|
||||||
pub struct KeymapAction(Box<RawValue>);
|
pub struct KeymapAction(Value);
|
||||||
|
|
||||||
impl JsonSchema for KeymapAction {
|
impl JsonSchema for KeymapAction {
|
||||||
fn schema_name() -> String {
|
fn schema_name() -> String {
|
||||||
|
@ -37,11 +37,12 @@ impl JsonSchema for KeymapAction {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct ActionWithData(Box<str>, Box<RawValue>);
|
struct ActionWithData(Box<str>, Value);
|
||||||
|
|
||||||
impl KeymapFile {
|
impl KeymapFile {
|
||||||
pub fn load_asset(asset_path: &str, cx: &mut AppContext) -> Result<()> {
|
pub fn load_asset(asset_path: &str, cx: &mut AppContext) -> Result<()> {
|
||||||
let content = asset_str::<SettingsAssets>(asset_path);
|
let content = asset_str::<SettingsAssets>(asset_path);
|
||||||
|
|
||||||
Self::parse(content.as_ref())?.add_to_cx(cx)
|
Self::parse(content.as_ref())?.add_to_cx(cx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,18 +55,27 @@ impl KeymapFile {
|
||||||
let bindings = bindings
|
let bindings = bindings
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(keystroke, action)| {
|
.filter_map(|(keystroke, action)| {
|
||||||
let action = action.0.get();
|
let action = action.0;
|
||||||
|
|
||||||
// This is a workaround for a limitation in serde: serde-rs/json#497
|
// This is a workaround for a limitation in serde: serde-rs/json#497
|
||||||
// We want to deserialize the action data as a `RawValue` so that we can
|
// We want to deserialize the action data as a `RawValue` so that we can
|
||||||
// deserialize the action itself dynamically directly from the JSON
|
// deserialize the action itself dynamically directly from the JSON
|
||||||
// string. But `RawValue` currently does not work inside of an untagged enum.
|
// string. But `RawValue` currently does not work inside of an untagged enum.
|
||||||
if action.starts_with('[') {
|
if let Value::Array(items) = action {
|
||||||
let ActionWithData(name, data) = serde_json::from_str(action).log_err()?;
|
let Ok([name, data]): Result<[serde_json::Value; 2], _> = items.try_into() else {
|
||||||
cx.deserialize_action(&name, Some(data.get()))
|
return Some(Err(anyhow!("Expected array of length 2")));
|
||||||
|
};
|
||||||
|
let serde_json::Value::String(name) = name else {
|
||||||
|
return Some(Err(anyhow!("Expected first item in array to be a string.")))
|
||||||
|
};
|
||||||
|
cx.deserialize_action(
|
||||||
|
&name,
|
||||||
|
Some(data),
|
||||||
|
)
|
||||||
|
} else if let Value::String(name) = action {
|
||||||
|
cx.deserialize_action(&name, None)
|
||||||
} else {
|
} else {
|
||||||
let name = serde_json::from_str(action).log_err()?;
|
return Some(Err(anyhow!("Expected two-element array, got {:?}", action)));
|
||||||
cx.deserialize_action(name, None)
|
|
||||||
}
|
}
|
||||||
.with_context(|| {
|
.with_context(|| {
|
||||||
format!(
|
format!(
|
||||||
|
@ -118,3 +128,24 @@ impl KeymapFile {
|
||||||
serde_json::to_value(root_schema).unwrap()
|
serde_json::to_value(root_schema).unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::KeymapFile;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_deserialize_keymap_with_trailing_comma() {
|
||||||
|
let json = indoc::indoc! {"[
|
||||||
|
// Standard macOS bindings
|
||||||
|
{
|
||||||
|
\"bindings\": {
|
||||||
|
\"up\": \"menu::SelectPrev\",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
"
|
||||||
|
|
||||||
|
};
|
||||||
|
KeymapFile::parse(json).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -834,11 +834,8 @@ fn to_pretty_json(value: &impl Serialize, indent_size: usize, indent_prefix_len:
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
|
pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
|
||||||
Ok(serde_json::from_reader(
|
Ok(serde_json_lenient::from_str(content)?)
|
||||||
json_comments::CommentSettings::c_style().strip_comments(content.as_bytes()),
|
|
||||||
)?)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
@ -669,7 +669,7 @@ impl<'a, T: Item> SeekAggregate<'a, T> for () {
|
||||||
impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate<T> {
|
impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate<T> {
|
||||||
fn begin_leaf(&mut self) {}
|
fn begin_leaf(&mut self) {}
|
||||||
fn end_leaf(&mut self, cx: &<T::Summary as Summary>::Context) {
|
fn end_leaf(&mut self, cx: &<T::Summary as Summary>::Context) {
|
||||||
self.tree.push_tree(
|
self.tree.append(
|
||||||
SumTree(Arc::new(Node::Leaf {
|
SumTree(Arc::new(Node::Leaf {
|
||||||
summary: mem::take(&mut self.leaf_summary),
|
summary: mem::take(&mut self.leaf_summary),
|
||||||
items: mem::take(&mut self.leaf_items),
|
items: mem::take(&mut self.leaf_items),
|
||||||
|
@ -689,7 +689,7 @@ impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate<T> {
|
||||||
_: &T::Summary,
|
_: &T::Summary,
|
||||||
cx: &<T::Summary as Summary>::Context,
|
cx: &<T::Summary as Summary>::Context,
|
||||||
) {
|
) {
|
||||||
self.tree.push_tree(tree.clone(), cx);
|
self.tree.append(tree.clone(), cx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -268,7 +268,7 @@ impl<T: Item> SumTree<T> {
|
||||||
|
|
||||||
for item in iter {
|
for item in iter {
|
||||||
if leaf.is_some() && leaf.as_ref().unwrap().items().len() == 2 * TREE_BASE {
|
if leaf.is_some() && leaf.as_ref().unwrap().items().len() == 2 * TREE_BASE {
|
||||||
self.push_tree(SumTree(Arc::new(leaf.take().unwrap())), cx);
|
self.append(SumTree(Arc::new(leaf.take().unwrap())), cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
if leaf.is_none() {
|
if leaf.is_none() {
|
||||||
|
@ -295,13 +295,13 @@ impl<T: Item> SumTree<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if leaf.is_some() {
|
if leaf.is_some() {
|
||||||
self.push_tree(SumTree(Arc::new(leaf.take().unwrap())), cx);
|
self.append(SumTree(Arc::new(leaf.take().unwrap())), cx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn push(&mut self, item: T, cx: &<T::Summary as Summary>::Context) {
|
pub fn push(&mut self, item: T, cx: &<T::Summary as Summary>::Context) {
|
||||||
let summary = item.summary();
|
let summary = item.summary();
|
||||||
self.push_tree(
|
self.append(
|
||||||
SumTree(Arc::new(Node::Leaf {
|
SumTree(Arc::new(Node::Leaf {
|
||||||
summary: summary.clone(),
|
summary: summary.clone(),
|
||||||
items: ArrayVec::from_iter(Some(item)),
|
items: ArrayVec::from_iter(Some(item)),
|
||||||
|
@ -311,11 +311,11 @@ impl<T: Item> SumTree<T> {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn push_tree(&mut self, other: Self, cx: &<T::Summary as Summary>::Context) {
|
pub fn append(&mut self, other: Self, cx: &<T::Summary as Summary>::Context) {
|
||||||
if !other.0.is_leaf() || !other.0.items().is_empty() {
|
if !other.0.is_leaf() || !other.0.items().is_empty() {
|
||||||
if self.0.height() < other.0.height() {
|
if self.0.height() < other.0.height() {
|
||||||
for tree in other.0.child_trees() {
|
for tree in other.0.child_trees() {
|
||||||
self.push_tree(tree.clone(), cx);
|
self.append(tree.clone(), cx);
|
||||||
}
|
}
|
||||||
} else if let Some(split_tree) = self.push_tree_recursive(other, cx) {
|
} else if let Some(split_tree) = self.push_tree_recursive(other, cx) {
|
||||||
*self = Self::from_child_trees(self.clone(), split_tree, cx);
|
*self = Self::from_child_trees(self.clone(), split_tree, cx);
|
||||||
|
@ -512,7 +512,7 @@ impl<T: KeyedItem> SumTree<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
new_tree.push(item, cx);
|
new_tree.push(item, cx);
|
||||||
new_tree.push_tree(cursor.suffix(cx), cx);
|
new_tree.append(cursor.suffix(cx), cx);
|
||||||
new_tree
|
new_tree
|
||||||
};
|
};
|
||||||
replaced
|
replaced
|
||||||
|
@ -529,7 +529,7 @@ impl<T: KeyedItem> SumTree<T> {
|
||||||
cursor.next(cx);
|
cursor.next(cx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
new_tree.push_tree(cursor.suffix(cx), cx);
|
new_tree.append(cursor.suffix(cx), cx);
|
||||||
new_tree
|
new_tree
|
||||||
};
|
};
|
||||||
removed
|
removed
|
||||||
|
@ -563,7 +563,7 @@ impl<T: KeyedItem> SumTree<T> {
|
||||||
{
|
{
|
||||||
new_tree.extend(buffered_items.drain(..), cx);
|
new_tree.extend(buffered_items.drain(..), cx);
|
||||||
let slice = cursor.slice(&new_key, Bias::Left, cx);
|
let slice = cursor.slice(&new_key, Bias::Left, cx);
|
||||||
new_tree.push_tree(slice, cx);
|
new_tree.append(slice, cx);
|
||||||
old_item = cursor.item();
|
old_item = cursor.item();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -583,7 +583,7 @@ impl<T: KeyedItem> SumTree<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
new_tree.extend(buffered_items, cx);
|
new_tree.extend(buffered_items, cx);
|
||||||
new_tree.push_tree(cursor.suffix(cx), cx);
|
new_tree.append(cursor.suffix(cx), cx);
|
||||||
new_tree
|
new_tree
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -719,7 +719,7 @@ mod tests {
|
||||||
let mut tree2 = SumTree::new();
|
let mut tree2 = SumTree::new();
|
||||||
tree2.extend(50..100, &());
|
tree2.extend(50..100, &());
|
||||||
|
|
||||||
tree1.push_tree(tree2, &());
|
tree1.append(tree2, &());
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tree1.items(&()),
|
tree1.items(&()),
|
||||||
(0..20).chain(50..100).collect::<Vec<u8>>()
|
(0..20).chain(50..100).collect::<Vec<u8>>()
|
||||||
|
@ -766,7 +766,7 @@ mod tests {
|
||||||
let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &());
|
let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &());
|
||||||
new_tree.extend(new_items, &());
|
new_tree.extend(new_items, &());
|
||||||
cursor.seek(&Count(splice_end), Bias::Right, &());
|
cursor.seek(&Count(splice_end), Bias::Right, &());
|
||||||
new_tree.push_tree(cursor.slice(&tree_end, Bias::Right, &()), &());
|
new_tree.append(cursor.slice(&tree_end, Bias::Right, &()), &());
|
||||||
new_tree
|
new_tree
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -67,7 +67,7 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
|
||||||
removed = Some(cursor.item().unwrap().value.clone());
|
removed = Some(cursor.item().unwrap().value.clone());
|
||||||
cursor.next(&());
|
cursor.next(&());
|
||||||
}
|
}
|
||||||
new_tree.push_tree(cursor.suffix(&()), &());
|
new_tree.append(cursor.suffix(&()), &());
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
self.0 = new_tree;
|
self.0 = new_tree;
|
||||||
removed
|
removed
|
||||||
|
@ -79,7 +79,7 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
|
||||||
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
|
||||||
let mut new_tree = cursor.slice(&start, Bias::Left, &());
|
let mut new_tree = cursor.slice(&start, Bias::Left, &());
|
||||||
cursor.seek(&end, Bias::Left, &());
|
cursor.seek(&end, Bias::Left, &());
|
||||||
new_tree.push_tree(cursor.suffix(&()), &());
|
new_tree.append(cursor.suffix(&()), &());
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
self.0 = new_tree;
|
self.0 = new_tree;
|
||||||
}
|
}
|
||||||
|
@ -117,7 +117,7 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
|
||||||
new_tree.push(updated, &());
|
new_tree.push(updated, &());
|
||||||
cursor.next(&());
|
cursor.next(&());
|
||||||
}
|
}
|
||||||
new_tree.push_tree(cursor.suffix(&()), &());
|
new_tree.append(cursor.suffix(&()), &());
|
||||||
drop(cursor);
|
drop(cursor);
|
||||||
self.0 = new_tree;
|
self.0 = new_tree;
|
||||||
result
|
result
|
||||||
|
|
|
@ -600,7 +600,7 @@ impl Buffer {
|
||||||
let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>();
|
let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>();
|
||||||
let mut new_fragments =
|
let mut new_fragments =
|
||||||
old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None);
|
old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None);
|
||||||
new_ropes.push_tree(new_fragments.summary().text);
|
new_ropes.append(new_fragments.summary().text);
|
||||||
|
|
||||||
let mut fragment_start = old_fragments.start().visible;
|
let mut fragment_start = old_fragments.start().visible;
|
||||||
for (range, new_text) in edits {
|
for (range, new_text) in edits {
|
||||||
|
@ -625,8 +625,8 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
let slice = old_fragments.slice(&range.start, Bias::Right, &None);
|
let slice = old_fragments.slice(&range.start, Bias::Right, &None);
|
||||||
new_ropes.push_tree(slice.summary().text);
|
new_ropes.append(slice.summary().text);
|
||||||
new_fragments.push_tree(slice, &None);
|
new_fragments.append(slice, &None);
|
||||||
fragment_start = old_fragments.start().visible;
|
fragment_start = old_fragments.start().visible;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -728,8 +728,8 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
let suffix = old_fragments.suffix(&None);
|
let suffix = old_fragments.suffix(&None);
|
||||||
new_ropes.push_tree(suffix.summary().text);
|
new_ropes.append(suffix.summary().text);
|
||||||
new_fragments.push_tree(suffix, &None);
|
new_fragments.append(suffix, &None);
|
||||||
let (visible_text, deleted_text) = new_ropes.finish();
|
let (visible_text, deleted_text) = new_ropes.finish();
|
||||||
drop(old_fragments);
|
drop(old_fragments);
|
||||||
|
|
||||||
|
@ -828,7 +828,7 @@ impl Buffer {
|
||||||
Bias::Left,
|
Bias::Left,
|
||||||
&cx,
|
&cx,
|
||||||
);
|
);
|
||||||
new_ropes.push_tree(new_fragments.summary().text);
|
new_ropes.append(new_fragments.summary().text);
|
||||||
|
|
||||||
let mut fragment_start = old_fragments.start().0.full_offset();
|
let mut fragment_start = old_fragments.start().0.full_offset();
|
||||||
for (range, new_text) in edits {
|
for (range, new_text) in edits {
|
||||||
|
@ -854,8 +854,8 @@ impl Buffer {
|
||||||
|
|
||||||
let slice =
|
let slice =
|
||||||
old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx);
|
old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx);
|
||||||
new_ropes.push_tree(slice.summary().text);
|
new_ropes.append(slice.summary().text);
|
||||||
new_fragments.push_tree(slice, &None);
|
new_fragments.append(slice, &None);
|
||||||
fragment_start = old_fragments.start().0.full_offset();
|
fragment_start = old_fragments.start().0.full_offset();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -986,8 +986,8 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
let suffix = old_fragments.suffix(&cx);
|
let suffix = old_fragments.suffix(&cx);
|
||||||
new_ropes.push_tree(suffix.summary().text);
|
new_ropes.append(suffix.summary().text);
|
||||||
new_fragments.push_tree(suffix, &None);
|
new_fragments.append(suffix, &None);
|
||||||
let (visible_text, deleted_text) = new_ropes.finish();
|
let (visible_text, deleted_text) = new_ropes.finish();
|
||||||
drop(old_fragments);
|
drop(old_fragments);
|
||||||
|
|
||||||
|
@ -1056,8 +1056,8 @@ impl Buffer {
|
||||||
|
|
||||||
for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
|
for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
|
||||||
let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None);
|
let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None);
|
||||||
new_ropes.push_tree(preceding_fragments.summary().text);
|
new_ropes.append(preceding_fragments.summary().text);
|
||||||
new_fragments.push_tree(preceding_fragments, &None);
|
new_fragments.append(preceding_fragments, &None);
|
||||||
|
|
||||||
if let Some(fragment) = old_fragments.item() {
|
if let Some(fragment) = old_fragments.item() {
|
||||||
let mut fragment = fragment.clone();
|
let mut fragment = fragment.clone();
|
||||||
|
@ -1087,8 +1087,8 @@ impl Buffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
let suffix = old_fragments.suffix(&None);
|
let suffix = old_fragments.suffix(&None);
|
||||||
new_ropes.push_tree(suffix.summary().text);
|
new_ropes.append(suffix.summary().text);
|
||||||
new_fragments.push_tree(suffix, &None);
|
new_fragments.append(suffix, &None);
|
||||||
|
|
||||||
drop(old_fragments);
|
drop(old_fragments);
|
||||||
let (visible_text, deleted_text) = new_ropes.finish();
|
let (visible_text, deleted_text) = new_ropes.finish();
|
||||||
|
@ -2070,7 +2070,7 @@ impl<'a> RopeBuilder<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_tree(&mut self, len: FragmentTextSummary) {
|
fn append(&mut self, len: FragmentTextSummary) {
|
||||||
self.push(len.visible, true, true);
|
self.push(len.visible, true, true);
|
||||||
self.push(len.deleted, false, false);
|
self.push(len.deleted, false, false);
|
||||||
}
|
}
|
||||||
|
|
|
@ -598,8 +598,8 @@ impl StatusItemView for PanelButtons {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
pub(crate) mod test {
|
pub mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
use gpui::{ViewContext, WindowContext};
|
use gpui::{ViewContext, WindowContext};
|
||||||
|
|
||||||
|
|
|
@ -710,8 +710,8 @@ impl<T: FollowableItem> FollowableItemHandle for ViewHandle<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
pub(crate) mod test {
|
pub mod test {
|
||||||
use super::{Item, ItemEvent};
|
use super::{Item, ItemEvent};
|
||||||
use crate::{ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId};
|
use crate::{ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId};
|
||||||
use gpui::{
|
use gpui::{
|
||||||
|
|
|
@ -162,6 +162,12 @@ define_connection! {
|
||||||
ALTER TABLE workspaces ADD COLUMN right_dock_active_panel TEXT;
|
ALTER TABLE workspaces ADD COLUMN right_dock_active_panel TEXT;
|
||||||
ALTER TABLE workspaces ADD COLUMN bottom_dock_visible INTEGER; //bool
|
ALTER TABLE workspaces ADD COLUMN bottom_dock_visible INTEGER; //bool
|
||||||
ALTER TABLE workspaces ADD COLUMN bottom_dock_active_panel TEXT;
|
ALTER TABLE workspaces ADD COLUMN bottom_dock_active_panel TEXT;
|
||||||
|
),
|
||||||
|
// Add panel zoom persistence
|
||||||
|
sql!(
|
||||||
|
ALTER TABLE workspaces ADD COLUMN left_dock_zoom INTEGER; //bool
|
||||||
|
ALTER TABLE workspaces ADD COLUMN right_dock_zoom INTEGER; //bool
|
||||||
|
ALTER TABLE workspaces ADD COLUMN bottom_dock_zoom INTEGER; //bool
|
||||||
)];
|
)];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -196,10 +202,13 @@ impl WorkspaceDb {
|
||||||
display,
|
display,
|
||||||
left_dock_visible,
|
left_dock_visible,
|
||||||
left_dock_active_panel,
|
left_dock_active_panel,
|
||||||
|
left_dock_zoom,
|
||||||
right_dock_visible,
|
right_dock_visible,
|
||||||
right_dock_active_panel,
|
right_dock_active_panel,
|
||||||
|
right_dock_zoom,
|
||||||
bottom_dock_visible,
|
bottom_dock_visible,
|
||||||
bottom_dock_active_panel
|
bottom_dock_active_panel,
|
||||||
|
bottom_dock_zoom
|
||||||
FROM workspaces
|
FROM workspaces
|
||||||
WHERE workspace_location = ?
|
WHERE workspace_location = ?
|
||||||
})
|
})
|
||||||
|
@ -244,22 +253,28 @@ impl WorkspaceDb {
|
||||||
workspace_location,
|
workspace_location,
|
||||||
left_dock_visible,
|
left_dock_visible,
|
||||||
left_dock_active_panel,
|
left_dock_active_panel,
|
||||||
|
left_dock_zoom,
|
||||||
right_dock_visible,
|
right_dock_visible,
|
||||||
right_dock_active_panel,
|
right_dock_active_panel,
|
||||||
|
right_dock_zoom,
|
||||||
bottom_dock_visible,
|
bottom_dock_visible,
|
||||||
bottom_dock_active_panel,
|
bottom_dock_active_panel,
|
||||||
|
bottom_dock_zoom,
|
||||||
timestamp
|
timestamp
|
||||||
)
|
)
|
||||||
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, CURRENT_TIMESTAMP)
|
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, CURRENT_TIMESTAMP)
|
||||||
ON CONFLICT DO
|
ON CONFLICT DO
|
||||||
UPDATE SET
|
UPDATE SET
|
||||||
workspace_location = ?2,
|
workspace_location = ?2,
|
||||||
left_dock_visible = ?3,
|
left_dock_visible = ?3,
|
||||||
left_dock_active_panel = ?4,
|
left_dock_active_panel = ?4,
|
||||||
right_dock_visible = ?5,
|
left_dock_zoom = ?5,
|
||||||
right_dock_active_panel = ?6,
|
right_dock_visible = ?6,
|
||||||
bottom_dock_visible = ?7,
|
right_dock_active_panel = ?7,
|
||||||
bottom_dock_active_panel = ?8,
|
right_dock_zoom = ?8,
|
||||||
|
bottom_dock_visible = ?9,
|
||||||
|
bottom_dock_active_panel = ?10,
|
||||||
|
bottom_dock_zoom = ?11,
|
||||||
timestamp = CURRENT_TIMESTAMP
|
timestamp = CURRENT_TIMESTAMP
|
||||||
))?((workspace.id, &workspace.location, workspace.docks))
|
))?((workspace.id, &workspace.location, workspace.docks))
|
||||||
.context("Updating workspace")?;
|
.context("Updating workspace")?;
|
||||||
|
|
|
@ -100,16 +100,19 @@ impl Bind for DockStructure {
|
||||||
pub struct DockData {
|
pub struct DockData {
|
||||||
pub(crate) visible: bool,
|
pub(crate) visible: bool,
|
||||||
pub(crate) active_panel: Option<String>,
|
pub(crate) active_panel: Option<String>,
|
||||||
|
pub(crate) zoom: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Column for DockData {
|
impl Column for DockData {
|
||||||
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
|
||||||
let (visible, next_index) = Option::<bool>::column(statement, start_index)?;
|
let (visible, next_index) = Option::<bool>::column(statement, start_index)?;
|
||||||
let (active_panel, next_index) = Option::<String>::column(statement, next_index)?;
|
let (active_panel, next_index) = Option::<String>::column(statement, next_index)?;
|
||||||
|
let (zoom, next_index) = Option::<bool>::column(statement, next_index)?;
|
||||||
Ok((
|
Ok((
|
||||||
DockData {
|
DockData {
|
||||||
visible: visible.unwrap_or(false),
|
visible: visible.unwrap_or(false),
|
||||||
active_panel,
|
active_panel,
|
||||||
|
zoom: zoom.unwrap_or(false),
|
||||||
},
|
},
|
||||||
next_index,
|
next_index,
|
||||||
))
|
))
|
||||||
|
@ -119,7 +122,8 @@ impl Column for DockData {
|
||||||
impl Bind for DockData {
|
impl Bind for DockData {
|
||||||
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
|
||||||
let next_index = statement.bind(&self.visible, start_index)?;
|
let next_index = statement.bind(&self.visible, start_index)?;
|
||||||
statement.bind(&self.active_panel, next_index)
|
let next_index = statement.bind(&self.active_panel, next_index)?;
|
||||||
|
statement.bind(&self.zoom, next_index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -553,6 +553,10 @@ impl Workspace {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
project::Event::Notification(message) => this.show_notification(0, cx, |cx| {
|
||||||
|
cx.add_view(|_| MessageNotification::new(message.clone()))
|
||||||
|
}),
|
||||||
|
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
cx.notify()
|
cx.notify()
|
||||||
|
@ -919,6 +923,7 @@ impl Workspace {
|
||||||
this.zoomed = None;
|
this.zoomed = None;
|
||||||
this.zoomed_position = None;
|
this.zoomed_position = None;
|
||||||
}
|
}
|
||||||
|
this.update_active_view_for_followers(cx);
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1598,9 +1603,7 @@ impl Workspace {
|
||||||
focus_center = true;
|
focus_center = true;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if active_panel.is_zoomed(cx) {
|
cx.focus(active_panel.as_any());
|
||||||
cx.focus(active_panel.as_any());
|
|
||||||
}
|
|
||||||
reveal_dock = true;
|
reveal_dock = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1946,18 +1949,7 @@ impl Workspace {
|
||||||
self.zoomed = None;
|
self.zoomed = None;
|
||||||
}
|
}
|
||||||
self.zoomed_position = None;
|
self.zoomed_position = None;
|
||||||
|
self.update_active_view_for_followers(cx);
|
||||||
self.update_followers(
|
|
||||||
proto::update_followers::Variant::UpdateActiveView(proto::UpdateActiveView {
|
|
||||||
id: self.active_item(cx).and_then(|item| {
|
|
||||||
item.to_followable_item_handle(cx)?
|
|
||||||
.remote_id(&self.app_state.client, cx)
|
|
||||||
.map(|id| id.to_proto())
|
|
||||||
}),
|
|
||||||
leader_id: self.leader_for_pane(&pane),
|
|
||||||
}),
|
|
||||||
cx,
|
|
||||||
);
|
|
||||||
|
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
}
|
||||||
|
@ -2646,6 +2638,30 @@ impl Workspace {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn update_active_view_for_followers(&self, cx: &AppContext) {
|
||||||
|
if self.active_pane.read(cx).has_focus() {
|
||||||
|
self.update_followers(
|
||||||
|
proto::update_followers::Variant::UpdateActiveView(proto::UpdateActiveView {
|
||||||
|
id: self.active_item(cx).and_then(|item| {
|
||||||
|
item.to_followable_item_handle(cx)?
|
||||||
|
.remote_id(&self.app_state.client, cx)
|
||||||
|
.map(|id| id.to_proto())
|
||||||
|
}),
|
||||||
|
leader_id: self.leader_for_pane(&self.active_pane),
|
||||||
|
}),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
self.update_followers(
|
||||||
|
proto::update_followers::Variant::UpdateActiveView(proto::UpdateActiveView {
|
||||||
|
id: None,
|
||||||
|
leader_id: None,
|
||||||
|
}),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn update_followers(
|
fn update_followers(
|
||||||
&self,
|
&self,
|
||||||
update: proto::update_followers::Variant,
|
update: proto::update_followers::Variant,
|
||||||
|
@ -2693,12 +2709,10 @@ impl Workspace {
|
||||||
.and_then(|id| state.items_by_leader_view_id.get(&id))
|
.and_then(|id| state.items_by_leader_view_id.get(&id))
|
||||||
{
|
{
|
||||||
items_to_activate.push((pane.clone(), item.boxed_clone()));
|
items_to_activate.push((pane.clone(), item.boxed_clone()));
|
||||||
} else {
|
} else if let Some(shared_screen) =
|
||||||
if let Some(shared_screen) =
|
self.shared_screen_for_peer(leader_id, pane, cx)
|
||||||
self.shared_screen_for_peer(leader_id, pane, cx)
|
{
|
||||||
{
|
items_to_activate.push((pane.clone(), Box::new(shared_screen)));
|
||||||
items_to_activate.push((pane.clone(), Box::new(shared_screen)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2838,7 +2852,7 @@ impl Workspace {
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_workspace(&self, cx: &AppContext) {
|
fn serialize_workspace(&self, cx: &ViewContext<Self>) {
|
||||||
fn serialize_pane_handle(
|
fn serialize_pane_handle(
|
||||||
pane_handle: &ViewHandle<Pane>,
|
pane_handle: &ViewHandle<Pane>,
|
||||||
cx: &AppContext,
|
cx: &AppContext,
|
||||||
|
@ -2881,7 +2895,7 @@ impl Workspace {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_serialized_docks(this: &Workspace, cx: &AppContext) -> DockStructure {
|
fn build_serialized_docks(this: &Workspace, cx: &ViewContext<Workspace>) -> DockStructure {
|
||||||
let left_dock = this.left_dock.read(cx);
|
let left_dock = this.left_dock.read(cx);
|
||||||
let left_visible = left_dock.is_open();
|
let left_visible = left_dock.is_open();
|
||||||
let left_active_panel = left_dock.visible_panel().and_then(|panel| {
|
let left_active_panel = left_dock.visible_panel().and_then(|panel| {
|
||||||
|
@ -2890,6 +2904,10 @@ impl Workspace {
|
||||||
.to_string(),
|
.to_string(),
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
let left_dock_zoom = left_dock
|
||||||
|
.visible_panel()
|
||||||
|
.map(|panel| panel.is_zoomed(cx))
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
let right_dock = this.right_dock.read(cx);
|
let right_dock = this.right_dock.read(cx);
|
||||||
let right_visible = right_dock.is_open();
|
let right_visible = right_dock.is_open();
|
||||||
|
@ -2899,6 +2917,10 @@ impl Workspace {
|
||||||
.to_string(),
|
.to_string(),
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
let right_dock_zoom = right_dock
|
||||||
|
.visible_panel()
|
||||||
|
.map(|panel| panel.is_zoomed(cx))
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
let bottom_dock = this.bottom_dock.read(cx);
|
let bottom_dock = this.bottom_dock.read(cx);
|
||||||
let bottom_visible = bottom_dock.is_open();
|
let bottom_visible = bottom_dock.is_open();
|
||||||
|
@ -2908,19 +2930,26 @@ impl Workspace {
|
||||||
.to_string(),
|
.to_string(),
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
let bottom_dock_zoom = bottom_dock
|
||||||
|
.visible_panel()
|
||||||
|
.map(|panel| panel.is_zoomed(cx))
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
DockStructure {
|
DockStructure {
|
||||||
left: DockData {
|
left: DockData {
|
||||||
visible: left_visible,
|
visible: left_visible,
|
||||||
active_panel: left_active_panel,
|
active_panel: left_active_panel,
|
||||||
|
zoom: left_dock_zoom,
|
||||||
},
|
},
|
||||||
right: DockData {
|
right: DockData {
|
||||||
visible: right_visible,
|
visible: right_visible,
|
||||||
active_panel: right_active_panel,
|
active_panel: right_active_panel,
|
||||||
|
zoom: right_dock_zoom,
|
||||||
},
|
},
|
||||||
bottom: DockData {
|
bottom: DockData {
|
||||||
visible: bottom_visible,
|
visible: bottom_visible,
|
||||||
active_panel: bottom_active_panel,
|
active_panel: bottom_active_panel,
|
||||||
|
zoom: bottom_dock_zoom,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3033,14 +3062,31 @@ impl Workspace {
|
||||||
dock.activate_panel(ix, cx);
|
dock.activate_panel(ix, cx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
dock.active_panel()
|
||||||
|
.map(|panel| {
|
||||||
|
panel.set_zoomed(docks.left.zoom, cx)
|
||||||
|
});
|
||||||
|
if docks.left.visible && docks.left.zoom {
|
||||||
|
cx.focus_self()
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
// TODO: I think the bug is that setting zoom or active undoes the bottom zoom or something
|
||||||
workspace.right_dock.update(cx, |dock, cx| {
|
workspace.right_dock.update(cx, |dock, cx| {
|
||||||
dock.set_open(docks.right.visible, cx);
|
dock.set_open(docks.right.visible, cx);
|
||||||
if let Some(active_panel) = docks.right.active_panel {
|
if let Some(active_panel) = docks.right.active_panel {
|
||||||
if let Some(ix) = dock.panel_index_for_ui_name(&active_panel, cx) {
|
if let Some(ix) = dock.panel_index_for_ui_name(&active_panel, cx) {
|
||||||
dock.activate_panel(ix, cx);
|
dock.activate_panel(ix, cx);
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
dock.active_panel()
|
||||||
|
.map(|panel| {
|
||||||
|
panel.set_zoomed(docks.right.zoom, cx)
|
||||||
|
});
|
||||||
|
|
||||||
|
if docks.right.visible && docks.right.zoom {
|
||||||
|
cx.focus_self()
|
||||||
|
}
|
||||||
});
|
});
|
||||||
workspace.bottom_dock.update(cx, |dock, cx| {
|
workspace.bottom_dock.update(cx, |dock, cx| {
|
||||||
dock.set_open(docks.bottom.visible, cx);
|
dock.set_open(docks.bottom.visible, cx);
|
||||||
|
@ -3049,8 +3095,18 @@ impl Workspace {
|
||||||
dock.activate_panel(ix, cx);
|
dock.activate_panel(ix, cx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dock.active_panel()
|
||||||
|
.map(|panel| {
|
||||||
|
panel.set_zoomed(docks.bottom.zoom, cx)
|
||||||
|
});
|
||||||
|
|
||||||
|
if docks.bottom.visible && docks.bottom.zoom {
|
||||||
|
cx.focus_self()
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
cx.notify();
|
cx.notify();
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
|
@ -4413,7 +4469,7 @@ mod tests {
|
||||||
workspace.read_with(cx, |workspace, cx| {
|
workspace.read_with(cx, |workspace, cx| {
|
||||||
assert!(workspace.right_dock().read(cx).is_open());
|
assert!(workspace.right_dock().read(cx).is_open());
|
||||||
assert!(!panel.is_zoomed(cx));
|
assert!(!panel.is_zoomed(cx));
|
||||||
assert!(!panel.has_focus(cx));
|
assert!(panel.has_focus(cx));
|
||||||
});
|
});
|
||||||
|
|
||||||
// Focus and zoom panel
|
// Focus and zoom panel
|
||||||
|
@ -4488,7 +4544,7 @@ mod tests {
|
||||||
workspace.read_with(cx, |workspace, cx| {
|
workspace.read_with(cx, |workspace, cx| {
|
||||||
let pane = pane.read(cx);
|
let pane = pane.read(cx);
|
||||||
assert!(!pane.is_zoomed());
|
assert!(!pane.is_zoomed());
|
||||||
assert!(pane.has_focus());
|
assert!(!pane.has_focus());
|
||||||
assert!(workspace.right_dock().read(cx).is_open());
|
assert!(workspace.right_dock().read(cx).is_open());
|
||||||
assert!(workspace.zoomed.is_none());
|
assert!(workspace.zoomed.is_none());
|
||||||
});
|
});
|
||||||
|
|
|
@ -4,12 +4,11 @@ use futures::StreamExt;
|
||||||
pub use language::*;
|
pub use language::*;
|
||||||
use smol::fs::{self, File};
|
use smol::fs::{self, File};
|
||||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||||
use util::fs::remove_matching;
|
use util::{
|
||||||
use util::github::latest_github_release;
|
fs::remove_matching,
|
||||||
use util::http::HttpClient;
|
github::{latest_github_release, GitHubLspBinaryVersion},
|
||||||
use util::ResultExt;
|
ResultExt,
|
||||||
|
};
|
||||||
use util::github::GitHubLspBinaryVersion;
|
|
||||||
|
|
||||||
pub struct CLspAdapter;
|
pub struct CLspAdapter;
|
||||||
|
|
||||||
|
@ -21,9 +20,9 @@ impl super::LspAdapter for CLspAdapter {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
http: Arc<dyn HttpClient>,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
let release = latest_github_release("clangd/clangd", false, http).await?;
|
let release = latest_github_release("clangd/clangd", false, delegate.http_client()).await?;
|
||||||
let asset_name = format!("clangd-mac-{}.zip", release.name);
|
let asset_name = format!("clangd-mac-{}.zip", release.name);
|
||||||
let asset = release
|
let asset = release
|
||||||
.assets
|
.assets
|
||||||
|
@ -40,8 +39,8 @@ impl super::LspAdapter for CLspAdapter {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||||
let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
|
let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
|
||||||
|
@ -49,7 +48,8 @@ impl super::LspAdapter for CLspAdapter {
|
||||||
let binary_path = version_dir.join("bin/clangd");
|
let binary_path = version_dir.join("bin/clangd");
|
||||||
|
|
||||||
if fs::metadata(&binary_path).await.is_err() {
|
if fs::metadata(&binary_path).await.is_err() {
|
||||||
let mut response = http
|
let mut response = delegate
|
||||||
|
.http_client()
|
||||||
.get(&version.url, Default::default(), true)
|
.get(&version.url, Default::default(), true)
|
||||||
.await
|
.await
|
||||||
.context("error downloading release")?;
|
.context("error downloading release")?;
|
||||||
|
@ -81,7 +81,11 @@ impl super::LspAdapter for CLspAdapter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
(|| async move {
|
(|| async move {
|
||||||
let mut last_clangd_dir = None;
|
let mut last_clangd_dir = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -1,16 +1,23 @@
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
|
use gpui::{AsyncAppContext, Task};
|
||||||
pub use language::*;
|
pub use language::*;
|
||||||
use lsp::{CompletionItemKind, SymbolKind};
|
use lsp::{CompletionItemKind, SymbolKind};
|
||||||
use smol::fs::{self, File};
|
use smol::fs::{self, File};
|
||||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
use std::{
|
||||||
use util::fs::remove_matching;
|
any::Any,
|
||||||
use util::github::latest_github_release;
|
path::PathBuf,
|
||||||
use util::http::HttpClient;
|
sync::{
|
||||||
use util::ResultExt;
|
atomic::{AtomicBool, Ordering::SeqCst},
|
||||||
|
Arc,
|
||||||
use util::github::GitHubLspBinaryVersion;
|
},
|
||||||
|
};
|
||||||
|
use util::{
|
||||||
|
fs::remove_matching,
|
||||||
|
github::{latest_github_release, GitHubLspBinaryVersion},
|
||||||
|
ResultExt,
|
||||||
|
};
|
||||||
|
|
||||||
pub struct ElixirLspAdapter;
|
pub struct ElixirLspAdapter;
|
||||||
|
|
||||||
|
@ -20,11 +27,43 @@ impl LspAdapter for ElixirLspAdapter {
|
||||||
LanguageServerName("elixir-ls".into())
|
LanguageServerName("elixir-ls".into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn will_start_server(
|
||||||
|
&self,
|
||||||
|
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||||
|
cx: &mut AsyncAppContext,
|
||||||
|
) -> Option<Task<Result<()>>> {
|
||||||
|
static DID_SHOW_NOTIFICATION: AtomicBool = AtomicBool::new(false);
|
||||||
|
|
||||||
|
const NOTIFICATION_MESSAGE: &str = "Could not run the elixir language server, `elixir-ls`, because `elixir` was not found.";
|
||||||
|
|
||||||
|
let delegate = delegate.clone();
|
||||||
|
Some(cx.spawn(|mut cx| async move {
|
||||||
|
let elixir_output = smol::process::Command::new("elixir")
|
||||||
|
.args(["--version"])
|
||||||
|
.output()
|
||||||
|
.await;
|
||||||
|
if elixir_output.is_err() {
|
||||||
|
if DID_SHOW_NOTIFICATION
|
||||||
|
.compare_exchange(false, true, SeqCst, SeqCst)
|
||||||
|
.is_ok()
|
||||||
|
{
|
||||||
|
cx.update(|cx| {
|
||||||
|
delegate.show_notification(NOTIFICATION_MESSAGE, cx);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return Err(anyhow!("cannot run elixir-ls"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
http: Arc<dyn HttpClient>,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
let release = latest_github_release("elixir-lsp/elixir-ls", false, http).await?;
|
let release =
|
||||||
|
latest_github_release("elixir-lsp/elixir-ls", false, delegate.http_client()).await?;
|
||||||
let asset_name = "elixir-ls.zip";
|
let asset_name = "elixir-ls.zip";
|
||||||
let asset = release
|
let asset = release
|
||||||
.assets
|
.assets
|
||||||
|
@ -41,8 +80,8 @@ impl LspAdapter for ElixirLspAdapter {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||||
let zip_path = container_dir.join(format!("elixir-ls_{}.zip", version.name));
|
let zip_path = container_dir.join(format!("elixir-ls_{}.zip", version.name));
|
||||||
|
@ -50,7 +89,8 @@ impl LspAdapter for ElixirLspAdapter {
|
||||||
let binary_path = version_dir.join("language_server.sh");
|
let binary_path = version_dir.join("language_server.sh");
|
||||||
|
|
||||||
if fs::metadata(&binary_path).await.is_err() {
|
if fs::metadata(&binary_path).await.is_err() {
|
||||||
let mut response = http
|
let mut response = delegate
|
||||||
|
.http_client()
|
||||||
.get(&version.url, Default::default(), true)
|
.get(&version.url, Default::default(), true)
|
||||||
.await
|
.await
|
||||||
.context("error downloading release")?;
|
.context("error downloading release")?;
|
||||||
|
@ -88,7 +128,11 @@ impl LspAdapter for ElixirLspAdapter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
(|| async move {
|
(|| async move {
|
||||||
let mut last = None;
|
let mut last = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -36,8 +36,6 @@
|
||||||
|
|
||||||
(char) @constant
|
(char) @constant
|
||||||
|
|
||||||
(interpolation "#{" @punctuation.special "}" @punctuation.special) @embedded
|
|
||||||
|
|
||||||
(escape_sequence) @string.escape
|
(escape_sequence) @string.escape
|
||||||
|
|
||||||
[
|
[
|
||||||
|
@ -146,3 +144,10 @@
|
||||||
"<<"
|
"<<"
|
||||||
">>"
|
">>"
|
||||||
] @punctuation.bracket
|
] @punctuation.bracket
|
||||||
|
|
||||||
|
(interpolation "#{" @punctuation.special "}" @punctuation.special) @embedded
|
||||||
|
|
||||||
|
((sigil
|
||||||
|
(sigil_name) @_sigil_name
|
||||||
|
(quoted_content) @embedded)
|
||||||
|
(#eq? @_sigil_name "H"))
|
||||||
|
|
|
@ -1,16 +1,23 @@
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
|
use gpui::{AsyncAppContext, Task};
|
||||||
pub use language::*;
|
pub use language::*;
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use smol::{fs, process};
|
use smol::{fs, process};
|
||||||
use std::ffi::{OsStr, OsString};
|
use std::{
|
||||||
use std::{any::Any, ops::Range, path::PathBuf, str, sync::Arc};
|
any::Any,
|
||||||
use util::fs::remove_matching;
|
ffi::{OsStr, OsString},
|
||||||
use util::github::latest_github_release;
|
ops::Range,
|
||||||
use util::http::HttpClient;
|
path::PathBuf,
|
||||||
use util::ResultExt;
|
str,
|
||||||
|
sync::{
|
||||||
|
atomic::{AtomicBool, Ordering::SeqCst},
|
||||||
|
Arc,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use util::{fs::remove_matching, github::latest_github_release, ResultExt};
|
||||||
|
|
||||||
fn server_binary_arguments() -> Vec<OsString> {
|
fn server_binary_arguments() -> Vec<OsString> {
|
||||||
vec!["-mode=stdio".into()]
|
vec!["-mode=stdio".into()]
|
||||||
|
@ -31,9 +38,9 @@ impl super::LspAdapter for GoLspAdapter {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
http: Arc<dyn HttpClient>,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
let release = latest_github_release("golang/tools", false, http).await?;
|
let release = latest_github_release("golang/tools", false, delegate.http_client()).await?;
|
||||||
let version: Option<String> = release.name.strip_prefix("gopls/v").map(str::to_string);
|
let version: Option<String> = release.name.strip_prefix("gopls/v").map(str::to_string);
|
||||||
if version.is_none() {
|
if version.is_none() {
|
||||||
log::warn!(
|
log::warn!(
|
||||||
|
@ -44,11 +51,39 @@ impl super::LspAdapter for GoLspAdapter {
|
||||||
Ok(Box::new(version) as Box<_>)
|
Ok(Box::new(version) as Box<_>)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn will_fetch_server(
|
||||||
|
&self,
|
||||||
|
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||||
|
cx: &mut AsyncAppContext,
|
||||||
|
) -> Option<Task<Result<()>>> {
|
||||||
|
static DID_SHOW_NOTIFICATION: AtomicBool = AtomicBool::new(false);
|
||||||
|
|
||||||
|
const NOTIFICATION_MESSAGE: &str =
|
||||||
|
"Could not install the Go language server `gopls`, because `go` was not found.";
|
||||||
|
|
||||||
|
let delegate = delegate.clone();
|
||||||
|
Some(cx.spawn(|mut cx| async move {
|
||||||
|
let install_output = process::Command::new("go").args(["version"]).output().await;
|
||||||
|
if install_output.is_err() {
|
||||||
|
if DID_SHOW_NOTIFICATION
|
||||||
|
.compare_exchange(false, true, SeqCst, SeqCst)
|
||||||
|
.is_ok()
|
||||||
|
{
|
||||||
|
cx.update(|cx| {
|
||||||
|
delegate.show_notification(NOTIFICATION_MESSAGE, cx);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return Err(anyhow!("cannot install gopls"));
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = version.downcast::<Option<String>>().unwrap();
|
let version = version.downcast::<Option<String>>().unwrap();
|
||||||
let this = *self;
|
let this = *self;
|
||||||
|
@ -68,7 +103,10 @@ impl super::LspAdapter for GoLspAdapter {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if let Some(path) = this.cached_server_binary(container_dir.clone()).await {
|
} else if let Some(path) = this
|
||||||
|
.cached_server_binary(container_dir.clone(), delegate)
|
||||||
|
.await
|
||||||
|
{
|
||||||
return Ok(path);
|
return Ok(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -105,7 +143,11 @@ impl super::LspAdapter for GoLspAdapter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
(|| async move {
|
(|| async move {
|
||||||
let mut last_binary_path = None;
|
let mut last_binary_path = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -1,17 +1,11 @@
|
||||||
; HEEx delimiters
|
; HEEx delimiters
|
||||||
[
|
[
|
||||||
"%>"
|
|
||||||
"--%>"
|
"--%>"
|
||||||
"-->"
|
"-->"
|
||||||
"/>"
|
"/>"
|
||||||
"<!"
|
"<!"
|
||||||
"<!--"
|
"<!--"
|
||||||
"<"
|
"<"
|
||||||
"<%!--"
|
|
||||||
"<%"
|
|
||||||
"<%#"
|
|
||||||
"<%%="
|
|
||||||
"<%="
|
|
||||||
"</"
|
"</"
|
||||||
"</:"
|
"</:"
|
||||||
"<:"
|
"<:"
|
||||||
|
@ -20,6 +14,15 @@
|
||||||
"}"
|
"}"
|
||||||
] @punctuation.bracket
|
] @punctuation.bracket
|
||||||
|
|
||||||
|
[
|
||||||
|
"<%!--"
|
||||||
|
"<%"
|
||||||
|
"<%#"
|
||||||
|
"<%%="
|
||||||
|
"<%="
|
||||||
|
"%>"
|
||||||
|
] @keyword
|
||||||
|
|
||||||
; HEEx operators are highlighted as such
|
; HEEx operators are highlighted as such
|
||||||
"=" @operator
|
"=" @operator
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
((directive (partial_expression_value) @content)
|
(
|
||||||
(#set! language "elixir")
|
(directive
|
||||||
(#set! include-children)
|
[
|
||||||
(#set! combined))
|
(partial_expression_value)
|
||||||
|
(expression_value)
|
||||||
; Regular expression_values do not need to be combined
|
(ending_expression_value)
|
||||||
((directive (expression_value) @content)
|
] @content)
|
||||||
(#set! language "elixir"))
|
(#set! language "elixir")
|
||||||
|
(#set! combined)
|
||||||
|
)
|
||||||
|
|
||||||
; expressions live within HTML tags, and do not need to be combined
|
; expressions live within HTML tags, and do not need to be combined
|
||||||
; <link href={ Routes.static_path(..) } />
|
; <link href={ Routes.static_path(..) } />
|
||||||
|
|
|
@ -1,14 +1,16 @@
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use language::{LanguageServerBinary, LanguageServerName, LspAdapter};
|
use language::{LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||||
use node_runtime::NodeRuntime;
|
use node_runtime::NodeRuntime;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use smol::fs;
|
use smol::fs;
|
||||||
use std::ffi::OsString;
|
use std::{
|
||||||
use std::path::Path;
|
any::Any,
|
||||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
ffi::OsString,
|
||||||
use util::http::HttpClient;
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
use util::ResultExt;
|
use util::ResultExt;
|
||||||
|
|
||||||
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||||
|
@ -36,7 +38,7 @@ impl LspAdapter for HtmlLspAdapter {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
_: Arc<dyn HttpClient>,
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Any + Send>> {
|
) -> Result<Box<dyn 'static + Any + Send>> {
|
||||||
Ok(Box::new(
|
Ok(Box::new(
|
||||||
self.node
|
self.node
|
||||||
|
@ -48,8 +50,8 @@ impl LspAdapter for HtmlLspAdapter {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = version.downcast::<String>().unwrap();
|
let version = version.downcast::<String>().unwrap();
|
||||||
let server_path = container_dir.join(Self::SERVER_PATH);
|
let server_path = container_dir.join(Self::SERVER_PATH);
|
||||||
|
@ -69,7 +71,11 @@ impl LspAdapter for HtmlLspAdapter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
(|| async move {
|
(|| async move {
|
||||||
let mut last_version_dir = None;
|
let mut last_version_dir = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -3,7 +3,9 @@ use async_trait::async_trait;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||||
use gpui::AppContext;
|
use gpui::AppContext;
|
||||||
use language::{LanguageRegistry, LanguageServerBinary, LanguageServerName, LspAdapter};
|
use language::{
|
||||||
|
LanguageRegistry, LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate,
|
||||||
|
};
|
||||||
use node_runtime::NodeRuntime;
|
use node_runtime::NodeRuntime;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore};
|
use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore};
|
||||||
|
@ -16,7 +18,6 @@ use std::{
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use util::http::HttpClient;
|
|
||||||
use util::{paths, ResultExt};
|
use util::{paths, ResultExt};
|
||||||
|
|
||||||
const SERVER_PATH: &'static str =
|
const SERVER_PATH: &'static str =
|
||||||
|
@ -45,7 +46,7 @@ impl LspAdapter for JsonLspAdapter {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
_: Arc<dyn HttpClient>,
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
Ok(Box::new(
|
Ok(Box::new(
|
||||||
self.node
|
self.node
|
||||||
|
@ -57,8 +58,8 @@ impl LspAdapter for JsonLspAdapter {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = version.downcast::<String>().unwrap();
|
let version = version.downcast::<String>().unwrap();
|
||||||
let server_path = container_dir.join(SERVER_PATH);
|
let server_path = container_dir.join(SERVER_PATH);
|
||||||
|
@ -78,7 +79,11 @@ impl LspAdapter for JsonLspAdapter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
(|| async move {
|
(|| async move {
|
||||||
let mut last_version_dir = None;
|
let mut last_version_dir = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -3,10 +3,9 @@ use async_trait::async_trait;
|
||||||
use collections::HashMap;
|
use collections::HashMap;
|
||||||
use futures::lock::Mutex;
|
use futures::lock::Mutex;
|
||||||
use gpui::executor::Background;
|
use gpui::executor::Background;
|
||||||
use language::{LanguageServerBinary, LanguageServerName, LspAdapter};
|
use language::{LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||||
use plugin_runtime::{Plugin, PluginBinary, PluginBuilder, WasiFn};
|
use plugin_runtime::{Plugin, PluginBinary, PluginBuilder, WasiFn};
|
||||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||||
use util::http::HttpClient;
|
|
||||||
use util::ResultExt;
|
use util::ResultExt;
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
|
@ -72,7 +71,7 @@ impl LspAdapter for PluginLspAdapter {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
_: Arc<dyn HttpClient>,
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
let runtime = self.runtime.clone();
|
let runtime = self.runtime.clone();
|
||||||
let function = self.fetch_latest_server_version;
|
let function = self.fetch_latest_server_version;
|
||||||
|
@ -92,8 +91,8 @@ impl LspAdapter for PluginLspAdapter {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = *version.downcast::<String>().unwrap();
|
let version = *version.downcast::<String>().unwrap();
|
||||||
let runtime = self.runtime.clone();
|
let runtime = self.runtime.clone();
|
||||||
|
@ -110,7 +109,11 @@ impl LspAdapter for PluginLspAdapter {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
let runtime = self.runtime.clone();
|
let runtime = self.runtime.clone();
|
||||||
let function = self.cached_server_binary;
|
let function = self.cached_server_binary;
|
||||||
|
|
||||||
|
|
|
@ -3,12 +3,14 @@ use async_compression::futures::bufread::GzipDecoder;
|
||||||
use async_tar::Archive;
|
use async_tar::Archive;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use futures::{io::BufReader, StreamExt};
|
use futures::{io::BufReader, StreamExt};
|
||||||
use language::{LanguageServerBinary, LanguageServerName};
|
use language::{LanguageServerBinary, LanguageServerName, LspAdapterDelegate};
|
||||||
use smol::fs;
|
use smol::fs;
|
||||||
use std::{any::Any, env::consts, ffi::OsString, path::PathBuf, sync::Arc};
|
use std::{any::Any, env::consts, ffi::OsString, path::PathBuf};
|
||||||
use util::{async_iife, github::latest_github_release, http::HttpClient, ResultExt};
|
use util::{
|
||||||
|
async_iife,
|
||||||
use util::github::GitHubLspBinaryVersion;
|
github::{latest_github_release, GitHubLspBinaryVersion},
|
||||||
|
ResultExt,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub struct LuaLspAdapter;
|
pub struct LuaLspAdapter;
|
||||||
|
@ -28,9 +30,11 @@ impl super::LspAdapter for LuaLspAdapter {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
http: Arc<dyn HttpClient>,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
let release = latest_github_release("LuaLS/lua-language-server", false, http).await?;
|
let release =
|
||||||
|
latest_github_release("LuaLS/lua-language-server", false, delegate.http_client())
|
||||||
|
.await?;
|
||||||
let version = release.name.clone();
|
let version = release.name.clone();
|
||||||
let platform = match consts::ARCH {
|
let platform = match consts::ARCH {
|
||||||
"x86_64" => "x64",
|
"x86_64" => "x64",
|
||||||
|
@ -53,15 +57,16 @@ impl super::LspAdapter for LuaLspAdapter {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||||
|
|
||||||
let binary_path = container_dir.join("bin/lua-language-server");
|
let binary_path = container_dir.join("bin/lua-language-server");
|
||||||
|
|
||||||
if fs::metadata(&binary_path).await.is_err() {
|
if fs::metadata(&binary_path).await.is_err() {
|
||||||
let mut response = http
|
let mut response = delegate
|
||||||
|
.http_client()
|
||||||
.get(&version.url, Default::default(), true)
|
.get(&version.url, Default::default(), true)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
||||||
|
@ -81,7 +86,11 @@ impl super::LspAdapter for LuaLspAdapter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
async_iife!({
|
async_iife!({
|
||||||
let mut last_binary_path = None;
|
let mut last_binary_path = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use language::{LanguageServerBinary, LanguageServerName, LspAdapter};
|
use language::{LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||||
use node_runtime::NodeRuntime;
|
use node_runtime::NodeRuntime;
|
||||||
use smol::fs;
|
use smol::fs;
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -10,7 +10,6 @@ use std::{
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use util::http::HttpClient;
|
|
||||||
use util::ResultExt;
|
use util::ResultExt;
|
||||||
|
|
||||||
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||||
|
@ -37,7 +36,7 @@ impl LspAdapter for PythonLspAdapter {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
_: Arc<dyn HttpClient>,
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Any + Send>> {
|
) -> Result<Box<dyn 'static + Any + Send>> {
|
||||||
Ok(Box::new(self.node.npm_package_latest_version("pyright").await?) as Box<_>)
|
Ok(Box::new(self.node.npm_package_latest_version("pyright").await?) as Box<_>)
|
||||||
}
|
}
|
||||||
|
@ -45,8 +44,8 @@ impl LspAdapter for PythonLspAdapter {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = version.downcast::<String>().unwrap();
|
let version = version.downcast::<String>().unwrap();
|
||||||
let server_path = container_dir.join(Self::SERVER_PATH);
|
let server_path = container_dir.join(Self::SERVER_PATH);
|
||||||
|
@ -63,7 +62,11 @@ impl LspAdapter for PythonLspAdapter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
(|| async move {
|
(|| async move {
|
||||||
let mut last_version_dir = None;
|
let mut last_version_dir = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use language::{LanguageServerBinary, LanguageServerName, LspAdapter};
|
use language::{LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||||
use std::{any::Any, path::PathBuf, sync::Arc};
|
use std::{any::Any, path::PathBuf, sync::Arc};
|
||||||
use util::http::HttpClient;
|
|
||||||
|
|
||||||
pub struct RubyLanguageServer;
|
pub struct RubyLanguageServer;
|
||||||
|
|
||||||
|
@ -14,7 +13,7 @@ impl LspAdapter for RubyLanguageServer {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
_: Arc<dyn HttpClient>,
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Any + Send>> {
|
) -> Result<Box<dyn 'static + Any + Send>> {
|
||||||
Ok(Box::new(()))
|
Ok(Box::new(()))
|
||||||
}
|
}
|
||||||
|
@ -22,13 +21,17 @@ impl LspAdapter for RubyLanguageServer {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
_version: Box<dyn 'static + Send + Any>,
|
_version: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
|
||||||
_container_dir: PathBuf,
|
_container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
Err(anyhow!("solargraph must be installed manually"))
|
Err(anyhow!("solargraph must be installed manually"))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, _container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
_: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
Some(LanguageServerBinary {
|
Some(LanguageServerBinary {
|
||||||
path: "solargraph".into(),
|
path: "solargraph".into(),
|
||||||
arguments: vec!["stdio".into()],
|
arguments: vec!["stdio".into()],
|
||||||
|
|
|
@ -7,10 +7,11 @@ use lazy_static::lazy_static;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use smol::fs::{self, File};
|
use smol::fs::{self, File};
|
||||||
use std::{any::Any, borrow::Cow, env::consts, path::PathBuf, str, sync::Arc};
|
use std::{any::Any, borrow::Cow, env::consts, path::PathBuf, str, sync::Arc};
|
||||||
use util::fs::remove_matching;
|
use util::{
|
||||||
use util::github::{latest_github_release, GitHubLspBinaryVersion};
|
fs::remove_matching,
|
||||||
use util::http::HttpClient;
|
github::{latest_github_release, GitHubLspBinaryVersion},
|
||||||
use util::ResultExt;
|
ResultExt,
|
||||||
|
};
|
||||||
|
|
||||||
pub struct RustLspAdapter;
|
pub struct RustLspAdapter;
|
||||||
|
|
||||||
|
@ -22,9 +23,11 @@ impl LspAdapter for RustLspAdapter {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
http: Arc<dyn HttpClient>,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
let release = latest_github_release("rust-analyzer/rust-analyzer", false, http).await?;
|
let release =
|
||||||
|
latest_github_release("rust-analyzer/rust-analyzer", false, delegate.http_client())
|
||||||
|
.await?;
|
||||||
let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH);
|
let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH);
|
||||||
let asset = release
|
let asset = release
|
||||||
.assets
|
.assets
|
||||||
|
@ -40,14 +43,15 @@ impl LspAdapter for RustLspAdapter {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||||
let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
|
let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
|
||||||
|
|
||||||
if fs::metadata(&destination_path).await.is_err() {
|
if fs::metadata(&destination_path).await.is_err() {
|
||||||
let mut response = http
|
let mut response = delegate
|
||||||
|
.http_client()
|
||||||
.get(&version.url, Default::default(), true)
|
.get(&version.url, Default::default(), true)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
||||||
|
@ -69,7 +73,11 @@ impl LspAdapter for RustLspAdapter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
(|| async move {
|
(|| async move {
|
||||||
let mut last = None;
|
let mut last = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -4,7 +4,7 @@ use async_tar::Archive;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use futures::{future::BoxFuture, FutureExt};
|
use futures::{future::BoxFuture, FutureExt};
|
||||||
use gpui::AppContext;
|
use gpui::AppContext;
|
||||||
use language::{LanguageServerBinary, LanguageServerName, LspAdapter};
|
use language::{LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||||
use lsp::CodeActionKind;
|
use lsp::CodeActionKind;
|
||||||
use node_runtime::NodeRuntime;
|
use node_runtime::NodeRuntime;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
@ -16,7 +16,7 @@ use std::{
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use util::{fs::remove_matching, github::latest_github_release, http::HttpClient};
|
use util::{fs::remove_matching, github::latest_github_release};
|
||||||
use util::{github::GitHubLspBinaryVersion, ResultExt};
|
use util::{github::GitHubLspBinaryVersion, ResultExt};
|
||||||
|
|
||||||
fn typescript_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
fn typescript_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||||
|
@ -58,7 +58,7 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
_: Arc<dyn HttpClient>,
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
Ok(Box::new(TypeScriptVersions {
|
Ok(Box::new(TypeScriptVersions {
|
||||||
typescript_version: self.node.npm_package_latest_version("typescript").await?,
|
typescript_version: self.node.npm_package_latest_version("typescript").await?,
|
||||||
|
@ -72,8 +72,8 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = version.downcast::<TypeScriptVersions>().unwrap();
|
let version = version.downcast::<TypeScriptVersions>().unwrap();
|
||||||
let server_path = container_dir.join(Self::NEW_SERVER_PATH);
|
let server_path = container_dir.join(Self::NEW_SERVER_PATH);
|
||||||
|
@ -99,7 +99,11 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
(|| async move {
|
(|| async move {
|
||||||
let old_server_path = container_dir.join(Self::OLD_SERVER_PATH);
|
let old_server_path = container_dir.join(Self::OLD_SERVER_PATH);
|
||||||
let new_server_path = container_dir.join(Self::NEW_SERVER_PATH);
|
let new_server_path = container_dir.join(Self::NEW_SERVER_PATH);
|
||||||
|
@ -204,12 +208,13 @@ impl LspAdapter for EsLintLspAdapter {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
http: Arc<dyn HttpClient>,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||||
// At the time of writing the latest vscode-eslint release was released in 2020 and requires
|
// At the time of writing the latest vscode-eslint release was released in 2020 and requires
|
||||||
// special custom LSP protocol extensions be handled to fully initialize. Download the latest
|
// special custom LSP protocol extensions be handled to fully initialize. Download the latest
|
||||||
// prerelease instead to sidestep this issue
|
// prerelease instead to sidestep this issue
|
||||||
let release = latest_github_release("microsoft/vscode-eslint", true, http).await?;
|
let release =
|
||||||
|
latest_github_release("microsoft/vscode-eslint", true, delegate.http_client()).await?;
|
||||||
Ok(Box::new(GitHubLspBinaryVersion {
|
Ok(Box::new(GitHubLspBinaryVersion {
|
||||||
name: release.name,
|
name: release.name,
|
||||||
url: release.tarball_url,
|
url: release.tarball_url,
|
||||||
|
@ -219,8 +224,8 @@ impl LspAdapter for EsLintLspAdapter {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
http: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||||
let destination_path = container_dir.join(format!("vscode-eslint-{}", version.name));
|
let destination_path = container_dir.join(format!("vscode-eslint-{}", version.name));
|
||||||
|
@ -229,7 +234,8 @@ impl LspAdapter for EsLintLspAdapter {
|
||||||
if fs::metadata(&server_path).await.is_err() {
|
if fs::metadata(&server_path).await.is_err() {
|
||||||
remove_matching(&container_dir, |entry| entry != destination_path).await;
|
remove_matching(&container_dir, |entry| entry != destination_path).await;
|
||||||
|
|
||||||
let mut response = http
|
let mut response = delegate
|
||||||
|
.http_client()
|
||||||
.get(&version.url, Default::default(), true)
|
.get(&version.url, Default::default(), true)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
.map_err(|err| anyhow!("error downloading release: {}", err))?;
|
||||||
|
@ -257,7 +263,11 @@ impl LspAdapter for EsLintLspAdapter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
(|| async move {
|
(|| async move {
|
||||||
// This is unfortunate but we don't know what the version is to build a path directly
|
// This is unfortunate but we don't know what the version is to build a path directly
|
||||||
let mut dir = fs::read_dir(&container_dir).await?;
|
let mut dir = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -4,6 +4,7 @@ use futures::{future::BoxFuture, FutureExt, StreamExt};
|
||||||
use gpui::AppContext;
|
use gpui::AppContext;
|
||||||
use language::{
|
use language::{
|
||||||
language_settings::all_language_settings, LanguageServerBinary, LanguageServerName, LspAdapter,
|
language_settings::all_language_settings, LanguageServerBinary, LanguageServerName, LspAdapter,
|
||||||
|
LspAdapterDelegate,
|
||||||
};
|
};
|
||||||
use node_runtime::NodeRuntime;
|
use node_runtime::NodeRuntime;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
@ -15,7 +16,6 @@ use std::{
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use util::http::HttpClient;
|
|
||||||
use util::ResultExt;
|
use util::ResultExt;
|
||||||
|
|
||||||
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||||
|
@ -42,7 +42,7 @@ impl LspAdapter for YamlLspAdapter {
|
||||||
|
|
||||||
async fn fetch_latest_server_version(
|
async fn fetch_latest_server_version(
|
||||||
&self,
|
&self,
|
||||||
_: Arc<dyn HttpClient>,
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<Box<dyn 'static + Any + Send>> {
|
) -> Result<Box<dyn 'static + Any + Send>> {
|
||||||
Ok(Box::new(
|
Ok(Box::new(
|
||||||
self.node
|
self.node
|
||||||
|
@ -54,8 +54,8 @@ impl LspAdapter for YamlLspAdapter {
|
||||||
async fn fetch_server_binary(
|
async fn fetch_server_binary(
|
||||||
&self,
|
&self,
|
||||||
version: Box<dyn 'static + Send + Any>,
|
version: Box<dyn 'static + Send + Any>,
|
||||||
_: Arc<dyn HttpClient>,
|
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let version = version.downcast::<String>().unwrap();
|
let version = version.downcast::<String>().unwrap();
|
||||||
let server_path = container_dir.join(Self::SERVER_PATH);
|
let server_path = container_dir.join(Self::SERVER_PATH);
|
||||||
|
@ -72,7 +72,11 @@ impl LspAdapter for YamlLspAdapter {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
async fn cached_server_binary(
|
||||||
|
&self,
|
||||||
|
container_dir: PathBuf,
|
||||||
|
_: &dyn LspAdapterDelegate,
|
||||||
|
) -> Option<LanguageServerBinary> {
|
||||||
(|| async move {
|
(|| async move {
|
||||||
let mut last_version_dir = None;
|
let mut last_version_dir = None;
|
||||||
let mut entries = fs::read_dir(&container_dir).await?;
|
let mut entries = fs::read_dir(&container_dir).await?;
|
||||||
|
|
|
@ -31,7 +31,6 @@ use std::{
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
fs::OpenOptions,
|
fs::OpenOptions,
|
||||||
io::Write as _,
|
io::Write as _,
|
||||||
ops::Not,
|
|
||||||
os::unix::prelude::OsStrExt,
|
os::unix::prelude::OsStrExt,
|
||||||
panic,
|
panic,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
@ -373,7 +372,6 @@ struct Panic {
|
||||||
os_version: Option<String>,
|
os_version: Option<String>,
|
||||||
architecture: String,
|
architecture: String,
|
||||||
panicked_on: u128,
|
panicked_on: u128,
|
||||||
identifying_backtrace: Option<Vec<String>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
|
@ -401,61 +399,18 @@ fn init_panic_hook(app: &App) {
|
||||||
.unwrap_or_else(|| "Box<Any>".to_string());
|
.unwrap_or_else(|| "Box<Any>".to_string());
|
||||||
|
|
||||||
let backtrace = Backtrace::new();
|
let backtrace = Backtrace::new();
|
||||||
let backtrace = backtrace
|
let mut backtrace = backtrace
|
||||||
.frames()
|
.frames()
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|frame| {
|
.filter_map(|frame| Some(format!("{:#}", frame.symbols().first()?.name()?)))
|
||||||
let symbol = frame.symbols().first()?;
|
|
||||||
let path = symbol.filename()?;
|
|
||||||
Some((path, symbol.lineno(), format!("{:#}", symbol.name()?)))
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let this_file_path = Path::new(file!());
|
// Strip out leading stack frames for rust panic-handling.
|
||||||
|
if let Some(ix) = backtrace
|
||||||
// Find the first frame in the backtrace for this panic hook itself. Exclude
|
.iter()
|
||||||
// that frame and all frames before it.
|
.position(|name| name == "rust_begin_unwind")
|
||||||
let mut start_frame_ix = 0;
|
{
|
||||||
let mut codebase_root_path = None;
|
backtrace.drain(0..=ix);
|
||||||
for (ix, (path, _, _)) in backtrace.iter().enumerate() {
|
|
||||||
if path.ends_with(this_file_path) {
|
|
||||||
start_frame_ix = ix + 1;
|
|
||||||
codebase_root_path = path.ancestors().nth(this_file_path.components().count());
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Exclude any subsequent frames inside of rust's panic handling system.
|
|
||||||
while let Some((path, _, _)) = backtrace.get(start_frame_ix) {
|
|
||||||
if path.starts_with("/rustc") {
|
|
||||||
start_frame_ix += 1;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build two backtraces:
|
|
||||||
// * one for display, which includes symbol names for all frames, and files
|
|
||||||
// and line numbers for symbols in this codebase
|
|
||||||
// * one for identification and de-duplication, which only includes symbol
|
|
||||||
// names for symbols in this codebase.
|
|
||||||
let mut display_backtrace = Vec::new();
|
|
||||||
let mut identifying_backtrace = Vec::new();
|
|
||||||
for (path, line, symbol) in &backtrace[start_frame_ix..] {
|
|
||||||
display_backtrace.push(symbol.clone());
|
|
||||||
|
|
||||||
if let Some(codebase_root_path) = &codebase_root_path {
|
|
||||||
if let Ok(suffix) = path.strip_prefix(&codebase_root_path) {
|
|
||||||
identifying_backtrace.push(symbol.clone());
|
|
||||||
|
|
||||||
let display_path = suffix.to_string_lossy();
|
|
||||||
if let Some(line) = line {
|
|
||||||
display_backtrace.push(format!(" {display_path}:{line}"));
|
|
||||||
} else {
|
|
||||||
display_backtrace.push(format!(" {display_path}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let panic_data = Panic {
|
let panic_data = Panic {
|
||||||
|
@ -477,29 +432,27 @@ fn init_panic_hook(app: &App) {
|
||||||
.duration_since(UNIX_EPOCH)
|
.duration_since(UNIX_EPOCH)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.as_millis(),
|
.as_millis(),
|
||||||
backtrace: display_backtrace,
|
backtrace,
|
||||||
identifying_backtrace: identifying_backtrace
|
|
||||||
.is_empty()
|
|
||||||
.not()
|
|
||||||
.then_some(identifying_backtrace),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() {
|
if is_pty {
|
||||||
if is_pty {
|
if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() {
|
||||||
eprintln!("{}", panic_data_json);
|
eprintln!("{}", panic_data_json);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
|
if let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() {
|
||||||
let panic_file_path = paths::LOGS_DIR.join(format!("zed-{}.panic", timestamp));
|
let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
|
||||||
let panic_file = std::fs::OpenOptions::new()
|
let panic_file_path = paths::LOGS_DIR.join(format!("zed-{}.panic", timestamp));
|
||||||
.append(true)
|
let panic_file = std::fs::OpenOptions::new()
|
||||||
.create(true)
|
.append(true)
|
||||||
.open(&panic_file_path)
|
.create(true)
|
||||||
.log_err();
|
.open(&panic_file_path)
|
||||||
if let Some(mut panic_file) = panic_file {
|
.log_err();
|
||||||
write!(&mut panic_file, "{}", panic_data_json).log_err();
|
if let Some(mut panic_file) = panic_file {
|
||||||
panic_file.flush().log_err();
|
writeln!(&mut panic_file, "{}", panic_data_json).log_err();
|
||||||
|
panic_file.flush().log_err();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
@ -531,23 +484,45 @@ fn upload_previous_panics(http: Arc<dyn HttpClient>, cx: &mut AppContext) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if telemetry_settings.diagnostics {
|
if telemetry_settings.diagnostics {
|
||||||
let panic_data_text = smol::fs::read_to_string(&child_path)
|
let panic_file_content = smol::fs::read_to_string(&child_path)
|
||||||
.await
|
.await
|
||||||
.context("error reading panic file")?;
|
.context("error reading panic file")?;
|
||||||
|
|
||||||
let body = serde_json::to_string(&PanicRequest {
|
let panic = serde_json::from_str(&panic_file_content)
|
||||||
panic: serde_json::from_str(&panic_data_text)?,
|
.ok()
|
||||||
token: ZED_SECRET_CLIENT_TOKEN.into(),
|
.or_else(|| {
|
||||||
})
|
panic_file_content
|
||||||
.unwrap();
|
.lines()
|
||||||
|
.next()
|
||||||
|
.and_then(|line| serde_json::from_str(line).ok())
|
||||||
|
})
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
log::error!(
|
||||||
|
"failed to deserialize panic file {:?}",
|
||||||
|
panic_file_content
|
||||||
|
);
|
||||||
|
None
|
||||||
|
});
|
||||||
|
|
||||||
let request = Request::post(&panic_report_url)
|
if let Some(panic) = panic {
|
||||||
.redirect_policy(isahc::config::RedirectPolicy::Follow)
|
let body = serde_json::to_string(&PanicRequest {
|
||||||
.header("Content-Type", "application/json")
|
panic,
|
||||||
.body(body.into())?;
|
token: ZED_SECRET_CLIENT_TOKEN.into(),
|
||||||
let response = http.send(request).await.context("error sending panic")?;
|
})
|
||||||
if !response.status().is_success() {
|
.unwrap();
|
||||||
log::error!("Error uploading panic to server: {}", response.status());
|
|
||||||
|
let request = Request::post(&panic_report_url)
|
||||||
|
.redirect_policy(isahc::config::RedirectPolicy::Follow)
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.body(body.into())?;
|
||||||
|
let response =
|
||||||
|
http.send(request).await.context("error sending panic")?;
|
||||||
|
if !response.status().is_success() {
|
||||||
|
log::error!(
|
||||||
|
"Error uploading panic to server: {}",
|
||||||
|
response.status()
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -384,6 +384,8 @@ pub fn initialize_workspace(
|
||||||
workspace.toggle_dock(project_panel_position, cx);
|
workspace.toggle_dock(project_panel_position, cx);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cx.focus_self();
|
||||||
|
|
||||||
workspace.add_panel(terminal_panel, cx);
|
workspace.add_panel(terminal_panel, cx);
|
||||||
if let Some(assistant_panel) = assistant_panel {
|
if let Some(assistant_panel) = assistant_panel {
|
||||||
workspace.add_panel(assistant_panel, cx);
|
workspace.add_panel(assistant_panel, cx);
|
||||||
|
|
52
docs/backend-development.md
Normal file
52
docs/backend-development.md
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
[⬅ Back to Index](./index.md)
|
||||||
|
|
||||||
|
# Developing Zed's Backend
|
||||||
|
|
||||||
|
Zed's backend consists of the following components:
|
||||||
|
|
||||||
|
- The Zed.dev web site
|
||||||
|
- implemented in the [`zed.dev`](https://github.com/zed-industries/zed.dev) repository
|
||||||
|
- hosted on [Vercel](https://vercel.com/zed-industries/zed-dev).
|
||||||
|
- The Zed Collaboration server
|
||||||
|
- implemented in the [`crates/collab`](https://github.com/zed-industries/zed/tree/main/crates/collab) directory of the main `zed` repository
|
||||||
|
- hosted on [DigitalOcean](https://cloud.digitalocean.com/projects/6c680a82-9d3b-4f1a-91e5-63a6ca4a8611), using Kubernetes
|
||||||
|
- The Zed Postgres database
|
||||||
|
- defined via migrations in the [`crates/collab/migrations`](https://github.com/zed-industries/zed/tree/main/crates/collab/migrations) directory
|
||||||
|
- hosted on DigitalOcean
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Local Development
|
||||||
|
|
||||||
|
Here's some things you need to develop backend code locally.
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- **Postgres** - download [Postgres.app](https://postgresapp.com).
|
||||||
|
|
||||||
|
### Setup
|
||||||
|
|
||||||
|
1. Check out the `zed` and `zed.dev` repositories into a common parent directory
|
||||||
|
2. Set the `GITHUB_TOKEN` environment variable to one of your GitHub personal access tokens (PATs).
|
||||||
|
|
||||||
|
- You can create a PAT [here](https://github.com/settings/tokens).
|
||||||
|
- You may want to add something like this to your `~/.zshrc`:
|
||||||
|
|
||||||
|
```
|
||||||
|
export GITHUB_TOKEN=<the personal access token>
|
||||||
|
```
|
||||||
|
|
||||||
|
3. In the `zed.dev` directory, run `npm install` to install dependencies.
|
||||||
|
4. In the `zed directory`, run `script/bootstrap` to set up the database
|
||||||
|
5. In the `zed directory`, run `foreman start` to start both servers
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Production Debugging
|
||||||
|
|
||||||
|
### Datadog
|
||||||
|
|
||||||
|
Zed uses Datadog to collect metrics and logs from backend services. The Zed organization lives within Datadog's _US5_ [site](https://docs.datadoghq.com/getting_started/site/), so it can be accessed at [us5.datadoghq.com](https://us5.datadoghq.com). Useful things to look at in Datadog:
|
||||||
|
|
||||||
|
- The [Logs](https://us5.datadoghq.com/logs) page shows logs from Zed.dev and the Collab server, and the internals of Zed's Kubernetes cluster.
|
||||||
|
- The [collab metrics dashboard](https://us5.datadoghq.com/dashboard/y2d-gxz-h4h/collab?from_ts=1660517946462&to_ts=1660604346462&live=true) shows metrics about the running collab server
|
79
docs/building-zed.md
Normal file
79
docs/building-zed.md
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
[⬅ Back to Index](./index.md)
|
||||||
|
|
||||||
|
# Building Zed
|
||||||
|
|
||||||
|
How to build Zed from source for the first time.
|
||||||
|
|
||||||
|
## Process
|
||||||
|
|
||||||
|
Expect this to take 30min to an hour! Some of these steps will take quite a while based on your connection speed, and how long your first build will be.
|
||||||
|
|
||||||
|
1. Install the [GitHub CLI](https://cli.github.com/):
|
||||||
|
- `brew install gh`
|
||||||
|
1. Clone the `zed` repo
|
||||||
|
- `gh repo clone zed-industries/zed`
|
||||||
|
1. Install Xcode from the macOS App Store
|
||||||
|
1. Install [Postgres](https://postgresapp.com)
|
||||||
|
1. Install rust/rustup
|
||||||
|
- `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh`
|
||||||
|
1. Install the wasm toolchain
|
||||||
|
- `rustup target add wasm32-wasi`
|
||||||
|
1. Generate an GitHub API Key
|
||||||
|
- Go to https://github.com/settings/tokens and Generate new token
|
||||||
|
- GitHub currently provides two kinds of tokens:
|
||||||
|
- Classic Tokens, where only `repo` (Full control of private repositories) OAuth scope has to be selected
|
||||||
|
Unfortunately, unselecting `repo` scope and selecting every its inner scope instead does not allow the token users to read from private repositories
|
||||||
|
- (not applicable) Fine-grained Tokens, at the moment of writing, did not allow any kind of access of non-owned private repos
|
||||||
|
- Keep the token in the browser tab/editor for the next two steps
|
||||||
|
1. Open Postgres.app
|
||||||
|
1. From `./path/to/zed/`:
|
||||||
|
- Run:
|
||||||
|
- `GITHUB_TOKEN={yourGithubAPIToken} script/bootstrap`
|
||||||
|
- Replace `{yourGithubAPIToken}` with the API token you generated above.
|
||||||
|
- Consider removing the token (if it's fine for you to crecreate such tokens during occasional migrations) or store this token somewhere safe (like your Zed 1Password vault).
|
||||||
|
- If you get:
|
||||||
|
- ```bash
|
||||||
|
Error: Cannot install in Homebrew on ARM processor in Intel default prefix (/usr/local)!
|
||||||
|
Please create a new installation in /opt/homebrew using one of the
|
||||||
|
"Alternative Installs" from:
|
||||||
|
https://docs.brew.sh/Installation
|
||||||
|
```
|
||||||
|
- In that case try:
|
||||||
|
- `/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"`
|
||||||
|
- If Homebrew is not in your PATH:
|
||||||
|
- Replace `{username}` with your home folder name (usually your login name)
|
||||||
|
- `echo 'eval "$(/opt/homebrew/bin/brew shellenv)"' >> /Users/{username}/.zprofile`
|
||||||
|
- `eval "$(/opt/homebrew/bin/brew shellenv)"`
|
||||||
|
1. To run the Zed app:
|
||||||
|
- If you are working on zed:
|
||||||
|
- `cargo run`
|
||||||
|
- If you are just using the latest version, but not working on zed:
|
||||||
|
- `cargo run --release`
|
||||||
|
- If you need to run the collaboration server locally:
|
||||||
|
- `script/zed-with-local-servers`
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### `error: failed to run custom build command for gpui v0.1.0 (/Users/path/to/zed)`
|
||||||
|
|
||||||
|
- Try `xcode-select --switch /Applications/Xcode.app/Contents/Developer`
|
||||||
|
|
||||||
|
### `xcrun: error: unable to find utility "metal", not a developer tool or in PATH`
|
||||||
|
|
||||||
|
### Seeding errors during `script/bootstrap` runs
|
||||||
|
|
||||||
|
```
|
||||||
|
seeding database...
|
||||||
|
thread 'main' panicked at 'failed to deserialize github user from 'https://api.github.com/orgs/zed-industries/teams/staff/members': reqwest::Error { kind: Decode, source: Error("invalid type: map, expected a sequence", line: 1, column: 0) }', crates/collab/src/bin/seed.rs:111:10
|
||||||
|
```
|
||||||
|
|
||||||
|
Wrong permissions for `GITHUB_TOKEN` token used, the token needs to be able to read from private repos.
|
||||||
|
For Classic GitHub Tokens, that required OAuth scope `repo` (seacrh the scope name above for more details)
|
||||||
|
|
||||||
|
Same command
|
||||||
|
|
||||||
|
`sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer`
|
||||||
|
|
||||||
|
### If you experience errors that mention some dependency is using unstable features
|
||||||
|
|
||||||
|
Try `cargo clean` and `cargo build`
|
34
docs/company-and-vision.md
Normal file
34
docs/company-and-vision.md
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
[⬅ Back to Index](./index.md)
|
||||||
|
|
||||||
|
# Company & Vision
|
||||||
|
|
||||||
|
## Vision
|
||||||
|
|
||||||
|
Our goal is to make Zed the primary tool software teams use to collaborate.
|
||||||
|
|
||||||
|
To do this, Zed will...
|
||||||
|
|
||||||
|
* Make collaboration a first-class feature of the code authoring environment.
|
||||||
|
* Enable text-based conversations about any piece of text, independent of whether/when it was committed to version control.
|
||||||
|
* Make it smooth to edit and discuss code with teammates in real time.
|
||||||
|
* Make it easy to recall past conversations any area of the code.
|
||||||
|
|
||||||
|
We believe the best way to make collaboration amazing is to build it into a new editor rather than retrofitting an existing editor. This means that in order for a team to adopt Zed for collaboration, each team member will need to adopt it as their editor as well.
|
||||||
|
|
||||||
|
For this reason, we need to deliver a clearly superior experience as a single-user code editor in addition to being an excellent collaboration tool. This will take time, but we believe the dominance of VS Code demonstrates that it's possible for a single tool to capture substantial market share. We can proceed incrementally, capturing one team at a time and gradually transitioning conversations away from GitHub.
|
||||||
|
|
||||||
|
## Zed Values
|
||||||
|
|
||||||
|
Everyone wants to work quickly and have a lot of users. What are we unwilling to sacrifice in pursuit of those goals?
|
||||||
|
|
||||||
|
- **Performance.** Speed is core to our brand and value proposition. It's important that we consistently deliver a response in less than 8ms on modern hardware for fine-grained actions. Coarse-grained actions should render feedback within 50ms. We consider the performance goals of the product at all times, and take the time to ensure our code meets them with reasonable usage. Once we have met our goals, we assess the impact vs effort of further performance investment and know when to say when. We measure our performance in the field and make an effort to maintain or improve real-world performance and promptly address regressions.
|
||||||
|
|
||||||
|
- **Craftsmanship.** Zed is a premium product, and we put care into design and user experience. We can always cut scope, but what we do ship should be quality. Incomplete is okay, so long as we're executing on a coherent subset well. Half-baked, unintuitive, or broken is not okay.
|
||||||
|
|
||||||
|
- **Shipping.** Knowledge matters only in as much as it drives results. We're here to build a real product in the real world. We care a lot about the experience of developing Zed, but we care about the user's experience more.
|
||||||
|
|
||||||
|
- **Code quality.** This enables craftsmanship. Nobody is creative in a trash heap, and we're willing to dedicate time to keep our codebase clean. If we're spending no time refactoring, we are likely underinvesting. When we realize a design flaw, we assess its centrality to the rest of the system and consider budgeting time to address it. If we're spending all of our time refactoring, we are likely either overinvesting or paying off debt from past underinvestment. It's up to each engineer to allocate a reasonable refactoring budget. We shouldn't be navel gazing, but we also shouldn't be afraid to invest.
|
||||||
|
|
||||||
|
- **Pairing.** Zed depends on regular pair programming to promote cohesion on our remote team. We believe pairing is a powerful substitute for beuracratic management, excessive documentation, and tedious code review. Nobody has to pair all day, every day, but everyone is responsible for pairing at least 2 hours a week with a variety of other engineers. If anyone wants to pair all day every day, that is explicitly endorsed and credited. If pairing temporarily reduces our throughput due to working on one thing instead of two, we trust that it will pay for itself in the long term by increasing our velocity and allowing us to more effectively grow our team.
|
||||||
|
|
||||||
|
- **Long-term thinking.** The Zed vision began several years ago, and we expect Zed to be around many years from today. We must always be mindful to avoid overengineering for the future, but we should also keep the long-term in mind. Are we building a system our future selves would want to work on in 5 years?
|
74
docs/design-tools.md
Normal file
74
docs/design-tools.md
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
[⬅ Back to Index](./index.md)
|
||||||
|
|
||||||
|
# Design Tools & Links
|
||||||
|
|
||||||
|
Generally useful tools and resources for design.
|
||||||
|
|
||||||
|
## General
|
||||||
|
|
||||||
|
[Names of Signs & Symbols](https://www.prepressure.com/fonts/basics/character-names#curlybrackets)
|
||||||
|
|
||||||
|
[The Noun Project](https://thenounproject.com/) - Icons for everything, attempts to describe all of human language visually.
|
||||||
|
|
||||||
|
[SVG Repo](https://www.svgrepo.com/) - Open-licensed SVG Vector and Icons
|
||||||
|
|
||||||
|
[Font Awsesome](https://fontawesome.com/) - High quality icons, has been around for many years.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Color
|
||||||
|
|
||||||
|
[Opacity/Transparency Hex Values](https://gist.github.com/lopspower/03fb1cc0ac9f32ef38f4)
|
||||||
|
|
||||||
|
[Color Ramp Generator](https://lyft-colorbox.herokuapp.com)
|
||||||
|
|
||||||
|
[Designing a Comprehensive Color System
|
||||||
|
](https://www.rethinkhq.com/videos/designing-a-comprehensive-color-system-for-lyft) - [Linda Dong](https://twitter.com/lindadong)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Figma & Plugins
|
||||||
|
|
||||||
|
[Figma Plugins for Designers](https://www.uiprep.com/blog/21-best-figma-plugins-for-designers-in-2021)
|
||||||
|
|
||||||
|
[Icon Resizer](https://www.figma.com/community/plugin/739117729229117975/Icon-Resizer)
|
||||||
|
|
||||||
|
[Code Syntax Highlighter](https://www.figma.com/community/plugin/938793197191698232/Code-Syntax-Highlighter)
|
||||||
|
|
||||||
|
[Proportional Scale](https://www.figma.com/community/plugin/756895186298946525/Proportional-Scale)
|
||||||
|
|
||||||
|
[LilGrid](https://www.figma.com/community/plugin/795397421598343178/LilGrid)
|
||||||
|
|
||||||
|
Organize your selection into a grid.
|
||||||
|
|
||||||
|
[Automator](https://www.figma.com/community/plugin/1005114571859948695/Automator)
|
||||||
|
|
||||||
|
Build photoshop-style batch actions to automate things.
|
||||||
|
|
||||||
|
[Figma Tokens](https://www.figma.com/community/plugin/843461159747178978/Figma-Tokens)
|
||||||
|
|
||||||
|
Use tokens in Figma and generate JSON from them.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Design Systems
|
||||||
|
|
||||||
|
### Naming
|
||||||
|
|
||||||
|
[Naming Design Tokens](https://uxdesign.cc/naming-design-tokens-9454818ed7cb)
|
||||||
|
|
||||||
|
### Storybook
|
||||||
|
|
||||||
|
[Collaboration with design tokens and storybook](https://zure.com/blog/collaboration-with-design-tokens-and-storybook/)
|
||||||
|
|
||||||
|
### Example DS Documentation
|
||||||
|
|
||||||
|
[Tailwind CSS Documentation](https://tailwindcss.com/docs/container)
|
||||||
|
|
||||||
|
[Material Design Docs](https://material.io/design/color/the-color-system.html#color-usage-and-palettes)
|
||||||
|
|
||||||
|
[Carbon Design System Docs](https://www.carbondesignsystem.com)
|
||||||
|
|
||||||
|
[Adobe Spectrum](https://spectrum.adobe.com/)
|
||||||
|
- Great documentation, like [Color System](https://spectrum.adobe.com/page/color-system/) and [Design Tokens](https://spectrum.adobe.com/page/design-tokens/).
|
||||||
|
- A good place to start if thinking about building a design system.
|
14
docs/index.md
Normal file
14
docs/index.md
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
[⬅ Back to Index](./index.md)
|
||||||
|
|
||||||
|
# Welcome to Zed
|
||||||
|
|
||||||
|
Welcome! These internal docs are a work in progress. You can contribute to them by submitting a PR directly!
|
||||||
|
|
||||||
|
## Contents
|
||||||
|
|
||||||
|
- [The Company](./company-and-vision.md)
|
||||||
|
- [Tools We Use](./tools.md)
|
||||||
|
- [Building Zed](./building-zed.md)
|
||||||
|
- [Release Process](./release-process.md)
|
||||||
|
- [Backend Development](./backend-development.md)
|
||||||
|
- [Design Tools & Links](./design-tools.md)
|
96
docs/release-process.md
Normal file
96
docs/release-process.md
Normal file
|
@ -0,0 +1,96 @@
|
||||||
|
[⬅ Back to Index](./index.md)
|
||||||
|
|
||||||
|
# Zed's Release Process
|
||||||
|
|
||||||
|
The process to create and ship a Zed release
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
### Release Channels
|
||||||
|
|
||||||
|
Users of Zed can choose between two _release channels_ - 'Stable' and 'Preview'. Most people use Stable, but Preview exists so that the Zed team and other early-adopters can test new features before they are released to our general user-base.
|
||||||
|
|
||||||
|
### Weekly (Minor) Releases
|
||||||
|
|
||||||
|
We normally publish new releases of Zed on Wednesdays, for both the Stable and Preview channels. For each of these releases, we bump Zed's _minor_ version number.
|
||||||
|
|
||||||
|
For the Preview channel, we build the new release based on what's on the `main` branch. For the Stable channel, we build the new release based on the last Preview release.
|
||||||
|
|
||||||
|
### Hotfix (Patch) Releases
|
||||||
|
|
||||||
|
When we find a _regression_ in Zed (a bug that wasn't present in an earlier version), or find a significant bug in a newly-released feature, we typically publish a hotfix release. For these releases, we bump Zed's _patch_ version number.
|
||||||
|
|
||||||
|
### Server Deployments
|
||||||
|
|
||||||
|
Often, changes in the Zed app require corresponding changes in the `collab` server. At the currente stage of our copmany, we don't attempt to keep our server backwards-compatible with older versions of the app. Instead, when making a change, we simply bump Zed's _protocol version_ number (in the `rpc` crate), which causes the server to recognize that it isn't compatible with earlier versions of the Zed app.
|
||||||
|
|
||||||
|
This means that when releasing a new version of Zed that has changes to the RPC protocol, we need to deploy a new version of the `collab` server at the same time.
|
||||||
|
|
||||||
|
## Instructions
|
||||||
|
|
||||||
|
### Publishing a Minor Release
|
||||||
|
|
||||||
|
1. Announce your intent to publish a new version in Discord. This gives other people a chance to raise concerns or postpone the release if they want to get something merged before publishing a new version.
|
||||||
|
1. Open your terminal and `cd` into your local copy of Zed. Checkout `main` and perform a `git pull` to ensure you have the latest version.
|
||||||
|
1. Run the following command, which will update two git branches and two git tags (one for each release channel):
|
||||||
|
|
||||||
|
```
|
||||||
|
script/bump-zed-minor-versions
|
||||||
|
```
|
||||||
|
|
||||||
|
1. The script will make local changes only, and print out a shell command that you can use to push all of these branches and tags.
|
||||||
|
1. Pushing the two new tags will trigger two CI builds that, when finished, will create two draft releases (Stable and Preview) containing `Zed.dmg` files.
|
||||||
|
1. Now you need to write the release notes for the Stable and Preview releases. For the Stable release, you can just copy the release notes from the last week's Preview release, plus any hotfixes that were published on the Preview channel since then. Some of the hotfixes may not be relevant for the Stable release notes, if they were fixing bugs that were only present in Preview.
|
||||||
|
1. For the Preview release, you can retrieve the list of changes by running this command (make sure you have at least `Node 18` installed):
|
||||||
|
|
||||||
|
```
|
||||||
|
GITHUB_ACCESS_TOKEN=your_access_token script/get-preview-channel-changes
|
||||||
|
```
|
||||||
|
|
||||||
|
1. The script will list all the merged pull requests and you can use it as a reference to write the release notes. If there were protocol changes, it will also emit a warning.
|
||||||
|
1. Once CI creates the draft releases, add each release's notes and save the drafts.
|
||||||
|
1. If there have been server-side changes since the last release, you'll need to re-deploy the `collab` server. See below.
|
||||||
|
1. Before publishing, download the Zed.dmg and smoke test it to ensure everything looks good.
|
||||||
|
|
||||||
|
### Publishing a Patch Release
|
||||||
|
|
||||||
|
1. Announce your intent to publish a new patch version in Discord.
|
||||||
|
1. Open your terminal and `cd` into your local copy of Zed. Check out the branch corresponding to the release channel where the fix is needed. For example, if the fix is for a bug in Stable, and the current stable version is `0.63.0`, then checkout the branch `v0.63.x`. Run `git pull` to ensure your branch is up-to-date.
|
||||||
|
1. Find the merge commit where your bug-fix landed on `main`. You can browse the merged pull requests on main by running `git log main --grep Merge`.
|
||||||
|
1. Cherry-pick those commits onto the current release branch:
|
||||||
|
|
||||||
|
```
|
||||||
|
git cherry-pick -m1 <THE-COMMIT-SHA>
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Run the following command, which will bump the version of Zed and create a new tag:
|
||||||
|
|
||||||
|
```
|
||||||
|
script/bump-zed-patch-version
|
||||||
|
```
|
||||||
|
|
||||||
|
1. The script will make local changes only, and print out a shell command that you can use to push all the branch and tag.
|
||||||
|
1. Pushing the new tag will trigger a CI build that, when finished, will create a draft release containing a `Zed.dmg` file.
|
||||||
|
1. Once the draft release is created, fill in the release notes based on the bugfixes that you cherry-picked.
|
||||||
|
1. If any of the bug-fixes require server-side changes, you'll need to re-deploy the `collab` server. See below.
|
||||||
|
1. Before publishing, download the Zed.dmg and smoke test it to ensure everything looks good.
|
||||||
|
1. Clicking publish on the release will cause old Zed instances to auto-update and the Zed.dev releases page to re-build and display the new release.
|
||||||
|
|
||||||
|
### Deploying the Server
|
||||||
|
|
||||||
|
1. Deploying the server is a two-step process that begins with pushing a tag. 1. Check out the commit you'd like to deploy. Often it will be the head of `main`, but could be on any branch.
|
||||||
|
1. Run the following script, which will bump the version of the `collab` crate and create a new tag. The script takes an argument `minor` or `patch`, to indicate how to increment the version. If you're releasing new features, use `minor`. If it's just a bugfix, use `patch`
|
||||||
|
|
||||||
|
```
|
||||||
|
script/bump-collab-version patch
|
||||||
|
```
|
||||||
|
|
||||||
|
1. This script will make local changes only, and print out a shell command that you can use to push the branch and tag.
|
||||||
|
1. Pushing the new tag will trigger a CI build that, when finished will upload a new versioned docker image to the DigitalOcean docker registry.
|
||||||
|
1. Once that CI job completes, you will be able to run the following command to deploy that docker image. The script takes two arguments: an environment (`production`, `preview`, or `staging`), and a version number (e.g. `0.10.1`).
|
||||||
|
|
||||||
|
```
|
||||||
|
script/deploy preview 0.10.1
|
||||||
|
```
|
||||||
|
|
||||||
|
1. This command should complete quickly, updating the given environment to use the given version number of the `collab` server.
|
82
docs/tools.md
Normal file
82
docs/tools.md
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
[⬅ Back to Index](./index.md)
|
||||||
|
|
||||||
|
# Tools
|
||||||
|
|
||||||
|
Tools to get started at Zed. Work in progress, submit a PR to add any missing tools here!
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Everyday Tools
|
||||||
|
|
||||||
|
### Calendar
|
||||||
|
|
||||||
|
To gain access to company calendar, visit [this link](https://calendar.google.com/calendar/u/0/r?cid=Y18xOGdzcGE1aG5wdHJocGRoNWtlb2tlbWxzc0Bncm91cC5jYWxlbmRhci5nb29nbGUuY29t).
|
||||||
|
|
||||||
|
If you would like the company calendar to be synced with a calendar application (Apple Calendar, etc.):
|
||||||
|
|
||||||
|
- Add your company account (i.e `joseph@zed.dev`) to your calendar application
|
||||||
|
- Visit [this link](https://calendar.google.com/calendar/u/0/syncselect), check `Zed Industries (Read Only)` under `Shared Calendars`, and save it.
|
||||||
|
|
||||||
|
### 1Password
|
||||||
|
|
||||||
|
We have a shared company 1Password with all of our credentials. To gain access:
|
||||||
|
|
||||||
|
1. Go to [zed-industries.1password.com](https://zed-industries.1password.com).
|
||||||
|
1. Sign in with your `@zed.dev` email address.
|
||||||
|
1. Make your account and let an admin know you've signed up.
|
||||||
|
1. Once they approve your sign up, you'll have access to all of the company credentials.
|
||||||
|
|
||||||
|
### Slack
|
||||||
|
|
||||||
|
Have a team member add you to the [Zed Industries](https://zed-industries.slack.com/) slack.
|
||||||
|
|
||||||
|
### Discord
|
||||||
|
|
||||||
|
We have a discord community. You can use [this link](https://discord.gg/SSD9eJrn6s) to join. **!Don't share this link, this is specifically for team memebers!**
|
||||||
|
|
||||||
|
Once you have joined the community, let a team member know and we can add your correct role.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Engineering
|
||||||
|
|
||||||
|
### Github
|
||||||
|
|
||||||
|
For now, all the Zed source code lives on [Github](https://github.com/zed-industries). A founder will need to add you to the team and set up the appropriate permissions.
|
||||||
|
|
||||||
|
Useful repos:
|
||||||
|
- [zed-industries/zed](https://github.com/zed-industries/zed) - Zed source
|
||||||
|
- [zed-industries/zed.dev](https://github.com/zed-industries/zed.dev) - Zed.dev site and collab API
|
||||||
|
- [zed-industries/docs](https://github.com/zed-industries/docs) - Zed public docs
|
||||||
|
- [zed-industries/community](https://github.com/zed-industries/community) - Zed community feedback & discussion
|
||||||
|
|
||||||
|
### Vercel
|
||||||
|
|
||||||
|
We use Vercel for all of our web deployments and some backend things. If you sign up with your `@zed.dev` email you should be invited to join the team automatically. If not, ask a founder to invite you to the Vercel team.
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
You can get access to many of our shared enviroment variables through 1Password and Vercel. For one password search the value you are looking for, or sort by passwords or API credentials.
|
||||||
|
|
||||||
|
For Vercel, go to `settings` -> `Environment Variables` (either on the entire org, or on a specific project depending on where it is shared.) For a given Vercel project if you have their CLI installed you can use `vercel pull` or `vercel env` to pull values down directly. More on those in their [CLI docs](https://vercel.com/docs/cli/env).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Design
|
||||||
|
|
||||||
|
### Figma
|
||||||
|
|
||||||
|
We use Figma for all of our design work. To gain access:
|
||||||
|
|
||||||
|
1. Use [this link](https://www.figma.com/team_invite/redeem/Xg4RcNXHhwP5netIvVBmKQ) to join the Figma team.
|
||||||
|
1. You should now have access to all of the company files.
|
||||||
|
1. You should go to the team page and "favorite" (star) any relevant projects so they show up in your sidebar.
|
||||||
|
1. Download the [Figma app](https://www.figma.com/downloads/) for easier access on desktop.
|
||||||
|
|
||||||
|
### Campsite
|
||||||
|
|
||||||
|
We use Campsite to review and discuss designs. To gain access:
|
||||||
|
|
||||||
|
1. Download the [Campsite app](https://campsite.design/desktop/download).
|
||||||
|
1. Open it and sign in with your `@zed.dev` email address.
|
||||||
|
1. You can access our company campsite directly: [app.campsite.design/zed](https://app.campsite.design/zed)
|
Loading…
Add table
Add a link
Reference in a new issue