Merge remote-tracking branch 'origin' into divs

This commit is contained in:
Nathan Sobo 2023-08-29 21:14:14 -06:00
commit 81957c49d5
177 changed files with 11085 additions and 3836 deletions

View file

@ -22,6 +22,9 @@ jobs:
- name: Sign into DigitalOcean docker registry - name: Sign into DigitalOcean docker registry
run: doctl registry login run: doctl registry login
- name: Prune Docker system
run: docker system prune
- name: Checkout repo - name: Checkout repo
uses: actions/checkout@v3 uses: actions/checkout@v3
with: with:

587
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@ members = [
"crates/auto_update", "crates/auto_update",
"crates/breadcrumbs", "crates/breadcrumbs",
"crates/call", "crates/call",
"crates/channel",
"crates/cli", "crates/cli",
"crates/client", "crates/client",
"crates/clock", "crates/clock",
@ -13,6 +14,7 @@ members = [
"crates/collab_ui", "crates/collab_ui",
"crates/collections", "crates/collections",
"crates/command_palette", "crates/command_palette",
"crates/component_test",
"crates/context_menu", "crates/context_menu",
"crates/copilot", "crates/copilot",
"crates/copilot_button", "crates/copilot_button",
@ -60,7 +62,7 @@ members = [
"crates/snippet", "crates/snippet",
"crates/sqlez", "crates/sqlez",
"crates/sqlez_macros", "crates/sqlez_macros",
"crates/staff_mode", "crates/feature_flags",
"crates/sum_tree", "crates/sum_tree",
"crates/terminal", "crates/terminal",
"crates/text", "crates/text",
@ -95,10 +97,11 @@ log = { version = "0.4.16", features = ["kv_unstable_serde"] }
ordered-float = { version = "2.1.1" } ordered-float = { version = "2.1.1" }
parking_lot = { version = "0.11.1" } parking_lot = { version = "0.11.1" }
postage = { version = "0.5", features = ["futures-traits"] } postage = { version = "0.5", features = ["futures-traits"] }
prost = { version = "0.8" }
rand = { version = "0.8.5" } rand = { version = "0.8.5" }
refineable = { path = "./crates/refineable" } refineable = { path = "./crates/refineable" }
regex = { version = "1.5" } regex = { version = "1.5" }
rust-embed = { version = "6.3", features = ["include-exclude"] } rust-embed = { version = "8.0", features = ["include-exclude"] }
schemars = { version = "0.8" } schemars = { version = "0.8" }
serde = { version = "1.0", features = ["derive", "rc"] } serde = { version = "1.0", features = ["derive", "rc"] }
serde_derive = { version = "1.0", features = ["deserialize_in_place"] } serde_derive = { version = "1.0", features = ["deserialize_in_place"] }

View file

@ -1,6 +1,6 @@
# syntax = docker/dockerfile:1.2 # syntax = docker/dockerfile:1.2
FROM rust:1.71-bullseye as builder FROM rust:1.72-bullseye as builder
WORKDIR app WORKDIR app
COPY . . COPY . .

View file

@ -521,7 +521,8 @@
"ctrl-alt-cmd-f": "workspace::FollowNextCollaborator", "ctrl-alt-cmd-f": "workspace::FollowNextCollaborator",
// TODO: Move this to a dock open action // TODO: Move this to a dock open action
"cmd-shift-c": "collab_panel::ToggleFocus", "cmd-shift-c": "collab_panel::ToggleFocus",
"cmd-alt-i": "zed::DebugElements" "cmd-alt-i": "zed::DebugElements",
"ctrl-shift-:": "editor::ToggleInlayHints",
} }
}, },
{ {

View file

@ -137,10 +137,67 @@
"partialWord": true "partialWord": true
} }
], ],
"g j": [
"vim::Down",
{
"displayLines": true
}
],
"g down": [
"vim::Down",
{
"displayLines": true
}
],
"g k": [
"vim::Up",
{
"displayLines": true
}
],
"g up": [
"vim::Up",
{
"displayLines": true
}
],
"g $": [
"vim::EndOfLine",
{
"displayLines": true
}
],
"g end": [
"vim::EndOfLine",
{
"displayLines": true
}
],
"g 0": [
"vim::StartOfLine",
{
"displayLines": true
}
],
"g home": [
"vim::StartOfLine",
{
"displayLines": true
}
],
"g ^": [
"vim::FirstNonWhitespace",
{
"displayLines": true
}
],
// z commands // z commands
"z t": "editor::ScrollCursorTop", "z t": "editor::ScrollCursorTop",
"z z": "editor::ScrollCursorCenter", "z z": "editor::ScrollCursorCenter",
"z b": "editor::ScrollCursorBottom", "z b": "editor::ScrollCursorBottom",
"z c": "editor::Fold",
"z o": "editor::UnfoldLines",
"z f": "editor::FoldSelectedRanges",
// Count support // Count support
"1": [ "1": [
"vim::Number", "vim::Number",

View file

@ -98,6 +98,7 @@
// Whether to show selections in the scrollbar. // Whether to show selections in the scrollbar.
"selections": true "selections": true
}, },
"relative_line_numbers": false,
// Inlay hint related settings // Inlay hint related settings
"inlay_hints": { "inlay_hints": {
// Global switch to toggle hints on and off, switched off by default. // Global switch to toggle hints on and off, switched off by default.
@ -284,8 +285,6 @@
// "directory": "~/zed/projects/" // "directory": "~/zed/projects/"
// } // }
// } // }
//
//
"working_directory": "current_project_directory", "working_directory": "current_project_directory",
// Set the cursor blinking behavior in the terminal. // Set the cursor blinking behavior in the terminal.
// May take 4 values: // May take 4 values:
@ -334,13 +333,32 @@
// "line_height": { // "line_height": {
// "custom": 2 // "custom": 2
// }, // },
"line_height": "comfortable" "line_height": "comfortable",
// Activate the python virtual environment, if one is found, in the
// terminal's working directory (as resolved by the working_directory
// setting). Set this to "off" to disable this behavior.
"detect_venv": {
"on": {
// Default directories to search for virtual environments, relative
// to the current working directory. We recommend overriding this
// in your project's settings, rather than globally.
"directories": [
".env",
"env",
".venv",
"venv"
],
// Can also be 'csh' and 'fish'
"activate_script": "default"
}
}
// Set the terminal's font size. If this option is not included, // Set the terminal's font size. If this option is not included,
// the terminal will default to matching the buffer's font size. // the terminal will default to matching the buffer's font size.
// "font_size": "15" // "font_size": "15",
// Set the terminal's font family. If this option is not included, // Set the terminal's font family. If this option is not included,
// the terminal will default to matching the buffer's font family. // the terminal will default to matching the buffer's font family.
// "font_family": "Zed Mono" // "font_family": "Zed Mono",
// ---
}, },
// Difference settings for semantic_index // Difference settings for semantic_index
"semantic_index": { "semantic_index": {

View file

@ -855,14 +855,14 @@ impl Conversation {
) -> Self { ) -> Self {
let markdown = language_registry.language_for_name("Markdown"); let markdown = language_registry.language_for_name("Markdown");
let buffer = cx.add_model(|cx| { let buffer = cx.add_model(|cx| {
let mut buffer = Buffer::new(0, "", cx); let mut buffer = Buffer::new(0, cx.model_id() as u64, "");
buffer.set_language_registry(language_registry); buffer.set_language_registry(language_registry);
cx.spawn_weak(|buffer, mut cx| async move { cx.spawn_weak(|buffer, mut cx| async move {
let markdown = markdown.await?; let markdown = markdown.await?;
let buffer = buffer let buffer = buffer
.upgrade(&cx) .upgrade(&cx)
.ok_or_else(|| anyhow!("buffer was dropped"))?; .ok_or_else(|| anyhow!("buffer was dropped"))?;
buffer.update(&mut cx, |buffer, cx| { buffer.update(&mut cx, |buffer: &mut Buffer, cx| {
buffer.set_language(Some(markdown), cx) buffer.set_language(Some(markdown), cx)
}); });
anyhow::Ok(()) anyhow::Ok(())
@ -944,7 +944,7 @@ impl Conversation {
let mut message_anchors = Vec::new(); let mut message_anchors = Vec::new();
let mut next_message_id = MessageId(0); let mut next_message_id = MessageId(0);
let buffer = cx.add_model(|cx| { let buffer = cx.add_model(|cx| {
let mut buffer = Buffer::new(0, saved_conversation.text, cx); let mut buffer = Buffer::new(0, cx.model_id() as u64, saved_conversation.text);
for message in saved_conversation.messages { for message in saved_conversation.messages {
message_anchors.push(MessageAnchor { message_anchors.push(MessageAnchor {
id: message.id, id: message.id,
@ -958,7 +958,7 @@ impl Conversation {
let buffer = buffer let buffer = buffer
.upgrade(&cx) .upgrade(&cx)
.ok_or_else(|| anyhow!("buffer was dropped"))?; .ok_or_else(|| anyhow!("buffer was dropped"))?;
buffer.update(&mut cx, |buffer, cx| { buffer.update(&mut cx, |buffer: &mut Buffer, cx| {
buffer.set_language(Some(markdown), cx) buffer.set_language(Some(markdown), cx)
}); });
anyhow::Ok(()) anyhow::Ok(())
@ -1128,7 +1128,9 @@ impl Conversation {
stream: true, stream: true,
}; };
let Some(api_key) = self.api_key.borrow().clone() else { continue }; let Some(api_key) = self.api_key.borrow().clone() else {
continue;
};
let stream = stream_completion(api_key, cx.background().clone(), request); let stream = stream_completion(api_key, cx.background().clone(), request);
let assistant_message = self let assistant_message = self
.insert_message_after( .insert_message_after(
@ -1484,7 +1486,9 @@ impl Conversation {
}) { }) {
current_message = messages.next(); current_message = messages.next();
} }
let Some(message) = current_message.as_ref() else { break }; let Some(message) = current_message.as_ref() else {
break;
};
// Skip offsets that are in the same message. // Skip offsets that are in the same message.
while offsets.peek().map_or(false, |offset| { while offsets.peek().map_or(false, |offset| {
@ -1921,7 +1925,10 @@ impl ConversationEditor {
let Some(panel) = workspace.panel::<AssistantPanel>(cx) else { let Some(panel) = workspace.panel::<AssistantPanel>(cx) else {
return; return;
}; };
let Some(editor) = workspace.active_item(cx).and_then(|item| item.act_as::<Editor>(cx)) else { let Some(editor) = workspace
.active_item(cx)
.and_then(|item| item.act_as::<Editor>(cx))
else {
return; return;
}; };

View file

@ -20,6 +20,7 @@ test-support = [
[dependencies] [dependencies]
audio = { path = "../audio" } audio = { path = "../audio" }
channel = { path = "../channel" }
client = { path = "../client" } client = { path = "../client" }
collections = { path = "../collections" } collections = { path = "../collections" }
gpui = { path = "../gpui" } gpui = { path = "../gpui" }

View file

@ -7,9 +7,8 @@ use std::sync::Arc;
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use audio::Audio; use audio::Audio;
use call_settings::CallSettings; use call_settings::CallSettings;
use client::{ use channel::ChannelId;
proto, ChannelId, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore, use client::{proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore};
};
use collections::HashSet; use collections::HashSet;
use futures::{future::Shared, FutureExt}; use futures::{future::Shared, FutureExt};
use postage::watch; use postage::watch;

View file

@ -644,7 +644,9 @@ impl Room {
if let Some(participants) = remote_participants.log_err() { if let Some(participants) = remote_participants.log_err() {
for (participant, user) in room.participants.into_iter().zip(participants) { for (participant, user) in room.participants.into_iter().zip(participants) {
let Some(peer_id) = participant.peer_id else { continue }; let Some(peer_id) = participant.peer_id else {
continue;
};
this.participant_user_ids.insert(participant.user_id); this.participant_user_ids.insert(participant.user_id);
let old_projects = this let old_projects = this

51
crates/channel/Cargo.toml Normal file
View file

@ -0,0 +1,51 @@
[package]
name = "channel"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/channel.rs"
doctest = false
[features]
test-support = ["collections/test-support", "gpui/test-support", "rpc/test-support"]
[dependencies]
client = { path = "../client" }
collections = { path = "../collections" }
db = { path = "../db" }
gpui = { path = "../gpui" }
util = { path = "../util" }
rpc = { path = "../rpc" }
text = { path = "../text" }
language = { path = "../language" }
settings = { path = "../settings" }
feature_flags = { path = "../feature_flags" }
sum_tree = { path = "../sum_tree" }
anyhow.workspace = true
futures.workspace = true
image = "0.23"
lazy_static.workspace = true
log.workspace = true
parking_lot.workspace = true
postage.workspace = true
rand.workspace = true
schemars.workspace = true
smol.workspace = true
thiserror.workspace = true
time.workspace = true
tiny_http = "0.8"
uuid = { version = "1.1.2", features = ["v4"] }
url = "2.2"
serde.workspace = true
serde_derive.workspace = true
tempfile = "3"
[dev-dependencies]
collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }

View file

@ -0,0 +1,14 @@
mod channel_store;
pub mod channel_buffer;
use std::sync::Arc;
pub use channel_store::*;
use client::Client;
#[cfg(test)]
mod channel_store_tests;
pub fn init(client: &Arc<Client>) {
channel_buffer::init(client);
}

View file

@ -0,0 +1,197 @@
use crate::Channel;
use anyhow::Result;
use client::Client;
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle};
use rpc::{proto, TypedEnvelope};
use std::sync::Arc;
use util::ResultExt;
pub(crate) fn init(client: &Arc<Client>) {
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer);
client.add_model_message_handler(ChannelBuffer::handle_add_channel_buffer_collaborator);
client.add_model_message_handler(ChannelBuffer::handle_remove_channel_buffer_collaborator);
}
pub struct ChannelBuffer {
pub(crate) channel: Arc<Channel>,
connected: bool,
collaborators: Vec<proto::Collaborator>,
buffer: ModelHandle<language::Buffer>,
client: Arc<Client>,
subscription: Option<client::Subscription>,
}
pub enum Event {
CollaboratorsChanged,
Disconnected,
}
impl Entity for ChannelBuffer {
type Event = Event;
fn release(&mut self, _: &mut AppContext) {
if self.connected {
self.client
.send(proto::LeaveChannelBuffer {
channel_id: self.channel.id,
})
.log_err();
}
}
}
impl ChannelBuffer {
pub(crate) async fn new(
channel: Arc<Channel>,
client: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<ModelHandle<Self>> {
let response = client
.request(proto::JoinChannelBuffer {
channel_id: channel.id,
})
.await?;
let base_text = response.base_text;
let operations = response
.operations
.into_iter()
.map(language::proto::deserialize_operation)
.collect::<Result<Vec<_>, _>>()?;
let collaborators = response.collaborators;
let buffer = cx.add_model(|_| {
language::Buffer::remote(response.buffer_id, response.replica_id as u16, base_text)
});
buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
let subscription = client.subscribe_to_entity(channel.id)?;
anyhow::Ok(cx.add_model(|cx| {
cx.subscribe(&buffer, Self::on_buffer_update).detach();
Self {
buffer,
client,
connected: true,
collaborators,
channel,
subscription: Some(subscription.set_model(&cx.handle(), &mut cx.to_async())),
}
}))
}
async fn handle_update_channel_buffer(
this: ModelHandle<Self>,
update_channel_buffer: TypedEnvelope<proto::UpdateChannelBuffer>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<()> {
let ops = update_channel_buffer
.payload
.operations
.into_iter()
.map(language::proto::deserialize_operation)
.collect::<Result<Vec<_>, _>>()?;
this.update(&mut cx, |this, cx| {
cx.notify();
this.buffer
.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))
})?;
Ok(())
}
async fn handle_add_channel_buffer_collaborator(
this: ModelHandle<Self>,
envelope: TypedEnvelope<proto::AddChannelBufferCollaborator>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<()> {
let collaborator = envelope.payload.collaborator.ok_or_else(|| {
anyhow::anyhow!(
"Should have gotten a collaborator in the AddChannelBufferCollaborator message"
)
})?;
this.update(&mut cx, |this, cx| {
this.collaborators.push(collaborator);
cx.emit(Event::CollaboratorsChanged);
cx.notify();
});
Ok(())
}
async fn handle_remove_channel_buffer_collaborator(
this: ModelHandle<Self>,
message: TypedEnvelope<proto::RemoveChannelBufferCollaborator>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<()> {
this.update(&mut cx, |this, cx| {
this.collaborators.retain(|collaborator| {
if collaborator.peer_id == message.payload.peer_id {
this.buffer.update(cx, |buffer, cx| {
buffer.remove_peer(collaborator.replica_id as u16, cx)
});
false
} else {
true
}
});
cx.emit(Event::CollaboratorsChanged);
cx.notify();
});
Ok(())
}
fn on_buffer_update(
&mut self,
_: ModelHandle<language::Buffer>,
event: &language::Event,
_: &mut ModelContext<Self>,
) {
if let language::Event::Operation(operation) = event {
let operation = language::proto::serialize_operation(operation);
self.client
.send(proto::UpdateChannelBuffer {
channel_id: self.channel.id,
operations: vec![operation],
})
.log_err();
}
}
pub fn buffer(&self) -> ModelHandle<language::Buffer> {
self.buffer.clone()
}
pub fn collaborators(&self) -> &[proto::Collaborator] {
&self.collaborators
}
pub fn channel(&self) -> Arc<Channel> {
self.channel.clone()
}
pub(crate) fn disconnect(&mut self, cx: &mut ModelContext<Self>) {
if self.connected {
self.connected = false;
self.subscription.take();
cx.emit(Event::Disconnected);
cx.notify()
}
}
pub fn is_connected(&self) -> bool {
self.connected
}
pub fn replica_id(&self, cx: &AppContext) -> u16 {
self.buffer.read(cx).replica_id()
}
}

View file

@ -1,19 +1,14 @@
use crate::Status; use crate::channel_buffer::ChannelBuffer;
use crate::{Client, Subscription, User, UserStore}; use anyhow::{anyhow, Result};
use anyhow::anyhow; use client::{Client, Status, Subscription, User, UserId, UserStore};
use anyhow::Result; use collections::{hash_map, HashMap, HashSet};
use collections::HashMap; use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
use collections::HashSet; use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
use futures::channel::mpsc;
use futures::Future;
use futures::StreamExt;
use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task};
use rpc::{proto, TypedEnvelope}; use rpc::{proto, TypedEnvelope};
use std::sync::Arc; use std::sync::Arc;
use util::ResultExt; use util::ResultExt;
pub type ChannelId = u64; pub type ChannelId = u64;
pub type UserId = u64;
pub struct ChannelStore { pub struct ChannelStore {
channels_by_id: HashMap<ChannelId, Arc<Channel>>, channels_by_id: HashMap<ChannelId, Arc<Channel>>,
@ -23,6 +18,7 @@ pub struct ChannelStore {
channels_with_admin_privileges: HashSet<ChannelId>, channels_with_admin_privileges: HashSet<ChannelId>,
outgoing_invites: HashSet<(ChannelId, UserId)>, outgoing_invites: HashSet<(ChannelId, UserId)>,
update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>, update_channels_tx: mpsc::UnboundedSender<proto::UpdateChannels>,
opened_buffers: HashMap<ChannelId, OpenedChannelBuffer>,
client: Arc<Client>, client: Arc<Client>,
user_store: ModelHandle<UserStore>, user_store: ModelHandle<UserStore>,
_rpc_subscription: Subscription, _rpc_subscription: Subscription,
@ -57,6 +53,11 @@ pub enum ChannelMemberStatus {
NotMember, NotMember,
} }
enum OpenedChannelBuffer {
Open(WeakModelHandle<ChannelBuffer>),
Loading(Shared<Task<Result<ModelHandle<ChannelBuffer>, Arc<anyhow::Error>>>>),
}
impl ChannelStore { impl ChannelStore {
pub fn new( pub fn new(
client: Arc<Client>, client: Arc<Client>,
@ -70,16 +71,14 @@ impl ChannelStore {
let mut connection_status = client.status(); let mut connection_status = client.status();
let watch_connection_status = cx.spawn_weak(|this, mut cx| async move { let watch_connection_status = cx.spawn_weak(|this, mut cx| async move {
while let Some(status) = connection_status.next().await { while let Some(status) = connection_status.next().await {
if matches!(status, Status::ConnectionLost | Status::SignedOut) { if !status.is_connected() {
if let Some(this) = this.upgrade(&cx) { if let Some(this) = this.upgrade(&cx) {
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.channels_by_id.clear(); if matches!(status, Status::ConnectionLost | Status::SignedOut) {
this.channel_invitations.clear(); this.handle_disconnect(cx);
this.channel_participants.clear(); } else {
this.channels_with_admin_privileges.clear(); this.disconnect_buffers(cx);
this.channel_paths.clear(); }
this.outgoing_invites.clear();
cx.notify();
}); });
} else { } else {
break; break;
@ -87,6 +86,7 @@ impl ChannelStore {
} }
} }
}); });
Self { Self {
channels_by_id: HashMap::default(), channels_by_id: HashMap::default(),
channel_invitations: Vec::default(), channel_invitations: Vec::default(),
@ -94,6 +94,7 @@ impl ChannelStore {
channel_participants: Default::default(), channel_participants: Default::default(),
channels_with_admin_privileges: Default::default(), channels_with_admin_privileges: Default::default(),
outgoing_invites: Default::default(), outgoing_invites: Default::default(),
opened_buffers: Default::default(),
update_channels_tx, update_channels_tx,
client, client,
user_store, user_store,
@ -114,6 +115,16 @@ impl ChannelStore {
} }
} }
pub fn has_children(&self, channel_id: ChannelId) -> bool {
self.channel_paths.iter().any(|path| {
if let Some(ix) = path.iter().position(|id| *id == channel_id) {
path.len() > ix + 1
} else {
false
}
})
}
pub fn channel_count(&self) -> usize { pub fn channel_count(&self) -> usize {
self.channel_paths.len() self.channel_paths.len()
} }
@ -141,6 +152,74 @@ impl ChannelStore {
self.channels_by_id.get(&channel_id) self.channels_by_id.get(&channel_id)
} }
pub fn open_channel_buffer(
&mut self,
channel_id: ChannelId,
cx: &mut ModelContext<Self>,
) -> Task<Result<ModelHandle<ChannelBuffer>>> {
// Make sure that a given channel buffer is only opened once per
// app instance, even if this method is called multiple times
// with the same channel id while the first task is still running.
let task = loop {
match self.opened_buffers.entry(channel_id) {
hash_map::Entry::Occupied(e) => match e.get() {
OpenedChannelBuffer::Open(buffer) => {
if let Some(buffer) = buffer.upgrade(cx) {
break Task::ready(Ok(buffer)).shared();
} else {
self.opened_buffers.remove(&channel_id);
continue;
}
}
OpenedChannelBuffer::Loading(task) => break task.clone(),
},
hash_map::Entry::Vacant(e) => {
let client = self.client.clone();
let task = cx
.spawn(|this, cx| async move {
let channel = this.read_with(&cx, |this, _| {
this.channel_for_id(channel_id).cloned().ok_or_else(|| {
Arc::new(anyhow!("no channel for id: {}", channel_id))
})
})?;
ChannelBuffer::new(channel, client, cx)
.await
.map_err(Arc::new)
})
.shared();
e.insert(OpenedChannelBuffer::Loading(task.clone()));
cx.spawn({
let task = task.clone();
|this, mut cx| async move {
let result = task.await;
this.update(&mut cx, |this, cx| match result {
Ok(buffer) => {
cx.observe_release(&buffer, move |this, _, _| {
this.opened_buffers.remove(&channel_id);
})
.detach();
this.opened_buffers.insert(
channel_id,
OpenedChannelBuffer::Open(buffer.downgrade()),
);
}
Err(error) => {
log::error!("failed to open channel buffer {error:?}");
this.opened_buffers.remove(&channel_id);
}
});
}
})
.detach();
break task;
}
}
};
cx.foreground()
.spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) })
}
pub fn is_user_admin(&self, channel_id: ChannelId) -> bool { pub fn is_user_admin(&self, channel_id: ChannelId) -> bool {
self.channel_paths.iter().any(|path| { self.channel_paths.iter().any(|path| {
if let Some(ix) = path.iter().position(|id| *id == channel_id) { if let Some(ix) = path.iter().position(|id| *id == channel_id) {
@ -403,6 +482,27 @@ impl ChannelStore {
Ok(()) Ok(())
} }
fn handle_disconnect(&mut self, cx: &mut ModelContext<'_, ChannelStore>) {
self.disconnect_buffers(cx);
self.channels_by_id.clear();
self.channel_invitations.clear();
self.channel_participants.clear();
self.channels_with_admin_privileges.clear();
self.channel_paths.clear();
self.outgoing_invites.clear();
cx.notify();
}
fn disconnect_buffers(&mut self, cx: &mut ModelContext<ChannelStore>) {
for (_, buffer) in self.opened_buffers.drain() {
if let OpenedChannelBuffer::Open(buffer) = buffer {
if let Some(buffer) = buffer.upgrade(cx) {
buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
}
}
}
}
pub(crate) fn update_channels( pub(crate) fn update_channels(
&mut self, &mut self,
payload: proto::UpdateChannels, payload: proto::UpdateChannels,
@ -437,38 +537,44 @@ impl ChannelStore {
.retain(|channel_id, _| !payload.remove_channels.contains(channel_id)); .retain(|channel_id, _| !payload.remove_channels.contains(channel_id));
self.channels_with_admin_privileges self.channels_with_admin_privileges
.retain(|channel_id| !payload.remove_channels.contains(channel_id)); .retain(|channel_id| !payload.remove_channels.contains(channel_id));
for channel_id in &payload.remove_channels {
let channel_id = *channel_id;
if let Some(OpenedChannelBuffer::Open(buffer)) =
self.opened_buffers.remove(&channel_id)
{
if let Some(buffer) = buffer.upgrade(cx) {
buffer.update(cx, ChannelBuffer::disconnect);
}
}
}
} }
for channel in payload.channels { for channel_proto in payload.channels {
if let Some(existing_channel) = self.channels_by_id.get_mut(&channel.id) { if let Some(existing_channel) = self.channels_by_id.get_mut(&channel_proto.id) {
// FIXME: We may be missing a path for this existing channel in certain cases Arc::make_mut(existing_channel).name = channel_proto.name;
let existing_channel = Arc::make_mut(existing_channel); } else {
existing_channel.name = channel.name; let channel = Arc::new(Channel {
continue; id: channel_proto.id,
} name: channel_proto.name,
});
self.channels_by_id.insert(channel.id, channel.clone());
self.channels_by_id.insert( if let Some(parent_id) = channel_proto.parent_id {
channel.id, let mut ix = 0;
Arc::new(Channel { while ix < self.channel_paths.len() {
id: channel.id, let path = &self.channel_paths[ix];
name: channel.name, if path.ends_with(&[parent_id]) {
}), let mut new_path = path.clone();
); new_path.push(channel.id);
self.channel_paths.insert(ix + 1, new_path);
if let Some(parent_id) = channel.parent_id { ix += 1;
let mut ix = 0; }
while ix < self.channel_paths.len() {
let path = &self.channel_paths[ix];
if path.ends_with(&[parent_id]) {
let mut new_path = path.clone();
new_path.push(channel.id);
self.channel_paths.insert(ix + 1, new_path);
ix += 1; ix += 1;
} }
ix += 1; } else {
self.channel_paths.push(vec![channel.id]);
} }
} else {
self.channel_paths.push(vec![channel.id]);
} }
} }

View file

@ -1,4 +1,7 @@
use super::*; use super::*;
use client::{Client, UserStore};
use gpui::{AppContext, ModelHandle};
use rpc::proto;
use util::http::FakeHttpClient; use util::http::FakeHttpClient;
#[gpui::test] #[gpui::test]

View file

@ -17,8 +17,9 @@ db = { path = "../db" }
gpui = { path = "../gpui" } gpui = { path = "../gpui" }
util = { path = "../util" } util = { path = "../util" }
rpc = { path = "../rpc" } rpc = { path = "../rpc" }
text = { path = "../text" }
settings = { path = "../settings" } settings = { path = "../settings" }
staff_mode = { path = "../staff_mode" } feature_flags = { path = "../feature_flags" }
sum_tree = { path = "../sum_tree" } sum_tree = { path = "../sum_tree" }
anyhow.workspace = true anyhow.workspace = true

View file

@ -1,10 +1,6 @@
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub mod test; pub mod test;
#[cfg(test)]
mod channel_store_tests;
pub mod channel_store;
pub mod telemetry; pub mod telemetry;
pub mod user; pub mod user;
@ -48,7 +44,6 @@ use util::channel::ReleaseChannel;
use util::http::HttpClient; use util::http::HttpClient;
use util::{ResultExt, TryFutureExt}; use util::{ResultExt, TryFutureExt};
pub use channel_store::*;
pub use rpc::*; pub use rpc::*;
pub use telemetry::ClickhouseEvent; pub use telemetry::ClickhouseEvent;
pub use user::*; pub use user::*;

View file

@ -135,8 +135,6 @@ impl Telemetry {
} }
} }
/// This method takes the entire TelemetrySettings struct in order to force client code
/// to pull the struct out of the settings global. Do not remove!
pub fn set_authenticated_user_info( pub fn set_authenticated_user_info(
self: &Arc<Self>, self: &Arc<Self>,
metrics_id: Option<String>, metrics_id: Option<String>,

View file

@ -168,6 +168,7 @@ impl FakeServer {
GetPrivateUserInfoResponse { GetPrivateUserInfoResponse {
metrics_id: "the-metrics-id".into(), metrics_id: "the-metrics-id".into(),
staff: false, staff: false,
flags: Default::default(),
}, },
) )
.await; .await;

View file

@ -1,18 +1,20 @@
use super::{proto, Client, Status, TypedEnvelope}; use super::{proto, Client, Status, TypedEnvelope};
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use collections::{hash_map::Entry, HashMap, HashSet}; use collections::{hash_map::Entry, HashMap, HashSet};
use feature_flags::FeatureFlagAppExt;
use futures::{channel::mpsc, future, AsyncReadExt, Future, StreamExt}; use futures::{channel::mpsc, future, AsyncReadExt, Future, StreamExt};
use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task}; use gpui::{AsyncAppContext, Entity, ImageData, ModelContext, ModelHandle, Task};
use postage::{sink::Sink, watch}; use postage::{sink::Sink, watch};
use rpc::proto::{RequestMessage, UsersResponse}; use rpc::proto::{RequestMessage, UsersResponse};
use staff_mode::StaffMode;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use util::http::HttpClient; use util::http::HttpClient;
use util::TryFutureExt as _; use util::TryFutureExt as _;
pub type UserId = u64;
#[derive(Default, Debug)] #[derive(Default, Debug)]
pub struct User { pub struct User {
pub id: u64, pub id: UserId,
pub github_login: String, pub github_login: String,
pub avatar: Option<Arc<ImageData>>, pub avatar: Option<Arc<ImageData>>,
} }
@ -143,26 +145,23 @@ impl UserStore {
let fetch_metrics_id = let fetch_metrics_id =
client.request(proto::GetPrivateUserInfo {}).log_err(); client.request(proto::GetPrivateUserInfo {}).log_err();
let (user, info) = futures::join!(fetch_user, fetch_metrics_id); let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
cx.read(|cx| {
client.telemetry.set_authenticated_user_info(
info.as_ref().map(|info| info.metrics_id.clone()),
info.as_ref().map(|info| info.staff).unwrap_or(false),
cx,
)
});
cx.update(|cx| { if let Some(info) = info {
cx.update_default_global(|staff_mode: &mut StaffMode, _| { cx.update(|cx| {
if !staff_mode.0 { cx.update_flags(info.staff, info.flags);
*staff_mode = StaffMode( client.telemetry.set_authenticated_user_info(
info.as_ref() Some(info.metrics_id.clone()),
.map(|info| info.staff) info.staff,
.unwrap_or_default(), cx,
) )
}
()
}); });
}); } else {
cx.read(|cx| {
client
.telemetry
.set_authenticated_user_info(None, false, cx)
});
}
current_user_tx.send(user).await.ok(); current_user_tx.send(user).await.ok();

View file

@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab" default-run = "collab"
edition = "2021" edition = "2021"
name = "collab" name = "collab"
version = "0.17.0" version = "0.18.0"
publish = false publish = false
[[bin]] [[bin]]
@ -14,8 +14,10 @@ name = "seed"
required-features = ["seed-support"] required-features = ["seed-support"]
[dependencies] [dependencies]
clock = { path = "../clock" }
collections = { path = "../collections" } collections = { path = "../collections" }
live_kit_server = { path = "../live_kit_server" } live_kit_server = { path = "../live_kit_server" }
text = { path = "../text" }
rpc = { path = "../rpc" } rpc = { path = "../rpc" }
util = { path = "../util" } util = { path = "../util" }
@ -35,6 +37,7 @@ log.workspace = true
nanoid = "0.4" nanoid = "0.4"
parking_lot.workspace = true parking_lot.workspace = true
prometheus = "0.13" prometheus = "0.13"
prost.workspace = true
rand.workspace = true rand.workspace = true
reqwest = { version = "0.11", features = ["json"], optional = true } reqwest = { version = "0.11", features = ["json"], optional = true }
scrypt = "0.7" scrypt = "0.7"
@ -62,6 +65,7 @@ collections = { path = "../collections", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] }
call = { path = "../call", features = ["test-support"] } call = { path = "../call", features = ["test-support"] }
client = { path = "../client", features = ["test-support"] } client = { path = "../client", features = ["test-support"] }
channel = { path = "../channel" }
editor = { path = "../editor", features = ["test-support"] } editor = { path = "../editor", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] } language = { path = "../language", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] } fs = { path = "../fs", features = ["test-support"] }
@ -74,6 +78,7 @@ rpc = { path = "../rpc", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] } settings = { path = "../settings", features = ["test-support"] }
theme = { path = "../theme" } theme = { path = "../theme" }
workspace = { path = "../workspace", features = ["test-support"] } workspace = { path = "../workspace", features = ["test-support"] }
collab_ui = { path = "../collab_ui", features = ["test-support"] }
ctor.workspace = true ctor.workspace = true
env_logger.workspace = true env_logger.workspace = true

View file

@ -208,3 +208,63 @@ CREATE TABLE "channel_members" (
); );
CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id"); CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id");
CREATE TABLE "buffers" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL DEFAULT 0
);
CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id");
CREATE TABLE "buffer_operations" (
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL,
"replica_id" INTEGER NOT NULL,
"lamport_timestamp" INTEGER NOT NULL,
"value" BLOB NOT NULL,
PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id)
);
CREATE TABLE "buffer_snapshots" (
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL,
"text" TEXT NOT NULL,
"operation_serialization_version" INTEGER NOT NULL,
PRIMARY KEY(buffer_id, epoch)
);
CREATE TABLE "channel_buffer_collaborators" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
"connection_id" INTEGER NOT NULL,
"connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
"connection_lost" BOOLEAN NOT NULL DEFAULT false,
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
"replica_id" INTEGER NOT NULL
);
CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id");
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id");
CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");
CREATE TABLE "feature_flags" (
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
"flag" TEXT NOT NULL UNIQUE
);
CREATE INDEX "index_feature_flags" ON "feature_flags" ("id");
CREATE TABLE "user_features" (
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
"feature_id" INTEGER NOT NULL REFERENCES feature_flags (id) ON DELETE CASCADE,
PRIMARY KEY (user_id, feature_id)
);
CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id");
CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id");
CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id");

View file

@ -0,0 +1,40 @@
CREATE TABLE "buffers" (
"id" SERIAL PRIMARY KEY,
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL DEFAULT 0
);
CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id");
CREATE TABLE "buffer_operations" (
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL,
"replica_id" INTEGER NOT NULL,
"lamport_timestamp" INTEGER NOT NULL,
"value" BYTEA NOT NULL,
PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id)
);
CREATE TABLE "buffer_snapshots" (
"buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
"epoch" INTEGER NOT NULL,
"text" TEXT NOT NULL,
"operation_serialization_version" INTEGER NOT NULL,
PRIMARY KEY(buffer_id, epoch)
);
CREATE TABLE "channel_buffer_collaborators" (
"id" SERIAL PRIMARY KEY,
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
"connection_id" INTEGER NOT NULL,
"connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
"connection_lost" BOOLEAN NOT NULL DEFAULT FALSE,
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
"replica_id" INTEGER NOT NULL
);
CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id");
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id");
CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");

View file

@ -0,0 +1,16 @@
CREATE TABLE "feature_flags" (
"id" SERIAL PRIMARY KEY,
"flag" VARCHAR(255) NOT NULL UNIQUE
);
CREATE UNIQUE INDEX "index_feature_flags" ON "feature_flags" ("id");
CREATE TABLE "user_features" (
"user_id" INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
"feature_id" INTEGER NOT NULL REFERENCES feature_flags(id) ON DELETE CASCADE,
PRIMARY KEY (user_id, feature_id)
);
CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id");
CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id");
CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id");

View file

@ -1,7 +1,8 @@
#[cfg(test)] #[cfg(test)]
mod db_tests; pub mod tests;
#[cfg(test)] #[cfg(test)]
pub mod test_db; pub use tests::TestDb;
mod ids; mod ids;
mod queries; mod queries;
@ -52,6 +53,8 @@ pub struct Database {
runtime: Option<tokio::runtime::Runtime>, runtime: Option<tokio::runtime::Runtime>,
} }
// The `Database` type has so many methods that its impl blocks are split into
// separate files in the `queries` folder.
impl Database { impl Database {
pub async fn new(options: ConnectOptions, executor: Executor) -> Result<Self> { pub async fn new(options: ConnectOptions, executor: Executor) -> Result<Self> {
Ok(Self { Ok(Self {
@ -246,7 +249,9 @@ impl Database {
let mut tx = Arc::new(Some(tx)); let mut tx = Arc::new(Some(tx));
let result = f(TransactionHandle(tx.clone())).await; let result = f(TransactionHandle(tx.clone())).await;
let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else { let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
return Err(anyhow!("couldn't complete transaction because it's still in use"))?; return Err(anyhow!(
"couldn't complete transaction because it's still in use"
))?;
}; };
Ok((tx, result)) Ok((tx, result))

View file

@ -110,6 +110,7 @@ fn value_to_integer(v: Value) -> Result<i32, ValueTypeErr> {
} }
} }
id_type!(BufferId);
id_type!(AccessTokenId); id_type!(AccessTokenId);
id_type!(ChannelId); id_type!(ChannelId);
id_type!(ChannelMemberId); id_type!(ChannelMemberId);
@ -123,3 +124,5 @@ id_type!(ReplicaId);
id_type!(ServerId); id_type!(ServerId);
id_type!(SignupId); id_type!(SignupId);
id_type!(UserId); id_type!(UserId);
id_type!(ChannelBufferCollaboratorId);
id_type!(FlagId);

View file

@ -1,6 +1,7 @@
use super::*; use super::*;
pub mod access_tokens; pub mod access_tokens;
pub mod buffers;
pub mod channels; pub mod channels;
pub mod contacts; pub mod contacts;
pub mod projects; pub mod projects;

View file

@ -0,0 +1,588 @@
use super::*;
use prost::Message;
use text::{EditOperation, InsertionTimestamp, UndoOperation};
impl Database {
pub async fn join_channel_buffer(
&self,
channel_id: ChannelId,
user_id: UserId,
connection: ConnectionId,
) -> Result<proto::JoinChannelBufferResponse> {
self.transaction(|tx| async move {
let tx = tx;
self.check_user_is_channel_member(channel_id, user_id, &tx)
.await?;
let buffer = channel::Model {
id: channel_id,
..Default::default()
}
.find_related(buffer::Entity)
.one(&*tx)
.await?;
let buffer = if let Some(buffer) = buffer {
buffer
} else {
let buffer = buffer::ActiveModel {
channel_id: ActiveValue::Set(channel_id),
..Default::default()
}
.insert(&*tx)
.await?;
buffer_snapshot::ActiveModel {
buffer_id: ActiveValue::Set(buffer.id),
epoch: ActiveValue::Set(0),
text: ActiveValue::Set(String::new()),
operation_serialization_version: ActiveValue::Set(
storage::SERIALIZATION_VERSION,
),
}
.insert(&*tx)
.await?;
buffer
};
// Join the collaborators
let mut collaborators = channel_buffer_collaborator::Entity::find()
.filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
.all(&*tx)
.await?;
let replica_ids = collaborators
.iter()
.map(|c| c.replica_id)
.collect::<HashSet<_>>();
let mut replica_id = ReplicaId(0);
while replica_ids.contains(&replica_id) {
replica_id.0 += 1;
}
let collaborator = channel_buffer_collaborator::ActiveModel {
channel_id: ActiveValue::Set(channel_id),
connection_id: ActiveValue::Set(connection.id as i32),
connection_server_id: ActiveValue::Set(ServerId(connection.owner_id as i32)),
user_id: ActiveValue::Set(user_id),
replica_id: ActiveValue::Set(replica_id),
..Default::default()
}
.insert(&*tx)
.await?;
collaborators.push(collaborator);
// Assemble the buffer state
let (base_text, operations) = self.get_buffer_state(&buffer, &tx).await?;
Ok(proto::JoinChannelBufferResponse {
buffer_id: buffer.id.to_proto(),
replica_id: replica_id.to_proto() as u32,
base_text,
operations,
collaborators: collaborators
.into_iter()
.map(|collaborator| proto::Collaborator {
peer_id: Some(collaborator.connection().into()),
user_id: collaborator.user_id.to_proto(),
replica_id: collaborator.replica_id.0 as u32,
})
.collect(),
})
})
.await
}
pub async fn leave_channel_buffer(
&self,
channel_id: ChannelId,
connection: ConnectionId,
) -> Result<Vec<ConnectionId>> {
self.transaction(|tx| async move {
self.leave_channel_buffer_internal(channel_id, connection, &*tx)
.await
})
.await
}
pub async fn leave_channel_buffer_internal(
&self,
channel_id: ChannelId,
connection: ConnectionId,
tx: &DatabaseTransaction,
) -> Result<Vec<ConnectionId>> {
let result = channel_buffer_collaborator::Entity::delete_many()
.filter(
Condition::all()
.add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
.add(channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32))
.add(
channel_buffer_collaborator::Column::ConnectionServerId
.eq(connection.owner_id as i32),
),
)
.exec(&*tx)
.await?;
if result.rows_affected == 0 {
Err(anyhow!("not a collaborator on this project"))?;
}
let mut connections = Vec::new();
let mut rows = channel_buffer_collaborator::Entity::find()
.filter(
Condition::all().add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
)
.stream(&*tx)
.await?;
while let Some(row) = rows.next().await {
let row = row?;
connections.push(ConnectionId {
id: row.connection_id as u32,
owner_id: row.connection_server_id.0 as u32,
});
}
drop(rows);
if connections.is_empty() {
self.snapshot_buffer(channel_id, &tx).await?;
}
Ok(connections)
}
pub async fn leave_channel_buffers(
&self,
connection: ConnectionId,
) -> Result<Vec<(ChannelId, Vec<ConnectionId>)>> {
self.transaction(|tx| async move {
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
enum QueryChannelIds {
ChannelId,
}
let channel_ids: Vec<ChannelId> = channel_buffer_collaborator::Entity::find()
.select_only()
.column(channel_buffer_collaborator::Column::ChannelId)
.filter(Condition::all().add(
channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32),
))
.into_values::<_, QueryChannelIds>()
.all(&*tx)
.await?;
let mut result = Vec::new();
for channel_id in channel_ids {
let collaborators = self
.leave_channel_buffer_internal(channel_id, connection, &*tx)
.await?;
result.push((channel_id, collaborators));
}
Ok(result)
})
.await
}
#[cfg(debug_assertions)]
pub async fn get_channel_buffer_collaborators(
&self,
channel_id: ChannelId,
) -> Result<Vec<UserId>> {
self.transaction(|tx| async move {
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
enum QueryUserIds {
UserId,
}
let users: Vec<UserId> = channel_buffer_collaborator::Entity::find()
.select_only()
.column(channel_buffer_collaborator::Column::UserId)
.filter(
Condition::all()
.add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
)
.into_values::<_, QueryUserIds>()
.all(&*tx)
.await?;
Ok(users)
})
.await
}
pub async fn update_channel_buffer(
&self,
channel_id: ChannelId,
user: UserId,
operations: &[proto::Operation],
) -> Result<Vec<ConnectionId>> {
self.transaction(move |tx| async move {
self.check_user_is_channel_member(channel_id, user, &*tx)
.await?;
let buffer = buffer::Entity::find()
.filter(buffer::Column::ChannelId.eq(channel_id))
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such buffer"))?;
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
enum QueryVersion {
OperationSerializationVersion,
}
let serialization_version: i32 = buffer
.find_related(buffer_snapshot::Entity)
.select_only()
.column(buffer_snapshot::Column::OperationSerializationVersion)
.filter(buffer_snapshot::Column::Epoch.eq(buffer.epoch))
.into_values::<_, QueryVersion>()
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("missing buffer snapshot"))?;
let operations = operations
.iter()
.filter_map(|op| operation_to_storage(op, &buffer, serialization_version))
.collect::<Vec<_>>();
if !operations.is_empty() {
buffer_operation::Entity::insert_many(operations)
.exec(&*tx)
.await?;
}
let mut connections = Vec::new();
let mut rows = channel_buffer_collaborator::Entity::find()
.filter(
Condition::all()
.add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
)
.stream(&*tx)
.await?;
while let Some(row) = rows.next().await {
let row = row?;
connections.push(ConnectionId {
id: row.connection_id as u32,
owner_id: row.connection_server_id.0 as u32,
});
}
Ok(connections)
})
.await
}
async fn get_buffer_state(
&self,
buffer: &buffer::Model,
tx: &DatabaseTransaction,
) -> Result<(String, Vec<proto::Operation>)> {
let id = buffer.id;
let (base_text, version) = if buffer.epoch > 0 {
let snapshot = buffer_snapshot::Entity::find()
.filter(
buffer_snapshot::Column::BufferId
.eq(id)
.and(buffer_snapshot::Column::Epoch.eq(buffer.epoch)),
)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such snapshot"))?;
let version = snapshot.operation_serialization_version;
(snapshot.text, version)
} else {
(String::new(), storage::SERIALIZATION_VERSION)
};
let mut rows = buffer_operation::Entity::find()
.filter(
buffer_operation::Column::BufferId
.eq(id)
.and(buffer_operation::Column::Epoch.eq(buffer.epoch)),
)
.stream(&*tx)
.await?;
let mut operations = Vec::new();
while let Some(row) = rows.next().await {
let row = row?;
let operation = operation_from_storage(row, version)?;
operations.push(proto::Operation {
variant: Some(operation),
})
}
Ok((base_text, operations))
}
async fn snapshot_buffer(&self, channel_id: ChannelId, tx: &DatabaseTransaction) -> Result<()> {
let buffer = channel::Model {
id: channel_id,
..Default::default()
}
.find_related(buffer::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such buffer"))?;
let (base_text, operations) = self.get_buffer_state(&buffer, tx).await?;
if operations.is_empty() {
return Ok(());
}
let mut text_buffer = text::Buffer::new(0, 0, base_text);
text_buffer
.apply_ops(operations.into_iter().filter_map(operation_from_wire))
.unwrap();
let base_text = text_buffer.text();
let epoch = buffer.epoch + 1;
buffer_snapshot::Model {
buffer_id: buffer.id,
epoch,
text: base_text,
operation_serialization_version: storage::SERIALIZATION_VERSION,
}
.into_active_model()
.insert(tx)
.await?;
buffer::ActiveModel {
id: ActiveValue::Unchanged(buffer.id),
epoch: ActiveValue::Set(epoch),
..Default::default()
}
.save(tx)
.await?;
Ok(())
}
}
fn operation_to_storage(
operation: &proto::Operation,
buffer: &buffer::Model,
_format: i32,
) -> Option<buffer_operation::ActiveModel> {
let (replica_id, lamport_timestamp, value) = match operation.variant.as_ref()? {
proto::operation::Variant::Edit(operation) => (
operation.replica_id,
operation.lamport_timestamp,
storage::Operation {
local_timestamp: operation.local_timestamp,
version: version_to_storage(&operation.version),
is_undo: false,
edit_ranges: operation
.ranges
.iter()
.map(|range| storage::Range {
start: range.start,
end: range.end,
})
.collect(),
edit_texts: operation.new_text.clone(),
undo_counts: Vec::new(),
},
),
proto::operation::Variant::Undo(operation) => (
operation.replica_id,
operation.lamport_timestamp,
storage::Operation {
local_timestamp: operation.local_timestamp,
version: version_to_storage(&operation.version),
is_undo: true,
edit_ranges: Vec::new(),
edit_texts: Vec::new(),
undo_counts: operation
.counts
.iter()
.map(|entry| storage::UndoCount {
replica_id: entry.replica_id,
local_timestamp: entry.local_timestamp,
count: entry.count,
})
.collect(),
},
),
_ => None?,
};
Some(buffer_operation::ActiveModel {
buffer_id: ActiveValue::Set(buffer.id),
epoch: ActiveValue::Set(buffer.epoch),
replica_id: ActiveValue::Set(replica_id as i32),
lamport_timestamp: ActiveValue::Set(lamport_timestamp as i32),
value: ActiveValue::Set(value.encode_to_vec()),
})
}
fn operation_from_storage(
row: buffer_operation::Model,
_format_version: i32,
) -> Result<proto::operation::Variant, Error> {
let operation =
storage::Operation::decode(row.value.as_slice()).map_err(|error| anyhow!("{}", error))?;
let version = version_from_storage(&operation.version);
Ok(if operation.is_undo {
proto::operation::Variant::Undo(proto::operation::Undo {
replica_id: row.replica_id as u32,
local_timestamp: operation.local_timestamp as u32,
lamport_timestamp: row.lamport_timestamp as u32,
version,
counts: operation
.undo_counts
.iter()
.map(|entry| proto::UndoCount {
replica_id: entry.replica_id,
local_timestamp: entry.local_timestamp,
count: entry.count,
})
.collect(),
})
} else {
proto::operation::Variant::Edit(proto::operation::Edit {
replica_id: row.replica_id as u32,
local_timestamp: operation.local_timestamp as u32,
lamport_timestamp: row.lamport_timestamp as u32,
version,
ranges: operation
.edit_ranges
.into_iter()
.map(|range| proto::Range {
start: range.start,
end: range.end,
})
.collect(),
new_text: operation.edit_texts,
})
})
}
fn version_to_storage(version: &Vec<proto::VectorClockEntry>) -> Vec<storage::VectorClockEntry> {
version
.iter()
.map(|entry| storage::VectorClockEntry {
replica_id: entry.replica_id,
timestamp: entry.timestamp,
})
.collect()
}
fn version_from_storage(version: &Vec<storage::VectorClockEntry>) -> Vec<proto::VectorClockEntry> {
version
.iter()
.map(|entry| proto::VectorClockEntry {
replica_id: entry.replica_id,
timestamp: entry.timestamp,
})
.collect()
}
// This is currently a manual copy of the deserialization code in the client's langauge crate
pub fn operation_from_wire(operation: proto::Operation) -> Option<text::Operation> {
match operation.variant? {
proto::operation::Variant::Edit(edit) => Some(text::Operation::Edit(EditOperation {
timestamp: InsertionTimestamp {
replica_id: edit.replica_id as text::ReplicaId,
local: edit.local_timestamp,
lamport: edit.lamport_timestamp,
},
version: version_from_wire(&edit.version),
ranges: edit
.ranges
.into_iter()
.map(|range| {
text::FullOffset(range.start as usize)..text::FullOffset(range.end as usize)
})
.collect(),
new_text: edit.new_text.into_iter().map(Arc::from).collect(),
})),
proto::operation::Variant::Undo(undo) => Some(text::Operation::Undo {
lamport_timestamp: clock::Lamport {
replica_id: undo.replica_id as text::ReplicaId,
value: undo.lamport_timestamp,
},
undo: UndoOperation {
id: clock::Local {
replica_id: undo.replica_id as text::ReplicaId,
value: undo.local_timestamp,
},
version: version_from_wire(&undo.version),
counts: undo
.counts
.into_iter()
.map(|c| {
(
clock::Local {
replica_id: c.replica_id as text::ReplicaId,
value: c.local_timestamp,
},
c.count,
)
})
.collect(),
},
}),
_ => None,
}
}
fn version_from_wire(message: &[proto::VectorClockEntry]) -> clock::Global {
let mut version = clock::Global::new();
for entry in message {
version.observe(clock::Local {
replica_id: entry.replica_id as text::ReplicaId,
value: entry.timestamp,
});
}
version
}
mod storage {
#![allow(non_snake_case)]
use prost::Message;
pub const SERIALIZATION_VERSION: i32 = 1;
#[derive(Message)]
pub struct Operation {
#[prost(uint32, tag = "1")]
pub local_timestamp: u32,
#[prost(message, repeated, tag = "2")]
pub version: Vec<VectorClockEntry>,
#[prost(bool, tag = "3")]
pub is_undo: bool,
#[prost(message, repeated, tag = "4")]
pub edit_ranges: Vec<Range>,
#[prost(string, repeated, tag = "5")]
pub edit_texts: Vec<String>,
#[prost(message, repeated, tag = "6")]
pub undo_counts: Vec<UndoCount>,
}
#[derive(Message)]
pub struct VectorClockEntry {
#[prost(uint32, tag = "1")]
pub replica_id: u32,
#[prost(uint32, tag = "2")]
pub timestamp: u32,
}
#[derive(Message)]
pub struct Range {
#[prost(uint64, tag = "1")]
pub start: u64,
#[prost(uint64, tag = "2")]
pub end: u64,
}
#[derive(Message)]
pub struct UndoCount {
#[prost(uint32, tag = "1")]
pub replica_id: u32,
#[prost(uint32, tag = "2")]
pub local_timestamp: u32,
#[prost(uint32, tag = "3")]
pub count: u32,
}
}

View file

@ -465,9 +465,9 @@ impl Database {
let mut rejoined_projects = Vec::new(); let mut rejoined_projects = Vec::new();
for rejoined_project in &rejoin_room.rejoined_projects { for rejoined_project in &rejoin_room.rejoined_projects {
let project_id = ProjectId::from_proto(rejoined_project.id); let project_id = ProjectId::from_proto(rejoined_project.id);
let Some(project) = project::Entity::find_by_id(project_id) let Some(project) = project::Entity::find_by_id(project_id).one(&*tx).await? else {
.one(&*tx) continue;
.await? else { continue }; };
let mut worktrees = Vec::new(); let mut worktrees = Vec::new();
let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?; let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?;
@ -903,15 +903,35 @@ impl Database {
), ),
) )
.one(&*tx) .one(&*tx)
.await? .await?;
.ok_or_else(|| anyhow!("not a participant in any room"))?;
room_participant::Entity::update(room_participant::ActiveModel { if let Some(participant) = participant {
answering_connection_lost: ActiveValue::set(true), room_participant::Entity::update(room_participant::ActiveModel {
..participant.into_active_model() answering_connection_lost: ActiveValue::set(true),
}) ..participant.into_active_model()
.exec(&*tx) })
.await?; .exec(&*tx)
.await?;
}
channel_buffer_collaborator::Entity::update_many()
.filter(
Condition::all()
.add(
channel_buffer_collaborator::Column::ConnectionId
.eq(connection.id as i32),
)
.add(
channel_buffer_collaborator::Column::ConnectionServerId
.eq(connection.owner_id as i32),
),
)
.set(channel_buffer_collaborator::ActiveModel {
connection_lost: ActiveValue::set(true),
..Default::default()
})
.exec(&*tx)
.await?;
Ok(()) Ok(())
}) })

View file

@ -240,4 +240,58 @@ impl Database {
result.push('%'); result.push('%');
result result
} }
#[cfg(debug_assertions)]
pub async fn create_user_flag(&self, flag: &str) -> Result<FlagId> {
self.transaction(|tx| async move {
let flag = feature_flag::Entity::insert(feature_flag::ActiveModel {
flag: ActiveValue::set(flag.to_string()),
..Default::default()
})
.exec(&*tx)
.await?
.last_insert_id;
Ok(flag)
})
.await
}
#[cfg(debug_assertions)]
pub async fn add_user_flag(&self, user: UserId, flag: FlagId) -> Result<()> {
self.transaction(|tx| async move {
user_feature::Entity::insert(user_feature::ActiveModel {
user_id: ActiveValue::set(user),
feature_id: ActiveValue::set(flag),
})
.exec(&*tx)
.await?;
Ok(())
})
.await
}
pub async fn get_user_flags(&self, user: UserId) -> Result<Vec<String>> {
self.transaction(|tx| async move {
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
enum QueryAs {
Flag,
}
let flags = user::Model {
id: user,
..Default::default()
}
.find_linked(user::UserFlags)
.select_only()
.column(feature_flag::Column::Flag)
.into_values::<_, QueryAs>()
.all(&*tx)
.await?;
Ok(flags)
})
.await
}
} }

View file

@ -1,8 +1,13 @@
pub mod access_token; pub mod access_token;
pub mod buffer;
pub mod buffer_operation;
pub mod buffer_snapshot;
pub mod channel; pub mod channel;
pub mod channel_buffer_collaborator;
pub mod channel_member; pub mod channel_member;
pub mod channel_path; pub mod channel_path;
pub mod contact; pub mod contact;
pub mod feature_flag;
pub mod follower; pub mod follower;
pub mod language_server; pub mod language_server;
pub mod project; pub mod project;
@ -12,6 +17,7 @@ pub mod room_participant;
pub mod server; pub mod server;
pub mod signup; pub mod signup;
pub mod user; pub mod user;
pub mod user_feature;
pub mod worktree; pub mod worktree;
pub mod worktree_diagnostic_summary; pub mod worktree_diagnostic_summary;
pub mod worktree_entry; pub mod worktree_entry;

View file

@ -0,0 +1,45 @@
use crate::db::{BufferId, ChannelId};
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "buffers")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: BufferId,
pub epoch: i32,
pub channel_id: ChannelId,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::buffer_operation::Entity")]
Operations,
#[sea_orm(has_many = "super::buffer_snapshot::Entity")]
Snapshots,
#[sea_orm(
belongs_to = "super::channel::Entity",
from = "Column::ChannelId",
to = "super::channel::Column::Id"
)]
Channel,
}
impl Related<super::buffer_operation::Entity> for Entity {
fn to() -> RelationDef {
Relation::Operations.def()
}
}
impl Related<super::buffer_snapshot::Entity> for Entity {
fn to() -> RelationDef {
Relation::Snapshots.def()
}
}
impl Related<super::channel::Entity> for Entity {
fn to() -> RelationDef {
Relation::Channel.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View file

@ -0,0 +1,34 @@
use crate::db::BufferId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "buffer_operations")]
pub struct Model {
#[sea_orm(primary_key)]
pub buffer_id: BufferId,
#[sea_orm(primary_key)]
pub epoch: i32,
#[sea_orm(primary_key)]
pub lamport_timestamp: i32,
#[sea_orm(primary_key)]
pub replica_id: i32,
pub value: Vec<u8>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::buffer::Entity",
from = "Column::BufferId",
to = "super::buffer::Column::Id"
)]
Buffer,
}
impl Related<super::buffer::Entity> for Entity {
fn to() -> RelationDef {
Relation::Buffer.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View file

@ -0,0 +1,31 @@
use crate::db::BufferId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "buffer_snapshots")]
pub struct Model {
#[sea_orm(primary_key)]
pub buffer_id: BufferId,
#[sea_orm(primary_key)]
pub epoch: i32,
pub text: String,
pub operation_serialization_version: i32,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::buffer::Entity",
from = "Column::BufferId",
to = "super::buffer::Column::Id"
)]
Buffer,
}
impl Related<super::buffer::Entity> for Entity {
fn to() -> RelationDef {
Relation::Buffer.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View file

@ -15,8 +15,12 @@ impl ActiveModelBehavior for ActiveModel {}
pub enum Relation { pub enum Relation {
#[sea_orm(has_one = "super::room::Entity")] #[sea_orm(has_one = "super::room::Entity")]
Room, Room,
#[sea_orm(has_one = "super::buffer::Entity")]
Buffer,
#[sea_orm(has_many = "super::channel_member::Entity")] #[sea_orm(has_many = "super::channel_member::Entity")]
Member, Member,
#[sea_orm(has_many = "super::channel_buffer_collaborator::Entity")]
BufferCollaborators,
} }
impl Related<super::channel_member::Entity> for Entity { impl Related<super::channel_member::Entity> for Entity {
@ -30,3 +34,15 @@ impl Related<super::room::Entity> for Entity {
Relation::Room.def() Relation::Room.def()
} }
} }
impl Related<super::buffer::Entity> for Entity {
fn to() -> RelationDef {
Relation::Buffer.def()
}
}
impl Related<super::channel_buffer_collaborator::Entity> for Entity {
fn to() -> RelationDef {
Relation::BufferCollaborators.def()
}
}

View file

@ -0,0 +1,43 @@
use crate::db::{ChannelBufferCollaboratorId, ChannelId, ReplicaId, ServerId, UserId};
use rpc::ConnectionId;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "channel_buffer_collaborators")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: ChannelBufferCollaboratorId,
pub channel_id: ChannelId,
pub connection_id: i32,
pub connection_server_id: ServerId,
pub connection_lost: bool,
pub user_id: UserId,
pub replica_id: ReplicaId,
}
impl Model {
pub fn connection(&self) -> ConnectionId {
ConnectionId {
owner_id: self.connection_server_id.0 as u32,
id: self.connection_id as u32,
}
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::channel::Entity",
from = "Column::ChannelId",
to = "super::channel::Column::Id"
)]
Channel,
}
impl Related<super::channel::Entity> for Entity {
fn to() -> RelationDef {
Relation::Channel.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View file

@ -0,0 +1,40 @@
use sea_orm::entity::prelude::*;
use crate::db::FlagId;
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "feature_flags")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: FlagId,
pub flag: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::user_feature::Entity")]
UserFeature,
}
impl Related<super::user_feature::Entity> for Entity {
fn to() -> RelationDef {
Relation::UserFeature.def()
}
}
impl ActiveModelBehavior for ActiveModel {}
pub struct FlaggedUsers;
impl Linked for FlaggedUsers {
type FromEntity = Entity;
type ToEntity = super::user::Entity;
fn link(&self) -> Vec<RelationDef> {
vec![
super::user_feature::Relation::Flag.def().rev(),
super::user_feature::Relation::User.def(),
]
}
}

View file

@ -28,6 +28,8 @@ pub enum Relation {
HostedProjects, HostedProjects,
#[sea_orm(has_many = "super::channel_member::Entity")] #[sea_orm(has_many = "super::channel_member::Entity")]
ChannelMemberships, ChannelMemberships,
#[sea_orm(has_many = "super::user_feature::Entity")]
UserFeatures,
} }
impl Related<super::access_token::Entity> for Entity { impl Related<super::access_token::Entity> for Entity {
@ -54,4 +56,25 @@ impl Related<super::channel_member::Entity> for Entity {
} }
} }
impl Related<super::user_feature::Entity> for Entity {
fn to() -> RelationDef {
Relation::UserFeatures.def()
}
}
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}
pub struct UserFlags;
impl Linked for UserFlags {
type FromEntity = Entity;
type ToEntity = super::feature_flag::Entity;
fn link(&self) -> Vec<RelationDef> {
vec![
super::user_feature::Relation::User.def().rev(),
super::user_feature::Relation::Flag.def(),
]
}
}

View file

@ -0,0 +1,42 @@
use sea_orm::entity::prelude::*;
use crate::db::{FlagId, UserId};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
#[sea_orm(table_name = "user_features")]
pub struct Model {
#[sea_orm(primary_key)]
pub user_id: UserId,
#[sea_orm(primary_key)]
pub feature_id: FlagId,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::feature_flag::Entity",
from = "Column::FeatureId",
to = "super::feature_flag::Column::Id"
)]
Flag,
#[sea_orm(
belongs_to = "super::user::Entity",
from = "Column::UserId",
to = "super::user::Column::Id"
)]
User,
}
impl Related<super::feature_flag::Entity> for Entity {
fn to() -> RelationDef {
Relation::Flag.def()
}
}
impl Related<super::user::Entity> for Entity {
fn to() -> RelationDef {
Relation::User.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View file

@ -1,3 +1,7 @@
mod buffer_tests;
mod db_tests;
mod feature_flag_tests;
use super::*; use super::*;
use gpui::executor::Background; use gpui::executor::Background;
use parking_lot::Mutex; use parking_lot::Mutex;
@ -91,6 +95,26 @@ impl TestDb {
} }
} }
#[macro_export]
macro_rules! test_both_dbs {
($test_name:ident, $postgres_test_name:ident, $sqlite_test_name:ident) => {
#[gpui::test]
async fn $postgres_test_name() {
let test_db = crate::db::TestDb::postgres(
gpui::executor::Deterministic::new(0).build_background(),
);
$test_name(test_db.db()).await;
}
#[gpui::test]
async fn $sqlite_test_name() {
let test_db =
crate::db::TestDb::sqlite(gpui::executor::Deterministic::new(0).build_background());
$test_name(test_db.db()).await;
}
};
}
impl Drop for TestDb { impl Drop for TestDb {
fn drop(&mut self) { fn drop(&mut self) {
let db = self.db.take().unwrap(); let db = self.db.take().unwrap();

View file

@ -0,0 +1,165 @@
use super::*;
use crate::test_both_dbs;
use language::proto;
use text::Buffer;
test_both_dbs!(
test_channel_buffers,
test_channel_buffers_postgres,
test_channel_buffers_sqlite
);
async fn test_channel_buffers(db: &Arc<Database>) {
let a_id = db
.create_user(
"user_a@example.com",
false,
NewUserParams {
github_login: "user_a".into(),
github_user_id: 101,
invite_count: 0,
},
)
.await
.unwrap()
.user_id;
let b_id = db
.create_user(
"user_b@example.com",
false,
NewUserParams {
github_login: "user_b".into(),
github_user_id: 102,
invite_count: 0,
},
)
.await
.unwrap()
.user_id;
// This user will not be a part of the channel
let c_id = db
.create_user(
"user_c@example.com",
false,
NewUserParams {
github_login: "user_c".into(),
github_user_id: 102,
invite_count: 0,
},
)
.await
.unwrap()
.user_id;
let owner_id = db.create_server("production").await.unwrap().0 as u32;
let zed_id = db.create_root_channel("zed", "1", a_id).await.unwrap();
db.invite_channel_member(zed_id, b_id, a_id, false)
.await
.unwrap();
db.respond_to_channel_invite(zed_id, b_id, true)
.await
.unwrap();
let connection_id_a = ConnectionId { owner_id, id: 1 };
let _ = db
.join_channel_buffer(zed_id, a_id, connection_id_a)
.await
.unwrap();
let mut buffer_a = Buffer::new(0, 0, "".to_string());
let mut operations = Vec::new();
operations.push(buffer_a.edit([(0..0, "hello world")]));
operations.push(buffer_a.edit([(5..5, ", cruel")]));
operations.push(buffer_a.edit([(0..5, "goodbye")]));
operations.push(buffer_a.undo().unwrap().1);
assert_eq!(buffer_a.text(), "hello, cruel world");
let operations = operations
.into_iter()
.map(|op| proto::serialize_operation(&language::Operation::Buffer(op)))
.collect::<Vec<_>>();
db.update_channel_buffer(zed_id, a_id, &operations)
.await
.unwrap();
let connection_id_b = ConnectionId { owner_id, id: 2 };
let buffer_response_b = db
.join_channel_buffer(zed_id, b_id, connection_id_b)
.await
.unwrap();
let mut buffer_b = Buffer::new(0, 0, buffer_response_b.base_text);
buffer_b
.apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
let operation = proto::deserialize_operation(operation).unwrap();
if let language::Operation::Buffer(operation) = operation {
operation
} else {
unreachable!()
}
}))
.unwrap();
assert_eq!(buffer_b.text(), "hello, cruel world");
// Ensure that C fails to open the buffer
assert!(db
.join_channel_buffer(zed_id, c_id, ConnectionId { owner_id, id: 3 })
.await
.is_err());
// Ensure that both collaborators have shown up
assert_eq!(
buffer_response_b.collaborators,
&[
rpc::proto::Collaborator {
user_id: a_id.to_proto(),
peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }),
replica_id: 0,
},
rpc::proto::Collaborator {
user_id: b_id.to_proto(),
peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }),
replica_id: 1,
}
]
);
// Ensure that get_channel_buffer_collaborators works
let zed_collaborats = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
assert_eq!(zed_collaborats, &[a_id, b_id]);
let collaborators = db
.leave_channel_buffer(zed_id, connection_id_b)
.await
.unwrap();
assert_eq!(collaborators, &[connection_id_a],);
let cargo_id = db.create_root_channel("cargo", "2", a_id).await.unwrap();
let _ = db
.join_channel_buffer(cargo_id, a_id, connection_id_a)
.await
.unwrap();
db.leave_channel_buffers(connection_id_a).await.unwrap();
let zed_collaborators = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
let cargo_collaborators = db.get_channel_buffer_collaborators(cargo_id).await.unwrap();
assert_eq!(zed_collaborators, &[]);
assert_eq!(cargo_collaborators, &[]);
// When everyone has left the channel, the operations are collapsed into
// a new base text.
let buffer_response_b = db
.join_channel_buffer(zed_id, b_id, connection_id_b)
.await
.unwrap();
assert_eq!(buffer_response_b.base_text, "hello, cruel world");
assert_eq!(buffer_response_b.operations, &[]);
}

View file

@ -0,0 +1,60 @@
use crate::{
db::{Database, NewUserParams},
test_both_dbs,
};
use std::sync::Arc;
test_both_dbs!(
test_get_user_flags,
test_get_user_flags_postgres,
test_get_user_flags_sqlite
);
async fn test_get_user_flags(db: &Arc<Database>) {
let user_1 = db
.create_user(
&format!("user1@example.com"),
false,
NewUserParams {
github_login: format!("user1"),
github_user_id: 1,
invite_count: 0,
},
)
.await
.unwrap()
.user_id;
let user_2 = db
.create_user(
&format!("user2@example.com"),
false,
NewUserParams {
github_login: format!("user2"),
github_user_id: 2,
invite_count: 0,
},
)
.await
.unwrap()
.user_id;
const CHANNELS_ALPHA: &'static str = "channels-alpha";
const NEW_SEARCH: &'static str = "new-search";
let channels_flag = db.create_user_flag(CHANNELS_ALPHA).await.unwrap();
let search_flag = db.create_user_flag(NEW_SEARCH).await.unwrap();
db.add_user_flag(user_1, channels_flag).await.unwrap();
db.add_user_flag(user_1, search_flag).await.unwrap();
db.add_user_flag(user_2, channels_flag).await.unwrap();
let mut user_1_flags = db.get_user_flags(user_1).await.unwrap();
user_1_flags.sort();
assert_eq!(user_1_flags, &[CHANNELS_ALPHA, NEW_SEARCH]);
let mut user_2_flags = db.get_user_flags(user_2).await.unwrap();
user_2_flags.sort();
assert_eq!(user_2_flags, &[CHANNELS_ALPHA]);
}

View file

@ -35,8 +35,8 @@ use lazy_static::lazy_static;
use prometheus::{register_int_gauge, IntGauge}; use prometheus::{register_int_gauge, IntGauge};
use rpc::{ use rpc::{
proto::{ proto::{
self, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, LiveKitConnectionInfo, self, Ack, AddChannelBufferCollaborator, AnyTypedEnvelope, EntityMessage, EnvelopedMessage,
RequestMessage, LiveKitConnectionInfo, RequestMessage,
}, },
Connection, ConnectionId, Peer, Receipt, TypedEnvelope, Connection, ConnectionId, Peer, Receipt, TypedEnvelope,
}; };
@ -248,6 +248,9 @@ impl Server {
.add_request_handler(remove_channel_member) .add_request_handler(remove_channel_member)
.add_request_handler(set_channel_member_admin) .add_request_handler(set_channel_member_admin)
.add_request_handler(rename_channel) .add_request_handler(rename_channel)
.add_request_handler(join_channel_buffer)
.add_request_handler(leave_channel_buffer)
.add_message_handler(update_channel_buffer)
.add_request_handler(get_channel_members) .add_request_handler(get_channel_members)
.add_request_handler(respond_to_channel_invite) .add_request_handler(respond_to_channel_invite)
.add_request_handler(join_channel) .add_request_handler(join_channel)
@ -851,6 +854,10 @@ async fn connection_lost(
.await .await
.trace_err(); .trace_err();
leave_channel_buffers_for_session(&session)
.await
.trace_err();
futures::select_biased! { futures::select_biased! {
_ = executor.sleep(RECONNECT_TIMEOUT).fuse() => { _ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
leave_room_for_session(&session).await.trace_err(); leave_room_for_session(&session).await.trace_err();
@ -866,6 +873,8 @@ async fn connection_lost(
} }
} }
update_user_contacts(session.user_id, &session).await?; update_user_contacts(session.user_id, &session).await?;
} }
_ = teardown.changed().fuse() => {} _ = teardown.changed().fuse() => {}
} }
@ -2478,6 +2487,104 @@ async fn join_channel(
Ok(()) Ok(())
} }
async fn join_channel_buffer(
request: proto::JoinChannelBuffer,
response: Response<proto::JoinChannelBuffer>,
session: Session,
) -> Result<()> {
let db = session.db().await;
let channel_id = ChannelId::from_proto(request.channel_id);
let open_response = db
.join_channel_buffer(channel_id, session.user_id, session.connection_id)
.await?;
let replica_id = open_response.replica_id;
let collaborators = open_response.collaborators.clone();
response.send(open_response)?;
let update = AddChannelBufferCollaborator {
channel_id: channel_id.to_proto(),
collaborator: Some(proto::Collaborator {
user_id: session.user_id.to_proto(),
peer_id: Some(session.connection_id.into()),
replica_id,
}),
};
channel_buffer_updated(
session.connection_id,
collaborators
.iter()
.filter_map(|collaborator| Some(collaborator.peer_id?.into())),
&update,
&session.peer,
);
Ok(())
}
async fn update_channel_buffer(
request: proto::UpdateChannelBuffer,
session: Session,
) -> Result<()> {
let db = session.db().await;
let channel_id = ChannelId::from_proto(request.channel_id);
let collaborators = db
.update_channel_buffer(channel_id, session.user_id, &request.operations)
.await?;
channel_buffer_updated(
session.connection_id,
collaborators,
&proto::UpdateChannelBuffer {
channel_id: channel_id.to_proto(),
operations: request.operations,
},
&session.peer,
);
Ok(())
}
async fn leave_channel_buffer(
request: proto::LeaveChannelBuffer,
response: Response<proto::LeaveChannelBuffer>,
session: Session,
) -> Result<()> {
let db = session.db().await;
let channel_id = ChannelId::from_proto(request.channel_id);
let collaborators_to_notify = db
.leave_channel_buffer(channel_id, session.connection_id)
.await?;
response.send(Ack {})?;
channel_buffer_updated(
session.connection_id,
collaborators_to_notify,
&proto::RemoveChannelBufferCollaborator {
channel_id: channel_id.to_proto(),
peer_id: Some(session.connection_id.into()),
},
&session.peer,
);
Ok(())
}
fn channel_buffer_updated<T: EnvelopedMessage>(
sender_id: ConnectionId,
collaborators: impl IntoIterator<Item = ConnectionId>,
message: &T,
peer: &Peer,
) {
broadcast(Some(sender_id), collaborators.into_iter(), |peer_id| {
peer.send(peer_id.into(), message.clone())
});
}
async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> Result<()> { async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> Result<()> {
let project_id = ProjectId::from_proto(request.project_id); let project_id = ProjectId::from_proto(request.project_id);
let project_connection_ids = session let project_connection_ids = session
@ -2502,20 +2609,19 @@ async fn get_private_user_info(
response: Response<proto::GetPrivateUserInfo>, response: Response<proto::GetPrivateUserInfo>,
session: Session, session: Session,
) -> Result<()> { ) -> Result<()> {
let metrics_id = session let db = session.db().await;
.db()
.await let metrics_id = db.get_user_metrics_id(session.user_id).await?;
.get_user_metrics_id(session.user_id) let user = db
.await?;
let user = session
.db()
.await
.get_user_by_id(session.user_id) .get_user_by_id(session.user_id)
.await? .await?
.ok_or_else(|| anyhow!("user not found"))?; .ok_or_else(|| anyhow!("user not found"))?;
let flags = db.get_user_flags(session.user_id).await?;
response.send(proto::GetPrivateUserInfoResponse { response.send(proto::GetPrivateUserInfoResponse {
metrics_id, metrics_id,
staff: user.admin, staff: user.admin,
flags,
})?; })?;
Ok(()) Ok(())
} }
@ -2803,6 +2909,28 @@ async fn leave_room_for_session(session: &Session) -> Result<()> {
Ok(()) Ok(())
} }
async fn leave_channel_buffers_for_session(session: &Session) -> Result<()> {
let left_channel_buffers = session
.db()
.await
.leave_channel_buffers(session.connection_id)
.await?;
for (channel_id, connections) in left_channel_buffers {
channel_buffer_updated(
session.connection_id,
connections,
&proto::RemoveChannelBufferCollaborator {
channel_id: channel_id.to_proto(),
peer_id: Some(session.connection_id.into()),
},
&session.peer,
);
}
Ok(())
}
fn project_left(project: &db::LeftProject, session: &Session) { fn project_left(project: &db::LeftProject, session: &Session) {
for connection_id in &project.connection_ids { for connection_id in &project.connection_ids {
if project.host_user_id == session.user_id { if project.host_user_id == session.user_id {

View file

@ -1,14 +1,14 @@
use crate::{ use crate::{
db::{test_db::TestDb, NewUserParams, UserId}, db::{tests::TestDb, NewUserParams, UserId},
executor::Executor, executor::Executor,
rpc::{Server, CLEANUP_TIMEOUT}, rpc::{Server, CLEANUP_TIMEOUT},
AppState, AppState,
}; };
use anyhow::anyhow; use anyhow::anyhow;
use call::{ActiveCall, Room}; use call::{ActiveCall, Room};
use channel::ChannelStore;
use client::{ use client::{
self, proto::PeerId, ChannelStore, Client, Connection, Credentials, EstablishConnectionError, self, proto::PeerId, Client, Connection, Credentials, EstablishConnectionError, UserStore,
UserStore,
}; };
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use fs::FakeFs; use fs::FakeFs;
@ -31,6 +31,7 @@ use std::{
use util::http::FakeHttpClient; use util::http::FakeHttpClient;
use workspace::Workspace; use workspace::Workspace;
mod channel_buffer_tests;
mod channel_tests; mod channel_tests;
mod integration_tests; mod integration_tests;
mod randomized_integration_tests; mod randomized_integration_tests;
@ -210,6 +211,7 @@ impl TestServer {
workspace::init(app_state.clone(), cx); workspace::init(app_state.clone(), cx);
audio::init((), cx); audio::init((), cx);
call::init(client.clone(), user_store.clone(), cx); call::init(client.clone(), user_store.clone(), cx);
channel::init(&client);
}); });
client client

View file

@ -0,0 +1,426 @@
use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
use call::ActiveCall;
use channel::Channel;
use client::UserId;
use collab_ui::channel_view::ChannelView;
use collections::HashMap;
use futures::future;
use gpui::{executor::Deterministic, ModelHandle, TestAppContext};
use rpc::{proto, RECEIVE_TIMEOUT};
use serde_json::json;
use std::sync::Arc;
#[gpui::test]
async fn test_core_channel_buffers(
deterministic: Arc<Deterministic>,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let zed_id = server
.make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
.await;
// Client A joins the channel buffer
let channel_buffer_a = client_a
.channel_store()
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
.await
.unwrap();
// Client A edits the buffer
let buffer_a = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer());
buffer_a.update(cx_a, |buffer, cx| {
buffer.edit([(0..0, "hello world")], None, cx)
});
buffer_a.update(cx_a, |buffer, cx| {
buffer.edit([(5..5, ", cruel")], None, cx)
});
buffer_a.update(cx_a, |buffer, cx| {
buffer.edit([(0..5, "goodbye")], None, cx)
});
buffer_a.update(cx_a, |buffer, cx| buffer.undo(cx));
deterministic.run_until_parked();
assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world");
// Client B joins the channel buffer
let channel_buffer_b = client_b
.channel_store()
.update(cx_b, |channel, cx| channel.open_channel_buffer(zed_id, cx))
.await
.unwrap();
channel_buffer_b.read_with(cx_b, |buffer, _| {
assert_collaborators(
buffer.collaborators(),
&[client_a.user_id(), client_b.user_id()],
);
});
// Client B sees the correct text, and then edits it
let buffer_b = channel_buffer_b.read_with(cx_b, |buffer, _| buffer.buffer());
assert_eq!(
buffer_b.read_with(cx_b, |buffer, _| buffer.remote_id()),
buffer_a.read_with(cx_a, |buffer, _| buffer.remote_id())
);
assert_eq!(buffer_text(&buffer_b, cx_b), "hello, cruel world");
buffer_b.update(cx_b, |buffer, cx| {
buffer.edit([(7..12, "beautiful")], None, cx)
});
// Both A and B see the new edit
deterministic.run_until_parked();
assert_eq!(buffer_text(&buffer_a, cx_a), "hello, beautiful world");
assert_eq!(buffer_text(&buffer_b, cx_b), "hello, beautiful world");
// Client A closes the channel buffer.
cx_a.update(|_| drop(channel_buffer_a));
deterministic.run_until_parked();
// Client B sees that client A is gone from the channel buffer.
channel_buffer_b.read_with(cx_b, |buffer, _| {
assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
});
// Client A rejoins the channel buffer
let _channel_buffer_a = client_a
.channel_store()
.update(cx_a, |channels, cx| {
channels.open_channel_buffer(zed_id, cx)
})
.await
.unwrap();
deterministic.run_until_parked();
// Sanity test, make sure we saw A rejoining
channel_buffer_b.read_with(cx_b, |buffer, _| {
assert_collaborators(
&buffer.collaborators(),
&[client_b.user_id(), client_a.user_id()],
);
});
// Client A loses connection.
server.forbid_connections();
server.disconnect_client(client_a.peer_id().unwrap());
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
// Client B observes A disconnect
channel_buffer_b.read_with(cx_b, |buffer, _| {
assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
});
// TODO:
// - Test synchronizing offline updates, what happens to A's channel buffer when A disconnects
// - Test interaction with channel deletion while buffer is open
}
#[gpui::test]
async fn test_channel_buffer_replica_ids(
deterministic: Arc<Deterministic>,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
let channel_id = server
.make_channel(
"zed",
(&client_a, cx_a),
&mut [(&client_b, cx_b), (&client_c, cx_c)],
)
.await;
let active_call_a = cx_a.read(ActiveCall::global);
let active_call_b = cx_b.read(ActiveCall::global);
let active_call_c = cx_c.read(ActiveCall::global);
// Clients A and B join a channel.
active_call_a
.update(cx_a, |call, cx| call.join_channel(channel_id, cx))
.await
.unwrap();
active_call_b
.update(cx_b, |call, cx| call.join_channel(channel_id, cx))
.await
.unwrap();
// Clients A, B, and C join a channel buffer
// C first so that the replica IDs in the project and the channel buffer are different
let channel_buffer_c = client_c
.channel_store()
.update(cx_c, |channel, cx| {
channel.open_channel_buffer(channel_id, cx)
})
.await
.unwrap();
let channel_buffer_b = client_b
.channel_store()
.update(cx_b, |channel, cx| {
channel.open_channel_buffer(channel_id, cx)
})
.await
.unwrap();
let channel_buffer_a = client_a
.channel_store()
.update(cx_a, |channel, cx| {
channel.open_channel_buffer(channel_id, cx)
})
.await
.unwrap();
// Client B shares a project
client_b
.fs()
.insert_tree("/dir", json!({ "file.txt": "contents" }))
.await;
let (project_b, _) = client_b.build_local_project("/dir", cx_b).await;
let shared_project_id = active_call_b
.update(cx_b, |call, cx| call.share_project(project_b.clone(), cx))
.await
.unwrap();
// Client A joins the project
let project_a = client_a.build_remote_project(shared_project_id, cx_a).await;
deterministic.run_until_parked();
// Client C is in a separate project.
client_c.fs().insert_tree("/dir", json!({})).await;
let (separate_project_c, _) = client_c.build_local_project("/dir", cx_c).await;
// Note that each user has a different replica id in the projects vs the
// channel buffer.
channel_buffer_a.read_with(cx_a, |channel_buffer, cx| {
assert_eq!(project_a.read(cx).replica_id(), 1);
assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 2);
});
channel_buffer_b.read_with(cx_b, |channel_buffer, cx| {
assert_eq!(project_b.read(cx).replica_id(), 0);
assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 1);
});
channel_buffer_c.read_with(cx_c, |channel_buffer, cx| {
// C is not in the project
assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 0);
});
let channel_window_a =
cx_a.add_window(|cx| ChannelView::new(project_a.clone(), channel_buffer_a.clone(), cx));
let channel_window_b =
cx_b.add_window(|cx| ChannelView::new(project_b.clone(), channel_buffer_b.clone(), cx));
let channel_window_c = cx_c.add_window(|cx| {
ChannelView::new(separate_project_c.clone(), channel_buffer_c.clone(), cx)
});
let channel_view_a = channel_window_a.root(cx_a);
let channel_view_b = channel_window_b.root(cx_b);
let channel_view_c = channel_window_c.root(cx_c);
// For clients A and B, the replica ids in the channel buffer are mapped
// so that they match the same users' replica ids in their shared project.
channel_view_a.read_with(cx_a, |view, cx| {
assert_eq!(
view.editor.read(cx).replica_id_map().unwrap(),
&[(1, 0), (2, 1)].into_iter().collect::<HashMap<_, _>>()
);
});
channel_view_b.read_with(cx_b, |view, cx| {
assert_eq!(
view.editor.read(cx).replica_id_map().unwrap(),
&[(1, 0), (2, 1)].into_iter().collect::<HashMap<u16, u16>>(),
)
});
// Client C only sees themself, as they're not part of any shared project
channel_view_c.read_with(cx_c, |view, cx| {
assert_eq!(
view.editor.read(cx).replica_id_map().unwrap(),
&[(0, 0)].into_iter().collect::<HashMap<u16, u16>>(),
);
});
// Client C joins the project that clients A and B are in.
active_call_c
.update(cx_c, |call, cx| call.join_channel(channel_id, cx))
.await
.unwrap();
let project_c = client_c.build_remote_project(shared_project_id, cx_c).await;
deterministic.run_until_parked();
project_c.read_with(cx_c, |project, _| {
assert_eq!(project.replica_id(), 2);
});
// For clients A and B, client C's replica id in the channel buffer is
// now mapped to their replica id in the shared project.
channel_view_a.read_with(cx_a, |view, cx| {
assert_eq!(
view.editor.read(cx).replica_id_map().unwrap(),
&[(1, 0), (2, 1), (0, 2)]
.into_iter()
.collect::<HashMap<_, _>>()
);
});
channel_view_b.read_with(cx_b, |view, cx| {
assert_eq!(
view.editor.read(cx).replica_id_map().unwrap(),
&[(1, 0), (2, 1), (0, 2)]
.into_iter()
.collect::<HashMap<_, _>>(),
)
});
}
#[gpui::test]
async fn test_reopen_channel_buffer(deterministic: Arc<Deterministic>, cx_a: &mut TestAppContext) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let zed_id = server.make_channel("zed", (&client_a, cx_a), &mut []).await;
let channel_buffer_1 = client_a
.channel_store()
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
let channel_buffer_2 = client_a
.channel_store()
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
let channel_buffer_3 = client_a
.channel_store()
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
// All concurrent tasks for opening a channel buffer return the same model handle.
let (channel_buffer_1, channel_buffer_2, channel_buffer_3) =
future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3)
.await
.unwrap();
let model_id = channel_buffer_1.id();
assert_eq!(channel_buffer_1, channel_buffer_2);
assert_eq!(channel_buffer_1, channel_buffer_3);
channel_buffer_1.update(cx_a, |buffer, cx| {
buffer.buffer().update(cx, |buffer, cx| {
buffer.edit([(0..0, "hello")], None, cx);
})
});
deterministic.run_until_parked();
cx_a.update(|_| {
drop(channel_buffer_1);
drop(channel_buffer_2);
drop(channel_buffer_3);
});
deterministic.run_until_parked();
// The channel buffer can be reopened after dropping it.
let channel_buffer = client_a
.channel_store()
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
.await
.unwrap();
assert_ne!(channel_buffer.id(), model_id);
channel_buffer.update(cx_a, |buffer, cx| {
buffer.buffer().update(cx, |buffer, _| {
assert_eq!(buffer.text(), "hello");
})
});
}
#[gpui::test]
async fn test_channel_buffer_disconnect(
deterministic: Arc<Deterministic>,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let channel_id = server
.make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
.await;
let channel_buffer_a = client_a
.channel_store()
.update(cx_a, |channel, cx| {
channel.open_channel_buffer(channel_id, cx)
})
.await
.unwrap();
let channel_buffer_b = client_b
.channel_store()
.update(cx_b, |channel, cx| {
channel.open_channel_buffer(channel_id, cx)
})
.await
.unwrap();
server.forbid_connections();
server.disconnect_client(client_a.peer_id().unwrap());
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
channel_buffer_a.update(cx_a, |buffer, _| {
assert_eq!(
buffer.channel().as_ref(),
&Channel {
id: channel_id,
name: "zed".to_string()
}
);
assert!(!buffer.is_connected());
});
deterministic.run_until_parked();
server.allow_connections();
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
deterministic.run_until_parked();
client_a
.channel_store()
.update(cx_a, |channel_store, _| {
channel_store.remove_channel(channel_id)
})
.await
.unwrap();
deterministic.run_until_parked();
// Channel buffer observed the deletion
channel_buffer_b.update(cx_b, |buffer, _| {
assert_eq!(
buffer.channel().as_ref(),
&Channel {
id: channel_id,
name: "zed".to_string()
}
);
assert!(!buffer.is_connected());
});
}
#[track_caller]
fn assert_collaborators(collaborators: &[proto::Collaborator], ids: &[Option<UserId>]) {
assert_eq!(
collaborators
.into_iter()
.map(|collaborator| collaborator.user_id)
.collect::<Vec<_>>(),
ids.into_iter().map(|id| id.unwrap()).collect::<Vec<_>>()
);
}
fn buffer_text(channel_buffer: &ModelHandle<language::Buffer>, cx: &mut TestAppContext) -> String {
channel_buffer.read_with(cx, |buffer, _| buffer.text())
}

View file

@ -3,7 +3,8 @@ use crate::{
tests::{room_participants, RoomParticipants, TestServer}, tests::{room_participants, RoomParticipants, TestServer},
}; };
use call::ActiveCall; use call::ActiveCall;
use client::{ChannelId, ChannelMembership, ChannelStore, User}; use channel::{ChannelId, ChannelMembership, ChannelStore};
use client::User;
use gpui::{executor::Deterministic, ModelHandle, TestAppContext}; use gpui::{executor::Deterministic, ModelHandle, TestAppContext};
use rpc::{proto, RECEIVE_TIMEOUT}; use rpc::{proto, RECEIVE_TIMEOUT};
use std::sync::Arc; use std::sync::Arc;
@ -798,7 +799,7 @@ async fn test_lost_channel_creation(
deterministic.run_until_parked(); deterministic.run_until_parked();
// Sanity check // Sanity check, B has the invitation
assert_channel_invitations( assert_channel_invitations(
client_b.channel_store(), client_b.channel_store(),
cx_b, cx_b,
@ -810,6 +811,7 @@ async fn test_lost_channel_creation(
}], }],
); );
// A creates a subchannel while the invite is still pending.
let subchannel_id = client_a let subchannel_id = client_a
.channel_store() .channel_store()
.update(cx_a, |channel_store, cx| { .update(cx_a, |channel_store, cx| {
@ -840,7 +842,7 @@ async fn test_lost_channel_creation(
], ],
); );
// Accept the invite // Client B accepts the invite
client_b client_b
.channel_store() .channel_store()
.update(cx_b, |channel_store, _| { .update(cx_b, |channel_store, _| {
@ -851,7 +853,7 @@ async fn test_lost_channel_creation(
deterministic.run_until_parked(); deterministic.run_until_parked();
// B should now see the channel // Client B should now see the channel
assert_channels( assert_channels(
client_b.channel_store(), client_b.channel_store(),
cx_b, cx_b,

View file

@ -4,7 +4,7 @@ use crate::{
}; };
use call::{room, ActiveCall, ParticipantLocation, Room}; use call::{room, ActiveCall, ParticipantLocation, Room};
use client::{User, RECEIVE_TIMEOUT}; use client::{User, RECEIVE_TIMEOUT};
use collections::HashSet; use collections::{HashMap, HashSet};
use editor::{ use editor::{
test::editor_test_context::EditorTestContext, ConfirmCodeAction, ConfirmCompletion, test::editor_test_context::EditorTestContext, ConfirmCodeAction, ConfirmCompletion,
ConfirmRename, Editor, ExcerptRange, MultiBuffer, Redo, Rename, ToggleCodeActions, Undo, ConfirmRename, Editor, ExcerptRange, MultiBuffer, Redo, Rename, ToggleCodeActions, Undo,
@ -4821,15 +4821,16 @@ async fn test_project_search(
let project_b = client_b.build_remote_project(project_id, cx_b).await; let project_b = client_b.build_remote_project(project_id, cx_b).await;
// Perform a search as the guest. // Perform a search as the guest.
let results = project_b let mut results = HashMap::default();
.update(cx_b, |project, cx| { let mut search_rx = project_b.update(cx_b, |project, cx| {
project.search( project.search(
SearchQuery::text("world", false, false, Vec::new(), Vec::new()), SearchQuery::text("world", false, false, Vec::new(), Vec::new()),
cx, cx,
) )
}) });
.await while let Some((buffer, ranges)) = search_rx.next().await {
.unwrap(); results.entry(buffer).or_insert(ranges);
}
let mut ranges_by_path = results let mut ranges_by_path = results
.into_iter() .into_iter()
@ -5320,7 +5321,7 @@ async fn test_collaborating_with_code_actions(
.unwrap(); .unwrap();
let mut fake_language_server = fake_language_servers.next().await.unwrap(); let mut fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server let mut requests = fake_language_server
.handle_request::<lsp::request::CodeActionRequest, _, _>(|params, _| async move { .handle_request::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
assert_eq!( assert_eq!(
params.text_document.uri, params.text_document.uri,
@ -5329,9 +5330,9 @@ async fn test_collaborating_with_code_actions(
assert_eq!(params.range.start, lsp::Position::new(0, 0)); assert_eq!(params.range.start, lsp::Position::new(0, 0));
assert_eq!(params.range.end, lsp::Position::new(0, 0)); assert_eq!(params.range.end, lsp::Position::new(0, 0));
Ok(None) Ok(None)
}) });
.next() deterministic.advance_clock(editor::CODE_ACTIONS_DEBOUNCE_TIMEOUT * 2);
.await; requests.next().await;
// Move cursor to a location that contains code actions. // Move cursor to a location that contains code actions.
editor_b.update(cx_b, |editor, cx| { editor_b.update(cx_b, |editor, cx| {
@ -5341,7 +5342,7 @@ async fn test_collaborating_with_code_actions(
cx.focus(&editor_b); cx.focus(&editor_b);
}); });
fake_language_server let mut requests = fake_language_server
.handle_request::<lsp::request::CodeActionRequest, _, _>(|params, _| async move { .handle_request::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
assert_eq!( assert_eq!(
params.text_document.uri, params.text_document.uri,
@ -5393,9 +5394,9 @@ async fn test_collaborating_with_code_actions(
..Default::default() ..Default::default()
}, },
)])) )]))
}) });
.next() deterministic.advance_clock(editor::CODE_ACTIONS_DEBOUNCE_TIMEOUT * 2);
.await; requests.next().await;
// Toggle code actions and wait for them to display. // Toggle code actions and wait for them to display.
editor_b.update(cx_b, |editor, cx| { editor_b.update(cx_b, |editor, cx| {
@ -7863,6 +7864,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
client_a.language_registry().add(Arc::clone(&language)); client_a.language_registry().add(Arc::clone(&language));
client_b.language_registry().add(language); client_b.language_registry().add(language);
// Client A opens a project.
client_a client_a
.fs() .fs()
.insert_tree( .insert_tree(
@ -7883,6 +7885,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
.await .await
.unwrap(); .unwrap();
// Client B joins the project
let project_b = client_b.build_remote_project(project_id, cx_b).await; let project_b = client_b.build_remote_project(project_id, cx_b).await;
active_call_b active_call_b
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx)) .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
@ -7892,6 +7895,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a); let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
cx_a.foreground().start_waiting(); cx_a.foreground().start_waiting();
// The host opens a rust file.
let _buffer_a = project_a let _buffer_a = project_a
.update(cx_a, |project, cx| { .update(cx_a, |project, cx| {
project.open_local_buffer("/a/main.rs", cx) project.open_local_buffer("/a/main.rs", cx)
@ -7899,7 +7903,6 @@ async fn test_mutual_editor_inlay_hint_cache_update(
.await .await
.unwrap(); .unwrap();
let fake_language_server = fake_language_servers.next().await.unwrap(); let fake_language_server = fake_language_servers.next().await.unwrap();
let next_call_id = Arc::new(AtomicU32::new(0));
let editor_a = workspace_a let editor_a = workspace_a
.update(cx_a, |workspace, cx| { .update(cx_a, |workspace, cx| {
workspace.open_path((worktree_id, "main.rs"), None, true, cx) workspace.open_path((worktree_id, "main.rs"), None, true, cx)
@ -7908,6 +7911,9 @@ async fn test_mutual_editor_inlay_hint_cache_update(
.unwrap() .unwrap()
.downcast::<Editor>() .downcast::<Editor>()
.unwrap(); .unwrap();
// Set up the language server to return an additional inlay hint on each request.
let next_call_id = Arc::new(AtomicU32::new(0));
fake_language_server fake_language_server
.handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| { .handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
let task_next_call_id = Arc::clone(&next_call_id); let task_next_call_id = Arc::clone(&next_call_id);
@ -7916,33 +7922,28 @@ async fn test_mutual_editor_inlay_hint_cache_update(
params.text_document.uri, params.text_document.uri,
lsp::Url::from_file_path("/a/main.rs").unwrap(), lsp::Url::from_file_path("/a/main.rs").unwrap(),
); );
let mut current_call_id = Arc::clone(&task_next_call_id).fetch_add(1, SeqCst); let call_count = task_next_call_id.fetch_add(1, SeqCst);
let mut new_hints = Vec::with_capacity(current_call_id as usize); Ok(Some(
loop { (0..=call_count)
new_hints.push(lsp::InlayHint { .map(|ix| lsp::InlayHint {
position: lsp::Position::new(0, current_call_id), position: lsp::Position::new(0, ix),
label: lsp::InlayHintLabel::String(current_call_id.to_string()), label: lsp::InlayHintLabel::String(ix.to_string()),
kind: None, kind: None,
text_edits: None, text_edits: None,
tooltip: None, tooltip: None,
padding_left: None, padding_left: None,
padding_right: None, padding_right: None,
data: None, data: None,
}); })
if current_call_id == 0 { .collect(),
break; ))
}
current_call_id -= 1;
}
Ok(Some(new_hints))
} }
}) })
.next() .next()
.await .await
.unwrap(); .unwrap();
cx_a.foreground().finish_waiting(); deterministic.run_until_parked();
cx_a.foreground().run_until_parked();
let mut edits_made = 1; let mut edits_made = 1;
editor_a.update(cx_a, |editor, _| { editor_a.update(cx_a, |editor, _| {
@ -7968,7 +7969,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
.downcast::<Editor>() .downcast::<Editor>()
.unwrap(); .unwrap();
cx_b.foreground().run_until_parked(); deterministic.run_until_parked();
editor_b.update(cx_b, |editor, _| { editor_b.update(cx_b, |editor, _| {
assert_eq!( assert_eq!(
vec!["0".to_string(), "1".to_string()], vec!["0".to_string(), "1".to_string()],
@ -7989,18 +7990,9 @@ async fn test_mutual_editor_inlay_hint_cache_update(
cx.focus(&editor_b); cx.focus(&editor_b);
edits_made += 1; edits_made += 1;
}); });
cx_a.foreground().run_until_parked();
cx_b.foreground().run_until_parked(); deterministic.run_until_parked();
editor_a.update(cx_a, |editor, _| { editor_a.update(cx_a, |editor, _| {
assert_eq!(
vec!["0".to_string(), "1".to_string(), "2".to_string()],
extract_hint_labels(editor),
"Host should get hints from the 1st edit and 1st LSP query"
);
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(inlay_cache.version(), edits_made);
});
editor_b.update(cx_b, |editor, _| {
assert_eq!( assert_eq!(
vec![ vec![
"0".to_string(), "0".to_string(),
@ -8014,6 +8006,15 @@ async fn test_mutual_editor_inlay_hint_cache_update(
let inlay_cache = editor.inlay_hint_cache(); let inlay_cache = editor.inlay_hint_cache();
assert_eq!(inlay_cache.version(), edits_made); assert_eq!(inlay_cache.version(), edits_made);
}); });
editor_b.update(cx_b, |editor, _| {
assert_eq!(
vec!["0".to_string(), "1".to_string(), "2".to_string(),],
extract_hint_labels(editor),
"Guest should get hints the 1st edit and 2nd LSP query"
);
let inlay_cache = editor.inlay_hint_cache();
assert_eq!(inlay_cache.version(), edits_made);
});
editor_a.update(cx_a, |editor, cx| { editor_a.update(cx_a, |editor, cx| {
editor.change_selections(None, cx, |s| s.select_ranges([13..13])); editor.change_selections(None, cx, |s| s.select_ranges([13..13]));
@ -8021,8 +8022,8 @@ async fn test_mutual_editor_inlay_hint_cache_update(
cx.focus(&editor_a); cx.focus(&editor_a);
edits_made += 1; edits_made += 1;
}); });
cx_a.foreground().run_until_parked();
cx_b.foreground().run_until_parked(); deterministic.run_until_parked();
editor_a.update(cx_a, |editor, _| { editor_a.update(cx_a, |editor, _| {
assert_eq!( assert_eq!(
vec![ vec![
@ -8061,8 +8062,8 @@ async fn test_mutual_editor_inlay_hint_cache_update(
.await .await
.expect("inlay refresh request failed"); .expect("inlay refresh request failed");
edits_made += 1; edits_made += 1;
cx_a.foreground().run_until_parked();
cx_b.foreground().run_until_parked(); deterministic.run_until_parked();
editor_a.update(cx_a, |editor, _| { editor_a.update(cx_a, |editor, _| {
assert_eq!( assert_eq!(
vec![ vec![

View file

@ -6,7 +6,7 @@ use crate::{
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use call::ActiveCall; use call::ActiveCall;
use client::RECEIVE_TIMEOUT; use client::RECEIVE_TIMEOUT;
use collections::BTreeMap; use collections::{BTreeMap, HashMap};
use editor::Bias; use editor::Bias;
use fs::{repository::GitFileStatus, FakeFs, Fs as _}; use fs::{repository::GitFileStatus, FakeFs, Fs as _};
use futures::StreamExt as _; use futures::StreamExt as _;
@ -121,7 +121,9 @@ async fn test_random_collaboration(
let mut operation_channels = Vec::new(); let mut operation_channels = Vec::new();
loop { loop {
let Some((next_operation, applied)) = plan.lock().next_server_operation(&clients) else { break }; let Some((next_operation, applied)) = plan.lock().next_server_operation(&clients) else {
break;
};
applied.store(true, SeqCst); applied.store(true, SeqCst);
let did_apply = apply_server_operation( let did_apply = apply_server_operation(
deterministic.clone(), deterministic.clone(),
@ -224,7 +226,9 @@ async fn apply_server_operation(
let client_ix = clients let client_ix = clients
.iter() .iter()
.position(|(client, cx)| client.current_user_id(cx) == removed_user_id); .position(|(client, cx)| client.current_user_id(cx) == removed_user_id);
let Some(client_ix) = client_ix else { return false }; let Some(client_ix) = client_ix else {
return false;
};
let user_connection_ids = server let user_connection_ids = server
.connection_pool .connection_pool
.lock() .lock()
@ -718,7 +722,7 @@ async fn apply_client_operation(
if detach { "detaching" } else { "awaiting" } if detach { "detaching" } else { "awaiting" }
); );
let search = project.update(cx, |project, cx| { let mut search = project.update(cx, |project, cx| {
project.search( project.search(
SearchQuery::text(query, false, false, Vec::new(), Vec::new()), SearchQuery::text(query, false, false, Vec::new(), Vec::new()),
cx, cx,
@ -726,15 +730,13 @@ async fn apply_client_operation(
}); });
drop(project); drop(project);
let search = cx.background().spawn(async move { let search = cx.background().spawn(async move {
search let mut results = HashMap::default();
.await while let Some((buffer, ranges)) = search.next().await {
.map_err(|err| anyhow!("search request failed: {:?}", err)) results.entry(buffer).or_insert(ranges);
}
results
}); });
if detach { search.await;
cx.update(|cx| search.detach_and_log_err(cx));
} else {
search.await?;
}
} }
ClientOperation::WriteFsEntry { ClientOperation::WriteFsEntry {
@ -1591,10 +1593,11 @@ impl TestPlan {
81.. => match self.rng.gen_range(0..100_u32) { 81.. => match self.rng.gen_range(0..100_u32) {
// Add a worktree to a local project // Add a worktree to a local project
0..=50 => { 0..=50 => {
let Some(project) = client let Some(project) =
.local_projects() client.local_projects().choose(&mut self.rng).cloned()
.choose(&mut self.rng) else {
.cloned() else { continue }; continue;
};
let project_root_name = root_name_for_project(&project, cx); let project_root_name = root_name_for_project(&project, cx);
let mut paths = client.fs().paths(false); let mut paths = client.fs().paths(false);
paths.remove(0); paths.remove(0);
@ -1611,7 +1614,9 @@ impl TestPlan {
// Add an entry to a worktree // Add an entry to a worktree
_ => { _ => {
let Some(project) = choose_random_project(client, &mut self.rng) else { continue }; let Some(project) = choose_random_project(client, &mut self.rng) else {
continue;
};
let project_root_name = root_name_for_project(&project, cx); let project_root_name = root_name_for_project(&project, cx);
let is_local = project.read_with(cx, |project, _| project.is_local()); let is_local = project.read_with(cx, |project, _| project.is_local());
let worktree = project.read_with(cx, |project, cx| { let worktree = project.read_with(cx, |project, cx| {
@ -1645,7 +1650,9 @@ impl TestPlan {
// Query and mutate buffers // Query and mutate buffers
60..=90 => { 60..=90 => {
let Some(project) = choose_random_project(client, &mut self.rng) else { continue }; let Some(project) = choose_random_project(client, &mut self.rng) else {
continue;
};
let project_root_name = root_name_for_project(&project, cx); let project_root_name = root_name_for_project(&project, cx);
let is_local = project.read_with(cx, |project, _| project.is_local()); let is_local = project.read_with(cx, |project, _| project.is_local());
@ -1656,7 +1663,10 @@ impl TestPlan {
.buffers_for_project(&project) .buffers_for_project(&project)
.iter() .iter()
.choose(&mut self.rng) .choose(&mut self.rng)
.cloned() else { continue }; .cloned()
else {
continue;
};
let full_path = buffer let full_path = buffer
.read_with(cx, |buffer, cx| buffer.file().unwrap().full_path(cx)); .read_with(cx, |buffer, cx| buffer.file().unwrap().full_path(cx));
@ -2026,7 +2036,10 @@ async fn simulate_client(
client.app_state.languages.add(Arc::new(language)); client.app_state.languages.add(Arc::new(language));
while let Some(batch_id) = operation_rx.next().await { while let Some(batch_id) = operation_rx.next().await {
let Some((operation, applied)) = plan.lock().next_client_operation(&client, batch_id, &cx) else { break }; let Some((operation, applied)) = plan.lock().next_client_operation(&client, batch_id, &cx)
else {
break;
};
applied.store(true, SeqCst); applied.store(true, SeqCst);
match apply_client_operation(&client, operation, &mut cx).await { match apply_client_operation(&client, operation, &mut cx).await {
Ok(()) => {} Ok(()) => {}

View file

@ -26,6 +26,7 @@ auto_update = { path = "../auto_update" }
db = { path = "../db" } db = { path = "../db" }
call = { path = "../call" } call = { path = "../call" }
client = { path = "../client" } client = { path = "../client" }
channel = { path = "../channel" }
clock = { path = "../clock" } clock = { path = "../clock" }
collections = { path = "../collections" } collections = { path = "../collections" }
context_menu = { path = "../context_menu" } context_menu = { path = "../context_menu" }
@ -33,12 +34,13 @@ editor = { path = "../editor" }
feedback = { path = "../feedback" } feedback = { path = "../feedback" }
fuzzy = { path = "../fuzzy" } fuzzy = { path = "../fuzzy" }
gpui = { path = "../gpui" } gpui = { path = "../gpui" }
language = { path = "../language" }
menu = { path = "../menu" } menu = { path = "../menu" }
picker = { path = "../picker" } picker = { path = "../picker" }
project = { path = "../project" } project = { path = "../project" }
recent_projects = {path = "../recent_projects"} recent_projects = {path = "../recent_projects"}
settings = { path = "../settings" } settings = { path = "../settings" }
staff_mode = {path = "../staff_mode"} feature_flags = {path = "../feature_flags"}
theme = { path = "../theme" } theme = { path = "../theme" }
theme_selector = { path = "../theme_selector" } theme_selector = { path = "../theme_selector" }
vcs_menu = { path = "../vcs_menu" } vcs_menu = { path = "../vcs_menu" }

View file

@ -0,0 +1,355 @@
use anyhow::{anyhow, Result};
use channel::{
channel_buffer::{self, ChannelBuffer},
ChannelId,
};
use client::proto;
use clock::ReplicaId;
use collections::HashMap;
use editor::Editor;
use gpui::{
actions,
elements::{ChildView, Label},
geometry::vector::Vector2F,
AnyElement, AnyViewHandle, AppContext, Element, Entity, ModelHandle, Subscription, Task, View,
ViewContext, ViewHandle,
};
use project::Project;
use std::any::Any;
use workspace::{
item::{FollowableItem, Item, ItemHandle},
register_followable_item,
searchable::SearchableItemHandle,
ItemNavHistory, Pane, ViewId, Workspace, WorkspaceId,
};
actions!(channel_view, [Deploy]);
pub(crate) fn init(cx: &mut AppContext) {
register_followable_item::<ChannelView>(cx)
}
pub struct ChannelView {
pub editor: ViewHandle<Editor>,
project: ModelHandle<Project>,
channel_buffer: ModelHandle<ChannelBuffer>,
remote_id: Option<ViewId>,
_editor_event_subscription: Subscription,
}
impl ChannelView {
pub fn open(
channel_id: ChannelId,
pane: ViewHandle<Pane>,
workspace: ViewHandle<Workspace>,
cx: &mut AppContext,
) -> Task<Result<ViewHandle<Self>>> {
let workspace = workspace.read(cx);
let project = workspace.project().to_owned();
let channel_store = workspace.app_state().channel_store.clone();
let markdown = workspace
.app_state()
.languages
.language_for_name("Markdown");
let channel_buffer =
channel_store.update(cx, |store, cx| store.open_channel_buffer(channel_id, cx));
cx.spawn(|mut cx| async move {
let channel_buffer = channel_buffer.await?;
let markdown = markdown.await?;
channel_buffer.update(&mut cx, |buffer, cx| {
buffer.buffer().update(cx, |buffer, cx| {
buffer.set_language(Some(markdown), cx);
})
});
pane.update(&mut cx, |pane, cx| {
pane.items_of_type::<Self>()
.find(|channel_view| channel_view.read(cx).channel_buffer == channel_buffer)
.unwrap_or_else(|| cx.add_view(|cx| Self::new(project, channel_buffer, cx)))
})
.ok_or_else(|| anyhow!("pane was dropped"))
})
}
pub fn new(
project: ModelHandle<Project>,
channel_buffer: ModelHandle<ChannelBuffer>,
cx: &mut ViewContext<Self>,
) -> Self {
let buffer = channel_buffer.read(cx).buffer();
// buffer.update(cx, |buffer, cx| buffer.set_language(language, cx));
let editor = cx.add_view(|cx| Editor::for_buffer(buffer, None, cx));
let _editor_event_subscription = cx.subscribe(&editor, |_, _, e, cx| cx.emit(e.clone()));
cx.subscribe(&project, Self::handle_project_event).detach();
cx.subscribe(&channel_buffer, Self::handle_channel_buffer_event)
.detach();
let this = Self {
editor,
project,
channel_buffer,
remote_id: None,
_editor_event_subscription,
};
this.refresh_replica_id_map(cx);
this
}
fn handle_project_event(
&mut self,
_: ModelHandle<Project>,
event: &project::Event,
cx: &mut ViewContext<Self>,
) {
match event {
project::Event::RemoteIdChanged(_) => {}
project::Event::DisconnectedFromHost => {}
project::Event::Closed => {}
project::Event::CollaboratorUpdated { .. } => {}
project::Event::CollaboratorLeft(_) => {}
project::Event::CollaboratorJoined(_) => {}
_ => return,
}
self.refresh_replica_id_map(cx);
}
fn handle_channel_buffer_event(
&mut self,
_: ModelHandle<ChannelBuffer>,
event: &channel_buffer::Event,
cx: &mut ViewContext<Self>,
) {
match event {
channel_buffer::Event::CollaboratorsChanged => {
self.refresh_replica_id_map(cx);
}
channel_buffer::Event::Disconnected => self.editor.update(cx, |editor, cx| {
editor.set_read_only(true);
cx.notify();
}),
}
}
/// Build a mapping of channel buffer replica ids to the corresponding
/// replica ids in the current project.
///
/// Using this mapping, a given user can be displayed with the same color
/// in the channel buffer as in other files in the project. Users who are
/// in the channel buffer but not the project will not have a color.
fn refresh_replica_id_map(&self, cx: &mut ViewContext<Self>) {
let mut project_replica_ids_by_channel_buffer_replica_id = HashMap::default();
let project = self.project.read(cx);
let channel_buffer = self.channel_buffer.read(cx);
project_replica_ids_by_channel_buffer_replica_id
.insert(channel_buffer.replica_id(cx), project.replica_id());
project_replica_ids_by_channel_buffer_replica_id.extend(
channel_buffer
.collaborators()
.iter()
.filter_map(|channel_buffer_collaborator| {
project
.collaborators()
.values()
.find_map(|project_collaborator| {
(project_collaborator.user_id == channel_buffer_collaborator.user_id)
.then_some((
channel_buffer_collaborator.replica_id as ReplicaId,
project_collaborator.replica_id,
))
})
}),
);
self.editor.update(cx, |editor, cx| {
editor.set_replica_id_map(Some(project_replica_ids_by_channel_buffer_replica_id), cx)
});
}
}
impl Entity for ChannelView {
type Event = editor::Event;
}
impl View for ChannelView {
fn ui_name() -> &'static str {
"ChannelView"
}
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
ChildView::new(self.editor.as_any(), cx).into_any()
}
fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext<Self>) {
if cx.is_self_focused() {
cx.focus(self.editor.as_any())
}
}
}
impl Item for ChannelView {
fn tab_content<V: 'static>(
&self,
_: Option<usize>,
style: &theme::Tab,
cx: &gpui::AppContext,
) -> AnyElement<V> {
let channel_name = &self.channel_buffer.read(cx).channel().name;
let label = if self.channel_buffer.read(cx).is_connected() {
format!("#{}", channel_name)
} else {
format!("#{} (disconnected)", channel_name)
};
Label::new(label, style.label.to_owned()).into_any()
}
fn clone_on_split(&self, _: WorkspaceId, cx: &mut ViewContext<Self>) -> Option<Self> {
Some(Self::new(
self.project.clone(),
self.channel_buffer.clone(),
cx,
))
}
fn is_singleton(&self, _cx: &AppContext) -> bool {
true
}
fn navigate(&mut self, data: Box<dyn Any>, cx: &mut ViewContext<Self>) -> bool {
self.editor
.update(cx, |editor, cx| editor.navigate(data, cx))
}
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
self.editor
.update(cx, |editor, cx| Item::deactivated(editor, cx))
}
fn set_nav_history(&mut self, history: ItemNavHistory, cx: &mut ViewContext<Self>) {
self.editor
.update(cx, |editor, cx| Item::set_nav_history(editor, history, cx))
}
fn as_searchable(&self, _: &ViewHandle<Self>) -> Option<Box<dyn SearchableItemHandle>> {
Some(Box::new(self.editor.clone()))
}
fn show_toolbar(&self) -> bool {
true
}
fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option<Vector2F> {
self.editor.read(cx).pixel_position_of_cursor(cx)
}
}
impl FollowableItem for ChannelView {
fn remote_id(&self) -> Option<workspace::ViewId> {
self.remote_id
}
fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant> {
let channel = self.channel_buffer.read(cx).channel();
Some(proto::view::Variant::ChannelView(
proto::view::ChannelView {
channel_id: channel.id,
editor: if let Some(proto::view::Variant::Editor(proto)) =
self.editor.read(cx).to_state_proto(cx)
{
Some(proto)
} else {
None
},
},
))
}
fn from_state_proto(
pane: ViewHandle<workspace::Pane>,
workspace: ViewHandle<workspace::Workspace>,
remote_id: workspace::ViewId,
state: &mut Option<proto::view::Variant>,
cx: &mut AppContext,
) -> Option<gpui::Task<anyhow::Result<ViewHandle<Self>>>> {
let Some(proto::view::Variant::ChannelView(_)) = state else {
return None;
};
let Some(proto::view::Variant::ChannelView(state)) = state.take() else {
unreachable!()
};
let open = ChannelView::open(state.channel_id, pane, workspace, cx);
Some(cx.spawn(|mut cx| async move {
let this = open.await?;
let task = this
.update(&mut cx, |this, cx| {
this.remote_id = Some(remote_id);
if let Some(state) = state.editor {
Some(this.editor.update(cx, |editor, cx| {
editor.apply_update_proto(
&this.project,
proto::update_view::Variant::Editor(proto::update_view::Editor {
selections: state.selections,
pending_selection: state.pending_selection,
scroll_top_anchor: state.scroll_top_anchor,
scroll_x: state.scroll_x,
scroll_y: state.scroll_y,
..Default::default()
}),
cx,
)
}))
} else {
None
}
})
.ok_or_else(|| anyhow!("window was closed"))?;
if let Some(task) = task {
task.await?;
}
Ok(this)
}))
}
fn add_event_to_update_proto(
&self,
event: &Self::Event,
update: &mut Option<proto::update_view::Variant>,
cx: &AppContext,
) -> bool {
self.editor
.read(cx)
.add_event_to_update_proto(event, update, cx)
}
fn apply_update_proto(
&mut self,
project: &ModelHandle<Project>,
message: proto::update_view::Variant,
cx: &mut ViewContext<Self>,
) -> gpui::Task<anyhow::Result<()>> {
self.editor.update(cx, |editor, cx| {
editor.apply_update_proto(project, message, cx)
})
}
fn set_leader_replica_id(
&mut self,
leader_replica_id: Option<u16>,
cx: &mut ViewContext<Self>,
) {
self.editor.update(cx, |editor, cx| {
editor.set_leader_replica_id(leader_replica_id, cx)
})
}
fn should_unfollow_on_event(event: &Self::Event, cx: &AppContext) -> bool {
Editor::should_unfollow_on_event(event, cx)
}
}

View file

@ -4,28 +4,30 @@ mod panel_settings;
use anyhow::Result; use anyhow::Result;
use call::ActiveCall; use call::ActiveCall;
use client::{ use channel::{Channel, ChannelEvent, ChannelId, ChannelStore};
proto::PeerId, Channel, ChannelEvent, ChannelId, ChannelStore, Client, Contact, User, UserStore, use client::{proto::PeerId, Client, Contact, User, UserStore};
};
use context_menu::{ContextMenu, ContextMenuItem}; use context_menu::{ContextMenu, ContextMenuItem};
use db::kvp::KEY_VALUE_STORE; use db::kvp::KEY_VALUE_STORE;
use editor::{Cancel, Editor}; use editor::{Cancel, Editor};
use feature_flags::{ChannelsAlpha, FeatureFlagAppExt, FeatureFlagViewExt};
use futures::StreamExt; use futures::StreamExt;
use fuzzy::{match_strings, StringMatchCandidate}; use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{ use gpui::{
actions, actions,
elements::{ elements::{
Canvas, ChildView, Empty, Flex, Image, Label, List, ListOffset, ListState, Canvas, ChildView, Component, Empty, Flex, Image, Label, List, ListOffset, ListState,
MouseEventHandler, Orientation, OverlayPositionMode, Padding, ParentElement, Stack, Svg, MouseEventHandler, Orientation, OverlayPositionMode, Padding, ParentElement, SafeStylable,
Stack, Svg,
}, },
fonts::TextStyle,
geometry::{ geometry::{
rect::RectF, rect::RectF,
vector::{vec2f, Vector2F}, vector::{vec2f, Vector2F},
}, },
impl_actions, impl_actions,
platform::{CursorStyle, MouseButton, PromptLevel}, platform::{CursorStyle, MouseButton, PromptLevel},
serde_json, AnyElement, AppContext, AsyncAppContext, Element, Entity, ModelHandle, serde_json, AnyElement, AppContext, AsyncAppContext, Element, Entity, FontCache, ModelHandle,
Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
}; };
use menu::{Confirm, SelectNext, SelectPrev}; use menu::{Confirm, SelectNext, SelectPrev};
@ -33,9 +35,8 @@ use panel_settings::{CollaborationPanelDockPosition, CollaborationPanelSettings}
use project::{Fs, Project}; use project::{Fs, Project};
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
use settings::SettingsStore; use settings::SettingsStore;
use staff_mode::StaffMode;
use std::{borrow::Cow, mem, sync::Arc}; use std::{borrow::Cow, mem, sync::Arc};
use theme::IconButton; use theme::{components::ComponentExt, IconButton};
use util::{iife, ResultExt, TryFutureExt}; use util::{iife, ResultExt, TryFutureExt};
use workspace::{ use workspace::{
dock::{DockPosition, Panel}, dock::{DockPosition, Panel},
@ -43,7 +44,10 @@ use workspace::{
Workspace, Workspace,
}; };
use crate::face_pile::FacePile; use crate::{
channel_view::{self, ChannelView},
face_pile::FacePile,
};
use channel_modal::ChannelModal; use channel_modal::ChannelModal;
use self::contact_finder::ContactFinder; use self::contact_finder::ContactFinder;
@ -53,6 +57,11 @@ struct RemoveChannel {
channel_id: u64, channel_id: u64,
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
struct ToggleCollapse {
channel_id: u64,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
struct NewChannel { struct NewChannel {
channel_id: u64, channel_id: u64,
@ -73,7 +82,21 @@ struct RenameChannel {
channel_id: u64, channel_id: u64,
} }
actions!(collab_panel, [ToggleFocus, Remove, Secondary]); #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
struct OpenChannelBuffer {
channel_id: u64,
}
actions!(
collab_panel,
[
ToggleFocus,
Remove,
Secondary,
CollapseSelectedChannel,
ExpandSelectedChannel
]
);
impl_actions!( impl_actions!(
collab_panel, collab_panel,
@ -82,7 +105,9 @@ impl_actions!(
NewChannel, NewChannel,
InviteMembers, InviteMembers,
ManageMembers, ManageMembers,
RenameChannel RenameChannel,
ToggleCollapse,
OpenChannelBuffer
] ]
); );
@ -92,6 +117,7 @@ pub fn init(_client: Arc<Client>, cx: &mut AppContext) {
settings::register::<panel_settings::CollaborationPanelSettings>(cx); settings::register::<panel_settings::CollaborationPanelSettings>(cx);
contact_finder::init(cx); contact_finder::init(cx);
channel_modal::init(cx); channel_modal::init(cx);
channel_view::init(cx);
cx.add_action(CollabPanel::cancel); cx.add_action(CollabPanel::cancel);
cx.add_action(CollabPanel::select_next); cx.add_action(CollabPanel::select_next);
@ -105,6 +131,10 @@ pub fn init(_client: Arc<Client>, cx: &mut AppContext) {
cx.add_action(CollabPanel::manage_members); cx.add_action(CollabPanel::manage_members);
cx.add_action(CollabPanel::rename_selected_channel); cx.add_action(CollabPanel::rename_selected_channel);
cx.add_action(CollabPanel::rename_channel); cx.add_action(CollabPanel::rename_channel);
cx.add_action(CollabPanel::toggle_channel_collapsed);
cx.add_action(CollabPanel::collapse_selected_channel);
cx.add_action(CollabPanel::expand_selected_channel);
cx.add_action(CollabPanel::open_channel_buffer);
} }
#[derive(Debug)] #[derive(Debug)]
@ -147,13 +177,15 @@ pub struct CollabPanel {
list_state: ListState<Self>, list_state: ListState<Self>,
subscriptions: Vec<Subscription>, subscriptions: Vec<Subscription>,
collapsed_sections: Vec<Section>, collapsed_sections: Vec<Section>,
collapsed_channels: Vec<ChannelId>,
workspace: WeakViewHandle<Workspace>, workspace: WeakViewHandle<Workspace>,
context_menu_on_selected: bool, context_menu_on_selected: bool,
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
struct SerializedChannelsPanel { struct SerializedCollabPanel {
width: Option<f32>, width: Option<f32>,
collapsed_channels: Option<Vec<ChannelId>>,
} }
#[derive(Debug)] #[derive(Debug)]
@ -198,6 +230,9 @@ enum ListEntry {
channel: Arc<Channel>, channel: Arc<Channel>,
depth: usize, depth: usize,
}, },
ChannelNotes {
channel_id: ChannelId,
},
ChannelEditor { ChannelEditor {
depth: usize, depth: usize,
}, },
@ -341,6 +376,12 @@ impl CollabPanel {
return channel_row; return channel_row;
} }
} }
ListEntry::ChannelNotes { channel_id } => this.render_channel_notes(
*channel_id,
&theme.collab_panel,
is_selected,
cx,
),
ListEntry::ChannelInvite(channel) => Self::render_channel_invite( ListEntry::ChannelInvite(channel) => Self::render_channel_invite(
channel.clone(), channel.clone(),
this.channel_store.clone(), this.channel_store.clone(),
@ -398,6 +439,7 @@ impl CollabPanel {
subscriptions: Vec::default(), subscriptions: Vec::default(),
match_candidates: Vec::default(), match_candidates: Vec::default(),
collapsed_sections: vec![Section::Offline], collapsed_sections: vec![Section::Offline],
collapsed_channels: Vec::default(),
workspace: workspace.weak_handle(), workspace: workspace.weak_handle(),
client: workspace.app_state().client.clone(), client: workspace.app_state().client.clone(),
context_menu_on_selected: true, context_menu_on_selected: true,
@ -431,9 +473,10 @@ impl CollabPanel {
})); }));
this.subscriptions this.subscriptions
.push(cx.observe(&active_call, |this, _, cx| this.update_entries(true, cx))); .push(cx.observe(&active_call, |this, _, cx| this.update_entries(true, cx)));
this.subscriptions.push( this.subscriptions
cx.observe_global::<StaffMode, _>(move |this, cx| this.update_entries(true, cx)), .push(cx.observe_flag::<ChannelsAlpha, _>(move |_, this, cx| {
); this.update_entries(true, cx)
}));
this.subscriptions.push(cx.subscribe( this.subscriptions.push(cx.subscribe(
&this.channel_store, &this.channel_store,
|this, _channel_store, e, cx| match e { |this, _channel_store, e, cx| match e {
@ -469,7 +512,7 @@ impl CollabPanel {
.log_err() .log_err()
.flatten() .flatten()
{ {
Some(serde_json::from_str::<SerializedChannelsPanel>(&panel)?) Some(serde_json::from_str::<SerializedCollabPanel>(&panel)?)
} else { } else {
None None
}; };
@ -479,6 +522,9 @@ impl CollabPanel {
if let Some(serialized_panel) = serialized_panel { if let Some(serialized_panel) = serialized_panel {
panel.update(cx, |panel, cx| { panel.update(cx, |panel, cx| {
panel.width = serialized_panel.width; panel.width = serialized_panel.width;
panel.collapsed_channels = serialized_panel
.collapsed_channels
.unwrap_or_else(|| Vec::new());
cx.notify(); cx.notify();
}); });
} }
@ -489,12 +535,16 @@ impl CollabPanel {
fn serialize(&mut self, cx: &mut ViewContext<Self>) { fn serialize(&mut self, cx: &mut ViewContext<Self>) {
let width = self.width; let width = self.width;
let collapsed_channels = self.collapsed_channels.clone();
self.pending_serialization = cx.background().spawn( self.pending_serialization = cx.background().spawn(
async move { async move {
KEY_VALUE_STORE KEY_VALUE_STORE
.write_kvp( .write_kvp(
COLLABORATION_PANEL_KEY.into(), COLLABORATION_PANEL_KEY.into(),
serde_json::to_string(&SerializedChannelsPanel { width })?, serde_json::to_string(&SerializedCollabPanel {
width,
collapsed_channels: Some(collapsed_channels),
})?,
) )
.await?; .await?;
anyhow::Ok(()) anyhow::Ok(())
@ -518,6 +568,10 @@ impl CollabPanel {
if !self.collapsed_sections.contains(&Section::ActiveCall) { if !self.collapsed_sections.contains(&Section::ActiveCall) {
let room = room.read(cx); let room = room.read(cx);
if let Some(channel_id) = room.channel_id() {
self.entries.push(ListEntry::ChannelNotes { channel_id })
}
// Populate the active user. // Populate the active user.
if let Some(user) = user_store.current_user() { if let Some(user) = user_store.current_user() {
self.match_candidates.clear(); self.match_candidates.clear();
@ -622,7 +676,8 @@ impl CollabPanel {
} }
let mut request_entries = Vec::new(); let mut request_entries = Vec::new();
if self.include_channels_section(cx) {
if cx.has_flag::<ChannelsAlpha>() {
self.entries.push(ListEntry::Header(Section::Channels, 0)); self.entries.push(ListEntry::Header(Section::Channels, 0));
if channel_store.channel_count() > 0 || self.channel_editing_state.is_some() { if channel_store.channel_count() > 0 || self.channel_editing_state.is_some() {
@ -657,10 +712,24 @@ impl CollabPanel {
self.entries.push(ListEntry::ChannelEditor { depth: 0 }); self.entries.push(ListEntry::ChannelEditor { depth: 0 });
} }
} }
let mut collapse_depth = None;
for mat in matches { for mat in matches {
let (depth, channel) = let (depth, channel) =
channel_store.channel_at_index(mat.candidate_id).unwrap(); channel_store.channel_at_index(mat.candidate_id).unwrap();
if collapse_depth.is_none() && self.is_channel_collapsed(channel.id) {
collapse_depth = Some(depth);
} else if let Some(collapsed_depth) = collapse_depth {
if depth > collapsed_depth {
continue;
}
if self.is_channel_collapsed(channel.id) {
collapse_depth = Some(depth);
} else {
collapse_depth = None;
}
}
match &self.channel_editing_state { match &self.channel_editing_state {
Some(ChannelEditingState::Create { parent_id, .. }) Some(ChannelEditingState::Create { parent_id, .. })
if *parent_id == Some(channel.id) => if *parent_id == Some(channel.id) =>
@ -963,25 +1032,19 @@ impl CollabPanel {
) -> AnyElement<Self> { ) -> AnyElement<Self> {
enum JoinProject {} enum JoinProject {}
let font_cache = cx.font_cache(); let host_avatar_width = theme
let host_avatar_height = theme
.contact_avatar .contact_avatar
.width .width
.or(theme.contact_avatar.height) .or(theme.contact_avatar.height)
.unwrap_or(0.); .unwrap_or(0.);
let row = &theme.project_row.inactive_state().default;
let tree_branch = theme.tree_branch; let tree_branch = theme.tree_branch;
let line_height = row.name.text.line_height(font_cache);
let cap_height = row.name.text.cap_height(font_cache);
let baseline_offset =
row.name.text.baseline_offset(font_cache) + (theme.row_height - line_height) / 2.;
let project_name = if worktree_root_names.is_empty() { let project_name = if worktree_root_names.is_empty() {
"untitled".to_string() "untitled".to_string()
} else { } else {
worktree_root_names.join(", ") worktree_root_names.join(", ")
}; };
MouseEventHandler::new::<JoinProject, _>(project_id as usize, cx, |mouse_state, _| { MouseEventHandler::new::<JoinProject, _>(project_id as usize, cx, |mouse_state, cx| {
let tree_branch = *tree_branch.in_state(is_selected).style_for(mouse_state); let tree_branch = *tree_branch.in_state(is_selected).style_for(mouse_state);
let row = theme let row = theme
.project_row .project_row
@ -989,39 +1052,20 @@ impl CollabPanel {
.style_for(mouse_state); .style_for(mouse_state);
Flex::row() Flex::row()
.with_child(render_tree_branch(
tree_branch,
&row.name.text,
is_last,
vec2f(host_avatar_width, theme.row_height),
cx.font_cache(),
))
.with_child( .with_child(
Stack::new() Svg::new("icons/file_icons/folder.svg")
.with_child(Canvas::new(move |scene, bounds, _, _, _| { .with_color(theme.channel_hash.color)
let start_x =
bounds.min_x() + (bounds.width() / 2.) - (tree_branch.width / 2.);
let end_x = bounds.max_x();
let start_y = bounds.min_y();
let end_y = bounds.min_y() + baseline_offset - (cap_height / 2.);
scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, start_y),
vec2f(
start_x + tree_branch.width,
if is_last { end_y } else { bounds.max_y() },
),
),
background: Some(tree_branch.color),
border: gpui::Border::default(),
corner_radii: (0.).into(),
});
scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, end_y),
vec2f(end_x, end_y + tree_branch.width),
),
background: Some(tree_branch.color),
border: gpui::Border::default(),
corner_radii: (0.).into(),
});
}))
.constrained() .constrained()
.with_width(host_avatar_height), .with_width(theme.channel_hash.width)
.aligned()
.left(),
) )
.with_child( .with_child(
Label::new(project_name, row.name.text.clone()) Label::new(project_name, row.name.text.clone())
@ -1196,7 +1240,7 @@ impl CollabPanel {
}); });
if let Some(name) = channel_name { if let Some(name) = channel_name {
Cow::Owned(format!("Current Call - #{}", name)) Cow::Owned(format!("#{}", name))
} else { } else {
Cow::Borrowed("Current Call") Cow::Borrowed("Current Call")
} }
@ -1332,7 +1376,7 @@ impl CollabPanel {
.with_cursor_style(CursorStyle::PointingHand) .with_cursor_style(CursorStyle::PointingHand)
.on_click(MouseButton::Left, move |_, this, cx| { .on_click(MouseButton::Left, move |_, this, cx| {
if can_collapse { if can_collapse {
this.toggle_expanded(section, cx); this.toggle_section_expanded(section, cx);
} }
}) })
} }
@ -1479,6 +1523,11 @@ impl CollabPanel {
cx: &AppContext, cx: &AppContext,
) -> AnyElement<Self> { ) -> AnyElement<Self> {
Flex::row() Flex::row()
.with_child(
Empty::new()
.constrained()
.with_width(theme.collab_panel.disclosure.button_space()),
)
.with_child( .with_child(
Svg::new("icons/hash.svg") Svg::new("icons/hash.svg")
.with_color(theme.collab_panel.channel_hash.color) .with_color(theme.collab_panel.channel_hash.color)
@ -1537,6 +1586,10 @@ impl CollabPanel {
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) -> AnyElement<Self> { ) -> AnyElement<Self> {
let channel_id = channel.id; let channel_id = channel.id;
let has_children = self.channel_store.read(cx).has_children(channel_id);
let disclosed =
has_children.then(|| !self.collapsed_channels.binary_search(&channel_id).is_ok());
let is_active = iife!({ let is_active = iife!({
let call_channel = ActiveCall::global(cx) let call_channel = ActiveCall::global(cx)
.read(cx) .read(cx)
@ -1550,7 +1603,7 @@ impl CollabPanel {
const FACEPILE_LIMIT: usize = 3; const FACEPILE_LIMIT: usize = 3;
MouseEventHandler::new::<Channel, _>(channel.id as usize, cx, |state, cx| { MouseEventHandler::new::<Channel, _>(channel.id as usize, cx, |state, cx| {
Flex::row() Flex::<Self>::row()
.with_child( .with_child(
Svg::new("icons/hash.svg") Svg::new("icons/hash.svg")
.with_color(theme.channel_hash.color) .with_color(theme.channel_hash.color)
@ -1599,6 +1652,11 @@ impl CollabPanel {
} }
}) })
.align_children_center() .align_children_center()
.styleable_component()
.disclosable(disclosed, Box::new(ToggleCollapse { channel_id }))
.with_id(channel_id as usize)
.with_style(theme.disclosure.clone())
.element()
.constrained() .constrained()
.with_height(theme.row_height) .with_height(theme.row_height)
.contained() .contained()
@ -1618,6 +1676,61 @@ impl CollabPanel {
.into_any() .into_any()
} }
fn render_channel_notes(
&self,
channel_id: ChannelId,
theme: &theme::CollabPanel,
is_selected: bool,
cx: &mut ViewContext<Self>,
) -> AnyElement<Self> {
enum ChannelNotes {}
let host_avatar_width = theme
.contact_avatar
.width
.or(theme.contact_avatar.height)
.unwrap_or(0.);
MouseEventHandler::new::<ChannelNotes, _>(channel_id as usize, cx, |state, cx| {
let tree_branch = *theme.tree_branch.in_state(is_selected).style_for(state);
let row = theme.project_row.in_state(is_selected).style_for(state);
Flex::<Self>::row()
.with_child(render_tree_branch(
tree_branch,
&row.name.text,
true,
vec2f(host_avatar_width, theme.row_height),
cx.font_cache(),
))
.with_child(
Svg::new("icons/radix/file.svg")
.with_color(theme.channel_hash.color)
.constrained()
.with_width(theme.channel_hash.width)
.aligned()
.left(),
)
.with_child(
Label::new("notes", theme.channel_name.text.clone())
.contained()
.with_style(theme.channel_name.container)
.aligned()
.left()
.flex(1., true),
)
.constrained()
.with_height(theme.row_height)
.contained()
.with_style(*theme.channel_row.style_for(is_selected, state))
.with_padding_left(theme.channel_row.default_style().padding.left)
})
.on_click(MouseButton::Left, move |_, this, cx| {
this.open_channel_buffer(&OpenChannelBuffer { channel_id }, cx);
})
.with_cursor_style(CursorStyle::PointingHand)
.into_any()
}
fn render_channel_invite( fn render_channel_invite(
channel: Arc<Channel>, channel: Arc<Channel>,
channel_store: ModelHandle<ChannelStore>, channel_store: ModelHandle<ChannelStore>,
@ -1801,53 +1914,58 @@ impl CollabPanel {
.into_any() .into_any()
} }
fn include_channels_section(&self, cx: &AppContext) -> bool {
if cx.has_global::<StaffMode>() {
cx.global::<StaffMode>().0
} else {
false
}
}
fn deploy_channel_context_menu( fn deploy_channel_context_menu(
&mut self, &mut self,
position: Option<Vector2F>, position: Option<Vector2F>,
channel_id: u64, channel_id: u64,
cx: &mut ViewContext<Self>, cx: &mut ViewContext<Self>,
) { ) {
if self.channel_store.read(cx).is_user_admin(channel_id) { self.context_menu_on_selected = position.is_none();
self.context_menu_on_selected = position.is_none();
self.context_menu.update(cx, |context_menu, cx| { self.context_menu.update(cx, |context_menu, cx| {
context_menu.set_position_mode(if self.context_menu_on_selected { context_menu.set_position_mode(if self.context_menu_on_selected {
OverlayPositionMode::Local OverlayPositionMode::Local
} else { } else {
OverlayPositionMode::Window OverlayPositionMode::Window
});
context_menu.show(
position.unwrap_or_default(),
if self.context_menu_on_selected {
gpui::elements::AnchorCorner::TopRight
} else {
gpui::elements::AnchorCorner::BottomLeft
},
vec![
ContextMenuItem::action("New Subchannel", NewChannel { channel_id }),
ContextMenuItem::Separator,
ContextMenuItem::action("Invite to Channel", InviteMembers { channel_id }),
ContextMenuItem::Separator,
ContextMenuItem::action("Rename", RenameChannel { channel_id }),
ContextMenuItem::action("Manage", ManageMembers { channel_id }),
ContextMenuItem::Separator,
ContextMenuItem::action("Delete", RemoveChannel { channel_id }),
],
cx,
);
}); });
cx.notify(); let expand_action_name = if self.is_channel_collapsed(channel_id) {
} "Expand Subchannels"
} else {
"Collapse Subchannels"
};
let mut items = vec![
ContextMenuItem::action(expand_action_name, ToggleCollapse { channel_id }),
ContextMenuItem::action("Open Notes", OpenChannelBuffer { channel_id }),
];
if self.channel_store.read(cx).is_user_admin(channel_id) {
items.extend([
ContextMenuItem::Separator,
ContextMenuItem::action("New Subchannel", NewChannel { channel_id }),
ContextMenuItem::action("Rename", RenameChannel { channel_id }),
ContextMenuItem::Separator,
ContextMenuItem::action("Invite Members", InviteMembers { channel_id }),
ContextMenuItem::action("Manage Members", ManageMembers { channel_id }),
ContextMenuItem::Separator,
ContextMenuItem::action("Delete", RemoveChannel { channel_id }),
]);
}
context_menu.show(
position.unwrap_or_default(),
if self.context_menu_on_selected {
gpui::elements::AnchorCorner::TopRight
} else {
gpui::elements::AnchorCorner::BottomLeft
},
items,
cx,
);
});
cx.notify();
} }
fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) { fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext<Self>) {
@ -1912,7 +2030,7 @@ impl CollabPanel {
| Section::Online | Section::Online
| Section::Offline | Section::Offline
| Section::ChannelInvites => { | Section::ChannelInvites => {
self.toggle_expanded(*section, cx); self.toggle_section_expanded(*section, cx);
} }
}, },
ListEntry::Contact { contact, calling } => { ListEntry::Contact { contact, calling } => {
@ -2000,7 +2118,7 @@ impl CollabPanel {
} }
} }
fn toggle_expanded(&mut self, section: Section, cx: &mut ViewContext<Self>) { fn toggle_section_expanded(&mut self, section: Section, cx: &mut ViewContext<Self>) {
if let Some(ix) = self.collapsed_sections.iter().position(|s| *s == section) { if let Some(ix) = self.collapsed_sections.iter().position(|s| *s == section) {
self.collapsed_sections.remove(ix); self.collapsed_sections.remove(ix);
} else { } else {
@ -2009,6 +2127,55 @@ impl CollabPanel {
self.update_entries(false, cx); self.update_entries(false, cx);
} }
fn collapse_selected_channel(
&mut self,
_: &CollapseSelectedChannel,
cx: &mut ViewContext<Self>,
) {
let Some(channel_id) = self.selected_channel().map(|channel| channel.id) else {
return;
};
if self.is_channel_collapsed(channel_id) {
return;
}
self.toggle_channel_collapsed(&ToggleCollapse { channel_id }, cx)
}
fn expand_selected_channel(&mut self, _: &ExpandSelectedChannel, cx: &mut ViewContext<Self>) {
let Some(channel_id) = self.selected_channel().map(|channel| channel.id) else {
return;
};
if !self.is_channel_collapsed(channel_id) {
return;
}
self.toggle_channel_collapsed(&ToggleCollapse { channel_id }, cx)
}
fn toggle_channel_collapsed(&mut self, action: &ToggleCollapse, cx: &mut ViewContext<Self>) {
let channel_id = action.channel_id;
match self.collapsed_channels.binary_search(&channel_id) {
Ok(ix) => {
self.collapsed_channels.remove(ix);
}
Err(ix) => {
self.collapsed_channels.insert(ix, channel_id);
}
};
self.serialize(cx);
self.update_entries(true, cx);
cx.notify();
cx.focus_self();
}
fn is_channel_collapsed(&self, channel: ChannelId) -> bool {
self.collapsed_channels.binary_search(&channel).is_ok()
}
fn leave_call(cx: &mut ViewContext<Self>) { fn leave_call(cx: &mut ViewContext<Self>) {
ActiveCall::global(cx) ActiveCall::global(cx)
.update(cx, |call, cx| call.hang_up(cx)) .update(cx, |call, cx| call.hang_up(cx))
@ -2048,6 +2215,8 @@ impl CollabPanel {
} }
fn new_subchannel(&mut self, action: &NewChannel, cx: &mut ViewContext<Self>) { fn new_subchannel(&mut self, action: &NewChannel, cx: &mut ViewContext<Self>) {
self.collapsed_channels
.retain(|&channel| channel != action.channel_id);
self.channel_editing_state = Some(ChannelEditingState::Create { self.channel_editing_state = Some(ChannelEditingState::Create {
parent_id: Some(action.channel_id), parent_id: Some(action.channel_id),
pending_name: None, pending_name: None,
@ -2103,6 +2272,21 @@ impl CollabPanel {
} }
} }
fn open_channel_buffer(&mut self, action: &OpenChannelBuffer, cx: &mut ViewContext<Self>) {
if let Some(workspace) = self.workspace.upgrade(cx) {
let pane = workspace.read(cx).active_pane().clone();
let channel_view = ChannelView::open(action.channel_id, pane.clone(), workspace, cx);
cx.spawn(|_, mut cx| async move {
let channel_view = channel_view.await?;
pane.update(&mut cx, |pane, cx| {
pane.add_item(Box::new(channel_view), true, true, None, cx)
});
anyhow::Ok(())
})
.detach();
}
}
fn show_inline_context_menu(&mut self, _: &menu::ShowContextMenu, cx: &mut ViewContext<Self>) { fn show_inline_context_menu(&mut self, _: &menu::ShowContextMenu, cx: &mut ViewContext<Self>) {
let Some(channel) = self.selected_channel() else { let Some(channel) = self.selected_channel() else {
return; return;
@ -2261,6 +2445,51 @@ impl CollabPanel {
} }
} }
fn render_tree_branch(
branch_style: theme::TreeBranch,
row_style: &TextStyle,
is_last: bool,
size: Vector2F,
font_cache: &FontCache,
) -> gpui::elements::ConstrainedBox<CollabPanel> {
let line_height = row_style.line_height(font_cache);
let cap_height = row_style.cap_height(font_cache);
let baseline_offset = row_style.baseline_offset(font_cache) + (size.y() - line_height) / 2.;
Canvas::new(move |scene, bounds, _, _, _| {
scene.paint_layer(None, |scene| {
let start_x = bounds.min_x() + (bounds.width() / 2.) - (branch_style.width / 2.);
let end_x = bounds.max_x();
let start_y = bounds.min_y();
let end_y = bounds.min_y() + baseline_offset - (cap_height / 2.);
scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, start_y),
vec2f(
start_x + branch_style.width,
if is_last { end_y } else { bounds.max_y() },
),
),
background: Some(branch_style.color),
border: gpui::Border::default(),
corner_radii: (0.).into(),
});
scene.push_quad(gpui::Quad {
bounds: RectF::from_points(
vec2f(start_x, end_y),
vec2f(end_x, end_y + branch_style.width),
),
background: Some(branch_style.color),
border: gpui::Border::default(),
corner_radii: (0.).into(),
});
})
})
.constrained()
.with_width(size.x())
}
impl View for CollabPanel { impl View for CollabPanel {
fn ui_name() -> &'static str { fn ui_name() -> &'static str {
"CollabPanel" "CollabPanel"
@ -2470,6 +2699,14 @@ impl PartialEq for ListEntry {
return channel_1.id == channel_2.id && depth_1 == depth_2; return channel_1.id == channel_2.id && depth_1 == depth_2;
} }
} }
ListEntry::ChannelNotes { channel_id } => {
if let ListEntry::ChannelNotes {
channel_id: other_id,
} = other
{
return channel_id == other_id;
}
}
ListEntry::ChannelInvite(channel_1) => { ListEntry::ChannelInvite(channel_1) => {
if let ListEntry::ChannelInvite(channel_2) = other { if let ListEntry::ChannelInvite(channel_2) = other {
return channel_1.id == channel_2.id; return channel_1.id == channel_2.id;

View file

@ -1,4 +1,5 @@
use client::{proto, ChannelId, ChannelMembership, ChannelStore, User, UserId, UserStore}; use channel::{ChannelId, ChannelMembership, ChannelStore};
use client::{proto, User, UserId, UserStore};
use context_menu::{ContextMenu, ContextMenuItem}; use context_menu::{ContextMenu, ContextMenuItem};
use fuzzy::{match_strings, StringMatchCandidate}; use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{ use gpui::{
@ -151,12 +152,9 @@ impl View for ChannelModal {
let theme = &theme::current(cx).collab_panel.tabbed_modal; let theme = &theme::current(cx).collab_panel.tabbed_modal;
let mode = self.picker.read(cx).delegate().mode; let mode = self.picker.read(cx).delegate().mode;
let Some(channel) = self let Some(channel) = self.channel_store.read(cx).channel_for_id(self.channel_id) else {
.channel_store return Empty::new().into_any();
.read(cx) };
.channel_for_id(self.channel_id) else {
return Empty::new().into_any()
};
enum InviteMembers {} enum InviteMembers {}
enum ManageMembers {} enum ManageMembers {}

View file

@ -1,3 +1,4 @@
pub mod channel_view;
pub mod collab_panel; pub mod collab_panel;
mod collab_titlebar_item; mod collab_titlebar_item;
mod contact_notification; mod contact_notification;

View file

@ -0,0 +1,18 @@
[package]
name = "component_test"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
path = "src/component_test.rs"
doctest = false
[dependencies]
anyhow.workspace = true
gpui = { path = "../gpui" }
settings = { path = "../settings" }
util = { path = "../util" }
theme = { path = "../theme" }
workspace = { path = "../workspace" }
project = { path = "../project" }

View file

@ -0,0 +1,121 @@
use gpui::{
actions,
elements::{Component, Flex, ParentElement, SafeStylable},
AppContext, Element, Entity, ModelHandle, Task, View, ViewContext, ViewHandle, WeakViewHandle,
};
use project::Project;
use theme::components::{action_button::Button, label::Label, ComponentExt};
use workspace::{
item::Item, register_deserializable_item, ItemId, Pane, PaneBackdrop, Workspace, WorkspaceId,
};
pub fn init(cx: &mut AppContext) {
cx.add_action(ComponentTest::toggle_disclosure);
cx.add_action(ComponentTest::toggle_toggle);
cx.add_action(ComponentTest::deploy);
register_deserializable_item::<ComponentTest>(cx);
}
actions!(
test,
[NoAction, ToggleDisclosure, ToggleToggle, NewComponentTest]
);
struct ComponentTest {
disclosed: bool,
toggled: bool,
}
impl ComponentTest {
fn new() -> Self {
Self {
disclosed: false,
toggled: false,
}
}
fn deploy(workspace: &mut Workspace, _: &NewComponentTest, cx: &mut ViewContext<Workspace>) {
workspace.add_item(Box::new(cx.add_view(|_| ComponentTest::new())), cx);
}
fn toggle_disclosure(&mut self, _: &ToggleDisclosure, cx: &mut ViewContext<Self>) {
self.disclosed = !self.disclosed;
cx.notify();
}
fn toggle_toggle(&mut self, _: &ToggleToggle, cx: &mut ViewContext<Self>) {
self.toggled = !self.toggled;
cx.notify();
}
}
impl Entity for ComponentTest {
type Event = ();
}
impl View for ComponentTest {
fn ui_name() -> &'static str {
"Component Test"
}
fn render(&mut self, cx: &mut gpui::ViewContext<Self>) -> gpui::AnyElement<Self> {
let theme = theme::current(cx);
PaneBackdrop::new(
cx.view_id(),
Flex::column()
.with_spacing(10.)
.with_child(
Button::action(NoAction)
.with_tooltip("Here's what a tooltip looks like", theme.tooltip.clone())
.with_contents(Label::new("Click me!"))
.with_style(theme.component_test.button.clone())
.element(),
)
.with_child(
Button::action(ToggleToggle)
.with_tooltip("Here's what a tooltip looks like", theme.tooltip.clone())
.with_contents(Label::new("Toggle me!"))
.toggleable(self.toggled)
.with_style(theme.component_test.toggle.clone())
.element(),
)
.with_child(
Label::new("A disclosure")
.disclosable(Some(self.disclosed), Box::new(ToggleDisclosure))
.with_style(theme.component_test.disclosure.clone())
.element(),
)
.constrained()
.with_width(200.)
.aligned()
.into_any(),
)
.into_any()
}
}
impl Item for ComponentTest {
fn tab_content<V: 'static>(
&self,
_: Option<usize>,
style: &theme::Tab,
_: &AppContext,
) -> gpui::AnyElement<V> {
gpui::elements::Label::new("Component test", style.label.clone()).into_any()
}
fn serialized_item_kind() -> Option<&'static str> {
Some("ComponentTest")
}
fn deserialize(
_project: ModelHandle<Project>,
_workspace: WeakViewHandle<Workspace>,
_workspace_id: WorkspaceId,
_item_id: ItemId,
cx: &mut ViewContext<Pane>,
) -> Task<anyhow::Result<ViewHandle<Self>>> {
Task::ready(Ok(cx.add_view(|_| Self::new())))
}
}

View file

@ -980,7 +980,7 @@ mod tests {
deterministic.forbid_parking(); deterministic.forbid_parking();
let (copilot, mut lsp) = Copilot::fake(cx); let (copilot, mut lsp) = Copilot::fake(cx);
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "Hello", cx)); let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Hello"));
let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.id()).parse().unwrap(); let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.id()).parse().unwrap();
copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx)); copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx));
assert_eq!( assert_eq!(
@ -996,7 +996,7 @@ mod tests {
} }
); );
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "Goodbye", cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Goodbye"));
let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.id()).parse().unwrap(); let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.id()).parse().unwrap();
copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx)); copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx));
assert_eq!( assert_eq!(

View file

@ -4,7 +4,10 @@ mod inlay_map;
mod tab_map; mod tab_map;
mod wrap_map; mod wrap_map;
use crate::{Anchor, AnchorRangeExt, InlayId, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint}; use crate::{
link_go_to_definition::{DocumentRange, InlayRange},
Anchor, AnchorRangeExt, InlayId, MultiBuffer, MultiBufferSnapshot, ToOffset, ToPoint,
};
pub use block_map::{BlockMap, BlockPoint}; pub use block_map::{BlockMap, BlockPoint};
use collections::{HashMap, HashSet}; use collections::{HashMap, HashSet};
use fold_map::FoldMap; use fold_map::FoldMap;
@ -27,7 +30,8 @@ pub use block_map::{
BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock, TransformBlock, BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock, TransformBlock,
}; };
pub use self::inlay_map::Inlay; pub use self::fold_map::FoldPoint;
pub use self::inlay_map::{Inlay, InlayOffset, InlayPoint};
#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum FoldStatus { pub enum FoldStatus {
@ -39,7 +43,7 @@ pub trait ToDisplayPoint {
fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint; fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint;
} }
type TextHighlights = TreeMap<Option<TypeId>, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>; type TextHighlights = TreeMap<Option<TypeId>, Arc<(HighlightStyle, Vec<DocumentRange>)>>;
pub struct DisplayMap { pub struct DisplayMap {
buffer: ModelHandle<MultiBuffer>, buffer: ModelHandle<MultiBuffer>,
@ -211,11 +215,28 @@ impl DisplayMap {
ranges: Vec<Range<Anchor>>, ranges: Vec<Range<Anchor>>,
style: HighlightStyle, style: HighlightStyle,
) { ) {
self.text_highlights self.text_highlights.insert(
.insert(Some(type_id), Arc::new((style, ranges))); Some(type_id),
Arc::new((style, ranges.into_iter().map(DocumentRange::Text).collect())),
);
} }
pub fn text_highlights(&self, type_id: TypeId) -> Option<(HighlightStyle, &[Range<Anchor>])> { pub fn highlight_inlays(
&mut self,
type_id: TypeId,
ranges: Vec<InlayRange>,
style: HighlightStyle,
) {
self.text_highlights.insert(
Some(type_id),
Arc::new((
style,
ranges.into_iter().map(DocumentRange::Inlay).collect(),
)),
);
}
pub fn text_highlights(&self, type_id: TypeId) -> Option<(HighlightStyle, &[DocumentRange])> {
let highlights = self.text_highlights.get(&Some(type_id))?; let highlights = self.text_highlights.get(&Some(type_id))?;
Some((highlights.0, &highlights.1)) Some((highlights.0, &highlights.1))
} }
@ -223,7 +244,7 @@ impl DisplayMap {
pub fn clear_text_highlights( pub fn clear_text_highlights(
&mut self, &mut self,
type_id: TypeId, type_id: TypeId,
) -> Option<Arc<(HighlightStyle, Vec<Range<Anchor>>)>> { ) -> Option<Arc<(HighlightStyle, Vec<DocumentRange>)>> {
self.text_highlights.remove(&Some(type_id)) self.text_highlights.remove(&Some(type_id))
} }
@ -290,7 +311,7 @@ impl DisplayMap {
pub struct DisplaySnapshot { pub struct DisplaySnapshot {
pub buffer_snapshot: MultiBufferSnapshot, pub buffer_snapshot: MultiBufferSnapshot,
fold_snapshot: fold_map::FoldSnapshot, pub fold_snapshot: fold_map::FoldSnapshot,
inlay_snapshot: inlay_map::InlaySnapshot, inlay_snapshot: inlay_map::InlaySnapshot,
tab_snapshot: tab_map::TabSnapshot, tab_snapshot: tab_map::TabSnapshot,
wrap_snapshot: wrap_map::WrapSnapshot, wrap_snapshot: wrap_map::WrapSnapshot,
@ -387,12 +408,49 @@ impl DisplaySnapshot {
} }
fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point { fn display_point_to_point(&self, point: DisplayPoint, bias: Bias) -> Point {
self.inlay_snapshot
.to_buffer_point(self.display_point_to_inlay_point(point, bias))
}
pub fn display_point_to_inlay_offset(&self, point: DisplayPoint, bias: Bias) -> InlayOffset {
self.inlay_snapshot
.to_offset(self.display_point_to_inlay_point(point, bias))
}
pub fn anchor_to_inlay_offset(&self, anchor: Anchor) -> InlayOffset {
self.inlay_snapshot
.to_inlay_offset(anchor.to_offset(&self.buffer_snapshot))
}
pub fn inlay_offset_to_display_point(&self, offset: InlayOffset, bias: Bias) -> DisplayPoint {
let inlay_point = self.inlay_snapshot.to_point(offset);
let fold_point = self.fold_snapshot.to_fold_point(inlay_point, bias);
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
let block_point = self.block_snapshot.to_block_point(wrap_point);
DisplayPoint(block_point)
}
fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint {
let block_point = point.0; let block_point = point.0;
let wrap_point = self.block_snapshot.to_wrap_point(block_point); let wrap_point = self.block_snapshot.to_wrap_point(block_point);
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point); let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
let fold_point = self.tab_snapshot.to_fold_point(tab_point, bias).0; let fold_point = self.tab_snapshot.to_fold_point(tab_point, bias).0;
let inlay_point = fold_point.to_inlay_point(&self.fold_snapshot); fold_point.to_inlay_point(&self.fold_snapshot)
self.inlay_snapshot.to_buffer_point(inlay_point) }
pub fn display_point_to_fold_point(&self, point: DisplayPoint, bias: Bias) -> FoldPoint {
let block_point = point.0;
let wrap_point = self.block_snapshot.to_wrap_point(block_point);
let tab_point = self.wrap_snapshot.to_tab_point(wrap_point);
self.tab_snapshot.to_fold_point(tab_point, bias).0
}
pub fn fold_point_to_display_point(&self, fold_point: FoldPoint) -> DisplayPoint {
let tab_point = self.tab_snapshot.to_tab_point(fold_point);
let wrap_point = self.wrap_snapshot.tab_point_to_wrap_point(tab_point);
let block_point = self.block_snapshot.to_block_point(wrap_point);
DisplayPoint(block_point)
} }
pub fn max_point(&self) -> DisplayPoint { pub fn max_point(&self) -> DisplayPoint {
@ -428,15 +486,15 @@ impl DisplaySnapshot {
&self, &self,
display_rows: Range<u32>, display_rows: Range<u32>,
language_aware: bool, language_aware: bool,
hint_highlights: Option<HighlightStyle>, hint_highlight_style: Option<HighlightStyle>,
suggestion_highlights: Option<HighlightStyle>, suggestion_highlight_style: Option<HighlightStyle>,
) -> DisplayChunks<'_> { ) -> DisplayChunks<'_> {
self.block_snapshot.chunks( self.block_snapshot.chunks(
display_rows, display_rows,
language_aware, language_aware,
Some(&self.text_highlights), Some(&self.text_highlights),
hint_highlights, hint_highlight_style,
suggestion_highlights, suggestion_highlight_style,
) )
} }
@ -757,7 +815,7 @@ impl DisplaySnapshot {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
pub fn highlight_ranges<Tag: ?Sized + 'static>( pub fn highlight_ranges<Tag: ?Sized + 'static>(
&self, &self,
) -> Option<Arc<(HighlightStyle, Vec<Range<Anchor>>)>> { ) -> Option<Arc<(HighlightStyle, Vec<DocumentRange>)>> {
let type_id = TypeId::of::<Tag>(); let type_id = TypeId::of::<Tag>();
self.text_highlights.get(&Some(type_id)).cloned() self.text_highlights.get(&Some(type_id)).cloned()
} }
@ -1319,7 +1377,8 @@ pub mod tests {
cx.update(|cx| init_test(cx, |s| s.defaults.tab_size = Some(2.try_into().unwrap()))); cx.update(|cx| init_test(cx, |s| s.defaults.tab_size = Some(2.try_into().unwrap())));
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); let buffer = cx
.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
buffer.condition(cx, |buf, _| !buf.is_parsing()).await; buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
@ -1408,7 +1467,8 @@ pub mod tests {
cx.update(|cx| init_test(cx, |_| {})); cx.update(|cx| init_test(cx, |_| {}));
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); let buffer = cx
.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
buffer.condition(cx, |buf, _| !buf.is_parsing()).await; buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
@ -1480,7 +1540,8 @@ pub mod tests {
let (text, highlighted_ranges) = marked_text_ranges(r#"constˇ «a»: B = "c «d»""#, false); let (text, highlighted_ranges) = marked_text_ranges(r#"constˇ «a»: B = "c «d»""#, false);
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); let buffer = cx
.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
buffer.condition(cx, |buf, _| !buf.is_parsing()).await; buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));

View file

@ -589,8 +589,8 @@ impl BlockSnapshot {
rows: Range<u32>, rows: Range<u32>,
language_aware: bool, language_aware: bool,
text_highlights: Option<&'a TextHighlights>, text_highlights: Option<&'a TextHighlights>,
hint_highlights: Option<HighlightStyle>, hint_highlight_style: Option<HighlightStyle>,
suggestion_highlights: Option<HighlightStyle>, suggestion_highlight_style: Option<HighlightStyle>,
) -> BlockChunks<'a> { ) -> BlockChunks<'a> {
let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows); let max_output_row = cmp::min(rows.end, self.transforms.summary().output_rows);
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(); let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>();
@ -623,8 +623,8 @@ impl BlockSnapshot {
input_start..input_end, input_start..input_end,
language_aware, language_aware,
text_highlights, text_highlights,
hint_highlights, hint_highlight_style,
suggestion_highlights, suggestion_highlight_style,
), ),
input_chunk: Default::default(), input_chunk: Default::default(),
transforms: cursor, transforms: cursor,

View file

@ -652,8 +652,8 @@ impl FoldSnapshot {
range: Range<FoldOffset>, range: Range<FoldOffset>,
language_aware: bool, language_aware: bool,
text_highlights: Option<&'a TextHighlights>, text_highlights: Option<&'a TextHighlights>,
hint_highlights: Option<HighlightStyle>, hint_highlight_style: Option<HighlightStyle>,
suggestion_highlights: Option<HighlightStyle>, suggestion_highlight_style: Option<HighlightStyle>,
) -> FoldChunks<'a> { ) -> FoldChunks<'a> {
let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>(); let mut transform_cursor = self.transforms.cursor::<(FoldOffset, InlayOffset)>();
@ -675,8 +675,8 @@ impl FoldSnapshot {
inlay_start..inlay_end, inlay_start..inlay_end,
language_aware, language_aware,
text_highlights, text_highlights,
hint_highlights, hint_highlight_style,
suggestion_highlights, suggestion_highlight_style,
), ),
inlay_chunk: None, inlay_chunk: None,
inlay_offset: inlay_start, inlay_offset: inlay_start,

View file

@ -1,4 +1,5 @@
use crate::{ use crate::{
link_go_to_definition::DocumentRange,
multi_buffer::{MultiBufferChunks, MultiBufferRows}, multi_buffer::{MultiBufferChunks, MultiBufferRows},
Anchor, InlayId, MultiBufferSnapshot, ToOffset, Anchor, InlayId, MultiBufferSnapshot, ToOffset,
}; };
@ -183,7 +184,7 @@ pub struct InlayBufferRows<'a> {
max_buffer_row: u32, max_buffer_row: u32,
} }
#[derive(Copy, Clone, Eq, PartialEq)] #[derive(Debug, Copy, Clone, Eq, PartialEq)]
struct HighlightEndpoint { struct HighlightEndpoint {
offset: InlayOffset, offset: InlayOffset,
is_start: bool, is_start: bool,
@ -210,6 +211,7 @@ pub struct InlayChunks<'a> {
buffer_chunks: MultiBufferChunks<'a>, buffer_chunks: MultiBufferChunks<'a>,
buffer_chunk: Option<Chunk<'a>>, buffer_chunk: Option<Chunk<'a>>,
inlay_chunks: Option<text::Chunks<'a>>, inlay_chunks: Option<text::Chunks<'a>>,
inlay_chunk: Option<&'a str>,
output_offset: InlayOffset, output_offset: InlayOffset,
max_output_offset: InlayOffset, max_output_offset: InlayOffset,
hint_highlight_style: Option<HighlightStyle>, hint_highlight_style: Option<HighlightStyle>,
@ -297,13 +299,31 @@ impl<'a> Iterator for InlayChunks<'a> {
- self.transforms.start().0; - self.transforms.start().0;
inlay.text.chunks_in_range(start.0..end.0) inlay.text.chunks_in_range(start.0..end.0)
}); });
let inlay_chunk = self
.inlay_chunk
.get_or_insert_with(|| inlay_chunks.next().unwrap());
let (chunk, remainder) = inlay_chunk.split_at(
inlay_chunk
.len()
.min(next_highlight_endpoint.0 - self.output_offset.0),
);
*inlay_chunk = remainder;
if inlay_chunk.is_empty() {
self.inlay_chunk = None;
}
let chunk = inlay_chunks.next().unwrap();
self.output_offset.0 += chunk.len(); self.output_offset.0 += chunk.len();
let highlight_style = match inlay.id { let mut highlight_style = match inlay.id {
InlayId::Suggestion(_) => self.suggestion_highlight_style, InlayId::Suggestion(_) => self.suggestion_highlight_style,
InlayId::Hint(_) => self.hint_highlight_style, InlayId::Hint(_) => self.hint_highlight_style,
}; };
if !self.active_highlights.is_empty() {
for active_highlight in self.active_highlights.values() {
highlight_style
.get_or_insert(Default::default())
.highlight(*active_highlight);
}
}
Chunk { Chunk {
text: chunk, text: chunk,
highlight_style, highlight_style,
@ -973,8 +993,8 @@ impl InlaySnapshot {
range: Range<InlayOffset>, range: Range<InlayOffset>,
language_aware: bool, language_aware: bool,
text_highlights: Option<&'a TextHighlights>, text_highlights: Option<&'a TextHighlights>,
hint_highlights: Option<HighlightStyle>, hint_highlight_style: Option<HighlightStyle>,
suggestion_highlights: Option<HighlightStyle>, suggestion_highlight_style: Option<HighlightStyle>,
) -> InlayChunks<'a> { ) -> InlayChunks<'a> {
let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>(); let mut cursor = self.transforms.cursor::<(InlayOffset, usize)>();
cursor.seek(&range.start, Bias::Right, &()); cursor.seek(&range.start, Bias::Right, &());
@ -983,52 +1003,56 @@ impl InlaySnapshot {
if let Some(text_highlights) = text_highlights { if let Some(text_highlights) = text_highlights {
if !text_highlights.is_empty() { if !text_highlights.is_empty() {
while cursor.start().0 < range.end { while cursor.start().0 < range.end {
if true { let transform_start = self.buffer.anchor_after(
let transform_start = self.buffer.anchor_after( self.to_buffer_offset(cmp::max(range.start, cursor.start().0)),
self.to_buffer_offset(cmp::max(range.start, cursor.start().0)), );
); let transform_start =
self.to_inlay_offset(transform_start.to_offset(&self.buffer));
let transform_end = { let transform_end = {
let overshoot = InlayOffset(range.end.0 - cursor.start().0 .0); let overshoot = InlayOffset(range.end.0 - cursor.start().0 .0);
self.buffer.anchor_before(self.to_buffer_offset(cmp::min( self.buffer.anchor_before(self.to_buffer_offset(cmp::min(
cursor.end(&()).0, cursor.end(&()).0,
cursor.start().0 + overshoot, cursor.start().0 + overshoot,
))) )))
}; };
let transform_end = self.to_inlay_offset(transform_end.to_offset(&self.buffer));
for (tag, highlights) in text_highlights.iter() { for (tag, text_highlights) in text_highlights.iter() {
let style = highlights.0; let style = text_highlights.0;
let ranges = &highlights.1; let ranges = &text_highlights.1;
let start_ix = match ranges.binary_search_by(|probe| { let start_ix = match ranges.binary_search_by(|probe| {
let cmp = probe.end.cmp(&transform_start, &self.buffer); let cmp = self
if cmp.is_gt() { .document_to_inlay_range(probe)
cmp::Ordering::Greater .end
} else { .cmp(&transform_start);
cmp::Ordering::Less if cmp.is_gt() {
} cmp::Ordering::Greater
}) { } else {
Ok(i) | Err(i) => i, cmp::Ordering::Less
};
for range in &ranges[start_ix..] {
if range.start.cmp(&transform_end, &self.buffer).is_ge() {
break;
}
highlight_endpoints.push(HighlightEndpoint {
offset: self
.to_inlay_offset(range.start.to_offset(&self.buffer)),
is_start: true,
tag: *tag,
style,
});
highlight_endpoints.push(HighlightEndpoint {
offset: self.to_inlay_offset(range.end.to_offset(&self.buffer)),
is_start: false,
tag: *tag,
style,
});
} }
}) {
Ok(i) | Err(i) => i,
};
for range in &ranges[start_ix..] {
let range = self.document_to_inlay_range(range);
if range.start.cmp(&transform_end).is_ge() {
break;
}
highlight_endpoints.push(HighlightEndpoint {
offset: range.start,
is_start: true,
tag: *tag,
style,
});
highlight_endpoints.push(HighlightEndpoint {
offset: range.end,
is_start: false,
tag: *tag,
style,
});
} }
} }
@ -1046,17 +1070,30 @@ impl InlaySnapshot {
transforms: cursor, transforms: cursor,
buffer_chunks, buffer_chunks,
inlay_chunks: None, inlay_chunks: None,
inlay_chunk: None,
buffer_chunk: None, buffer_chunk: None,
output_offset: range.start, output_offset: range.start,
max_output_offset: range.end, max_output_offset: range.end,
hint_highlight_style: hint_highlights, hint_highlight_style,
suggestion_highlight_style: suggestion_highlights, suggestion_highlight_style,
highlight_endpoints: highlight_endpoints.into_iter().peekable(), highlight_endpoints: highlight_endpoints.into_iter().peekable(),
active_highlights: Default::default(), active_highlights: Default::default(),
snapshot: self, snapshot: self,
} }
} }
fn document_to_inlay_range(&self, range: &DocumentRange) -> Range<InlayOffset> {
match range {
DocumentRange::Text(text_range) => {
self.to_inlay_offset(text_range.start.to_offset(&self.buffer))
..self.to_inlay_offset(text_range.end.to_offset(&self.buffer))
}
DocumentRange::Inlay(inlay_range) => {
inlay_range.highlight_start..inlay_range.highlight_end
}
}
}
#[cfg(test)] #[cfg(test)]
pub fn text(&self) -> String { pub fn text(&self) -> String {
self.chunks(Default::default()..self.len(), false, None, None, None) self.chunks(Default::default()..self.len(), false, None, None, None)
@ -1107,13 +1144,12 @@ fn push_isomorphic(sum_tree: &mut SumTree<Transform>, summary: TextSummary) {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::{InlayId, MultiBuffer}; use crate::{link_go_to_definition::InlayRange, InlayId, MultiBuffer};
use gpui::AppContext; use gpui::AppContext;
use project::{InlayHint, InlayHintLabel}; use project::{InlayHint, InlayHintLabel, ResolveState};
use rand::prelude::*; use rand::prelude::*;
use settings::SettingsStore; use settings::SettingsStore;
use std::{cmp::Reverse, env, sync::Arc}; use std::{cmp::Reverse, env, sync::Arc};
use sum_tree::TreeMap;
use text::Patch; use text::Patch;
use util::post_inc; use util::post_inc;
@ -1125,12 +1161,12 @@ mod tests {
Anchor::min(), Anchor::min(),
&InlayHint { &InlayHint {
label: InlayHintLabel::String("a".to_string()), label: InlayHintLabel::String("a".to_string()),
buffer_id: 0,
position: text::Anchor::default(), position: text::Anchor::default(),
padding_left: false, padding_left: false,
padding_right: false, padding_right: false,
tooltip: None, tooltip: None,
kind: None, kind: None,
resolve_state: ResolveState::Resolved,
}, },
) )
.text .text
@ -1145,12 +1181,12 @@ mod tests {
Anchor::min(), Anchor::min(),
&InlayHint { &InlayHint {
label: InlayHintLabel::String("a".to_string()), label: InlayHintLabel::String("a".to_string()),
buffer_id: 0,
position: text::Anchor::default(), position: text::Anchor::default(),
padding_left: true, padding_left: true,
padding_right: true, padding_right: true,
tooltip: None, tooltip: None,
kind: None, kind: None,
resolve_state: ResolveState::Resolved,
}, },
) )
.text .text
@ -1165,12 +1201,12 @@ mod tests {
Anchor::min(), Anchor::min(),
&InlayHint { &InlayHint {
label: InlayHintLabel::String(" a ".to_string()), label: InlayHintLabel::String(" a ".to_string()),
buffer_id: 0,
position: text::Anchor::default(), position: text::Anchor::default(),
padding_left: false, padding_left: false,
padding_right: false, padding_right: false,
tooltip: None, tooltip: None,
kind: None, kind: None,
resolve_state: ResolveState::Resolved,
}, },
) )
.text .text
@ -1185,12 +1221,12 @@ mod tests {
Anchor::min(), Anchor::min(),
&InlayHint { &InlayHint {
label: InlayHintLabel::String(" a ".to_string()), label: InlayHintLabel::String(" a ".to_string()),
buffer_id: 0,
position: text::Anchor::default(), position: text::Anchor::default(),
padding_left: true, padding_left: true,
padding_right: true, padding_right: true,
tooltip: None, tooltip: None,
kind: None, kind: None,
resolve_state: ResolveState::Resolved,
}, },
) )
.text .text
@ -1542,26 +1578,6 @@ mod tests {
let mut buffer_snapshot = buffer.read(cx).snapshot(cx); let mut buffer_snapshot = buffer.read(cx).snapshot(cx);
let mut next_inlay_id = 0; let mut next_inlay_id = 0;
log::info!("buffer text: {:?}", buffer_snapshot.text()); log::info!("buffer text: {:?}", buffer_snapshot.text());
let mut highlights = TreeMap::default();
let highlight_count = rng.gen_range(0_usize..10);
let mut highlight_ranges = (0..highlight_count)
.map(|_| buffer_snapshot.random_byte_range(0, &mut rng))
.collect::<Vec<_>>();
highlight_ranges.sort_by_key(|range| (range.start, Reverse(range.end)));
log::info!("highlighting ranges {:?}", highlight_ranges);
let highlight_ranges = highlight_ranges
.into_iter()
.map(|range| {
buffer_snapshot.anchor_before(range.start)..buffer_snapshot.anchor_after(range.end)
})
.collect::<Vec<_>>();
highlights.insert(
Some(TypeId::of::<()>()),
Arc::new((HighlightStyle::default(), highlight_ranges)),
);
let (mut inlay_map, mut inlay_snapshot) = InlayMap::new(buffer_snapshot.clone()); let (mut inlay_map, mut inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
for _ in 0..operations { for _ in 0..operations {
let mut inlay_edits = Patch::default(); let mut inlay_edits = Patch::default();
@ -1624,6 +1640,38 @@ mod tests {
); );
} }
let mut highlights = TextHighlights::default();
let highlight_count = rng.gen_range(0_usize..10);
let mut highlight_ranges = (0..highlight_count)
.map(|_| buffer_snapshot.random_byte_range(0, &mut rng))
.collect::<Vec<_>>();
highlight_ranges.sort_by_key(|range| (range.start, Reverse(range.end)));
log::info!("highlighting ranges {:?}", highlight_ranges);
let highlight_ranges = if rng.gen_bool(0.5) {
highlight_ranges
.into_iter()
.map(|range| InlayRange {
inlay_position: buffer_snapshot.anchor_before(range.start),
highlight_start: inlay_snapshot.to_inlay_offset(range.start),
highlight_end: inlay_snapshot.to_inlay_offset(range.end),
})
.map(DocumentRange::Inlay)
.collect::<Vec<_>>()
} else {
highlight_ranges
.into_iter()
.map(|range| {
buffer_snapshot.anchor_before(range.start)
..buffer_snapshot.anchor_after(range.end)
})
.map(DocumentRange::Text)
.collect::<Vec<_>>()
};
highlights.insert(
Some(TypeId::of::<()>()),
Arc::new((HighlightStyle::default(), highlight_ranges)),
);
for _ in 0..5 { for _ in 0..5 {
let mut end = rng.gen_range(0..=inlay_snapshot.len().0); let mut end = rng.gen_range(0..=inlay_snapshot.len().0);
end = expected_text.clip_offset(end, Bias::Right); end = expected_text.clip_offset(end, Bias::Right);

View file

@ -224,8 +224,8 @@ impl TabSnapshot {
range: Range<TabPoint>, range: Range<TabPoint>,
language_aware: bool, language_aware: bool,
text_highlights: Option<&'a TextHighlights>, text_highlights: Option<&'a TextHighlights>,
hint_highlights: Option<HighlightStyle>, hint_highlight_style: Option<HighlightStyle>,
suggestion_highlights: Option<HighlightStyle>, suggestion_highlight_style: Option<HighlightStyle>,
) -> TabChunks<'a> { ) -> TabChunks<'a> {
let (input_start, expanded_char_column, to_next_stop) = let (input_start, expanded_char_column, to_next_stop) =
self.to_fold_point(range.start, Bias::Left); self.to_fold_point(range.start, Bias::Left);
@ -246,8 +246,8 @@ impl TabSnapshot {
input_start..input_end, input_start..input_end,
language_aware, language_aware,
text_highlights, text_highlights,
hint_highlights, hint_highlight_style,
suggestion_highlights, suggestion_highlight_style,
), ),
input_column, input_column,
column: expanded_char_column, column: expanded_char_column,

View file

@ -576,8 +576,8 @@ impl WrapSnapshot {
rows: Range<u32>, rows: Range<u32>,
language_aware: bool, language_aware: bool,
text_highlights: Option<&'a TextHighlights>, text_highlights: Option<&'a TextHighlights>,
hint_highlights: Option<HighlightStyle>, hint_highlight_style: Option<HighlightStyle>,
suggestion_highlights: Option<HighlightStyle>, suggestion_highlight_style: Option<HighlightStyle>,
) -> WrapChunks<'a> { ) -> WrapChunks<'a> {
let output_start = WrapPoint::new(rows.start, 0); let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0); let output_end = WrapPoint::new(rows.end, 0);
@ -595,8 +595,8 @@ impl WrapSnapshot {
input_start..input_end, input_start..input_end,
language_aware, language_aware,
text_highlights, text_highlights,
hint_highlights, hint_highlight_style,
suggestion_highlights, suggestion_highlight_style,
), ),
input_chunk: Default::default(), input_chunk: Default::default(),
output_position: output_start, output_position: output_start,

File diff suppressed because it is too large Load diff

View file

@ -9,6 +9,7 @@ pub struct EditorSettings {
pub show_completions_on_input: bool, pub show_completions_on_input: bool,
pub use_on_type_format: bool, pub use_on_type_format: bool,
pub scrollbar: Scrollbar, pub scrollbar: Scrollbar,
pub relative_line_numbers: bool,
} }
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
@ -34,6 +35,7 @@ pub struct EditorSettingsContent {
pub show_completions_on_input: Option<bool>, pub show_completions_on_input: Option<bool>,
pub use_on_type_format: Option<bool>, pub use_on_type_format: Option<bool>,
pub scrollbar: Option<ScrollbarContent>, pub scrollbar: Option<ScrollbarContent>,
pub relative_line_numbers: Option<bool>,
} }
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]

View file

@ -42,7 +42,7 @@ fn test_edit_events(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let buffer = cx.add_model(|cx| { let buffer = cx.add_model(|cx| {
let mut buffer = language::Buffer::new(0, "123456", cx); let mut buffer = language::Buffer::new(0, cx.model_id() as u64, "123456");
buffer.set_group_interval(Duration::from_secs(1)); buffer.set_group_interval(Duration::from_secs(1));
buffer buffer
}); });
@ -174,7 +174,7 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let mut now = Instant::now(); let mut now = Instant::now();
let buffer = cx.add_model(|cx| language::Buffer::new(0, "123456", cx)); let buffer = cx.add_model(|cx| language::Buffer::new(0, cx.model_id() as u64, "123456"));
let group_interval = buffer.read_with(cx, |buffer, _| buffer.transaction_group_interval()); let group_interval = buffer.read_with(cx, |buffer, _| buffer.transaction_group_interval());
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let editor = cx let editor = cx
@ -247,7 +247,7 @@ fn test_ime_composition(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let buffer = cx.add_model(|cx| { let buffer = cx.add_model(|cx| {
let mut buffer = language::Buffer::new(0, "abcde", cx); let mut buffer = language::Buffer::new(0, cx.model_id() as u64, "abcde");
// Ensure automatic grouping doesn't occur. // Ensure automatic grouping doesn't occur.
buffer.set_group_interval(Duration::ZERO); buffer.set_group_interval(Duration::ZERO);
buffer buffer
@ -1434,6 +1434,74 @@ async fn test_scroll_page_up_page_down(cx: &mut gpui::TestAppContext) {
}); });
} }
#[gpui::test]
async fn test_autoscroll(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
let line_height = cx.update_editor(|editor, cx| {
editor.set_vertical_scroll_margin(2, cx);
editor.style(cx).text.line_height(cx.font_cache())
});
let window = cx.window;
window.simulate_resize(vec2f(1000., 6.0 * line_height), &mut cx);
cx.set_state(
&r#"ˇone
two
three
four
five
six
seven
eight
nine
ten
"#,
);
cx.update_editor(|editor, cx| {
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 0.0));
});
// Add a cursor below the visible area. Since both cursors cannot fit
// on screen, the editor autoscrolls to reveal the newest cursor, and
// allows the vertical scroll margin below that cursor.
cx.update_editor(|editor, cx| {
editor.change_selections(Some(Autoscroll::fit()), cx, |selections| {
selections.select_ranges([
Point::new(0, 0)..Point::new(0, 0),
Point::new(6, 0)..Point::new(6, 0),
]);
})
});
cx.update_editor(|editor, cx| {
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 3.0));
});
// Move down. The editor cursor scrolls down to track the newest cursor.
cx.update_editor(|editor, cx| {
editor.move_down(&Default::default(), cx);
});
cx.update_editor(|editor, cx| {
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 4.0));
});
// Add a cursor above the visible area. Since both cursors fit on screen,
// the editor scrolls to show both.
cx.update_editor(|editor, cx| {
editor.change_selections(Some(Autoscroll::fit()), cx, |selections| {
selections.select_ranges([
Point::new(1, 0)..Point::new(1, 0),
Point::new(6, 0)..Point::new(6, 0),
]);
})
});
cx.update_editor(|editor, cx| {
assert_eq!(editor.snapshot(cx).scroll_position(), vec2f(0., 1.0));
});
}
#[gpui::test] #[gpui::test]
async fn test_move_page_up_page_down(cx: &mut gpui::TestAppContext) { async fn test_move_page_up_page_down(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
@ -2213,10 +2281,12 @@ fn test_indent_outdent_with_excerpts(cx: &mut TestAppContext) {
None, None,
)); ));
let toml_buffer = let toml_buffer = cx.add_model(|cx| {
cx.add_model(|cx| Buffer::new(0, "a = 1\nb = 2\n", cx).with_language(toml_language, cx)); Buffer::new(0, cx.model_id() as u64, "a = 1\nb = 2\n").with_language(toml_language, cx)
});
let rust_buffer = cx.add_model(|cx| { let rust_buffer = cx.add_model(|cx| {
Buffer::new(0, "const c: usize = 3;\n", cx).with_language(rust_language, cx) Buffer::new(0, cx.model_id() as u64, "const c: usize = 3;\n")
.with_language(rust_language, cx)
}); });
let multibuffer = cx.add_model(|cx| { let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0); let mut multibuffer = MultiBuffer::new(0);
@ -3686,7 +3756,8 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
"# "#
.unindent(); .unindent();
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); let buffer =
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx); let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
@ -3849,7 +3920,8 @@ async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) {
let text = "fn a() {}"; let text = "fn a() {}";
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); let buffer =
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let editor = cx.add_window(|cx| build_editor(buffer, cx)).root(cx); let editor = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
editor editor
@ -4412,7 +4484,8 @@ async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) {
"# "#
.unindent(); .unindent();
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); let buffer =
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx); let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
@ -4560,7 +4633,8 @@ async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) {
"# "#
.unindent(); .unindent();
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); let buffer =
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let editor = cx.add_window(|cx| build_editor(buffer, cx)).root(cx); let editor = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
editor editor
@ -5766,7 +5840,7 @@ async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) {
fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) { fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(3, 4, 'a')));
let multibuffer = cx.add_model(|cx| { let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0); let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts( multibuffer.push_excerpts(
@ -5850,7 +5924,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
primary: None, primary: None,
} }
}); });
let buffer = cx.add_model(|cx| Buffer::new(0, initial_text, cx)); let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, initial_text));
let multibuffer = cx.add_model(|cx| { let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0); let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts(buffer, excerpt_ranges, cx); multibuffer.push_excerpts(buffer, excerpt_ranges, cx);
@ -5908,7 +5982,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
fn test_refresh_selections(cx: &mut TestAppContext) { fn test_refresh_selections(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(3, 4, 'a')));
let mut excerpt1_id = None; let mut excerpt1_id = None;
let multibuffer = cx.add_model(|cx| { let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0); let mut multibuffer = MultiBuffer::new(0);
@ -5995,7 +6069,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) {
fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) { fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx)); let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(3, 4, 'a')));
let mut excerpt1_id = None; let mut excerpt1_id = None;
let multibuffer = cx.add_model(|cx| { let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0); let mut multibuffer = MultiBuffer::new(0);
@ -6092,7 +6166,8 @@ async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) {
"{{} }\n", // "{{} }\n", //
); );
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx)); let buffer =
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx)); let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx); let view = cx.add_window(|cx| build_editor(buffer, cx)).root(cx);
view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx)) view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
@ -6384,7 +6459,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
.update(|cx| { .update(|cx| {
Editor::from_state_proto( Editor::from_state_proto(
pane.clone(), pane.clone(),
project.clone(), workspace.clone(),
ViewId { ViewId {
creator: Default::default(), creator: Default::default(),
id: 0, id: 0,
@ -6479,7 +6554,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
.update(|cx| { .update(|cx| {
Editor::from_state_proto( Editor::from_state_proto(
pane.clone(), pane.clone(),
project.clone(), workspace.clone(),
ViewId { ViewId {
creator: Default::default(), creator: Default::default(),
id: 0, id: 0,
@ -7092,8 +7167,8 @@ async fn test_copilot_multibuffer(
let (copilot, copilot_lsp) = Copilot::fake(cx); let (copilot, copilot_lsp) = Copilot::fake(cx);
cx.update(|cx| cx.set_global(copilot)); cx.update(|cx| cx.set_global(copilot));
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "a = 1\nb = 2\n", cx)); let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "a = 1\nb = 2\n"));
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "c = 3\nd = 4\n", cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "c = 3\nd = 4\n"));
let multibuffer = cx.add_model(|cx| { let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0); let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts( multibuffer.push_excerpts(

View file

@ -13,6 +13,7 @@ use crate::{
}, },
link_go_to_definition::{ link_go_to_definition::{
go_to_fetched_definition, go_to_fetched_type_definition, update_go_to_definition_link, go_to_fetched_definition, go_to_fetched_type_definition, update_go_to_definition_link,
update_inlay_link_and_hover_points, GoToDefinitionTrigger,
}, },
mouse_context_menu, EditorSettings, EditorStyle, GutterHover, UnfoldAt, mouse_context_menu, EditorSettings, EditorStyle, GutterHover, UnfoldAt,
}; };
@ -62,6 +63,7 @@ struct SelectionLayout {
head: DisplayPoint, head: DisplayPoint,
cursor_shape: CursorShape, cursor_shape: CursorShape,
is_newest: bool, is_newest: bool,
is_local: bool,
range: Range<DisplayPoint>, range: Range<DisplayPoint>,
active_rows: Range<u32>, active_rows: Range<u32>,
} }
@ -73,6 +75,7 @@ impl SelectionLayout {
cursor_shape: CursorShape, cursor_shape: CursorShape,
map: &DisplaySnapshot, map: &DisplaySnapshot,
is_newest: bool, is_newest: bool,
is_local: bool,
) -> Self { ) -> Self {
let point_selection = selection.map(|p| p.to_point(&map.buffer_snapshot)); let point_selection = selection.map(|p| p.to_point(&map.buffer_snapshot));
let display_selection = point_selection.map(|p| p.to_display_point(map)); let display_selection = point_selection.map(|p| p.to_display_point(map));
@ -109,6 +112,7 @@ impl SelectionLayout {
head, head,
cursor_shape, cursor_shape,
is_newest, is_newest,
is_local,
range, range,
active_rows, active_rows,
} }
@ -284,13 +288,13 @@ impl EditorElement {
return false; return false;
} }
let (position, target_position) = position_map.point_for_position(text_bounds, position); let point_for_position = position_map.point_for_position(text_bounds, position);
let position = point_for_position.previous_valid;
if shift && alt { if shift && alt {
editor.select( editor.select(
SelectPhase::BeginColumnar { SelectPhase::BeginColumnar {
position, position,
goal_column: target_position.column(), goal_column: point_for_position.exact_unclipped.column(),
}, },
cx, cx,
); );
@ -326,9 +330,13 @@ impl EditorElement {
if !text_bounds.contains_point(position) { if !text_bounds.contains_point(position) {
return false; return false;
} }
let point_for_position = position_map.point_for_position(text_bounds, position);
let (point, _) = position_map.point_for_position(text_bounds, position); mouse_context_menu::deploy_context_menu(
mouse_context_menu::deploy_context_menu(editor, position, point, cx); editor,
position,
point_for_position.previous_valid,
cx,
);
true true
} }
@ -350,17 +358,15 @@ impl EditorElement {
} }
if !pending_nonempty_selections && cmd && text_bounds.contains_point(position) { if !pending_nonempty_selections && cmd && text_bounds.contains_point(position) {
let (point, target_point) = position_map.point_for_position(text_bounds, position); let point = position_map.point_for_position(text_bounds, position);
let could_be_inlay = point.as_valid().is_none();
if point == target_point { if shift || could_be_inlay {
if shift { go_to_fetched_type_definition(editor, point, alt, cx);
go_to_fetched_type_definition(editor, point, alt, cx); } else {
} else { go_to_fetched_definition(editor, point, alt, cx);
go_to_fetched_definition(editor, point, alt, cx);
}
return true;
} }
return true;
} }
end_selection end_selection
@ -380,17 +386,20 @@ impl EditorElement {
// This will be handled more correctly once https://github.com/zed-industries/zed/issues/1218 is completed // This will be handled more correctly once https://github.com/zed-industries/zed/issues/1218 is completed
// Don't trigger hover popover if mouse is hovering over context menu // Don't trigger hover popover if mouse is hovering over context menu
let point = if text_bounds.contains_point(position) { let point = if text_bounds.contains_point(position) {
let (point, target_point) = position_map.point_for_position(text_bounds, position); position_map
if point == target_point { .point_for_position(text_bounds, position)
Some(point) .as_valid()
} else {
None
}
} else { } else {
None None
}; };
update_go_to_definition_link(editor, point, cmd, shift, cx); update_go_to_definition_link(
editor,
point.map(GoToDefinitionTrigger::Text),
cmd,
shift,
cx,
);
if editor.has_pending_selection() { if editor.has_pending_selection() {
let mut scroll_delta = Vector2F::zero(); let mut scroll_delta = Vector2F::zero();
@ -419,13 +428,12 @@ impl EditorElement {
)) ))
} }
let (position, target_position) = let point_for_position = position_map.point_for_position(text_bounds, position);
position_map.point_for_position(text_bounds, position);
editor.select( editor.select(
SelectPhase::Update { SelectPhase::Update {
position, position: point_for_position.previous_valid,
goal_column: target_position.column(), goal_column: point_for_position.exact_unclipped.column(),
scroll_position: (position_map.snapshot.scroll_position() + scroll_delta) scroll_position: (position_map.snapshot.scroll_position() + scroll_delta)
.clamp(Vector2F::zero(), position_map.scroll_max), .clamp(Vector2F::zero(), position_map.scroll_max),
}, },
@ -452,10 +460,34 @@ impl EditorElement {
) -> bool { ) -> bool {
// This will be handled more correctly once https://github.com/zed-industries/zed/issues/1218 is completed // This will be handled more correctly once https://github.com/zed-industries/zed/issues/1218 is completed
// Don't trigger hover popover if mouse is hovering over context menu // Don't trigger hover popover if mouse is hovering over context menu
let point = position_to_display_point(position, text_bounds, position_map); if text_bounds.contains_point(position) {
let point_for_position = position_map.point_for_position(text_bounds, position);
update_go_to_definition_link(editor, point, cmd, shift, cx); match point_for_position.as_valid() {
hover_at(editor, point, cx); Some(point) => {
update_go_to_definition_link(
editor,
Some(GoToDefinitionTrigger::Text(point)),
cmd,
shift,
cx,
);
hover_at(editor, Some(point), cx);
}
None => {
update_inlay_link_and_hover_points(
&position_map.snapshot,
point_for_position,
editor,
cmd,
shift,
cx,
);
}
}
} else {
update_go_to_definition_link(editor, None, cmd, shift, cx);
hover_at(editor, None, cx);
}
true true
} }
@ -763,7 +795,6 @@ impl EditorElement {
cx: &mut PaintContext<Editor>, cx: &mut PaintContext<Editor>,
) { ) {
let style = &self.style; let style = &self.style;
let local_replica_id = editor.replica_id(cx);
let scroll_position = layout.position_map.snapshot.scroll_position(); let scroll_position = layout.position_map.snapshot.scroll_position();
let start_row = layout.visible_display_row_range.start; let start_row = layout.visible_display_row_range.start;
let scroll_top = scroll_position.y() * layout.position_map.line_height; let scroll_top = scroll_position.y() * layout.position_map.line_height;
@ -852,15 +883,13 @@ impl EditorElement {
for (replica_id, selections) in &layout.selections { for (replica_id, selections) in &layout.selections {
let replica_id = *replica_id; let replica_id = *replica_id;
let selection_style = style.replica_selection_style(replica_id); let selection_style = if let Some(replica_id) = replica_id {
style.replica_selection_style(replica_id)
} else {
&style.absent_selection
};
for selection in selections { for selection in selections {
if !selection.range.is_empty()
&& (replica_id == local_replica_id
|| Some(replica_id) == editor.leader_replica_id)
{
invisible_display_ranges.push(selection.range.clone());
}
self.paint_highlighted_range( self.paint_highlighted_range(
scene, scene,
selection.range.clone(), selection.range.clone(),
@ -874,7 +903,10 @@ impl EditorElement {
bounds, bounds,
); );
if editor.show_local_cursors(cx) || replica_id != local_replica_id { if selection.is_local && !selection.range.is_empty() {
invisible_display_ranges.push(selection.range.clone());
}
if !selection.is_local || editor.show_local_cursors(cx) {
let cursor_position = selection.head; let cursor_position = selection.head;
if layout if layout
.visible_display_row_range .visible_display_row_range
@ -906,7 +938,7 @@ impl EditorElement {
&text, &text,
cursor_row_layout.font_size(), cursor_row_layout.font_size(),
&[( &[(
text.len(), text.chars().count(),
RunStyle { RunStyle {
font_id, font_id,
color: style.background, color: style.background,
@ -1405,10 +1437,61 @@ impl EditorElement {
.collect() .collect()
} }
fn calculate_relative_line_numbers(
&self,
snapshot: &EditorSnapshot,
rows: &Range<u32>,
relative_to: Option<u32>,
) -> HashMap<u32, u32> {
let mut relative_rows: HashMap<u32, u32> = Default::default();
let Some(relative_to) = relative_to else {
return relative_rows;
};
let start = rows.start.min(relative_to);
let end = rows.end.max(relative_to);
let buffer_rows = snapshot
.buffer_rows(start)
.take(1 + (end - start) as usize)
.collect::<Vec<_>>();
let head_idx = relative_to - start;
let mut delta = 1;
let mut i = head_idx + 1;
while i < buffer_rows.len() as u32 {
if buffer_rows[i as usize].is_some() {
if rows.contains(&(i + start)) {
relative_rows.insert(i + start, delta);
}
delta += 1;
}
i += 1;
}
delta = 1;
i = head_idx.min(buffer_rows.len() as u32 - 1);
while i > 0 && buffer_rows[i as usize].is_none() {
i -= 1;
}
while i > 0 {
i -= 1;
if buffer_rows[i as usize].is_some() {
if rows.contains(&(i + start)) {
relative_rows.insert(i + start, delta);
}
delta += 1;
}
}
relative_rows
}
fn layout_line_numbers( fn layout_line_numbers(
&self, &self,
rows: Range<u32>, rows: Range<u32>,
active_rows: &BTreeMap<u32, bool>, active_rows: &BTreeMap<u32, bool>,
newest_selection_head: DisplayPoint,
is_singleton: bool, is_singleton: bool,
snapshot: &EditorSnapshot, snapshot: &EditorSnapshot,
cx: &ViewContext<Editor>, cx: &ViewContext<Editor>,
@ -1421,6 +1504,15 @@ impl EditorElement {
let mut line_number_layouts = Vec::with_capacity(rows.len()); let mut line_number_layouts = Vec::with_capacity(rows.len());
let mut fold_statuses = Vec::with_capacity(rows.len()); let mut fold_statuses = Vec::with_capacity(rows.len());
let mut line_number = String::new(); let mut line_number = String::new();
let is_relative = settings::get::<EditorSettings>(cx).relative_line_numbers;
let relative_to = if is_relative {
Some(newest_selection_head.row())
} else {
None
};
let relative_rows = self.calculate_relative_line_numbers(&snapshot, &rows, relative_to);
for (ix, row) in snapshot for (ix, row) in snapshot
.buffer_rows(rows.start) .buffer_rows(rows.start)
.take((rows.end - rows.start) as usize) .take((rows.end - rows.start) as usize)
@ -1435,7 +1527,11 @@ impl EditorElement {
if let Some(buffer_row) = row { if let Some(buffer_row) = row {
if include_line_numbers { if include_line_numbers {
line_number.clear(); line_number.clear();
write!(&mut line_number, "{}", buffer_row + 1).unwrap(); let default_number = buffer_row + 1;
let number = relative_rows
.get(&(ix as u32 + rows.start))
.unwrap_or(&default_number);
write!(&mut line_number, "{}", number).unwrap();
line_number_layouts.push(Some(cx.text_layout_cache().layout_str( line_number_layouts.push(Some(cx.text_layout_cache().layout_str(
&line_number, &line_number,
style.text.font_size, style.text.font_size,
@ -2079,14 +2175,11 @@ impl Element<Editor> for EditorElement {
scroll_height scroll_height
.min(constraint.max_along(Axis::Vertical)) .min(constraint.max_along(Axis::Vertical))
.max(constraint.min_along(Axis::Vertical)) .max(constraint.min_along(Axis::Vertical))
.max(line_height)
.min(line_height * max_lines as f32), .min(line_height * max_lines as f32),
) )
} else if let EditorMode::SingleLine = snapshot.mode { } else if let EditorMode::SingleLine = snapshot.mode {
size.set_y( size.set_y(line_height.max(constraint.min_along(Axis::Vertical)))
line_height
.min(constraint.max_along(Axis::Vertical))
.max(constraint.min_along(Axis::Vertical)),
)
} else if size.y().is_infinite() { } else if size.y().is_infinite() {
size.set_y(scroll_height); size.set_y(scroll_height);
} }
@ -2124,7 +2217,7 @@ impl Element<Editor> for EditorElement {
.anchor_before(DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right)) .anchor_before(DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right))
}; };
let mut selections: Vec<(ReplicaId, Vec<SelectionLayout>)> = Vec::new(); let mut selections: Vec<(Option<ReplicaId>, Vec<SelectionLayout>)> = Vec::new();
let mut active_rows = BTreeMap::new(); let mut active_rows = BTreeMap::new();
let mut fold_ranges = Vec::new(); let mut fold_ranges = Vec::new();
let is_singleton = editor.is_singleton(cx); let is_singleton = editor.is_singleton(cx);
@ -2155,8 +2248,14 @@ impl Element<Editor> for EditorElement {
.buffer_snapshot .buffer_snapshot
.remote_selections_in_range(&(start_anchor..end_anchor)) .remote_selections_in_range(&(start_anchor..end_anchor))
{ {
let replica_id = if let Some(mapping) = &editor.replica_id_mapping {
mapping.get(&replica_id).copied()
} else {
None
};
// The local selections match the leader's selections. // The local selections match the leader's selections.
if Some(replica_id) == editor.leader_replica_id { if replica_id.is_some() && replica_id == editor.leader_replica_id {
continue; continue;
} }
remote_selections remote_selections
@ -2168,6 +2267,7 @@ impl Element<Editor> for EditorElement {
cursor_shape, cursor_shape,
&snapshot.display_snapshot, &snapshot.display_snapshot,
false, false,
false,
)); ));
} }
selections.extend(remote_selections); selections.extend(remote_selections);
@ -2191,6 +2291,7 @@ impl Element<Editor> for EditorElement {
editor.cursor_shape, editor.cursor_shape,
&snapshot.display_snapshot, &snapshot.display_snapshot,
is_newest, is_newest,
true,
); );
if is_newest { if is_newest {
newest_selection_head = Some(layout.head); newest_selection_head = Some(layout.head);
@ -2206,11 +2307,18 @@ impl Element<Editor> for EditorElement {
} }
// Render the local selections in the leader's color when following. // Render the local selections in the leader's color when following.
let local_replica_id = editor let local_replica_id = if let Some(leader_replica_id) = editor.leader_replica_id {
.leader_replica_id leader_replica_id
.unwrap_or_else(|| editor.replica_id(cx)); } else {
let replica_id = editor.replica_id(cx);
if let Some(mapping) = &editor.replica_id_mapping {
mapping.get(&replica_id).copied().unwrap_or(replica_id)
} else {
replica_id
}
};
selections.push((local_replica_id, layouts)); selections.push((Some(local_replica_id), layouts));
} }
let scrollbar_settings = &settings::get::<EditorSettings>(cx).scrollbar; let scrollbar_settings = &settings::get::<EditorSettings>(cx).scrollbar;
@ -2244,9 +2352,23 @@ impl Element<Editor> for EditorElement {
}) })
.collect(); .collect();
let head_for_relative = newest_selection_head.unwrap_or_else(|| {
let newest = editor.selections.newest::<Point>(cx);
SelectionLayout::new(
newest,
editor.selections.line_mode,
editor.cursor_shape,
&snapshot.display_snapshot,
true,
true,
)
.head
});
let (line_number_layouts, fold_statuses) = self.layout_line_numbers( let (line_number_layouts, fold_statuses) = self.layout_line_numbers(
start_row..end_row, start_row..end_row,
&active_rows, &active_rows,
head_for_relative,
is_singleton, is_singleton,
&snapshot, &snapshot,
cx, cx,
@ -2591,7 +2713,7 @@ pub struct LayoutState {
blocks: Vec<BlockLayout>, blocks: Vec<BlockLayout>,
highlighted_ranges: Vec<(Range<DisplayPoint>, Color)>, highlighted_ranges: Vec<(Range<DisplayPoint>, Color)>,
fold_ranges: Vec<(BufferRow, Range<DisplayPoint>, Color)>, fold_ranges: Vec<(BufferRow, Range<DisplayPoint>, Color)>,
selections: Vec<(ReplicaId, Vec<SelectionLayout>)>, selections: Vec<(Option<ReplicaId>, Vec<SelectionLayout>)>,
scrollbar_row_range: Range<f32>, scrollbar_row_range: Range<f32>,
show_scrollbars: bool, show_scrollbars: bool,
is_singleton: bool, is_singleton: bool,
@ -2614,22 +2736,42 @@ struct PositionMap {
snapshot: EditorSnapshot, snapshot: EditorSnapshot,
} }
#[derive(Debug, Copy, Clone)]
pub struct PointForPosition {
pub previous_valid: DisplayPoint,
pub next_valid: DisplayPoint,
pub exact_unclipped: DisplayPoint,
pub column_overshoot_after_line_end: u32,
}
impl PointForPosition {
#[cfg(test)]
pub fn valid(valid: DisplayPoint) -> Self {
Self {
previous_valid: valid,
next_valid: valid,
exact_unclipped: valid,
column_overshoot_after_line_end: 0,
}
}
pub fn as_valid(&self) -> Option<DisplayPoint> {
if self.previous_valid == self.exact_unclipped && self.next_valid == self.exact_unclipped {
Some(self.previous_valid)
} else {
None
}
}
}
impl PositionMap { impl PositionMap {
/// Returns two display points: fn point_for_position(&self, text_bounds: RectF, position: Vector2F) -> PointForPosition {
/// 1. The nearest *valid* position in the editor
/// 2. An unclipped, potentially *invalid* position that maps directly to
/// the given pixel position.
fn point_for_position(
&self,
text_bounds: RectF,
position: Vector2F,
) -> (DisplayPoint, DisplayPoint) {
let scroll_position = self.snapshot.scroll_position(); let scroll_position = self.snapshot.scroll_position();
let position = position - text_bounds.origin(); let position = position - text_bounds.origin();
let y = position.y().max(0.0).min(self.size.y()); let y = position.y().max(0.0).min(self.size.y());
let x = position.x() + (scroll_position.x() * self.em_width); let x = position.x() + (scroll_position.x() * self.em_width);
let row = (y / self.line_height + scroll_position.y()) as u32; let row = (y / self.line_height + scroll_position.y()) as u32;
let (column, x_overshoot) = if let Some(line) = self let (column, x_overshoot_after_line_end) = if let Some(line) = self
.line_layouts .line_layouts
.get(row as usize - scroll_position.y() as usize) .get(row as usize - scroll_position.y() as usize)
.map(|line_with_spaces| &line_with_spaces.line) .map(|line_with_spaces| &line_with_spaces.line)
@ -2643,11 +2785,18 @@ impl PositionMap {
(0, x) (0, x)
}; };
let mut target_point = DisplayPoint::new(row, column); let mut exact_unclipped = DisplayPoint::new(row, column);
let point = self.snapshot.clip_point(target_point, Bias::Left); let previous_valid = self.snapshot.clip_point(exact_unclipped, Bias::Left);
*target_point.column_mut() += (x_overshoot / self.em_advance) as u32; let next_valid = self.snapshot.clip_point(exact_unclipped, Bias::Right);
(point, target_point) let column_overshoot_after_line_end = (x_overshoot_after_line_end / self.em_advance) as u32;
*exact_unclipped.column_mut() += column_overshoot_after_line_end;
PointForPosition {
previous_valid,
next_valid,
exact_unclipped,
column_overshoot_after_line_end,
}
} }
} }
@ -2901,23 +3050,6 @@ impl HighlightedRange {
} }
} }
fn position_to_display_point(
position: Vector2F,
text_bounds: RectF,
position_map: &PositionMap,
) -> Option<DisplayPoint> {
if text_bounds.contains_point(position) {
let (point, target_point) = position_map.point_for_position(text_bounds, position);
if point == target_point {
Some(point)
} else {
None
}
} else {
None
}
}
fn range_to_bounds( fn range_to_bounds(
range: &Range<DisplayPoint>, range: &Range<DisplayPoint>,
content_origin: Vector2F, content_origin: Vector2F,
@ -2995,7 +3127,6 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_layout_line_numbers(cx: &mut TestAppContext) { fn test_layout_line_numbers(cx: &mut TestAppContext) {
init_test(cx, |_| {}); init_test(cx, |_| {});
let editor = cx let editor = cx
.add_window(|cx| { .add_window(|cx| {
let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx); let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx);
@ -3007,10 +3138,50 @@ mod tests {
let layouts = editor.update(cx, |editor, cx| { let layouts = editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx); let snapshot = editor.snapshot(cx);
element element
.layout_line_numbers(0..6, &Default::default(), false, &snapshot, cx) .layout_line_numbers(
0..6,
&Default::default(),
DisplayPoint::new(0, 0),
false,
&snapshot,
cx,
)
.0 .0
}); });
assert_eq!(layouts.len(), 6); assert_eq!(layouts.len(), 6);
let relative_rows = editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
element.calculate_relative_line_numbers(&snapshot, &(0..6), Some(3))
});
assert_eq!(relative_rows[&0], 3);
assert_eq!(relative_rows[&1], 2);
assert_eq!(relative_rows[&2], 1);
// current line has no relative number
assert_eq!(relative_rows[&4], 1);
assert_eq!(relative_rows[&5], 2);
// works if cursor is before screen
let relative_rows = editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
element.calculate_relative_line_numbers(&snapshot, &(3..6), Some(1))
});
assert_eq!(relative_rows.len(), 3);
assert_eq!(relative_rows[&3], 2);
assert_eq!(relative_rows[&4], 3);
assert_eq!(relative_rows[&5], 4);
// works if cursor is after screen
let relative_rows = editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
element.calculate_relative_line_numbers(&snapshot, &(0..3), Some(6))
});
assert_eq!(relative_rows.len(), 3);
assert_eq!(relative_rows[&0], 5);
assert_eq!(relative_rows[&1], 4);
assert_eq!(relative_rows[&2], 3);
} }
#[gpui::test] #[gpui::test]

View file

@ -1,6 +1,8 @@
use crate::{ use crate::{
display_map::ToDisplayPoint, Anchor, AnchorRangeExt, DisplayPoint, Editor, EditorSettings, display_map::{InlayOffset, ToDisplayPoint},
EditorSnapshot, EditorStyle, RangeToAnchorExt, link_go_to_definition::{DocumentRange, InlayRange},
Anchor, AnchorRangeExt, DisplayPoint, Editor, EditorSettings, EditorSnapshot, EditorStyle,
ExcerptId, RangeToAnchorExt,
}; };
use futures::FutureExt; use futures::FutureExt;
use gpui::{ use gpui::{
@ -11,7 +13,7 @@ use gpui::{
AnyElement, AppContext, CursorRegion, Element, ModelHandle, MouseRegion, Task, ViewContext, AnyElement, AppContext, CursorRegion, Element, ModelHandle, MouseRegion, Task, ViewContext,
}; };
use language::{Bias, DiagnosticEntry, DiagnosticSeverity, Language, LanguageRegistry}; use language::{Bias, DiagnosticEntry, DiagnosticSeverity, Language, LanguageRegistry};
use project::{HoverBlock, HoverBlockKind, Project}; use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart, Project};
use std::{ops::Range, sync::Arc, time::Duration}; use std::{ops::Range, sync::Arc, time::Duration};
use util::TryFutureExt; use util::TryFutureExt;
@ -46,6 +48,106 @@ pub fn hover_at(editor: &mut Editor, point: Option<DisplayPoint>, cx: &mut ViewC
} }
} }
pub struct InlayHover {
pub excerpt: ExcerptId,
pub triggered_from: InlayOffset,
pub range: InlayRange,
pub tooltip: HoverBlock,
}
pub fn find_hovered_hint_part(
label_parts: Vec<InlayHintLabelPart>,
hint_range: Range<InlayOffset>,
hovered_offset: InlayOffset,
) -> Option<(InlayHintLabelPart, Range<InlayOffset>)> {
if hovered_offset >= hint_range.start && hovered_offset <= hint_range.end {
let mut hovered_character = (hovered_offset - hint_range.start).0;
let mut part_start = hint_range.start;
for part in label_parts {
let part_len = part.value.chars().count();
if hovered_character > part_len {
hovered_character -= part_len;
part_start.0 += part_len;
} else {
let part_end = InlayOffset(part_start.0 + part_len);
return Some((part, part_start..part_end));
}
}
}
None
}
pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut ViewContext<Editor>) {
if settings::get::<EditorSettings>(cx).hover_popover_enabled {
if editor.pending_rename.is_some() {
return;
}
let Some(project) = editor.project.clone() else {
return;
};
if let Some(InfoPopover { symbol_range, .. }) = &editor.hover_state.info_popover {
if let DocumentRange::Inlay(range) = symbol_range {
if (range.highlight_start..range.highlight_end)
.contains(&inlay_hover.triggered_from)
{
// Hover triggered from same location as last time. Don't show again.
return;
}
}
hide_hover(editor, cx);
}
let snapshot = editor.snapshot(cx);
// Don't request again if the location is the same as the previous request
if let Some(triggered_from) = editor.hover_state.triggered_from {
if inlay_hover.triggered_from
== snapshot
.display_snapshot
.anchor_to_inlay_offset(triggered_from)
{
return;
}
}
let task = cx.spawn(|this, mut cx| {
async move {
cx.background()
.timer(Duration::from_millis(HOVER_DELAY_MILLIS))
.await;
this.update(&mut cx, |this, _| {
this.hover_state.diagnostic_popover = None;
})?;
let hover_popover = InfoPopover {
project: project.clone(),
symbol_range: DocumentRange::Inlay(inlay_hover.range),
blocks: vec![inlay_hover.tooltip],
language: None,
rendered_content: None,
};
this.update(&mut cx, |this, cx| {
// Highlight the selected symbol using a background highlight
this.highlight_inlay_background::<HoverState>(
vec![inlay_hover.range],
|theme| theme.editor.hover_popover.highlight,
cx,
);
this.hover_state.info_popover = Some(hover_popover);
cx.notify();
})?;
anyhow::Ok(())
}
.log_err()
});
editor.hover_state.info_task = Some(task);
}
}
/// Hides the type information popup. /// Hides the type information popup.
/// Triggered by the `Hover` action when the cursor is not over a symbol or when the /// Triggered by the `Hover` action when the cursor is not over a symbol or when the
/// selections changed. /// selections changed.
@ -110,8 +212,13 @@ fn show_hover(
if !ignore_timeout { if !ignore_timeout {
if let Some(InfoPopover { symbol_range, .. }) = &editor.hover_state.info_popover { if let Some(InfoPopover { symbol_range, .. }) = &editor.hover_state.info_popover {
if symbol_range if symbol_range
.to_offset(&snapshot.buffer_snapshot) .as_text_range()
.contains(&multibuffer_offset) .map(|range| {
range
.to_offset(&snapshot.buffer_snapshot)
.contains(&multibuffer_offset)
})
.unwrap_or(false)
{ {
// Hover triggered from same location as last time. Don't show again. // Hover triggered from same location as last time. Don't show again.
return; return;
@ -219,7 +326,7 @@ fn show_hover(
Some(InfoPopover { Some(InfoPopover {
project: project.clone(), project: project.clone(),
symbol_range: range, symbol_range: DocumentRange::Text(range),
blocks: hover_result.contents, blocks: hover_result.contents,
language: hover_result.language, language: hover_result.language,
rendered_content: None, rendered_content: None,
@ -227,10 +334,13 @@ fn show_hover(
}); });
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
if let Some(hover_popover) = hover_popover.as_ref() { if let Some(symbol_range) = hover_popover
.as_ref()
.and_then(|hover_popover| hover_popover.symbol_range.as_text_range())
{
// Highlight the selected symbol using a background highlight // Highlight the selected symbol using a background highlight
this.highlight_background::<HoverState>( this.highlight_background::<HoverState>(
vec![hover_popover.symbol_range.clone()], vec![symbol_range],
|theme| theme.editor.hover_popover.highlight, |theme| theme.editor.hover_popover.highlight,
cx, cx,
); );
@ -497,7 +607,10 @@ impl HoverState {
.or_else(|| { .or_else(|| {
self.info_popover self.info_popover
.as_ref() .as_ref()
.map(|info_popover| &info_popover.symbol_range.start) .map(|info_popover| match &info_popover.symbol_range {
DocumentRange::Text(range) => &range.start,
DocumentRange::Inlay(range) => &range.inlay_position,
})
})?; })?;
let point = anchor.to_display_point(&snapshot.display_snapshot); let point = anchor.to_display_point(&snapshot.display_snapshot);
@ -522,7 +635,7 @@ impl HoverState {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct InfoPopover { pub struct InfoPopover {
pub project: ModelHandle<Project>, pub project: ModelHandle<Project>,
pub symbol_range: Range<Anchor>, symbol_range: DocumentRange,
pub blocks: Vec<HoverBlock>, pub blocks: Vec<HoverBlock>,
language: Option<Arc<Language>>, language: Option<Arc<Language>>,
rendered_content: Option<RenderedInfo>, rendered_content: Option<RenderedInfo>,
@ -692,10 +805,17 @@ impl DiagnosticPopover {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext}; use crate::{
editor_tests::init_test,
element::PointForPosition,
inlay_hint_cache::tests::{cached_hint_labels, visible_hint_labels},
link_go_to_definition::update_inlay_link_and_hover_points,
test::editor_lsp_test_context::EditorLspTestContext,
};
use collections::BTreeSet;
use gpui::fonts::Weight; use gpui::fonts::Weight;
use indoc::indoc; use indoc::indoc;
use language::{Diagnostic, DiagnosticSet}; use language::{language_settings::InlayHintSettings, Diagnostic, DiagnosticSet};
use lsp::LanguageServerId; use lsp::LanguageServerId;
use project::{HoverBlock, HoverBlockKind}; use project::{HoverBlock, HoverBlockKind};
use smol::stream::StreamExt; use smol::stream::StreamExt;
@ -1131,4 +1251,327 @@ mod tests {
editor editor
}); });
} }
#[gpui::test]
async fn test_hover_inlay_label_parts(cx: &mut gpui::TestAppContext) {
init_test(cx, |settings| {
settings.defaults.inlay_hints = Some(InlayHintSettings {
enabled: true,
show_type_hints: true,
show_parameter_hints: true,
show_other_hints: true,
})
});
let mut cx = EditorLspTestContext::new_rust(
lsp::ServerCapabilities {
inlay_hint_provider: Some(lsp::OneOf::Right(
lsp::InlayHintServerCapabilities::Options(lsp::InlayHintOptions {
resolve_provider: Some(true),
..Default::default()
}),
)),
..Default::default()
},
cx,
)
.await;
cx.set_state(indoc! {"
struct TestStruct;
// ==================
struct TestNewType<T>(T);
fn main() {
let variableˇ = TestNewType(TestStruct);
}
"});
let hint_start_offset = cx.ranges(indoc! {"
struct TestStruct;
// ==================
struct TestNewType<T>(T);
fn main() {
let variableˇ = TestNewType(TestStruct);
}
"})[0]
.start;
let hint_position = cx.to_lsp(hint_start_offset);
let new_type_target_range = cx.lsp_range(indoc! {"
struct TestStruct;
// ==================
struct «TestNewType»<T>(T);
fn main() {
let variable = TestNewType(TestStruct);
}
"});
let struct_target_range = cx.lsp_range(indoc! {"
struct «TestStruct»;
// ==================
struct TestNewType<T>(T);
fn main() {
let variable = TestNewType(TestStruct);
}
"});
let uri = cx.buffer_lsp_url.clone();
let new_type_label = "TestNewType";
let struct_label = "TestStruct";
let entire_hint_label = ": TestNewType<TestStruct>";
let closure_uri = uri.clone();
cx.lsp
.handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
let task_uri = closure_uri.clone();
async move {
assert_eq!(params.text_document.uri, task_uri);
Ok(Some(vec![lsp::InlayHint {
position: hint_position,
label: lsp::InlayHintLabel::LabelParts(vec![lsp::InlayHintLabelPart {
value: entire_hint_label.to_string(),
..Default::default()
}]),
kind: Some(lsp::InlayHintKind::TYPE),
text_edits: None,
tooltip: None,
padding_left: Some(false),
padding_right: Some(false),
data: None,
}]))
}
})
.next()
.await;
cx.foreground().run_until_parked();
cx.update_editor(|editor, cx| {
let expected_layers = vec![entire_hint_label.to_string()];
assert_eq!(expected_layers, cached_hint_labels(editor));
assert_eq!(expected_layers, visible_hint_labels(editor, cx));
});
let inlay_range = cx
.ranges(indoc! {"
struct TestStruct;
// ==================
struct TestNewType<T>(T);
fn main() {
let variable« »= TestNewType(TestStruct);
}
"})
.get(0)
.cloned()
.unwrap();
let new_type_hint_part_hover_position = cx.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx);
let previous_valid = inlay_range.start.to_display_point(&snapshot);
let next_valid = inlay_range.end.to_display_point(&snapshot);
assert_eq!(previous_valid.row(), next_valid.row());
assert!(previous_valid.column() < next_valid.column());
let exact_unclipped = DisplayPoint::new(
previous_valid.row(),
previous_valid.column()
+ (entire_hint_label.find(new_type_label).unwrap() + new_type_label.len() / 2)
as u32,
);
PointForPosition {
previous_valid,
next_valid,
exact_unclipped,
column_overshoot_after_line_end: 0,
}
});
cx.update_editor(|editor, cx| {
update_inlay_link_and_hover_points(
&editor.snapshot(cx),
new_type_hint_part_hover_position,
editor,
true,
false,
cx,
);
});
let resolve_closure_uri = uri.clone();
cx.lsp
.handle_request::<lsp::request::InlayHintResolveRequest, _, _>(
move |mut hint_to_resolve, _| {
let mut resolved_hint_positions = BTreeSet::new();
let task_uri = resolve_closure_uri.clone();
async move {
let inserted = resolved_hint_positions.insert(hint_to_resolve.position);
assert!(inserted, "Hint {hint_to_resolve:?} was resolved twice");
// `: TestNewType<TestStruct>`
hint_to_resolve.label = lsp::InlayHintLabel::LabelParts(vec![
lsp::InlayHintLabelPart {
value: ": ".to_string(),
..Default::default()
},
lsp::InlayHintLabelPart {
value: new_type_label.to_string(),
location: Some(lsp::Location {
uri: task_uri.clone(),
range: new_type_target_range,
}),
tooltip: Some(lsp::InlayHintLabelPartTooltip::String(format!(
"A tooltip for `{new_type_label}`"
))),
..Default::default()
},
lsp::InlayHintLabelPart {
value: "<".to_string(),
..Default::default()
},
lsp::InlayHintLabelPart {
value: struct_label.to_string(),
location: Some(lsp::Location {
uri: task_uri,
range: struct_target_range,
}),
tooltip: Some(lsp::InlayHintLabelPartTooltip::MarkupContent(
lsp::MarkupContent {
kind: lsp::MarkupKind::Markdown,
value: format!("A tooltip for `{struct_label}`"),
},
)),
..Default::default()
},
lsp::InlayHintLabelPart {
value: ">".to_string(),
..Default::default()
},
]);
Ok(hint_to_resolve)
}
},
)
.next()
.await;
cx.foreground().run_until_parked();
cx.update_editor(|editor, cx| {
update_inlay_link_and_hover_points(
&editor.snapshot(cx),
new_type_hint_part_hover_position,
editor,
true,
false,
cx,
);
});
cx.foreground()
.advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100));
cx.foreground().run_until_parked();
cx.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx);
let hover_state = &editor.hover_state;
assert!(hover_state.diagnostic_popover.is_none() && hover_state.info_popover.is_some());
let popover = hover_state.info_popover.as_ref().unwrap();
let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
let entire_inlay_start = snapshot.display_point_to_inlay_offset(
inlay_range.start.to_display_point(&snapshot),
Bias::Left,
);
let expected_new_type_label_start = InlayOffset(entire_inlay_start.0 + ": ".len());
assert_eq!(
popover.symbol_range,
DocumentRange::Inlay(InlayRange {
inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
highlight_start: expected_new_type_label_start,
highlight_end: InlayOffset(
expected_new_type_label_start.0 + new_type_label.len()
),
}),
"Popover range should match the new type label part"
);
assert_eq!(
popover
.rendered_content
.as_ref()
.expect("should have label text for new type hint")
.text,
format!("A tooltip for `{new_type_label}`"),
"Rendered text should not anyhow alter backticks"
);
});
let struct_hint_part_hover_position = cx.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx);
let previous_valid = inlay_range.start.to_display_point(&snapshot);
let next_valid = inlay_range.end.to_display_point(&snapshot);
assert_eq!(previous_valid.row(), next_valid.row());
assert!(previous_valid.column() < next_valid.column());
let exact_unclipped = DisplayPoint::new(
previous_valid.row(),
previous_valid.column()
+ (entire_hint_label.find(struct_label).unwrap() + struct_label.len() / 2)
as u32,
);
PointForPosition {
previous_valid,
next_valid,
exact_unclipped,
column_overshoot_after_line_end: 0,
}
});
cx.update_editor(|editor, cx| {
update_inlay_link_and_hover_points(
&editor.snapshot(cx),
struct_hint_part_hover_position,
editor,
true,
false,
cx,
);
});
cx.foreground()
.advance_clock(Duration::from_millis(HOVER_DELAY_MILLIS + 100));
cx.foreground().run_until_parked();
cx.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx);
let hover_state = &editor.hover_state;
assert!(hover_state.diagnostic_popover.is_none() && hover_state.info_popover.is_some());
let popover = hover_state.info_popover.as_ref().unwrap();
let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
let entire_inlay_start = snapshot.display_point_to_inlay_offset(
inlay_range.start.to_display_point(&snapshot),
Bias::Left,
);
let expected_struct_label_start =
InlayOffset(entire_inlay_start.0 + ": ".len() + new_type_label.len() + "<".len());
assert_eq!(
popover.symbol_range,
DocumentRange::Inlay(InlayRange {
inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
highlight_start: expected_struct_label_start,
highlight_end: InlayOffset(expected_struct_label_start.0 + struct_label.len()),
}),
"Popover range should match the struct label part"
);
assert_eq!(
popover
.rendered_content
.as_ref()
.expect("should have label text for struct hint")
.text,
format!("A tooltip for {struct_label}"),
"Rendered markdown element should remove backticks from text"
);
});
}
} }

File diff suppressed because it is too large Load diff

View file

@ -49,13 +49,18 @@ impl FollowableItem for Editor {
fn from_state_proto( fn from_state_proto(
pane: ViewHandle<workspace::Pane>, pane: ViewHandle<workspace::Pane>,
project: ModelHandle<Project>, workspace: ViewHandle<Workspace>,
remote_id: ViewId, remote_id: ViewId,
state: &mut Option<proto::view::Variant>, state: &mut Option<proto::view::Variant>,
cx: &mut AppContext, cx: &mut AppContext,
) -> Option<Task<Result<ViewHandle<Self>>>> { ) -> Option<Task<Result<ViewHandle<Self>>>> {
let Some(proto::view::Variant::Editor(_)) = state else { return None }; let project = workspace.read(cx).project().to_owned();
let Some(proto::view::Variant::Editor(state)) = state.take() else { unreachable!() }; let Some(proto::view::Variant::Editor(_)) = state else {
return None;
};
let Some(proto::view::Variant::Editor(state)) = state.take() else {
unreachable!()
};
let client = project.read(cx).client(); let client = project.read(cx).client();
let replica_id = project.read(cx).replica_id(); let replica_id = project.read(cx).replica_id();
@ -340,10 +345,16 @@ async fn update_editor_from_message(
let mut insertions = message.inserted_excerpts.into_iter().peekable(); let mut insertions = message.inserted_excerpts.into_iter().peekable();
while let Some(insertion) = insertions.next() { while let Some(insertion) = insertions.next() {
let Some(excerpt) = insertion.excerpt else { continue }; let Some(excerpt) = insertion.excerpt else {
let Some(previous_excerpt_id) = insertion.previous_excerpt_id else { continue }; continue;
};
let Some(previous_excerpt_id) = insertion.previous_excerpt_id else {
continue;
};
let buffer_id = excerpt.buffer_id; let buffer_id = excerpt.buffer_id;
let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else { continue }; let Some(buffer) = project.read(cx).buffer_for_id(buffer_id, cx) else {
continue;
};
let adjacent_excerpts = iter::from_fn(|| { let adjacent_excerpts = iter::from_fn(|| {
let insertion = insertions.peek()?; let insertion = insertions.peek()?;
@ -614,7 +625,7 @@ impl Item for Editor {
fn workspace_deactivated(&mut self, cx: &mut ViewContext<Self>) { fn workspace_deactivated(&mut self, cx: &mut ViewContext<Self>) {
hide_link_definition(self, cx); hide_link_definition(self, cx);
self.link_go_to_definition_state.last_mouse_location = None; self.link_go_to_definition_state.last_trigger_point = None;
} }
fn is_dirty(&self, cx: &AppContext) -> bool { fn is_dirty(&self, cx: &AppContext) -> bool {
@ -753,7 +764,7 @@ impl Item for Editor {
Some(Box::new(handle.clone())) Some(Box::new(handle.clone()))
} }
fn pixel_position_of_cursor(&self) -> Option<Vector2F> { fn pixel_position_of_cursor(&self, _: &AppContext) -> Option<Vector2F> {
self.pixel_position_of_newest_cursor self.pixel_position_of_newest_cursor
} }

File diff suppressed because it is too large Load diff

View file

@ -756,7 +756,8 @@ mod tests {
.select_font(family_id, &Default::default()) .select_font(family_id, &Default::default())
.unwrap(); .unwrap();
let buffer = cx.add_model(|cx| Buffer::new(0, "abc\ndefg\nhijkl\nmn", cx)); let buffer =
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abc\ndefg\nhijkl\nmn"));
let multibuffer = cx.add_model(|cx| { let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0); let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts( multibuffer.push_excerpts(

View file

@ -6,7 +6,7 @@ use clock::ReplicaId;
use collections::{BTreeMap, Bound, HashMap, HashSet}; use collections::{BTreeMap, Bound, HashMap, HashSet};
use futures::{channel::mpsc, SinkExt}; use futures::{channel::mpsc, SinkExt};
use git::diff::DiffHunk; use git::diff::DiffHunk;
use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task}; use gpui::{AppContext, Entity, ModelContext, ModelHandle};
pub use language::Completion; pub use language::Completion;
use language::{ use language::{
char_kind, char_kind,
@ -67,7 +67,9 @@ pub enum Event {
ExcerptsEdited { ExcerptsEdited {
ids: Vec<ExcerptId>, ids: Vec<ExcerptId>,
}, },
Edited, Edited {
sigleton_buffer_edited: bool,
},
Reloaded, Reloaded,
DiffBaseChanged, DiffBaseChanged,
LanguageChanged, LanguageChanged,
@ -788,59 +790,59 @@ impl MultiBuffer {
pub fn stream_excerpts_with_context_lines( pub fn stream_excerpts_with_context_lines(
&mut self, &mut self,
excerpts: Vec<(ModelHandle<Buffer>, Vec<Range<text::Anchor>>)>, buffer: ModelHandle<Buffer>,
ranges: Vec<Range<text::Anchor>>,
context_line_count: u32, context_line_count: u32,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> (Task<()>, mpsc::Receiver<Range<Anchor>>) { ) -> mpsc::Receiver<Range<Anchor>> {
let (mut tx, rx) = mpsc::channel(256); let (mut tx, rx) = mpsc::channel(256);
let task = cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
for (buffer, ranges) in excerpts { let (buffer_id, buffer_snapshot) =
let (buffer_id, buffer_snapshot) = buffer.read_with(&cx, |buffer, _| (buffer.remote_id(), buffer.snapshot()));
buffer.read_with(&cx, |buffer, _| (buffer.remote_id(), buffer.snapshot()));
let mut excerpt_ranges = Vec::new(); let mut excerpt_ranges = Vec::new();
let mut range_counts = Vec::new(); let mut range_counts = Vec::new();
cx.background() cx.background()
.scoped(|scope| { .scoped(|scope| {
scope.spawn(async { scope.spawn(async {
let (ranges, counts) = let (ranges, counts) =
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count); build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
excerpt_ranges = ranges; excerpt_ranges = ranges;
range_counts = counts; range_counts = counts;
});
})
.await;
let mut ranges = ranges.into_iter();
let mut range_counts = range_counts.into_iter();
for excerpt_ranges in excerpt_ranges.chunks(100) {
let excerpt_ids = this.update(&mut cx, |this, cx| {
this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
}); });
})
.await;
for (excerpt_id, range_count) in let mut ranges = ranges.into_iter();
excerpt_ids.into_iter().zip(range_counts.by_ref()) let mut range_counts = range_counts.into_iter();
{ for excerpt_ranges in excerpt_ranges.chunks(100) {
for range in ranges.by_ref().take(range_count) { let excerpt_ids = this.update(&mut cx, |this, cx| {
let start = Anchor { this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
buffer_id: Some(buffer_id), });
excerpt_id: excerpt_id.clone(),
text_anchor: range.start, for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(range_counts.by_ref())
}; {
let end = Anchor { for range in ranges.by_ref().take(range_count) {
buffer_id: Some(buffer_id), let start = Anchor {
excerpt_id: excerpt_id.clone(), buffer_id: Some(buffer_id),
text_anchor: range.end, excerpt_id: excerpt_id.clone(),
}; text_anchor: range.start,
if tx.send(start..end).await.is_err() { };
break; let end = Anchor {
} buffer_id: Some(buffer_id),
excerpt_id: excerpt_id.clone(),
text_anchor: range.end,
};
if tx.send(start..end).await.is_err() {
break;
} }
} }
} }
} }
}); })
(task, rx) .detach();
rx
} }
pub fn push_excerpts<O>( pub fn push_excerpts<O>(
@ -1022,7 +1024,9 @@ impl MultiBuffer {
old: edit_start..edit_start, old: edit_start..edit_start,
new: edit_start..edit_end, new: edit_start..edit_end,
}]); }]);
cx.emit(Event::Edited); cx.emit(Event::Edited {
sigleton_buffer_edited: false,
});
cx.emit(Event::ExcerptsAdded { cx.emit(Event::ExcerptsAdded {
buffer, buffer,
predecessor: prev_excerpt_id, predecessor: prev_excerpt_id,
@ -1046,7 +1050,9 @@ impl MultiBuffer {
old: 0..prev_len, old: 0..prev_len,
new: 0..0, new: 0..0,
}]); }]);
cx.emit(Event::Edited); cx.emit(Event::Edited {
sigleton_buffer_edited: false,
});
cx.emit(Event::ExcerptsRemoved { ids }); cx.emit(Event::ExcerptsRemoved { ids });
cx.notify(); cx.notify();
} }
@ -1254,7 +1260,9 @@ impl MultiBuffer {
} }
self.subscriptions.publish_mut(edits); self.subscriptions.publish_mut(edits);
cx.emit(Event::Edited); cx.emit(Event::Edited {
sigleton_buffer_edited: false,
});
cx.emit(Event::ExcerptsRemoved { ids }); cx.emit(Event::ExcerptsRemoved { ids });
cx.notify(); cx.notify();
} }
@ -1315,7 +1323,9 @@ impl MultiBuffer {
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) { ) {
cx.emit(match event { cx.emit(match event {
language::Event::Edited => Event::Edited, language::Event::Edited => Event::Edited {
sigleton_buffer_edited: true,
},
language::Event::DirtyChanged => Event::DirtyChanged, language::Event::DirtyChanged => Event::DirtyChanged,
language::Event::Saved => Event::Saved, language::Event::Saved => Event::Saved,
language::Event::FileHandleChanged => Event::FileHandleChanged, language::Event::FileHandleChanged => Event::FileHandleChanged,
@ -1560,7 +1570,7 @@ impl MultiBuffer {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
impl MultiBuffer { impl MultiBuffer {
pub fn build_simple(text: &str, cx: &mut gpui::AppContext) -> ModelHandle<Self> { pub fn build_simple(text: &str, cx: &mut gpui::AppContext) -> ModelHandle<Self> {
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
cx.add_model(|cx| Self::singleton(buffer, cx)) cx.add_model(|cx| Self::singleton(buffer, cx))
} }
@ -1570,7 +1580,7 @@ impl MultiBuffer {
) -> ModelHandle<Self> { ) -> ModelHandle<Self> {
let multi = cx.add_model(|_| Self::new(0)); let multi = cx.add_model(|_| Self::new(0));
for (text, ranges) in excerpts { for (text, ranges) in excerpts {
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
let excerpt_ranges = ranges.into_iter().map(|range| ExcerptRange { let excerpt_ranges = ranges.into_iter().map(|range| ExcerptRange {
context: range, context: range,
primary: None, primary: None,
@ -1662,7 +1672,7 @@ impl MultiBuffer {
if excerpt_ids.is_empty() || (rng.gen() && excerpt_ids.len() < max_excerpts) { if excerpt_ids.is_empty() || (rng.gen() && excerpt_ids.len() < max_excerpts) {
let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() { let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() {
let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>(); let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>();
buffers.push(cx.add_model(|cx| Buffer::new(0, text, cx))); buffers.push(cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text)));
let buffer = buffers.last().unwrap().read(cx); let buffer = buffers.last().unwrap().read(cx);
log::info!( log::info!(
"Creating new buffer {} with text: {:?}", "Creating new buffer {} with text: {:?}",
@ -2756,7 +2766,9 @@ impl MultiBufferSnapshot {
// Get the ranges of the innermost pair of brackets. // Get the ranges of the innermost pair of brackets.
let mut result: Option<(Range<usize>, Range<usize>)> = None; let mut result: Option<(Range<usize>, Range<usize>)> = None;
let Some(enclosing_bracket_ranges) = self.enclosing_bracket_ranges(range.clone()) else { return None; }; let Some(enclosing_bracket_ranges) = self.enclosing_bracket_ranges(range.clone()) else {
return None;
};
for (open, close) in enclosing_bracket_ranges { for (open, close) in enclosing_bracket_ranges {
let len = close.end - open.start; let len = close.end - open.start;
@ -4010,7 +4022,8 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_singleton(cx: &mut AppContext) { fn test_singleton(cx: &mut AppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let buffer =
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(6, 6, 'a')));
let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
let snapshot = multibuffer.read(cx).snapshot(cx); let snapshot = multibuffer.read(cx).snapshot(cx);
@ -4037,7 +4050,7 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_remote(cx: &mut AppContext) { fn test_remote(cx: &mut AppContext) {
let host_buffer = cx.add_model(|cx| Buffer::new(0, "a", cx)); let host_buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "a"));
let guest_buffer = cx.add_model(|cx| { let guest_buffer = cx.add_model(|cx| {
let state = host_buffer.read(cx).to_proto(); let state = host_buffer.read(cx).to_proto();
let ops = cx let ops = cx
@ -4068,15 +4081,17 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) { fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) {
let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'a'), cx)); let buffer_1 =
let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(6, 6, 'g'), cx)); cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(6, 6, 'a')));
let buffer_2 =
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(6, 6, 'g')));
let multibuffer = cx.add_model(|_| MultiBuffer::new(0)); let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
let events = Rc::new(RefCell::new(Vec::<Event>::new())); let events = Rc::new(RefCell::new(Vec::<Event>::new()));
multibuffer.update(cx, |_, cx| { multibuffer.update(cx, |_, cx| {
let events = events.clone(); let events = events.clone();
cx.subscribe(&multibuffer, move |_, _, event, _| { cx.subscribe(&multibuffer, move |_, _, event, _| {
if let Event::Edited = event { if let Event::Edited { .. } = event {
events.borrow_mut().push(event.clone()) events.borrow_mut().push(event.clone())
} }
}) })
@ -4131,7 +4146,17 @@ mod tests {
// Adding excerpts emits an edited event. // Adding excerpts emits an edited event.
assert_eq!( assert_eq!(
events.borrow().as_slice(), events.borrow().as_slice(),
&[Event::Edited, Event::Edited, Event::Edited] &[
Event::Edited {
sigleton_buffer_edited: false
},
Event::Edited {
sigleton_buffer_edited: false
},
Event::Edited {
sigleton_buffer_edited: false
}
]
); );
let snapshot = multibuffer.read(cx).snapshot(cx); let snapshot = multibuffer.read(cx).snapshot(cx);
@ -4292,8 +4317,10 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_excerpt_events(cx: &mut AppContext) { fn test_excerpt_events(cx: &mut AppContext) {
let buffer_1 = cx.add_model(|cx| Buffer::new(0, sample_text(10, 3, 'a'), cx)); let buffer_1 =
let buffer_2 = cx.add_model(|cx| Buffer::new(0, sample_text(10, 3, 'm'), cx)); cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(10, 3, 'a')));
let buffer_2 =
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(10, 3, 'm')));
let leader_multibuffer = cx.add_model(|_| MultiBuffer::new(0)); let leader_multibuffer = cx.add_model(|_| MultiBuffer::new(0));
let follower_multibuffer = cx.add_model(|_| MultiBuffer::new(0)); let follower_multibuffer = cx.add_model(|_| MultiBuffer::new(0));
@ -4310,7 +4337,7 @@ mod tests {
excerpts, excerpts,
} => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx), } => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx),
Event::ExcerptsRemoved { ids } => follower.remove_excerpts(ids, cx), Event::ExcerptsRemoved { ids } => follower.remove_excerpts(ids, cx),
Event::Edited => { Event::Edited { .. } => {
*follower_edit_event_count.borrow_mut() += 1; *follower_edit_event_count.borrow_mut() += 1;
} }
_ => {} _ => {}
@ -4398,7 +4425,8 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_push_excerpts_with_context_lines(cx: &mut AppContext) { fn test_push_excerpts_with_context_lines(cx: &mut AppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(20, 3, 'a'), cx)); let buffer =
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(20, 3, 'a')));
let multibuffer = cx.add_model(|_| MultiBuffer::new(0)); let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| { let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| {
multibuffer.push_excerpts_with_context_lines( multibuffer.push_excerpts_with_context_lines(
@ -4434,9 +4462,10 @@ mod tests {
#[gpui::test] #[gpui::test]
async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) { async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(20, 3, 'a'), cx)); let buffer =
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, sample_text(20, 3, 'a')));
let multibuffer = cx.add_model(|_| MultiBuffer::new(0)); let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
let (task, anchor_ranges) = multibuffer.update(cx, |multibuffer, cx| { let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| {
let snapshot = buffer.read(cx); let snapshot = buffer.read(cx);
let ranges = vec![ let ranges = vec![
snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)), snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)),
@ -4444,12 +4473,10 @@ mod tests {
snapshot.anchor_before(Point::new(15, 0)) snapshot.anchor_before(Point::new(15, 0))
..snapshot.anchor_before(Point::new(15, 0)), ..snapshot.anchor_before(Point::new(15, 0)),
]; ];
multibuffer.stream_excerpts_with_context_lines(vec![(buffer.clone(), ranges)], 2, cx) multibuffer.stream_excerpts_with_context_lines(buffer.clone(), ranges, 2, cx)
}); });
let anchor_ranges = anchor_ranges.collect::<Vec<_>>().await; let anchor_ranges = anchor_ranges.collect::<Vec<_>>().await;
// Ensure task is finished when stream completes.
task.await;
let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx)); let snapshot = multibuffer.read_with(cx, |multibuffer, cx| multibuffer.snapshot(cx));
assert_eq!( assert_eq!(
@ -4482,7 +4509,7 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_singleton_multibuffer_anchors(cx: &mut AppContext) { fn test_singleton_multibuffer_anchors(cx: &mut AppContext) {
let buffer = cx.add_model(|cx| Buffer::new(0, "abcd", cx)); let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcd"));
let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx)); let multibuffer = cx.add_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
let old_snapshot = multibuffer.read(cx).snapshot(cx); let old_snapshot = multibuffer.read(cx).snapshot(cx);
buffer.update(cx, |buffer, cx| { buffer.update(cx, |buffer, cx| {
@ -4502,8 +4529,8 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_multibuffer_anchors(cx: &mut AppContext) { fn test_multibuffer_anchors(cx: &mut AppContext) {
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "abcd", cx)); let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcd"));
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "efghi", cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "efghi"));
let multibuffer = cx.add_model(|cx| { let multibuffer = cx.add_model(|cx| {
let mut multibuffer = MultiBuffer::new(0); let mut multibuffer = MultiBuffer::new(0);
multibuffer.push_excerpts( multibuffer.push_excerpts(
@ -4560,8 +4587,8 @@ mod tests {
#[gpui::test] #[gpui::test]
fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) { fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) {
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "abcd", cx)); let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcd"));
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "ABCDEFGHIJKLMNOP", cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "ABCDEFGHIJKLMNOP"));
let multibuffer = cx.add_model(|_| MultiBuffer::new(0)); let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
// Create an insertion id in buffer 1 that doesn't exist in buffer 2. // Create an insertion id in buffer 1 that doesn't exist in buffer 2.
@ -4956,7 +4983,9 @@ mod tests {
let base_text = util::RandomCharIter::new(&mut rng) let base_text = util::RandomCharIter::new(&mut rng)
.take(10) .take(10)
.collect::<String>(); .collect::<String>();
buffers.push(cx.add_model(|cx| Buffer::new(0, base_text, cx))); buffers.push(
cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, base_text)),
);
buffers.last().unwrap() buffers.last().unwrap()
} else { } else {
buffers.choose(&mut rng).unwrap() buffers.choose(&mut rng).unwrap()
@ -5297,8 +5326,8 @@ mod tests {
fn test_history(cx: &mut AppContext) { fn test_history(cx: &mut AppContext) {
cx.set_global(SettingsStore::test(cx)); cx.set_global(SettingsStore::test(cx));
let buffer_1 = cx.add_model(|cx| Buffer::new(0, "1234", cx)); let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "1234"));
let buffer_2 = cx.add_model(|cx| Buffer::new(0, "5678", cx)); let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "5678"));
let multibuffer = cx.add_model(|_| MultiBuffer::new(0)); let multibuffer = cx.add_model(|_| MultiBuffer::new(0));
let group_interval = multibuffer.read(cx).history.group_interval; let group_interval = multibuffer.read(cx).history.group_interval;
multibuffer.update(cx, |multibuffer, cx| { multibuffer.update(cx, |multibuffer, cx| {

View file

@ -65,47 +65,52 @@ impl Editor {
self.set_scroll_position(scroll_position, cx); self.set_scroll_position(scroll_position, cx);
} }
let (autoscroll, local) = let Some((autoscroll, local)) = self.scroll_manager.autoscroll_request.take() else {
if let Some(autoscroll) = self.scroll_manager.autoscroll_request.take() { return false;
autoscroll };
} else {
return false;
};
let first_cursor_top; let mut target_top;
let last_cursor_bottom; let mut target_bottom;
if let Some(highlighted_rows) = &self.highlighted_rows { if let Some(highlighted_rows) = &self.highlighted_rows {
first_cursor_top = highlighted_rows.start as f32; target_top = highlighted_rows.start as f32;
last_cursor_bottom = first_cursor_top + 1.; target_bottom = target_top + 1.;
} else if autoscroll == Autoscroll::newest() {
let newest_selection = self.selections.newest::<Point>(cx);
first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32;
last_cursor_bottom = first_cursor_top + 1.;
} else { } else {
let selections = self.selections.all::<Point>(cx); let selections = self.selections.all::<Point>(cx);
first_cursor_top = selections target_top = selections
.first() .first()
.unwrap() .unwrap()
.head() .head()
.to_display_point(&display_map) .to_display_point(&display_map)
.row() as f32; .row() as f32;
last_cursor_bottom = selections target_bottom = selections
.last() .last()
.unwrap() .unwrap()
.head() .head()
.to_display_point(&display_map) .to_display_point(&display_map)
.row() as f32 .row() as f32
+ 1.0; + 1.0;
// If the selections can't all fit on screen, scroll to the newest.
if autoscroll == Autoscroll::newest()
|| autoscroll == Autoscroll::fit() && target_bottom - target_top > visible_lines
{
let newest_selection_top = selections
.iter()
.max_by_key(|s| s.id)
.unwrap()
.head()
.to_display_point(&display_map)
.row() as f32;
target_top = newest_selection_top;
target_bottom = newest_selection_top + 1.;
}
} }
let margin = if matches!(self.mode, EditorMode::AutoHeight { .. }) { let margin = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
0. 0.
} else { } else {
((visible_lines - (last_cursor_bottom - first_cursor_top)) / 2.0).floor() ((visible_lines - (target_bottom - target_top)) / 2.0).floor()
}; };
if margin < 0.0 {
return false;
}
let strategy = match autoscroll { let strategy = match autoscroll {
Autoscroll::Strategy(strategy) => strategy, Autoscroll::Strategy(strategy) => strategy,
@ -113,8 +118,8 @@ impl Editor {
let last_autoscroll = &self.scroll_manager.last_autoscroll; let last_autoscroll = &self.scroll_manager.last_autoscroll;
if let Some(last_autoscroll) = last_autoscroll { if let Some(last_autoscroll) = last_autoscroll {
if self.scroll_manager.anchor.offset == last_autoscroll.0 if self.scroll_manager.anchor.offset == last_autoscroll.0
&& first_cursor_top == last_autoscroll.1 && target_top == last_autoscroll.1
&& last_cursor_bottom == last_autoscroll.2 && target_bottom == last_autoscroll.2
{ {
last_autoscroll.3.next() last_autoscroll.3.next()
} else { } else {
@ -129,37 +134,41 @@ impl Editor {
match strategy { match strategy {
AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => { AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => {
let margin = margin.min(self.scroll_manager.vertical_scroll_margin); let margin = margin.min(self.scroll_manager.vertical_scroll_margin);
let target_top = (first_cursor_top - margin).max(0.0); let target_top = (target_top - margin).max(0.0);
let target_bottom = last_cursor_bottom + margin; let target_bottom = target_bottom + margin;
let start_row = scroll_position.y(); let start_row = scroll_position.y();
let end_row = start_row + visible_lines; let end_row = start_row + visible_lines;
if target_top < start_row { let needs_scroll_up = target_top < start_row;
let needs_scroll_down = target_bottom >= end_row;
if needs_scroll_up && !needs_scroll_down {
scroll_position.set_y(target_top); scroll_position.set_y(target_top);
self.set_scroll_position_internal(scroll_position, local, true, cx); self.set_scroll_position_internal(scroll_position, local, true, cx);
} else if target_bottom >= end_row { }
if !needs_scroll_up && needs_scroll_down {
scroll_position.set_y(target_bottom - visible_lines); scroll_position.set_y(target_bottom - visible_lines);
self.set_scroll_position_internal(scroll_position, local, true, cx); self.set_scroll_position_internal(scroll_position, local, true, cx);
} }
} }
AutoscrollStrategy::Center => { AutoscrollStrategy::Center => {
scroll_position.set_y((first_cursor_top - margin).max(0.0)); scroll_position.set_y((target_top - margin).max(0.0));
self.set_scroll_position_internal(scroll_position, local, true, cx); self.set_scroll_position_internal(scroll_position, local, true, cx);
} }
AutoscrollStrategy::Top => { AutoscrollStrategy::Top => {
scroll_position.set_y((first_cursor_top).max(0.0)); scroll_position.set_y((target_top).max(0.0));
self.set_scroll_position_internal(scroll_position, local, true, cx); self.set_scroll_position_internal(scroll_position, local, true, cx);
} }
AutoscrollStrategy::Bottom => { AutoscrollStrategy::Bottom => {
scroll_position.set_y((last_cursor_bottom - visible_lines).max(0.0)); scroll_position.set_y((target_bottom - visible_lines).max(0.0));
self.set_scroll_position_internal(scroll_position, local, true, cx); self.set_scroll_position_internal(scroll_position, local, true, cx);
} }
} }
self.scroll_manager.last_autoscroll = Some(( self.scroll_manager.last_autoscroll = Some((
self.scroll_manager.anchor.offset, self.scroll_manager.anchor.offset,
first_cursor_top, target_top,
last_cursor_bottom, target_bottom,
strategy, strategy,
)); ));

View file

@ -225,6 +225,7 @@ impl<'a> EditorTestContext<'a> {
.map(|h| h.1.clone()) .map(|h| h.1.clone())
.unwrap_or_default() .unwrap_or_default()
.into_iter() .into_iter()
.filter_map(|range| range.as_text_range())
.map(|range| range.to_offset(&snapshot.buffer_snapshot)) .map(|range| range.to_offset(&snapshot.buffer_snapshot))
.collect() .collect()
}); });
@ -240,6 +241,7 @@ impl<'a> EditorTestContext<'a> {
.map(|ranges| ranges.as_ref().clone().1) .map(|ranges| ranges.as_ref().clone().1)
.unwrap_or_default() .unwrap_or_default()
.into_iter() .into_iter()
.filter_map(|range| range.as_text_range())
.map(|range| range.to_offset(&snapshot.buffer_snapshot)) .map(|range| range.to_offset(&snapshot.buffer_snapshot))
.collect(); .collect();
assert_set_eq!(actual_ranges, expected_ranges); assert_set_eq!(actual_ranges, expected_ranges);

View file

@ -1,11 +1,11 @@
[package] [package]
name = "staff_mode" name = "feature_flags"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
publish = false publish = false
[lib] [lib]
path = "src/staff_mode.rs" path = "src/feature_flags.rs"
[dependencies] [dependencies]
gpui = { path = "../gpui" } gpui = { path = "../gpui" }

View file

@ -0,0 +1,79 @@
use gpui::{AppContext, Subscription, ViewContext};
#[derive(Default)]
struct FeatureFlags {
flags: Vec<String>,
staff: bool,
}
impl FeatureFlags {
fn has_flag(&self, flag: &str) -> bool {
self.staff || self.flags.iter().find(|f| f.as_str() == flag).is_some()
}
}
pub trait FeatureFlag {
const NAME: &'static str;
}
pub enum ChannelsAlpha {}
impl FeatureFlag for ChannelsAlpha {
const NAME: &'static str = "channels_alpha";
}
pub trait FeatureFlagViewExt<V: 'static> {
fn observe_flag<T: FeatureFlag, F>(&mut self, callback: F) -> Subscription
where
F: Fn(bool, &mut V, &mut ViewContext<V>) + 'static;
}
impl<V: 'static> FeatureFlagViewExt<V> for ViewContext<'_, '_, V> {
fn observe_flag<T: FeatureFlag, F>(&mut self, callback: F) -> Subscription
where
F: Fn(bool, &mut V, &mut ViewContext<V>) + 'static,
{
self.observe_global::<FeatureFlags, _>(move |v, cx| {
let feature_flags = cx.global::<FeatureFlags>();
callback(feature_flags.has_flag(<T as FeatureFlag>::NAME), v, cx);
})
}
}
pub trait FeatureFlagAppExt {
fn update_flags(&mut self, staff: bool, flags: Vec<String>);
fn set_staff(&mut self, staff: bool);
fn has_flag<T: FeatureFlag>(&self) -> bool;
fn is_staff(&self) -> bool;
}
impl FeatureFlagAppExt for AppContext {
fn update_flags(&mut self, staff: bool, flags: Vec<String>) {
self.update_default_global::<FeatureFlags, _, _>(|feature_flags, _| {
feature_flags.staff = staff;
feature_flags.flags = flags;
})
}
fn set_staff(&mut self, staff: bool) {
self.update_default_global::<FeatureFlags, _, _>(|feature_flags, _| {
feature_flags.staff = staff;
})
}
fn has_flag<T: FeatureFlag>(&self) -> bool {
if self.has_global::<FeatureFlags>() {
self.global::<FeatureFlags>().has_flag(T::NAME)
} else {
false
}
}
fn is_staff(&self) -> bool {
if self.has_global::<FeatureFlags>() {
return self.global::<FeatureFlags>().staff;
} else {
false
}
}
}

View file

@ -2,7 +2,7 @@ use button_component::Button;
use gpui::{ use gpui::{
color::Color, color::Color,
elements::{Component, ContainerStyle, Flex, Label, ParentElement}, elements::{ContainerStyle, Flex, Label, ParentElement, StatefulComponent},
fonts::{self, TextStyle}, fonts::{self, TextStyle},
platform::WindowOptions, platform::WindowOptions,
AnyElement, App, Element, Entity, View, ViewContext, AnyElement, App, Element, Entity, View, ViewContext,
@ -114,7 +114,7 @@ mod theme {
// Component creation: // Component creation:
mod toggleable_button { mod toggleable_button {
use gpui::{ use gpui::{
elements::{Component, ContainerStyle, LabelStyle}, elements::{ContainerStyle, LabelStyle, StatefulComponent},
scene::MouseClick, scene::MouseClick,
EventContext, View, EventContext, View,
}; };
@ -156,7 +156,7 @@ mod toggleable_button {
} }
} }
impl<V: View> Component<V> for ToggleableButton<V> { impl<V: View> StatefulComponent<V> for ToggleableButton<V> {
fn render(self, v: &mut V, cx: &mut gpui::ViewContext<V>) -> gpui::AnyElement<V> { fn render(self, v: &mut V, cx: &mut gpui::ViewContext<V>) -> gpui::AnyElement<V> {
let button = if let Some(style) = self.style { let button = if let Some(style) = self.style {
self.button.with_style(*style.style_for(self.active)) self.button.with_style(*style.style_for(self.active))
@ -171,7 +171,7 @@ mod toggleable_button {
mod button_component { mod button_component {
use gpui::{ use gpui::{
elements::{Component, ContainerStyle, Label, LabelStyle, MouseEventHandler}, elements::{ContainerStyle, Label, LabelStyle, MouseEventHandler, StatefulComponent},
platform::MouseButton, platform::MouseButton,
scene::MouseClick, scene::MouseClick,
AnyElement, Element, EventContext, TypeTag, View, ViewContext, AnyElement, Element, EventContext, TypeTag, View, ViewContext,
@ -212,7 +212,7 @@ mod button_component {
} }
} }
impl<V: View> Component<V> for Button<V> { impl<V: View> StatefulComponent<V> for Button<V> {
fn render(self, _: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V> { fn render(self, _: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V> {
let click_handler = self.click_handler; let click_handler = self.click_handler;

View file

@ -3299,15 +3299,15 @@ impl<'a, 'b, V: 'static> ViewContext<'a, 'b, V> {
let region_id = MouseRegionId::new(tag, self.view_id, region_id); let region_id = MouseRegionId::new(tag, self.view_id, region_id);
MouseState { MouseState {
hovered: self.window.hovered_region_ids.contains(&region_id), hovered: self.window.hovered_region_ids.contains(&region_id),
clicked: if let Some((clicked_region_id, button)) = self.window.clicked_region { mouse_down: !self.window.clicked_region_ids.is_empty(),
if region_id == clicked_region_id { clicked: self
Some(button) .window
} else { .clicked_region_ids
None .iter()
} .find(|click_region_id| **click_region_id == region_id)
} else { // If we've gotten here, there should always be a clicked region.
None // But let's be defensive and return None if there isn't.
}, .and_then(|_| self.window.clicked_region.map(|(_, button)| button)),
accessed_hovered: false, accessed_hovered: false,
accessed_clicked: false, accessed_clicked: false,
} }
@ -3798,14 +3798,20 @@ impl<'a, T> DerefMut for Reference<'a, T> {
pub struct MouseState { pub struct MouseState {
pub(crate) hovered: bool, pub(crate) hovered: bool,
pub(crate) clicked: Option<MouseButton>, pub(crate) clicked: Option<MouseButton>,
pub(crate) mouse_down: bool,
pub(crate) accessed_hovered: bool, pub(crate) accessed_hovered: bool,
pub(crate) accessed_clicked: bool, pub(crate) accessed_clicked: bool,
} }
impl MouseState { impl MouseState {
pub fn dragging(&mut self) -> bool {
self.accessed_hovered = true;
self.hovered && self.mouse_down
}
pub fn hovered(&mut self) -> bool { pub fn hovered(&mut self) -> bool {
self.accessed_hovered = true; self.accessed_hovered = true;
self.hovered self.hovered && (!self.mouse_down || self.clicked.is_some())
} }
pub fn clicked(&mut self) -> Option<MouseButton> { pub fn clicked(&mut self) -> Option<MouseButton> {
@ -4656,12 +4662,13 @@ impl AnyWeakModelHandle {
} }
} }
#[derive(Copy)]
pub struct WeakViewHandle<T> { pub struct WeakViewHandle<T> {
any_handle: AnyWeakViewHandle, any_handle: AnyWeakViewHandle,
view_type: PhantomData<T>, view_type: PhantomData<T>,
} }
impl<T> Copy for WeakViewHandle<T> {}
impl<T> Debug for WeakViewHandle<T> { impl<T> Debug for WeakViewHandle<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct(&format!("WeakViewHandle<{}>", type_name::<T>())) f.debug_struct(&format!("WeakViewHandle<{}>", type_name::<T>()))

View file

@ -624,10 +624,11 @@ impl<'a> WindowContext<'a> {
} }
} }
if self if pressed_button.is_none()
.window && self
.platform_window .window
.is_topmost_for_position(*position) .platform_window
.is_topmost_for_position(*position)
{ {
self.platform().set_cursor_style(style_to_assign); self.platform().set_cursor_style(style_to_assign);
} }
@ -791,6 +792,11 @@ impl<'a> WindowContext<'a> {
if clicked_region_ids.contains(&mouse_region.id()) { if clicked_region_ids.contains(&mouse_region.id()) {
if mouse_region.bounds.contains_point(self.mouse_position()) { if mouse_region.bounds.contains_point(self.mouse_position()) {
valid_regions.push(mouse_region.clone()); valid_regions.push(mouse_region.clone());
} else {
// Let the view know that it hasn't been clicked anymore
if mouse_region.notify_on_click {
notified_views.insert(mouse_region.id().view_id());
}
} }
} }
} }

View file

@ -234,6 +234,27 @@ pub trait Element<V: 'static>: 'static {
{ {
MouseEventHandler::for_child::<Tag>(self.into_any(), region_id) MouseEventHandler::for_child::<Tag>(self.into_any(), region_id)
} }
fn component(self) -> StatelessElementAdapter
where
Self: Sized,
{
StatelessElementAdapter::new(self.into_any())
}
fn stateful_component(self) -> StatefulElementAdapter<V>
where
Self: Sized,
{
StatefulElementAdapter::new(self.into_any())
}
fn styleable_component(self) -> StylableAdapter<StatelessElementAdapter>
where
Self: Sized,
{
StatelessElementAdapter::new(self.into_any()).stylable()
}
} }
trait AnyElementState<V> { trait AnyElementState<V> {

View file

@ -1,79 +1,81 @@
use std::marker::PhantomData; use std::{any::Any, marker::PhantomData};
use pathfinder_geometry::{rect::RectF, vector::Vector2F}; use pathfinder_geometry::{rect::RectF, vector::Vector2F};
use crate::{ use crate::{
AnyElement, Element, LayoutContext, PaintContext, SceneBuilder, SizeConstraint, View, AnyElement, Element, LayoutContext, PaintContext, SceneBuilder, SizeConstraint, ViewContext,
ViewContext,
}; };
use super::Empty; use super::Empty;
pub trait GeneralComponent { /// The core stateless component trait, simply rendering an element tree
fn render<V: View>(self, v: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V>; pub trait Component {
fn element<V: View>(self) -> ComponentAdapter<V, Self> fn render<V: 'static>(self, cx: &mut ViewContext<V>) -> AnyElement<V>;
fn element<V: 'static>(self) -> ComponentAdapter<V, Self>
where where
Self: Sized, Self: Sized,
{ {
ComponentAdapter::new(self) ComponentAdapter::new(self)
} }
fn stylable(self) -> StylableAdapter<Self>
where
Self: Sized,
{
StylableAdapter::new(self)
}
fn stateful<V: 'static>(self) -> StatefulAdapter<Self, V>
where
Self: Sized,
{
StatefulAdapter::new(self)
}
} }
pub trait StyleableComponent { /// Allows a a component's styles to be rebound in a simple way.
pub trait Stylable: Component {
type Style: Clone; type Style: Clone;
type Output: GeneralComponent;
fn with_style(self, style: Self::Style) -> Self;
}
/// This trait models the typestate pattern for a component's style,
/// enforcing at compile time that a component is only usable after
/// it has been styled while still allowing for late binding of the
/// styling information
pub trait SafeStylable {
type Style: Clone;
type Output: Component;
fn with_style(self, style: Self::Style) -> Self::Output; fn with_style(self, style: Self::Style) -> Self::Output;
} }
impl GeneralComponent for () { /// All stylable components can trivially implement SafeStylable
fn render<V: View>(self, _: &mut V, _: &mut ViewContext<V>) -> AnyElement<V> { impl<C: Stylable> SafeStylable for C {
Empty::new().into_any() type Style = C::Style;
type Output = C;
fn with_style(self, style: Self::Style) -> Self::Output {
self.with_style(style)
} }
} }
impl StyleableComponent for () { /// Allows converting an unstylable component into a stylable one
type Style = (); /// by using `()` as the style type
type Output = (); pub struct StylableAdapter<C: Component> {
fn with_style(self, _: Self::Style) -> Self::Output {
()
}
}
pub trait Component<V: View> {
fn render(self, v: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V>;
fn element(self) -> ComponentAdapter<V, Self>
where
Self: Sized,
{
ComponentAdapter::new(self)
}
}
impl<V: View, C: GeneralComponent> Component<V> for C {
fn render(self, v: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V> {
self.render(v, cx)
}
}
// StylableComponent -> GeneralComponent
pub struct StylableComponentAdapter<C: Component<V>, V: View> {
component: C, component: C,
phantom: std::marker::PhantomData<V>,
} }
impl<C: Component<V>, V: View> StylableComponentAdapter<C, V> { impl<C: Component> StylableAdapter<C> {
pub fn new(component: C) -> Self { pub fn new(component: C) -> Self {
Self { Self { component }
component,
phantom: std::marker::PhantomData,
}
} }
} }
impl<C: GeneralComponent, V: View> StyleableComponent for StylableComponentAdapter<C, V> { impl<C: Component> SafeStylable for StylableAdapter<C> {
type Style = (); type Style = ();
type Output = C; type Output = C;
@ -83,13 +85,150 @@ impl<C: GeneralComponent, V: View> StyleableComponent for StylableComponentAdapt
} }
} }
// Element -> Component /// This is a secondary trait for components that can be styled
pub struct ElementAdapter<V: View> { /// which rely on their view's state. This is useful for components that, for example,
/// want to take click handler callbacks Unfortunately, the generic bound on the
/// Component trait makes it incompatible with the stateless components above.
// So let's just replicate them for now
pub trait StatefulComponent<V: 'static> {
fn render(self, v: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V>;
fn element(self) -> ComponentAdapter<V, Self>
where
Self: Sized,
{
ComponentAdapter::new(self)
}
fn styleable(self) -> StatefulStylableAdapter<Self, V>
where
Self: Sized,
{
StatefulStylableAdapter::new(self)
}
fn stateless(self) -> StatelessElementAdapter
where
Self: Sized + 'static,
{
StatelessElementAdapter::new(self.element().into_any())
}
}
/// It is trivial to convert stateless components to stateful components, so lets
/// do so en masse. Note that the reverse is impossible without a helper.
impl<V: 'static, C: Component> StatefulComponent<V> for C {
fn render(self, _: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V> {
self.render(cx)
}
}
/// Same as stylable, but generic over a view type
pub trait StatefulStylable<V: 'static>: StatefulComponent<V> {
type Style: Clone;
fn with_style(self, style: Self::Style) -> Self;
}
/// Same as SafeStylable, but generic over a view type
pub trait StatefulSafeStylable<V: 'static> {
type Style: Clone;
type Output: StatefulComponent<V>;
fn with_style(self, style: Self::Style) -> Self::Output;
}
/// Converting from stateless to stateful
impl<V: 'static, C: SafeStylable> StatefulSafeStylable<V> for C {
type Style = C::Style;
type Output = C::Output;
fn with_style(self, style: Self::Style) -> Self::Output {
self.with_style(style)
}
}
// A helper for converting stateless components into stateful ones
pub struct StatefulAdapter<C, V> {
component: C,
phantom: std::marker::PhantomData<V>,
}
impl<C: Component, V: 'static> StatefulAdapter<C, V> {
pub fn new(component: C) -> Self {
Self {
component,
phantom: std::marker::PhantomData,
}
}
}
impl<C: Component, V: 'static> StatefulComponent<V> for StatefulAdapter<C, V> {
fn render(self, _: &mut V, cx: &mut ViewContext<V>) -> AnyElement<V> {
self.component.render(cx)
}
}
// A helper for converting stateful but style-less components into stylable ones
// by using `()` as the style type
pub struct StatefulStylableAdapter<C: StatefulComponent<V>, V: 'static> {
component: C,
phantom: std::marker::PhantomData<V>,
}
impl<C: StatefulComponent<V>, V: 'static> StatefulStylableAdapter<C, V> {
pub fn new(component: C) -> Self {
Self {
component,
phantom: std::marker::PhantomData,
}
}
}
impl<C: StatefulComponent<V>, V: 'static> StatefulSafeStylable<V>
for StatefulStylableAdapter<C, V>
{
type Style = ();
type Output = C;
fn with_style(self, _: Self::Style) -> Self::Output {
self.component
}
}
/// A way of erasing the view generic from an element, useful
/// for wrapping up an explicit element tree into stateless
/// components
pub struct StatelessElementAdapter {
element: Box<dyn Any>,
}
impl StatelessElementAdapter {
pub fn new<V: 'static>(element: AnyElement<V>) -> Self {
StatelessElementAdapter {
element: Box::new(element) as Box<dyn Any>,
}
}
}
impl Component for StatelessElementAdapter {
fn render<V: 'static>(self, _: &mut ViewContext<V>) -> AnyElement<V> {
*self
.element
.downcast::<AnyElement<V>>()
.expect("Don't move elements out of their view :(")
}
}
// For converting elements into stateful components
pub struct StatefulElementAdapter<V: 'static> {
element: AnyElement<V>, element: AnyElement<V>,
_phantom: std::marker::PhantomData<V>, _phantom: std::marker::PhantomData<V>,
} }
impl<V: View> ElementAdapter<V> { impl<V: 'static> StatefulElementAdapter<V> {
pub fn new(element: AnyElement<V>) -> Self { pub fn new(element: AnyElement<V>) -> Self {
Self { Self {
element, element,
@ -98,20 +237,35 @@ impl<V: View> ElementAdapter<V> {
} }
} }
impl<V: View> Component<V> for ElementAdapter<V> { impl<V: 'static> StatefulComponent<V> for StatefulElementAdapter<V> {
fn render(self, _: &mut V, _: &mut ViewContext<V>) -> AnyElement<V> { fn render(self, _: &mut V, _: &mut ViewContext<V>) -> AnyElement<V> {
self.element self.element
} }
} }
// Component -> Element /// A convenient shorthand for creating an empty component.
pub struct ComponentAdapter<V: View, E> { impl Component for () {
fn render<V: 'static>(self, _: &mut ViewContext<V>) -> AnyElement<V> {
Empty::new().into_any()
}
}
impl Stylable for () {
type Style = ();
fn with_style(self, _: Self::Style) -> Self {
()
}
}
// For converting components back into Elements
pub struct ComponentAdapter<V: 'static, E> {
component: Option<E>, component: Option<E>,
element: Option<AnyElement<V>>, element: Option<AnyElement<V>>,
phantom: PhantomData<V>, phantom: PhantomData<V>,
} }
impl<E, V: View> ComponentAdapter<V, E> { impl<E, V: 'static> ComponentAdapter<V, E> {
pub fn new(e: E) -> Self { pub fn new(e: E) -> Self {
Self { Self {
component: Some(e), component: Some(e),
@ -121,7 +275,7 @@ impl<E, V: View> ComponentAdapter<V, E> {
} }
} }
impl<V: View, C: Component<V> + 'static> Element<V> for ComponentAdapter<V, C> { impl<V: 'static, C: StatefulComponent<V> + 'static> Element<V> for ComponentAdapter<V, C> {
type LayoutState = (); type LayoutState = ();
type PaintState = (); type PaintState = ();
@ -184,6 +338,7 @@ impl<V: View, C: Component<V> + 'static> Element<V> for ComponentAdapter<V, C> {
) -> serde_json::Value { ) -> serde_json::Value {
serde_json::json!({ serde_json::json!({
"type": "ComponentAdapter", "type": "ComponentAdapter",
"component": std::any::type_name::<C>(),
"child": self.element.as_ref().map(|el| el.debug(view, cx)), "child": self.element.as_ref().map(|el| el.debug(view, cx)),
}) })
} }

View file

@ -44,6 +44,14 @@ impl ContainerStyle {
..Default::default() ..Default::default()
} }
} }
pub fn additional_length(&self) -> f32 {
self.padding.left
+ self.padding.right
+ self.border.width * 2.
+ self.margin.left
+ self.margin.right
}
} }
pub struct Container<V> { pub struct Container<V> {

View file

@ -22,6 +22,7 @@ pub struct Flex<V> {
children: Vec<AnyElement<V>>, children: Vec<AnyElement<V>>,
scroll_state: Option<(ElementStateHandle<Rc<ScrollState>>, usize)>, scroll_state: Option<(ElementStateHandle<Rc<ScrollState>>, usize)>,
child_alignment: f32, child_alignment: f32,
spacing: f32,
} }
impl<V: 'static> Flex<V> { impl<V: 'static> Flex<V> {
@ -31,6 +32,7 @@ impl<V: 'static> Flex<V> {
children: Default::default(), children: Default::default(),
scroll_state: None, scroll_state: None,
child_alignment: -1., child_alignment: -1.,
spacing: 0.,
} }
} }
@ -51,6 +53,11 @@ impl<V: 'static> Flex<V> {
self self
} }
pub fn with_spacing(mut self, spacing: f32) -> Self {
self.spacing = spacing;
self
}
pub fn scrollable<Tag>( pub fn scrollable<Tag>(
mut self, mut self,
element_id: usize, element_id: usize,
@ -81,7 +88,7 @@ impl<V: 'static> Flex<V> {
cx: &mut LayoutContext<V>, cx: &mut LayoutContext<V>,
) { ) {
let cross_axis = self.axis.invert(); let cross_axis = self.axis.invert();
for child in &mut self.children { for child in self.children.iter_mut() {
if let Some(metadata) = child.metadata::<FlexParentData>() { if let Some(metadata) = child.metadata::<FlexParentData>() {
if let Some((flex, expanded)) = metadata.flex { if let Some((flex, expanded)) = metadata.flex {
if expanded != layout_expanded { if expanded != layout_expanded {
@ -132,12 +139,12 @@ impl<V: 'static> Element<V> for Flex<V> {
cx: &mut LayoutContext<V>, cx: &mut LayoutContext<V>,
) -> (Vector2F, Self::LayoutState) { ) -> (Vector2F, Self::LayoutState) {
let mut total_flex = None; let mut total_flex = None;
let mut fixed_space = 0.0; let mut fixed_space = self.children.len().saturating_sub(1) as f32 * self.spacing;
let mut contains_float = false; let mut contains_float = false;
let cross_axis = self.axis.invert(); let cross_axis = self.axis.invert();
let mut cross_axis_max: f32 = 0.0; let mut cross_axis_max: f32 = 0.0;
for child in &mut self.children { for child in self.children.iter_mut() {
let metadata = child.metadata::<FlexParentData>(); let metadata = child.metadata::<FlexParentData>();
contains_float |= metadata.map_or(false, |metadata| metadata.float); contains_float |= metadata.map_or(false, |metadata| metadata.float);
@ -315,7 +322,7 @@ impl<V: 'static> Element<V> for Flex<V> {
} }
} }
for child in &mut self.children { for child in self.children.iter_mut() {
if remaining_space > 0. { if remaining_space > 0. {
if let Some(metadata) = child.metadata::<FlexParentData>() { if let Some(metadata) = child.metadata::<FlexParentData>() {
if metadata.float { if metadata.float {
@ -354,8 +361,8 @@ impl<V: 'static> Element<V> for Flex<V> {
child.paint(scene, aligned_child_origin, visible_bounds, view, cx); child.paint(scene, aligned_child_origin, visible_bounds, view, cx);
match self.axis { match self.axis {
Axis::Horizontal => child_origin += vec2f(child.size().x(), 0.0), Axis::Horizontal => child_origin += vec2f(child.size().x() + self.spacing, 0.0),
Axis::Vertical => child_origin += vec2f(0.0, child.size().y()), Axis::Vertical => child_origin += vec2f(0.0, child.size().y() + self.spacing),
} }
} }

View file

@ -67,7 +67,9 @@ impl KeymapContextPredicate {
} }
pub fn eval(&self, contexts: &[KeymapContext]) -> bool { pub fn eval(&self, contexts: &[KeymapContext]) -> bool {
let Some(context) = contexts.first() else { return false }; let Some(context) = contexts.first() else {
return false;
};
match self { match self {
Self::Identifier(name) => (&context.set).contains(name.as_str()), Self::Identifier(name) => (&context.set).contains(name.as_str()),
Self::Equal(left, right) => context Self::Equal(left, right) => context

View file

@ -14,7 +14,7 @@ use crate::{
CodeLabel, LanguageScope, Outline, CodeLabel, LanguageScope, Outline,
}; };
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use clock::ReplicaId; pub use clock::ReplicaId;
use fs::LineEnding; use fs::LineEnding;
use futures::FutureExt as _; use futures::FutureExt as _;
use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task}; use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
@ -347,13 +347,17 @@ impl CharKind {
} }
impl Buffer { impl Buffer {
pub fn new<T: Into<String>>( pub fn new<T: Into<String>>(replica_id: ReplicaId, id: u64, base_text: T) -> Self {
replica_id: ReplicaId,
base_text: T,
cx: &mut ModelContext<Self>,
) -> Self {
Self::build( Self::build(
TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()), TextBuffer::new(replica_id, id, base_text.into()),
None,
None,
)
}
pub fn remote(remote_id: u64, replica_id: ReplicaId, base_text: String) -> Self {
Self::build(
TextBuffer::new(replica_id, remote_id, base_text),
None, None,
None, None,
) )
@ -2476,7 +2480,9 @@ impl BufferSnapshot {
matches.advance(); matches.advance();
let Some((open, close)) = open.zip(close) else { continue }; let Some((open, close)) = open.zip(close) else {
continue;
};
let bracket_range = open.start..=close.end; let bracket_range = open.start..=close.end;
if !bracket_range.overlaps(&range) { if !bracket_range.overlaps(&range) {

View file

@ -43,8 +43,8 @@ fn test_line_endings(cx: &mut gpui::AppContext) {
init_settings(cx, |_| {}); init_settings(cx, |_| {});
cx.add_model(|cx| { cx.add_model(|cx| {
let mut buffer = let mut buffer = Buffer::new(0, cx.model_id() as u64, "one\r\ntwo\rthree")
Buffer::new(0, "one\r\ntwo\rthree", cx).with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
assert_eq!(buffer.text(), "one\ntwo\nthree"); assert_eq!(buffer.text(), "one\ntwo\nthree");
assert_eq!(buffer.line_ending(), LineEnding::Windows); assert_eq!(buffer.line_ending(), LineEnding::Windows);
@ -138,8 +138,8 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
let buffer_1_events = Rc::new(RefCell::new(Vec::new())); let buffer_1_events = Rc::new(RefCell::new(Vec::new()));
let buffer_2_events = Rc::new(RefCell::new(Vec::new())); let buffer_2_events = Rc::new(RefCell::new(Vec::new()));
let buffer1 = cx.add_model(|cx| Buffer::new(0, "abcdef", cx)); let buffer1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abcdef"));
let buffer2 = cx.add_model(|cx| Buffer::new(1, "abcdef", cx)); let buffer2 = cx.add_model(|cx| Buffer::new(1, cx.model_id() as u64, "abcdef"));
let buffer1_ops = Rc::new(RefCell::new(Vec::new())); let buffer1_ops = Rc::new(RefCell::new(Vec::new()));
buffer1.update(cx, { buffer1.update(cx, {
let buffer1_ops = buffer1_ops.clone(); let buffer1_ops = buffer1_ops.clone();
@ -222,7 +222,7 @@ fn test_edit_events(cx: &mut gpui::AppContext) {
#[gpui::test] #[gpui::test]
async fn test_apply_diff(cx: &mut gpui::TestAppContext) { async fn test_apply_diff(cx: &mut gpui::TestAppContext) {
let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n"; let text = "a\nbb\nccc\ndddd\neeeee\nffffff\n";
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
let anchor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3))); let anchor = buffer.read_with(cx, |buffer, _| buffer.anchor_before(Point::new(3, 3)));
let text = "a\nccc\ndddd\nffffff\n"; let text = "a\nccc\ndddd\nffffff\n";
@ -254,7 +254,7 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
] ]
.join("\n"); .join("\n");
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx)); let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, text));
// Spawn a task to format the buffer's whitespace. // Spawn a task to format the buffer's whitespace.
// Pause so that the foratting task starts running. // Pause so that the foratting task starts running.
@ -318,8 +318,9 @@ async fn test_normalize_whitespace(cx: &mut gpui::TestAppContext) {
#[gpui::test] #[gpui::test]
async fn test_reparse(cx: &mut gpui::TestAppContext) { async fn test_reparse(cx: &mut gpui::TestAppContext) {
let text = "fn a() {}"; let text = "fn a() {}";
let buffer = let buffer = cx.add_model(|cx| {
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)); Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
});
// Wait for the initial text to parse // Wait for the initial text to parse
buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await; buffer.condition(cx, |buffer, _| !buffer.is_parsing()).await;
@ -443,7 +444,8 @@ async fn test_reparse(cx: &mut gpui::TestAppContext) {
#[gpui::test] #[gpui::test]
async fn test_resetting_language(cx: &mut gpui::TestAppContext) { async fn test_resetting_language(cx: &mut gpui::TestAppContext) {
let buffer = cx.add_model(|cx| { let buffer = cx.add_model(|cx| {
let mut buffer = Buffer::new(0, "{}", cx).with_language(Arc::new(rust_lang()), cx); let mut buffer =
Buffer::new(0, cx.model_id() as u64, "{}").with_language(Arc::new(rust_lang()), cx);
buffer.set_sync_parse_timeout(Duration::ZERO); buffer.set_sync_parse_timeout(Duration::ZERO);
buffer buffer
}); });
@ -491,8 +493,9 @@ async fn test_outline(cx: &mut gpui::TestAppContext) {
"# "#
.unindent(); .unindent();
let buffer = let buffer = cx.add_model(|cx| {
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)); Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
});
let outline = buffer let outline = buffer
.read_with(cx, |buffer, _| buffer.snapshot().outline(None)) .read_with(cx, |buffer, _| buffer.snapshot().outline(None))
.unwrap(); .unwrap();
@ -576,8 +579,9 @@ async fn test_outline_nodes_with_newlines(cx: &mut gpui::TestAppContext) {
"# "#
.unindent(); .unindent();
let buffer = let buffer = cx.add_model(|cx| {
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)); Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
});
let outline = buffer let outline = buffer
.read_with(cx, |buffer, _| buffer.snapshot().outline(None)) .read_with(cx, |buffer, _| buffer.snapshot().outline(None))
.unwrap(); .unwrap();
@ -613,7 +617,9 @@ async fn test_outline_with_extra_context(cx: &mut gpui::TestAppContext) {
"# "#
.unindent(); .unindent();
let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(language), cx)); let buffer = cx.add_model(|cx| {
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx)
});
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
// extra context nodes are included in the outline. // extra context nodes are included in the outline.
@ -655,8 +661,9 @@ async fn test_symbols_containing(cx: &mut gpui::TestAppContext) {
"# "#
.unindent(); .unindent();
let buffer = let buffer = cx.add_model(|cx| {
cx.add_model(|cx| Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx)); Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx)
});
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
// point is at the start of an item // point is at the start of an item
@ -877,7 +884,8 @@ fn test_enclosing_bracket_ranges_where_brackets_are_not_outermost_children(cx: &
fn test_range_for_syntax_ancestor(cx: &mut AppContext) { fn test_range_for_syntax_ancestor(cx: &mut AppContext) {
cx.add_model(|cx| { cx.add_model(|cx| {
let text = "fn a() { b(|c| {}) }"; let text = "fn a() { b(|c| {}) }";
let buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx); let buffer =
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
let snapshot = buffer.snapshot(); let snapshot = buffer.snapshot();
assert_eq!( assert_eq!(
@ -917,7 +925,8 @@ fn test_autoindent_with_soft_tabs(cx: &mut AppContext) {
cx.add_model(|cx| { cx.add_model(|cx| {
let text = "fn a() {}"; let text = "fn a() {}";
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx); let mut buffer =
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx); buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
assert_eq!(buffer.text(), "fn a() {\n \n}"); assert_eq!(buffer.text(), "fn a() {\n \n}");
@ -959,7 +968,8 @@ fn test_autoindent_with_hard_tabs(cx: &mut AppContext) {
cx.add_model(|cx| { cx.add_model(|cx| {
let text = "fn a() {}"; let text = "fn a() {}";
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx); let mut buffer =
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx); buffer.edit([(8..8, "\n\n")], Some(AutoindentMode::EachLine), cx);
assert_eq!(buffer.text(), "fn a() {\n\t\n}"); assert_eq!(buffer.text(), "fn a() {\n\t\n}");
@ -1000,6 +1010,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
cx.add_model(|cx| { cx.add_model(|cx| {
let mut buffer = Buffer::new( let mut buffer = Buffer::new(
0, 0,
cx.model_id() as u64,
" "
fn a() { fn a() {
c; c;
@ -1007,7 +1018,6 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
} }
" "
.unindent(), .unindent(),
cx,
) )
.with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
@ -1073,6 +1083,7 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
cx.add_model(|cx| { cx.add_model(|cx| {
let mut buffer = Buffer::new( let mut buffer = Buffer::new(
0, 0,
cx.model_id() as u64,
" "
fn a() { fn a() {
b(); b();
@ -1080,7 +1091,6 @@ fn test_autoindent_does_not_adjust_lines_with_unchanged_suggestion(cx: &mut AppC
" "
.replace("|", "") // marker to preserve trailing whitespace .replace("|", "") // marker to preserve trailing whitespace
.unindent(), .unindent(),
cx,
) )
.with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
@ -1136,13 +1146,13 @@ fn test_autoindent_does_not_adjust_lines_within_newly_created_errors(cx: &mut Ap
cx.add_model(|cx| { cx.add_model(|cx| {
let mut buffer = Buffer::new( let mut buffer = Buffer::new(
0, 0,
cx.model_id() as u64,
" "
fn a() { fn a() {
i i
} }
" "
.unindent(), .unindent(),
cx,
) )
.with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
@ -1198,11 +1208,11 @@ fn test_autoindent_adjusts_lines_when_only_text_changes(cx: &mut AppContext) {
cx.add_model(|cx| { cx.add_model(|cx| {
let mut buffer = Buffer::new( let mut buffer = Buffer::new(
0, 0,
cx.model_id() as u64,
" "
fn a() {} fn a() {}
" "
.unindent(), .unindent(),
cx,
) )
.with_language(Arc::new(rust_lang()), cx); .with_language(Arc::new(rust_lang()), cx);
@ -1254,7 +1264,8 @@ fn test_autoindent_with_edit_at_end_of_buffer(cx: &mut AppContext) {
cx.add_model(|cx| { cx.add_model(|cx| {
let text = "a\nb"; let text = "a\nb";
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx); let mut buffer =
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
buffer.edit( buffer.edit(
[(0..1, "\n"), (2..3, "\n")], [(0..1, "\n"), (2..3, "\n")],
Some(AutoindentMode::EachLine), Some(AutoindentMode::EachLine),
@ -1280,7 +1291,8 @@ fn test_autoindent_multi_line_insertion(cx: &mut AppContext) {
" "
.unindent(); .unindent();
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx); let mut buffer =
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
buffer.edit( buffer.edit(
[(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")], [(Point::new(3, 0)..Point::new(3, 0), "e(\n f()\n);\n")],
Some(AutoindentMode::EachLine), Some(AutoindentMode::EachLine),
@ -1317,7 +1329,8 @@ fn test_autoindent_block_mode(cx: &mut AppContext) {
} }
"# "#
.unindent(); .unindent();
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx); let mut buffer =
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
// When this text was copied, both of the quotation marks were at the same // When this text was copied, both of the quotation marks were at the same
// indent level, but the indentation of the first line was not included in // indent level, but the indentation of the first line was not included in
@ -1402,7 +1415,8 @@ fn test_autoindent_block_mode_without_original_indent_columns(cx: &mut AppContex
} }
"# "#
.unindent(); .unindent();
let mut buffer = Buffer::new(0, text, cx).with_language(Arc::new(rust_lang()), cx); let mut buffer =
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(rust_lang()), cx);
// The original indent columns are not known, so this text is // The original indent columns are not known, so this text is
// auto-indented in a block as if the first line was copied in // auto-indented in a block as if the first line was copied in
@ -1481,7 +1495,7 @@ fn test_autoindent_language_without_indents_query(cx: &mut AppContext) {
" "
.unindent(); .unindent();
let mut buffer = Buffer::new(0, text, cx).with_language( let mut buffer = Buffer::new(0, cx.model_id() as u64, text).with_language(
Arc::new(Language::new( Arc::new(Language::new(
LanguageConfig { LanguageConfig {
name: "Markdown".into(), name: "Markdown".into(),
@ -1557,7 +1571,7 @@ fn test_autoindent_with_injected_languages(cx: &mut AppContext) {
false, false,
); );
let mut buffer = Buffer::new(0, text, cx); let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
buffer.set_language_registry(language_registry); buffer.set_language_registry(language_registry);
buffer.set_language(Some(html_language), cx); buffer.set_language(Some(html_language), cx);
buffer.edit( buffer.edit(
@ -1593,7 +1607,8 @@ fn test_autoindent_query_with_outdent_captures(cx: &mut AppContext) {
}); });
cx.add_model(|cx| { cx.add_model(|cx| {
let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(ruby_lang()), cx); let mut buffer =
Buffer::new(0, cx.model_id() as u64, "").with_language(Arc::new(ruby_lang()), cx);
let text = r#" let text = r#"
class C class C
@ -1683,7 +1698,8 @@ fn test_language_scope_at_with_javascript(cx: &mut AppContext) {
let text = r#"a["b"] = <C d="e"></C>;"#; let text = r#"a["b"] = <C d="e"></C>;"#;
let buffer = Buffer::new(0, text, cx).with_language(Arc::new(language), cx); let buffer =
Buffer::new(0, cx.model_id() as u64, text).with_language(Arc::new(language), cx);
let snapshot = buffer.snapshot(); let snapshot = buffer.snapshot();
let config = snapshot.language_scope_at(0).unwrap(); let config = snapshot.language_scope_at(0).unwrap();
@ -1762,7 +1778,8 @@ fn test_language_scope_at_with_rust(cx: &mut AppContext) {
"# "#
.unindent(); .unindent();
let buffer = Buffer::new(0, text.clone(), cx).with_language(Arc::new(language), cx); let buffer = Buffer::new(0, cx.model_id() as u64, text.clone())
.with_language(Arc::new(language), cx);
let snapshot = buffer.snapshot(); let snapshot = buffer.snapshot();
// By default, all brackets are enabled // By default, all brackets are enabled
@ -1806,7 +1823,7 @@ fn test_language_scope_at_with_combined_injections(cx: &mut AppContext) {
language_registry.add(Arc::new(html_lang())); language_registry.add(Arc::new(html_lang()));
language_registry.add(Arc::new(erb_lang())); language_registry.add(Arc::new(erb_lang()));
let mut buffer = Buffer::new(0, text, cx); let mut buffer = Buffer::new(0, cx.model_id() as u64, text);
buffer.set_language_registry(language_registry.clone()); buffer.set_language_registry(language_registry.clone());
buffer.set_language( buffer.set_language(
language_registry language_registry
@ -1838,7 +1855,7 @@ fn test_serialization(cx: &mut gpui::AppContext) {
let mut now = Instant::now(); let mut now = Instant::now();
let buffer1 = cx.add_model(|cx| { let buffer1 = cx.add_model(|cx| {
let mut buffer = Buffer::new(0, "abc", cx); let mut buffer = Buffer::new(0, cx.model_id() as u64, "abc");
buffer.edit([(3..3, "D")], None, cx); buffer.edit([(3..3, "D")], None, cx);
now += Duration::from_secs(1); now += Duration::from_secs(1);
@ -1893,7 +1910,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
let mut replica_ids = Vec::new(); let mut replica_ids = Vec::new();
let mut buffers = Vec::new(); let mut buffers = Vec::new();
let network = Rc::new(RefCell::new(Network::new(rng.clone()))); let network = Rc::new(RefCell::new(Network::new(rng.clone())));
let base_buffer = cx.add_model(|cx| Buffer::new(0, base_text.as_str(), cx)); let base_buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, base_text.as_str()));
for i in 0..rng.gen_range(min_peers..=max_peers) { for i in 0..rng.gen_range(min_peers..=max_peers) {
let buffer = cx.add_model(|cx| { let buffer = cx.add_model(|cx| {
@ -2394,7 +2411,8 @@ fn assert_bracket_pairs(
) { ) {
let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false); let (expected_text, selection_ranges) = marked_text_ranges(selection_text, false);
let buffer = cx.add_model(|cx| { let buffer = cx.add_model(|cx| {
Buffer::new(0, expected_text.clone(), cx).with_language(Arc::new(language), cx) Buffer::new(0, cx.model_id() as u64, expected_text.clone())
.with_language(Arc::new(language), cx)
}); });
let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot()); let buffer = buffer.update(cx, |buffer, _cx| buffer.snapshot());

View file

@ -18,7 +18,7 @@ use futures::{
FutureExt, TryFutureExt as _, FutureExt, TryFutureExt as _,
}; };
use gpui::{executor::Background, AppContext, AsyncAppContext, Task}; use gpui::{executor::Background, AppContext, AsyncAppContext, Task};
use highlight_map::HighlightMap; pub use highlight_map::HighlightMap;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use lsp::{CodeActionKind, LanguageServerBinary}; use lsp::{CodeActionKind, LanguageServerBinary};
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};

View file

@ -207,6 +207,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
} }
} }
// This behavior is currently copied in the collab database, for snapshotting channel notes
pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> { pub fn deserialize_operation(message: proto::Operation) -> Result<crate::Operation> {
Ok( Ok(
match message match message

View file

@ -310,7 +310,9 @@ impl SyntaxSnapshot {
// Ignore edits that end before the start of this layer, and don't consider them // Ignore edits that end before the start of this layer, and don't consider them
// for any subsequent layers at this same depth. // for any subsequent layers at this same depth.
loop { loop {
let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) else { continue 'outer }; let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) else {
continue 'outer;
};
if edit_range.end.cmp(&layer.range.start, text).is_le() { if edit_range.end.cmp(&layer.range.start, text).is_le() {
first_edit_ix_for_depth += 1; first_edit_ix_for_depth += 1;
} else { } else {
@ -391,7 +393,9 @@ impl SyntaxSnapshot {
.filter::<_, ()>(|summary| summary.contains_unknown_injections); .filter::<_, ()>(|summary| summary.contains_unknown_injections);
cursor.next(text); cursor.next(text);
while let Some(layer) = cursor.item() { while let Some(layer) = cursor.item() {
let SyntaxLayerContent::Pending { language_name } = &layer.content else { unreachable!() }; let SyntaxLayerContent::Pending { language_name } = &layer.content else {
unreachable!()
};
if registry if registry
.language_for_name_or_extension(language_name) .language_for_name_or_extension(language_name)
.now_or_never() .now_or_never()
@ -533,7 +537,9 @@ impl SyntaxSnapshot {
let content = match step.language { let content = match step.language {
ParseStepLanguage::Loaded { language } => { ParseStepLanguage::Loaded { language } => {
let Some(grammar) = language.grammar() else { continue }; let Some(grammar) = language.grammar() else {
continue;
};
let tree; let tree;
let changed_ranges; let changed_ranges;

View file

@ -932,8 +932,12 @@ fn check_interpolation(
.zip(new_syntax_map.layers.iter()) .zip(new_syntax_map.layers.iter())
{ {
assert_eq!(old_layer.range, new_layer.range); assert_eq!(old_layer.range, new_layer.range);
let Some(old_tree) = old_layer.content.tree() else { continue }; let Some(old_tree) = old_layer.content.tree() else {
let Some(new_tree) = new_layer.content.tree() else { continue }; continue;
};
let Some(new_tree) = new_layer.content.tree() else {
continue;
};
let old_start_byte = old_layer.range.start.to_offset(old_buffer); let old_start_byte = old_layer.range.start.to_offset(old_buffer);
let new_start_byte = new_layer.range.start.to_offset(new_buffer); let new_start_byte = new_layer.range.start.to_offset(new_buffer);
let old_start_point = old_layer.range.start.to_point(old_buffer).to_ts_point(); let old_start_point = old_layer.range.start.to_point(old_buffer).to_ts_point();

View file

@ -176,7 +176,9 @@ impl LogStore {
cx.notify(); cx.notify();
LanguageServerState { LanguageServerState {
rpc_state: None, rpc_state: None,
log_buffer: cx.add_model(|cx| Buffer::new(0, "", cx)).clone(), log_buffer: cx
.add_model(|cx| Buffer::new(0, cx.model_id() as u64, ""))
.clone(),
} }
}) })
.log_buffer .log_buffer
@ -241,7 +243,7 @@ impl LogStore {
let rpc_state = server_state.rpc_state.get_or_insert_with(|| { let rpc_state = server_state.rpc_state.get_or_insert_with(|| {
let io_tx = self.io_tx.clone(); let io_tx = self.io_tx.clone();
let language = project.read(cx).languages().language_for_name("JSON"); let language = project.read(cx).languages().language_for_name("JSON");
let buffer = cx.add_model(|cx| Buffer::new(0, "", cx)); let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, ""));
cx.spawn_weak({ cx.spawn_weak({
let buffer = buffer.clone(); let buffer = buffer.clone();
|_, mut cx| async move { |_, mut cx| async move {
@ -327,7 +329,7 @@ impl LspLogView {
.projects .projects
.get(&project.downgrade()) .get(&project.downgrade())
.and_then(|project| project.servers.keys().copied().next()); .and_then(|project| project.servers.keys().copied().next());
let buffer = cx.add_model(|cx| Buffer::new(0, "", cx)); let buffer = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, ""));
let mut this = Self { let mut this = Self {
editor: Self::editor_for_buffer(project.clone(), buffer, cx), editor: Self::editor_for_buffer(project.clone(), buffer, cx),
project, project,
@ -549,7 +551,9 @@ impl View for LspLogToolbarItemView {
fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> { fn render(&mut self, cx: &mut ViewContext<Self>) -> AnyElement<Self> {
let theme = theme::current(cx).clone(); let theme = theme::current(cx).clone();
let Some(log_view) = self.log_view.as_ref() else { return Empty::new().into_any() }; let Some(log_view) = self.log_view.as_ref() else {
return Empty::new().into_any();
};
let log_view = log_view.read(cx); let log_view = log_view.read(cx);
let menu_rows = log_view.menu_items(cx).unwrap_or_default(); let menu_rows = log_view.menu_items(cx).unwrap_or_default();

View file

@ -77,7 +77,7 @@ pub enum Subscription {
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize)]
struct Request<'a, T> { pub struct Request<'a, T> {
jsonrpc: &'static str, jsonrpc: &'static str,
id: usize, id: usize,
method: &'a str, method: &'a str,
@ -435,7 +435,13 @@ impl LanguageServer {
}), }),
inlay_hint: Some(InlayHintClientCapabilities { inlay_hint: Some(InlayHintClientCapabilities {
resolve_support: Some(InlayHintResolveClientCapabilities { resolve_support: Some(InlayHintResolveClientCapabilities {
properties: vec!["textEdits".to_string(), "tooltip".to_string()], properties: vec![
"textEdits".to_string(),
"tooltip".to_string(),
"label.tooltip".to_string(),
"label.location".to_string(),
"label.command".to_string(),
],
}), }),
dynamic_registration: Some(false), dynamic_registration: Some(false),
}), }),

View file

@ -1,21 +1,23 @@
use crate::{ use crate::{
DocumentHighlight, Hover, HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel, DocumentHighlight, Hover, HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel,
InlayHintLabelPart, InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink, InlayHintLabelPart, InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink,
MarkupContent, Project, ProjectTransaction, MarkupContent, Project, ProjectTransaction, ResolveState,
}; };
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use async_trait::async_trait; use async_trait::async_trait;
use client::proto::{self, PeerId}; use client::proto::{self, PeerId};
use fs::LineEnding; use fs::LineEnding;
use futures::future;
use gpui::{AppContext, AsyncAppContext, ModelHandle}; use gpui::{AppContext, AsyncAppContext, ModelHandle};
use language::{ use language::{
language_settings::{language_settings, InlayHintKind}, language_settings::{language_settings, InlayHintKind},
point_from_lsp, point_to_lsp, point_from_lsp, point_to_lsp,
proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CharKind, CodeAction, range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind,
Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction, Unclipped, CodeAction, Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction,
Unclipped,
}; };
use lsp::{DocumentHighlightKind, LanguageServer, LanguageServerId, ServerCapabilities}; use lsp::{DocumentHighlightKind, LanguageServer, LanguageServerId, OneOf, ServerCapabilities};
use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc}; use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc};
pub fn lsp_formatting_options(tab_size: u32) -> lsp::FormattingOptions { pub fn lsp_formatting_options(tab_size: u32) -> lsp::FormattingOptions {
@ -1431,7 +1433,7 @@ impl LspCommand for GetCompletions {
}) })
}); });
Ok(futures::future::join_all(completions).await) Ok(future::join_all(completions).await)
} }
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCompletions { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCompletions {
@ -1499,7 +1501,7 @@ impl LspCommand for GetCompletions {
let completions = message.completions.into_iter().map(|completion| { let completions = message.completions.into_iter().map(|completion| {
language::proto::deserialize_completion(completion, language.clone()) language::proto::deserialize_completion(completion, language.clone())
}); });
futures::future::try_join_all(completions).await future::try_join_all(completions).await
} }
fn buffer_id_from_proto(message: &proto::GetCompletions) -> u64 { fn buffer_id_from_proto(message: &proto::GetCompletions) -> u64 {
@ -1653,7 +1655,11 @@ impl LspCommand for OnTypeFormatting {
type ProtoRequest = proto::OnTypeFormatting; type ProtoRequest = proto::OnTypeFormatting;
fn check_capabilities(&self, server_capabilities: &lsp::ServerCapabilities) -> bool { fn check_capabilities(&self, server_capabilities: &lsp::ServerCapabilities) -> bool {
let Some(on_type_formatting_options) = &server_capabilities.document_on_type_formatting_provider else { return false }; let Some(on_type_formatting_options) =
&server_capabilities.document_on_type_formatting_provider
else {
return false;
};
on_type_formatting_options on_type_formatting_options
.first_trigger_character .first_trigger_character
.contains(&self.trigger) .contains(&self.trigger)
@ -1767,7 +1773,9 @@ impl LspCommand for OnTypeFormatting {
_: ModelHandle<Buffer>, _: ModelHandle<Buffer>,
_: AsyncAppContext, _: AsyncAppContext,
) -> Result<Option<Transaction>> { ) -> Result<Option<Transaction>> {
let Some(transaction) = message.transaction else { return Ok(None) }; let Some(transaction) = message.transaction else {
return Ok(None);
};
Ok(Some(language::proto::deserialize_transaction(transaction)?)) Ok(Some(language::proto::deserialize_transaction(transaction)?))
} }
@ -1776,6 +1784,377 @@ impl LspCommand for OnTypeFormatting {
} }
} }
impl InlayHints {
pub async fn lsp_to_project_hint(
lsp_hint: lsp::InlayHint,
buffer_handle: &ModelHandle<Buffer>,
server_id: LanguageServerId,
resolve_state: ResolveState,
force_no_type_left_padding: bool,
cx: &mut AsyncAppContext,
) -> anyhow::Result<InlayHint> {
let kind = lsp_hint.kind.and_then(|kind| match kind {
lsp::InlayHintKind::TYPE => Some(InlayHintKind::Type),
lsp::InlayHintKind::PARAMETER => Some(InlayHintKind::Parameter),
_ => None,
});
let position = cx.update(|cx| {
let buffer = buffer_handle.read(cx);
let position = buffer.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
if kind == Some(InlayHintKind::Parameter) {
buffer.anchor_before(position)
} else {
buffer.anchor_after(position)
}
});
let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id)
.await
.context("lsp to project inlay hint conversion")?;
let padding_left = if force_no_type_left_padding && kind == Some(InlayHintKind::Type) {
false
} else {
lsp_hint.padding_left.unwrap_or(false)
};
Ok(InlayHint {
position,
padding_left,
padding_right: lsp_hint.padding_right.unwrap_or(false),
label,
kind,
tooltip: lsp_hint.tooltip.map(|tooltip| match tooltip {
lsp::InlayHintTooltip::String(s) => InlayHintTooltip::String(s),
lsp::InlayHintTooltip::MarkupContent(markup_content) => {
InlayHintTooltip::MarkupContent(MarkupContent {
kind: match markup_content.kind {
lsp::MarkupKind::PlainText => HoverBlockKind::PlainText,
lsp::MarkupKind::Markdown => HoverBlockKind::Markdown,
},
value: markup_content.value,
})
}
}),
resolve_state,
})
}
async fn lsp_inlay_label_to_project(
lsp_label: lsp::InlayHintLabel,
server_id: LanguageServerId,
) -> anyhow::Result<InlayHintLabel> {
let label = match lsp_label {
lsp::InlayHintLabel::String(s) => InlayHintLabel::String(s),
lsp::InlayHintLabel::LabelParts(lsp_parts) => {
let mut parts = Vec::with_capacity(lsp_parts.len());
for lsp_part in lsp_parts {
parts.push(InlayHintLabelPart {
value: lsp_part.value,
tooltip: lsp_part.tooltip.map(|tooltip| match tooltip {
lsp::InlayHintLabelPartTooltip::String(s) => {
InlayHintLabelPartTooltip::String(s)
}
lsp::InlayHintLabelPartTooltip::MarkupContent(markup_content) => {
InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
kind: match markup_content.kind {
lsp::MarkupKind::PlainText => HoverBlockKind::PlainText,
lsp::MarkupKind::Markdown => HoverBlockKind::Markdown,
},
value: markup_content.value,
})
}
}),
location: Some(server_id).zip(lsp_part.location),
});
}
InlayHintLabel::LabelParts(parts)
}
};
Ok(label)
}
pub fn project_to_proto_hint(response_hint: InlayHint) -> proto::InlayHint {
let (state, lsp_resolve_state) = match response_hint.resolve_state {
ResolveState::Resolved => (0, None),
ResolveState::CanResolve(server_id, resolve_data) => (
1,
resolve_data
.map(|json_data| {
serde_json::to_string(&json_data)
.expect("failed to serialize resolve json data")
})
.map(|value| proto::resolve_state::LspResolveState {
server_id: server_id.0 as u64,
value,
}),
),
ResolveState::Resolving => (2, None),
};
let resolve_state = Some(proto::ResolveState {
state,
lsp_resolve_state,
});
proto::InlayHint {
position: Some(language::proto::serialize_anchor(&response_hint.position)),
padding_left: response_hint.padding_left,
padding_right: response_hint.padding_right,
label: Some(proto::InlayHintLabel {
label: Some(match response_hint.label {
InlayHintLabel::String(s) => proto::inlay_hint_label::Label::Value(s),
InlayHintLabel::LabelParts(label_parts) => {
proto::inlay_hint_label::Label::LabelParts(proto::InlayHintLabelParts {
parts: label_parts.into_iter().map(|label_part| {
let location_url = label_part.location.as_ref().map(|(_, location)| location.uri.to_string());
let location_range_start = label_part.location.as_ref().map(|(_, location)| point_from_lsp(location.range.start).0).map(|point| proto::PointUtf16 { row: point.row, column: point.column });
let location_range_end = label_part.location.as_ref().map(|(_, location)| point_from_lsp(location.range.end).0).map(|point| proto::PointUtf16 { row: point.row, column: point.column });
proto::InlayHintLabelPart {
value: label_part.value,
tooltip: label_part.tooltip.map(|tooltip| {
let proto_tooltip = match tooltip {
InlayHintLabelPartTooltip::String(s) => proto::inlay_hint_label_part_tooltip::Content::Value(s),
InlayHintLabelPartTooltip::MarkupContent(markup_content) => proto::inlay_hint_label_part_tooltip::Content::MarkupContent(proto::MarkupContent {
is_markdown: markup_content.kind == HoverBlockKind::Markdown,
value: markup_content.value,
}),
};
proto::InlayHintLabelPartTooltip {content: Some(proto_tooltip)}
}),
location_url,
location_range_start,
location_range_end,
language_server_id: label_part.location.as_ref().map(|(server_id, _)| server_id.0 as u64),
}}).collect()
})
}
}),
}),
kind: response_hint.kind.map(|kind| kind.name().to_string()),
tooltip: response_hint.tooltip.map(|response_tooltip| {
let proto_tooltip = match response_tooltip {
InlayHintTooltip::String(s) => proto::inlay_hint_tooltip::Content::Value(s),
InlayHintTooltip::MarkupContent(markup_content) => {
proto::inlay_hint_tooltip::Content::MarkupContent(proto::MarkupContent {
is_markdown: markup_content.kind == HoverBlockKind::Markdown,
value: markup_content.value,
})
}
};
proto::InlayHintTooltip {
content: Some(proto_tooltip),
}
}),
resolve_state,
}
}
pub fn proto_to_project_hint(message_hint: proto::InlayHint) -> anyhow::Result<InlayHint> {
let resolve_state = message_hint.resolve_state.as_ref().unwrap_or_else(|| {
panic!("incorrect proto inlay hint message: no resolve state in hint {message_hint:?}",)
});
let resolve_state_data = resolve_state
.lsp_resolve_state.as_ref()
.map(|lsp_resolve_state| {
serde_json::from_str::<Option<lsp::LSPAny>>(&lsp_resolve_state.value)
.with_context(|| format!("incorrect proto inlay hint message: non-json resolve state {lsp_resolve_state:?}"))
.map(|state| (LanguageServerId(lsp_resolve_state.server_id as usize), state))
})
.transpose()?;
let resolve_state = match resolve_state.state {
0 => ResolveState::Resolved,
1 => {
let (server_id, lsp_resolve_state) = resolve_state_data.with_context(|| {
format!(
"No lsp resolve data for the hint that can be resolved: {message_hint:?}"
)
})?;
ResolveState::CanResolve(server_id, lsp_resolve_state)
}
2 => ResolveState::Resolving,
invalid => {
anyhow::bail!("Unexpected resolve state {invalid} for hint {message_hint:?}")
}
};
Ok(InlayHint {
position: message_hint
.position
.and_then(language::proto::deserialize_anchor)
.context("invalid position")?,
label: match message_hint
.label
.and_then(|label| label.label)
.context("missing label")?
{
proto::inlay_hint_label::Label::Value(s) => InlayHintLabel::String(s),
proto::inlay_hint_label::Label::LabelParts(parts) => {
let mut label_parts = Vec::new();
for part in parts.parts {
label_parts.push(InlayHintLabelPart {
value: part.value,
tooltip: part.tooltip.map(|tooltip| match tooltip.content {
Some(proto::inlay_hint_label_part_tooltip::Content::Value(s)) => {
InlayHintLabelPartTooltip::String(s)
}
Some(
proto::inlay_hint_label_part_tooltip::Content::MarkupContent(
markup_content,
),
) => InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
kind: if markup_content.is_markdown {
HoverBlockKind::Markdown
} else {
HoverBlockKind::PlainText
},
value: markup_content.value,
}),
None => InlayHintLabelPartTooltip::String(String::new()),
}),
location: {
match part
.location_url
.zip(
part.location_range_start.and_then(|start| {
Some(start..part.location_range_end?)
}),
)
.zip(part.language_server_id)
{
Some(((uri, range), server_id)) => Some((
LanguageServerId(server_id as usize),
lsp::Location {
uri: lsp::Url::parse(&uri)
.context("invalid uri in hint part {part:?}")?,
range: lsp::Range::new(
point_to_lsp(PointUtf16::new(
range.start.row,
range.start.column,
)),
point_to_lsp(PointUtf16::new(
range.end.row,
range.end.column,
)),
),
},
)),
None => None,
}
},
});
}
InlayHintLabel::LabelParts(label_parts)
}
},
padding_left: message_hint.padding_left,
padding_right: message_hint.padding_right,
kind: message_hint
.kind
.as_deref()
.and_then(InlayHintKind::from_name),
tooltip: message_hint.tooltip.and_then(|tooltip| {
Some(match tooltip.content? {
proto::inlay_hint_tooltip::Content::Value(s) => InlayHintTooltip::String(s),
proto::inlay_hint_tooltip::Content::MarkupContent(markup_content) => {
InlayHintTooltip::MarkupContent(MarkupContent {
kind: if markup_content.is_markdown {
HoverBlockKind::Markdown
} else {
HoverBlockKind::PlainText
},
value: markup_content.value,
})
}
})
}),
resolve_state,
})
}
pub fn project_to_lsp_hint(hint: InlayHint, snapshot: &BufferSnapshot) -> lsp::InlayHint {
lsp::InlayHint {
position: point_to_lsp(hint.position.to_point_utf16(snapshot)),
kind: hint.kind.map(|kind| match kind {
InlayHintKind::Type => lsp::InlayHintKind::TYPE,
InlayHintKind::Parameter => lsp::InlayHintKind::PARAMETER,
}),
text_edits: None,
tooltip: hint.tooltip.and_then(|tooltip| {
Some(match tooltip {
InlayHintTooltip::String(s) => lsp::InlayHintTooltip::String(s),
InlayHintTooltip::MarkupContent(markup_content) => {
lsp::InlayHintTooltip::MarkupContent(lsp::MarkupContent {
kind: match markup_content.kind {
HoverBlockKind::PlainText => lsp::MarkupKind::PlainText,
HoverBlockKind::Markdown => lsp::MarkupKind::Markdown,
HoverBlockKind::Code { .. } => return None,
},
value: markup_content.value,
})
}
})
}),
label: match hint.label {
InlayHintLabel::String(s) => lsp::InlayHintLabel::String(s),
InlayHintLabel::LabelParts(label_parts) => lsp::InlayHintLabel::LabelParts(
label_parts
.into_iter()
.map(|part| lsp::InlayHintLabelPart {
value: part.value,
tooltip: part.tooltip.and_then(|tooltip| {
Some(match tooltip {
InlayHintLabelPartTooltip::String(s) => {
lsp::InlayHintLabelPartTooltip::String(s)
}
InlayHintLabelPartTooltip::MarkupContent(markup_content) => {
lsp::InlayHintLabelPartTooltip::MarkupContent(
lsp::MarkupContent {
kind: match markup_content.kind {
HoverBlockKind::PlainText => {
lsp::MarkupKind::PlainText
}
HoverBlockKind::Markdown => {
lsp::MarkupKind::Markdown
}
HoverBlockKind::Code { .. } => return None,
},
value: markup_content.value,
},
)
}
})
}),
location: part.location.map(|(_, location)| location),
command: None,
})
.collect(),
),
},
padding_left: Some(hint.padding_left),
padding_right: Some(hint.padding_right),
data: match hint.resolve_state {
ResolveState::CanResolve(_, data) => data,
ResolveState::Resolving | ResolveState::Resolved => None,
},
}
}
pub fn can_resolve_inlays(capabilities: &ServerCapabilities) -> bool {
capabilities
.inlay_hint_provider
.as_ref()
.and_then(|options| match options {
OneOf::Left(_is_supported) => None,
OneOf::Right(capabilities) => match capabilities {
lsp::InlayHintServerCapabilities::Options(o) => o.resolve_provider,
lsp::InlayHintServerCapabilities::RegistrationOptions(o) => {
o.inlay_hint_options.resolve_provider
}
},
})
.unwrap_or(false)
}
}
#[async_trait(?Send)] #[async_trait(?Send)]
impl LspCommand for InlayHints { impl LspCommand for InlayHints {
type Response = Vec<InlayHint>; type Response = Vec<InlayHint>;
@ -1783,7 +2162,9 @@ impl LspCommand for InlayHints {
type ProtoRequest = proto::InlayHints; type ProtoRequest = proto::InlayHints;
fn check_capabilities(&self, server_capabilities: &lsp::ServerCapabilities) -> bool { fn check_capabilities(&self, server_capabilities: &lsp::ServerCapabilities) -> bool {
let Some(inlay_hint_provider) = &server_capabilities.inlay_hint_provider else { return false }; let Some(inlay_hint_provider) = &server_capabilities.inlay_hint_provider else {
return false;
};
match inlay_hint_provider { match inlay_hint_provider {
lsp::OneOf::Left(enabled) => *enabled, lsp::OneOf::Left(enabled) => *enabled,
lsp::OneOf::Right(inlay_hint_capabilities) => match inlay_hint_capabilities { lsp::OneOf::Right(inlay_hint_capabilities) => match inlay_hint_capabilities {
@ -1816,8 +2197,9 @@ impl LspCommand for InlayHints {
buffer: ModelHandle<Buffer>, buffer: ModelHandle<Buffer>,
server_id: LanguageServerId, server_id: LanguageServerId,
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<Vec<InlayHint>> { ) -> anyhow::Result<Vec<InlayHint>> {
let (lsp_adapter, _) = language_server_for_buffer(&project, &buffer, server_id, &mut cx)?; let (lsp_adapter, lsp_server) =
language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
// `typescript-language-server` adds padding to the left for type hints, turning // `typescript-language-server` adds padding to the left for type hints, turning
// `const foo: boolean` into `const foo : boolean` which looks odd. // `const foo: boolean` into `const foo : boolean` which looks odd.
// `rust-analyzer` does not have the padding for this case, and we have to accomodate both. // `rust-analyzer` does not have the padding for this case, and we have to accomodate both.
@ -1827,93 +2209,32 @@ impl LspCommand for InlayHints {
// Hence let's use a heuristic first to handle the most awkward case and look for more. // Hence let's use a heuristic first to handle the most awkward case and look for more.
let force_no_type_left_padding = let force_no_type_left_padding =
lsp_adapter.name.0.as_ref() == "typescript-language-server"; lsp_adapter.name.0.as_ref() == "typescript-language-server";
cx.read(|cx| {
let origin_buffer = buffer.read(cx); let hints = message.unwrap_or_default().into_iter().map(|lsp_hint| {
Ok(message let resolve_state = if InlayHints::can_resolve_inlays(lsp_server.capabilities()) {
.unwrap_or_default() ResolveState::CanResolve(lsp_server.server_id(), lsp_hint.data.clone())
.into_iter() } else {
.map(|lsp_hint| { ResolveState::Resolved
let kind = lsp_hint.kind.and_then(|kind| match kind { };
lsp::InlayHintKind::TYPE => Some(InlayHintKind::Type),
lsp::InlayHintKind::PARAMETER => Some(InlayHintKind::Parameter), let buffer = buffer.clone();
_ => None, cx.spawn(|mut cx| async move {
}); InlayHints::lsp_to_project_hint(
let position = origin_buffer lsp_hint,
.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left); &buffer,
let padding_left = server_id,
if force_no_type_left_padding && kind == Some(InlayHintKind::Type) { resolve_state,
false force_no_type_left_padding,
} else { &mut cx,
lsp_hint.padding_left.unwrap_or(false) )
}; .await
InlayHint { })
buffer_id: origin_buffer.remote_id(), });
position: if kind == Some(InlayHintKind::Parameter) { future::join_all(hints)
origin_buffer.anchor_before(position) .await
} else { .into_iter()
origin_buffer.anchor_after(position) .collect::<anyhow::Result<_>>()
}, .context("lsp to project inlay hints conversion")
padding_left,
padding_right: lsp_hint.padding_right.unwrap_or(false),
label: match lsp_hint.label {
lsp::InlayHintLabel::String(s) => InlayHintLabel::String(s),
lsp::InlayHintLabel::LabelParts(lsp_parts) => {
InlayHintLabel::LabelParts(
lsp_parts
.into_iter()
.map(|label_part| InlayHintLabelPart {
value: label_part.value,
tooltip: label_part.tooltip.map(
|tooltip| {
match tooltip {
lsp::InlayHintLabelPartTooltip::String(s) => {
InlayHintLabelPartTooltip::String(s)
}
lsp::InlayHintLabelPartTooltip::MarkupContent(
markup_content,
) => InlayHintLabelPartTooltip::MarkupContent(
MarkupContent {
kind: format!("{:?}", markup_content.kind),
value: markup_content.value,
},
),
}
},
),
location: label_part.location.map(|lsp_location| {
let target_start = origin_buffer.clip_point_utf16(
point_from_lsp(lsp_location.range.start),
Bias::Left,
);
let target_end = origin_buffer.clip_point_utf16(
point_from_lsp(lsp_location.range.end),
Bias::Left,
);
Location {
buffer: buffer.clone(),
range: origin_buffer.anchor_after(target_start)
..origin_buffer.anchor_before(target_end),
}
}),
})
.collect(),
)
}
},
kind,
tooltip: lsp_hint.tooltip.map(|tooltip| match tooltip {
lsp::InlayHintTooltip::String(s) => InlayHintTooltip::String(s),
lsp::InlayHintTooltip::MarkupContent(markup_content) => {
InlayHintTooltip::MarkupContent(MarkupContent {
kind: format!("{:?}", markup_content.kind),
value: markup_content.value,
})
}
}),
}
})
.collect())
})
} }
fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::InlayHints { fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::InlayHints {
@ -1959,23 +2280,7 @@ impl LspCommand for InlayHints {
proto::InlayHintsResponse { proto::InlayHintsResponse {
hints: response hints: response
.into_iter() .into_iter()
.map(|response_hint| proto::InlayHint { .map(|response_hint| InlayHints::project_to_proto_hint(response_hint))
position: Some(language::proto::serialize_anchor(&response_hint.position)),
padding_left: response_hint.padding_left,
padding_right: response_hint.padding_right,
kind: response_hint.kind.map(|kind| kind.name().to_string()),
// Do not pass extra data such as tooltips to clients: host can put tooltip data from the cache during resolution.
tooltip: None,
// Similarly, do not pass label parts to clients: host can return a detailed list during resolution.
label: Some(proto::InlayHintLabel {
label: Some(proto::inlay_hint_label::Label::Value(
match response_hint.label {
InlayHintLabel::String(s) => s,
InlayHintLabel::LabelParts(_) => response_hint.text(),
},
)),
}),
})
.collect(), .collect(),
version: serialize_version(buffer_version), version: serialize_version(buffer_version),
} }
@ -1984,10 +2289,10 @@ impl LspCommand for InlayHints {
async fn response_from_proto( async fn response_from_proto(
self, self,
message: proto::InlayHintsResponse, message: proto::InlayHintsResponse,
project: ModelHandle<Project>, _: ModelHandle<Project>,
buffer: ModelHandle<Buffer>, buffer: ModelHandle<Buffer>,
mut cx: AsyncAppContext, mut cx: AsyncAppContext,
) -> Result<Vec<InlayHint>> { ) -> anyhow::Result<Vec<InlayHint>> {
buffer buffer
.update(&mut cx, |buffer, _| { .update(&mut cx, |buffer, _| {
buffer.wait_for_version(deserialize_version(&message.version)) buffer.wait_for_version(deserialize_version(&message.version))
@ -1996,82 +2301,7 @@ impl LspCommand for InlayHints {
let mut hints = Vec::new(); let mut hints = Vec::new();
for message_hint in message.hints { for message_hint in message.hints {
let buffer_id = message_hint hints.push(InlayHints::proto_to_project_hint(message_hint)?);
.position
.as_ref()
.and_then(|location| location.buffer_id)
.context("missing buffer id")?;
let hint = InlayHint {
buffer_id,
position: message_hint
.position
.and_then(language::proto::deserialize_anchor)
.context("invalid position")?,
label: match message_hint
.label
.and_then(|label| label.label)
.context("missing label")?
{
proto::inlay_hint_label::Label::Value(s) => InlayHintLabel::String(s),
proto::inlay_hint_label::Label::LabelParts(parts) => {
let mut label_parts = Vec::new();
for part in parts.parts {
label_parts.push(InlayHintLabelPart {
value: part.value,
tooltip: part.tooltip.map(|tooltip| match tooltip.content {
Some(proto::inlay_hint_label_part_tooltip::Content::Value(s)) => InlayHintLabelPartTooltip::String(s),
Some(proto::inlay_hint_label_part_tooltip::Content::MarkupContent(markup_content)) => InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
kind: markup_content.kind,
value: markup_content.value,
}),
None => InlayHintLabelPartTooltip::String(String::new()),
}),
location: match part.location {
Some(location) => {
let target_buffer = project
.update(&mut cx, |this, cx| {
this.wait_for_remote_buffer(location.buffer_id, cx)
})
.await?;
Some(Location {
range: location
.start
.and_then(language::proto::deserialize_anchor)
.context("invalid start")?
..location
.end
.and_then(language::proto::deserialize_anchor)
.context("invalid end")?,
buffer: target_buffer,
})},
None => None,
},
});
}
InlayHintLabel::LabelParts(label_parts)
}
},
padding_left: message_hint.padding_left,
padding_right: message_hint.padding_right,
kind: message_hint
.kind
.as_deref()
.and_then(InlayHintKind::from_name),
tooltip: message_hint.tooltip.and_then(|tooltip| {
Some(match tooltip.content? {
proto::inlay_hint_tooltip::Content::Value(s) => InlayHintTooltip::String(s),
proto::inlay_hint_tooltip::Content::MarkupContent(markup_content) => {
InlayHintTooltip::MarkupContent(MarkupContent {
kind: markup_content.kind,
value: markup_content.value,
})
}
})
}),
};
hints.push(hint);
} }
Ok(hints) Ok(hints)

View file

@ -11,7 +11,7 @@ mod project_tests;
mod worktree_tests; mod worktree_tests;
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use client::{proto, Client, TypedEnvelope, UserStore}; use client::{proto, Client, TypedEnvelope, UserId, UserStore};
use clock::ReplicaId; use clock::ReplicaId;
use collections::{hash_map, BTreeMap, HashMap, HashSet}; use collections::{hash_map, BTreeMap, HashMap, HashSet};
use copilot::Copilot; use copilot::Copilot;
@ -26,8 +26,8 @@ use futures::{
}; };
use globset::{Glob, GlobSet, GlobSetBuilder}; use globset::{Glob, GlobSet, GlobSetBuilder};
use gpui::{ use gpui::{
AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity, ModelContext, executor::Background, AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity,
ModelHandle, Task, WeakModelHandle, ModelContext, ModelHandle, Task, WeakModelHandle,
}; };
use itertools::Itertools; use itertools::Itertools;
use language::{ use language::{
@ -37,11 +37,11 @@ use language::{
deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version, deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
serialize_anchor, serialize_version, serialize_anchor, serialize_version,
}, },
range_from_lsp, range_to_lsp, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel, range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate, OffsetRangeExt, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate,
Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
ToPointUtf16, Transaction, Unclipped, ToOffset, ToPointUtf16, Transaction, Unclipped,
}; };
use log::error; use log::error;
use lsp::{ use lsp::{
@ -57,8 +57,8 @@ use serde::Serialize;
use settings::SettingsStore; use settings::SettingsStore;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use similar::{ChangeTag, TextDiff}; use similar::{ChangeTag, TextDiff};
use smol::channel::{Receiver, Sender};
use std::{ use std::{
cell::RefCell,
cmp::{self, Ordering}, cmp::{self, Ordering},
convert::TryInto, convert::TryInto,
hash::Hash, hash::Hash,
@ -67,7 +67,6 @@ use std::{
ops::Range, ops::Range,
path::{self, Component, Path, PathBuf}, path::{self, Component, Path, PathBuf},
process::Stdio, process::Stdio,
rc::Rc,
str, str,
sync::{ sync::{
atomic::{AtomicUsize, Ordering::SeqCst}, atomic::{AtomicUsize, Ordering::SeqCst},
@ -250,6 +249,7 @@ enum ProjectClientState {
pub struct Collaborator { pub struct Collaborator {
pub peer_id: proto::PeerId, pub peer_id: proto::PeerId,
pub replica_id: ReplicaId, pub replica_id: ReplicaId,
pub user_id: UserId,
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
@ -281,6 +281,7 @@ pub enum Event {
old_peer_id: proto::PeerId, old_peer_id: proto::PeerId,
new_peer_id: proto::PeerId, new_peer_id: proto::PeerId,
}, },
CollaboratorJoined(proto::PeerId),
CollaboratorLeft(proto::PeerId), CollaboratorLeft(proto::PeerId),
RefreshInlayHints, RefreshInlayHints,
} }
@ -331,15 +332,22 @@ pub struct Location {
pub range: Range<language::Anchor>, pub range: Range<language::Anchor>,
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct InlayHint { pub struct InlayHint {
pub buffer_id: u64,
pub position: language::Anchor, pub position: language::Anchor,
pub label: InlayHintLabel, pub label: InlayHintLabel,
pub kind: Option<InlayHintKind>, pub kind: Option<InlayHintKind>,
pub padding_left: bool, pub padding_left: bool,
pub padding_right: bool, pub padding_right: bool,
pub tooltip: Option<InlayHintTooltip>, pub tooltip: Option<InlayHintTooltip>,
pub resolve_state: ResolveState,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ResolveState {
Resolved,
CanResolve(LanguageServerId, Option<lsp::LSPAny>),
Resolving,
} }
impl InlayHint { impl InlayHint {
@ -351,34 +359,34 @@ impl InlayHint {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum InlayHintLabel { pub enum InlayHintLabel {
String(String), String(String),
LabelParts(Vec<InlayHintLabelPart>), LabelParts(Vec<InlayHintLabelPart>),
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct InlayHintLabelPart { pub struct InlayHintLabelPart {
pub value: String, pub value: String,
pub tooltip: Option<InlayHintLabelPartTooltip>, pub tooltip: Option<InlayHintLabelPartTooltip>,
pub location: Option<Location>, pub location: Option<(LanguageServerId, lsp::Location)>,
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum InlayHintTooltip { pub enum InlayHintTooltip {
String(String), String(String),
MarkupContent(MarkupContent), MarkupContent(MarkupContent),
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum InlayHintLabelPartTooltip { pub enum InlayHintLabelPartTooltip {
String(String), String(String),
MarkupContent(MarkupContent), MarkupContent(MarkupContent),
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct MarkupContent { pub struct MarkupContent {
pub kind: String, pub kind: HoverBlockKind,
pub value: String, pub value: String,
} }
@ -412,7 +420,7 @@ pub struct HoverBlock {
pub kind: HoverBlockKind, pub kind: HoverBlockKind,
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub enum HoverBlockKind { pub enum HoverBlockKind {
PlainText, PlainText,
Markdown, Markdown,
@ -516,6 +524,28 @@ impl FormatTrigger {
} }
} }
} }
#[derive(Clone, Debug, PartialEq)]
enum SearchMatchCandidate {
OpenBuffer {
buffer: ModelHandle<Buffer>,
// This might be an unnamed file without representation on filesystem
path: Option<Arc<Path>>,
},
Path {
worktree_id: WorktreeId,
path: Arc<Path>,
},
}
type SearchMatchCandidateIndex = usize;
impl SearchMatchCandidate {
fn path(&self) -> Option<Arc<Path>> {
match self {
SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
}
}
}
impl Project { impl Project {
pub fn init_settings(cx: &mut AppContext) { pub fn init_settings(cx: &mut AppContext) {
@ -549,6 +579,7 @@ impl Project {
client.add_model_request_handler(Self::handle_apply_code_action); client.add_model_request_handler(Self::handle_apply_code_action);
client.add_model_request_handler(Self::handle_on_type_formatting); client.add_model_request_handler(Self::handle_on_type_formatting);
client.add_model_request_handler(Self::handle_inlay_hints); client.add_model_request_handler(Self::handle_inlay_hints);
client.add_model_request_handler(Self::handle_resolve_inlay_hint);
client.add_model_request_handler(Self::handle_refresh_inlay_hints); client.add_model_request_handler(Self::handle_refresh_inlay_hints);
client.add_model_request_handler(Self::handle_reload_buffers); client.add_model_request_handler(Self::handle_reload_buffers);
client.add_model_request_handler(Self::handle_synchronize_buffers); client.add_model_request_handler(Self::handle_synchronize_buffers);
@ -1537,9 +1568,9 @@ impl Project {
if self.is_remote() { if self.is_remote() {
return Err(anyhow!("creating buffers as a guest is not supported yet")); return Err(anyhow!("creating buffers as a guest is not supported yet"));
} }
let id = post_inc(&mut self.next_buffer_id);
let buffer = cx.add_model(|cx| { let buffer = cx.add_model(|cx| {
Buffer::new(self.replica_id(), text, cx) Buffer::new(self.replica_id(), id, text)
.with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx) .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
}); });
self.register_buffer(&buffer, cx)?; self.register_buffer(&buffer, cx)?;
@ -1677,7 +1708,7 @@ impl Project {
} }
/// LanguageServerName is owned, because it is inserted into a map /// LanguageServerName is owned, because it is inserted into a map
fn open_local_buffer_via_lsp( pub fn open_local_buffer_via_lsp(
&mut self, &mut self,
abs_path: lsp::Url, abs_path: lsp::Url,
language_server_id: LanguageServerId, language_server_id: LanguageServerId,
@ -4967,7 +4998,7 @@ impl Project {
buffer_handle: ModelHandle<Buffer>, buffer_handle: ModelHandle<Buffer>,
range: Range<T>, range: Range<T>,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<InlayHint>>> { ) -> Task<anyhow::Result<Vec<InlayHint>>> {
let buffer = buffer_handle.read(cx); let buffer = buffer_handle.read(cx);
let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end); let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
let range_start = range.start; let range_start = range.start;
@ -5017,192 +5048,79 @@ impl Project {
} }
} }
pub fn resolve_inlay_hint(
&self,
hint: InlayHint,
buffer_handle: ModelHandle<Buffer>,
server_id: LanguageServerId,
cx: &mut ModelContext<Self>,
) -> Task<anyhow::Result<InlayHint>> {
if self.is_local() {
let buffer = buffer_handle.read(cx);
let (_, lang_server) = if let Some((adapter, server)) =
self.language_server_for_buffer(buffer, server_id, cx)
{
(adapter.clone(), server.clone())
} else {
return Task::ready(Ok(hint));
};
if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
return Task::ready(Ok(hint));
}
let buffer_snapshot = buffer.snapshot();
cx.spawn(|_, mut cx| async move {
let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
);
let resolved_hint = resolve_task
.await
.context("inlay hint resolve LSP request")?;
let resolved_hint = InlayHints::lsp_to_project_hint(
resolved_hint,
&buffer_handle,
server_id,
ResolveState::Resolved,
false,
&mut cx,
)
.await?;
Ok(resolved_hint)
})
} else if let Some(project_id) = self.remote_id() {
let client = self.client.clone();
let request = proto::ResolveInlayHint {
project_id,
buffer_id: buffer_handle.read(cx).remote_id(),
language_server_id: server_id.0 as u64,
hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
};
cx.spawn(|_, _| async move {
let response = client
.request(request)
.await
.context("inlay hints proto request")?;
match response.hint {
Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
.context("inlay hints proto resolve response conversion"),
None => Ok(hint),
}
})
} else {
Task::ready(Err(anyhow!("project does not have a remote id")))
}
}
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
pub fn search( pub fn search(
&self, &self,
query: SearchQuery, query: SearchQuery,
cx: &mut ModelContext<Self>, cx: &mut ModelContext<Self>,
) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> { ) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
if self.is_local() { if self.is_local() {
let snapshots = self self.search_local(query, cx)
.visible_worktrees(cx)
.filter_map(|tree| {
let tree = tree.read(cx).as_local()?;
Some(tree.snapshot())
})
.collect::<Vec<_>>();
let background = cx.background().clone();
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
if path_count == 0 {
return Task::ready(Ok(Default::default()));
}
let workers = background.num_cpus().min(path_count);
let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
cx.background()
.spawn({
let fs = self.fs.clone();
let background = cx.background().clone();
let query = query.clone();
async move {
let fs = &fs;
let query = &query;
let matching_paths_tx = &matching_paths_tx;
let paths_per_worker = (path_count + workers - 1) / workers;
let snapshots = &snapshots;
background
.scoped(|scope| {
for worker_ix in 0..workers {
let worker_start_ix = worker_ix * paths_per_worker;
let worker_end_ix = worker_start_ix + paths_per_worker;
scope.spawn(async move {
let mut snapshot_start_ix = 0;
let mut abs_path = PathBuf::new();
for snapshot in snapshots {
let snapshot_end_ix =
snapshot_start_ix + snapshot.visible_file_count();
if worker_end_ix <= snapshot_start_ix {
break;
} else if worker_start_ix > snapshot_end_ix {
snapshot_start_ix = snapshot_end_ix;
continue;
} else {
let start_in_snapshot = worker_start_ix
.saturating_sub(snapshot_start_ix);
let end_in_snapshot =
cmp::min(worker_end_ix, snapshot_end_ix)
- snapshot_start_ix;
for entry in snapshot
.files(false, start_in_snapshot)
.take(end_in_snapshot - start_in_snapshot)
{
if matching_paths_tx.is_closed() {
break;
}
let matches = if query
.file_matches(Some(&entry.path))
{
abs_path.clear();
abs_path.push(&snapshot.abs_path());
abs_path.push(&entry.path);
if let Some(file) =
fs.open_sync(&abs_path).await.log_err()
{
query.detect(file).unwrap_or(false)
} else {
false
}
} else {
false
};
if matches {
let project_path =
(snapshot.id(), entry.path.clone());
if matching_paths_tx
.send(project_path)
.await
.is_err()
{
break;
}
}
}
snapshot_start_ix = snapshot_end_ix;
}
}
});
}
})
.await;
}
})
.detach();
let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
let open_buffers = self
.opened_buffers
.values()
.filter_map(|b| b.upgrade(cx))
.collect::<HashSet<_>>();
cx.spawn(|this, cx| async move {
for buffer in &open_buffers {
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
buffers_tx.send((buffer.clone(), snapshot)).await?;
}
let open_buffers = Rc::new(RefCell::new(open_buffers));
while let Some(project_path) = matching_paths_rx.next().await {
if buffers_tx.is_closed() {
break;
}
let this = this.clone();
let open_buffers = open_buffers.clone();
let buffers_tx = buffers_tx.clone();
cx.spawn(|mut cx| async move {
if let Some(buffer) = this
.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
.await
.log_err()
{
if open_buffers.borrow_mut().insert(buffer.clone()) {
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
buffers_tx.send((buffer, snapshot)).await?;
}
}
Ok::<_, anyhow::Error>(())
})
.detach();
}
Ok::<_, anyhow::Error>(())
})
.detach_and_log_err(cx);
let background = cx.background().clone();
cx.background().spawn(async move {
let query = &query;
let mut matched_buffers = Vec::new();
for _ in 0..workers {
matched_buffers.push(HashMap::default());
}
background
.scoped(|scope| {
for worker_matched_buffers in matched_buffers.iter_mut() {
let mut buffers_rx = buffers_rx.clone();
scope.spawn(async move {
while let Some((buffer, snapshot)) = buffers_rx.next().await {
let buffer_matches = if query.file_matches(
snapshot.file().map(|file| file.path().as_ref()),
) {
query
.search(&snapshot, None)
.await
.iter()
.map(|range| {
snapshot.anchor_before(range.start)
..snapshot.anchor_after(range.end)
})
.collect()
} else {
Vec::new()
};
if !buffer_matches.is_empty() {
worker_matched_buffers
.insert(buffer.clone(), buffer_matches);
}
}
});
}
})
.await;
Ok(matched_buffers.into_iter().flatten().collect())
})
} else if let Some(project_id) = self.remote_id() { } else if let Some(project_id) = self.remote_id() {
let (tx, rx) = smol::channel::unbounded();
let request = self.client.request(query.to_proto(project_id)); let request = self.client.request(query.to_proto(project_id));
cx.spawn(|this, mut cx| async move { cx.spawn(|this, mut cx| async move {
let response = request.await?; let response = request.await?;
@ -5226,13 +5144,303 @@ impl Project {
.or_insert(Vec::new()) .or_insert(Vec::new())
.push(start..end) .push(start..end)
} }
Ok(result) for (buffer, ranges) in result {
let _ = tx.send((buffer, ranges)).await;
}
Result::<(), anyhow::Error>::Ok(())
}) })
.detach_and_log_err(cx);
rx
} else { } else {
Task::ready(Ok(Default::default())) unimplemented!();
} }
} }
pub fn search_local(
&self,
query: SearchQuery,
cx: &mut ModelContext<Self>,
) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
// Local search is split into several phases.
// TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
// and the second phase that finds positions of all the matches found in the candidate files.
// The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
//
// It gets a bit hairy though, because we must account for files that do not have a persistent representation
// on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
//
// 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
// Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
// of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
// 2. At this point, we have a list of all potentially matching buffers/files.
// We sort that list by buffer path - this list is retained for later use.
// We ensure that all buffers are now opened and available in project.
// 3. We run a scan over all the candidate buffers on multiple background threads.
// We cannot assume that there will even be a match - while at least one match
// is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
// There is also an auxilliary background thread responsible for result gathering.
// This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
// it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
// As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
// entry - which might already be available thanks to out-of-order processing.
//
// We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
// This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
// This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
// in face of constantly updating list of sorted matches.
// Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
let snapshots = self
.visible_worktrees(cx)
.filter_map(|tree| {
let tree = tree.read(cx).as_local()?;
Some(tree.snapshot())
})
.collect::<Vec<_>>();
let background = cx.background().clone();
let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
if path_count == 0 {
let (_, rx) = smol::channel::bounded(1024);
return rx;
}
let workers = background.num_cpus().min(path_count);
let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
let mut unnamed_files = vec![];
let opened_buffers = self
.opened_buffers
.iter()
.filter_map(|(_, b)| {
let buffer = b.upgrade(cx)?;
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
if let Some(path) = snapshot.file().map(|file| file.path()) {
Some((path.clone(), (buffer, snapshot)))
} else {
unnamed_files.push(buffer);
None
}
})
.collect();
cx.background()
.spawn(Self::background_search(
unnamed_files,
opened_buffers,
cx.background().clone(),
self.fs.clone(),
workers,
query.clone(),
path_count,
snapshots,
matching_paths_tx,
))
.detach();
let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
let background = cx.background().clone();
let (result_tx, result_rx) = smol::channel::bounded(1024);
cx.background()
.spawn(async move {
let Ok(buffers) = buffers.await else {
return;
};
let buffers_len = buffers.len();
if buffers_len == 0 {
return;
}
let query = &query;
let (finished_tx, mut finished_rx) = smol::channel::unbounded();
background
.scoped(|scope| {
#[derive(Clone)]
struct FinishedStatus {
entry: Option<(ModelHandle<Buffer>, Vec<Range<Anchor>>)>,
buffer_index: SearchMatchCandidateIndex,
}
for _ in 0..workers {
let finished_tx = finished_tx.clone();
let mut buffers_rx = buffers_rx.clone();
scope.spawn(async move {
while let Some((entry, buffer_index)) = buffers_rx.next().await {
let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
{
if query.file_matches(
snapshot.file().map(|file| file.path().as_ref()),
) {
query
.search(&snapshot, None)
.await
.iter()
.map(|range| {
snapshot.anchor_before(range.start)
..snapshot.anchor_after(range.end)
})
.collect()
} else {
Vec::new()
}
} else {
Vec::new()
};
let status = if !buffer_matches.is_empty() {
let entry = if let Some((buffer, _)) = entry.as_ref() {
Some((buffer.clone(), buffer_matches))
} else {
None
};
FinishedStatus {
entry,
buffer_index,
}
} else {
FinishedStatus {
entry: None,
buffer_index,
}
};
if finished_tx.send(status).await.is_err() {
break;
}
}
});
}
// Report sorted matches
scope.spawn(async move {
let mut current_index = 0;
let mut scratch = vec![None; buffers_len];
while let Some(status) = finished_rx.next().await {
debug_assert!(
scratch[status.buffer_index].is_none(),
"Got match status of position {} twice",
status.buffer_index
);
let index = status.buffer_index;
scratch[index] = Some(status);
while current_index < buffers_len {
let Some(current_entry) = scratch[current_index].take() else {
// We intentionally **do not** increment `current_index` here. When next element arrives
// from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
// this time.
break;
};
if let Some(entry) = current_entry.entry {
result_tx.send(entry).await.log_err();
}
current_index += 1;
}
if current_index == buffers_len {
break;
}
}
});
})
.await;
})
.detach();
result_rx
}
/// Pick paths that might potentially contain a match of a given search query.
async fn background_search(
unnamed_buffers: Vec<ModelHandle<Buffer>>,
opened_buffers: HashMap<Arc<Path>, (ModelHandle<Buffer>, BufferSnapshot)>,
background: Arc<Background>,
fs: Arc<dyn Fs>,
workers: usize,
query: SearchQuery,
path_count: usize,
snapshots: Vec<LocalSnapshot>,
matching_paths_tx: Sender<SearchMatchCandidate>,
) {
let fs = &fs;
let query = &query;
let matching_paths_tx = &matching_paths_tx;
let snapshots = &snapshots;
let paths_per_worker = (path_count + workers - 1) / workers;
for buffer in unnamed_buffers {
matching_paths_tx
.send(SearchMatchCandidate::OpenBuffer {
buffer: buffer.clone(),
path: None,
})
.await
.log_err();
}
for (path, (buffer, _)) in opened_buffers.iter() {
matching_paths_tx
.send(SearchMatchCandidate::OpenBuffer {
buffer: buffer.clone(),
path: Some(path.clone()),
})
.await
.log_err();
}
background
.scoped(|scope| {
for worker_ix in 0..workers {
let worker_start_ix = worker_ix * paths_per_worker;
let worker_end_ix = worker_start_ix + paths_per_worker;
let unnamed_buffers = opened_buffers.clone();
scope.spawn(async move {
let mut snapshot_start_ix = 0;
let mut abs_path = PathBuf::new();
for snapshot in snapshots {
let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
if worker_end_ix <= snapshot_start_ix {
break;
} else if worker_start_ix > snapshot_end_ix {
snapshot_start_ix = snapshot_end_ix;
continue;
} else {
let start_in_snapshot =
worker_start_ix.saturating_sub(snapshot_start_ix);
let end_in_snapshot =
cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
for entry in snapshot
.files(false, start_in_snapshot)
.take(end_in_snapshot - start_in_snapshot)
{
if matching_paths_tx.is_closed() {
break;
}
if unnamed_buffers.contains_key(&entry.path) {
continue;
}
let matches = if query.file_matches(Some(&entry.path)) {
abs_path.clear();
abs_path.push(&snapshot.abs_path());
abs_path.push(&entry.path);
if let Some(file) = fs.open_sync(&abs_path).await.log_err()
{
query.detect(file).unwrap_or(false)
} else {
false
}
} else {
false
};
if matches {
let project_path = SearchMatchCandidate::Path {
worktree_id: snapshot.id(),
path: entry.path.clone(),
};
if matching_paths_tx.send(project_path).await.is_err() {
break;
}
}
}
snapshot_start_ix = snapshot_end_ix;
}
}
});
}
})
.await;
}
// TODO: Wire this up to allow selecting a server? // TODO: Wire this up to allow selecting a server?
fn request_lsp<R: LspCommand>( fn request_lsp<R: LspCommand>(
&self, &self,
@ -5307,6 +5515,61 @@ impl Project {
Task::ready(Ok(Default::default())) Task::ready(Ok(Default::default()))
} }
fn sort_candidates_and_open_buffers(
mut matching_paths_rx: Receiver<SearchMatchCandidate>,
cx: &mut ModelContext<Self>,
) -> (
futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
Receiver<(
Option<(ModelHandle<Buffer>, BufferSnapshot)>,
SearchMatchCandidateIndex,
)>,
) {
let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
cx.spawn(|this, cx| async move {
let mut buffers = vec![];
while let Some(entry) = matching_paths_rx.next().await {
buffers.push(entry);
}
buffers.sort_by_key(|candidate| candidate.path());
let matching_paths = buffers.clone();
let _ = sorted_buffers_tx.send(buffers);
for (index, candidate) in matching_paths.into_iter().enumerate() {
if buffers_tx.is_closed() {
break;
}
let this = this.clone();
let buffers_tx = buffers_tx.clone();
cx.spawn(|mut cx| async move {
let buffer = match candidate {
SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
SearchMatchCandidate::Path { worktree_id, path } => this
.update(&mut cx, |this, cx| {
this.open_buffer((worktree_id, path), cx)
})
.await
.log_err(),
};
if let Some(buffer) = buffer {
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
buffers_tx
.send((Some((buffer, snapshot)), index))
.await
.log_err();
} else {
buffers_tx.send((None, index)).await.log_err();
}
Ok::<_, anyhow::Error>(())
})
.detach();
}
})
.detach();
(sorted_buffers_rx, buffers_rx)
}
pub fn find_or_create_local_worktree( pub fn find_or_create_local_worktree(
&mut self, &mut self,
abs_path: impl AsRef<Path>, abs_path: impl AsRef<Path>,
@ -5930,6 +6193,7 @@ impl Project {
let collaborator = Collaborator::from_proto(collaborator)?; let collaborator = Collaborator::from_proto(collaborator)?;
this.update(&mut cx, |this, cx| { this.update(&mut cx, |this, cx| {
this.shared_buffers.remove(&collaborator.peer_id); this.shared_buffers.remove(&collaborator.peer_id);
cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
this.collaborators this.collaborators
.insert(collaborator.peer_id, collaborator); .insert(collaborator.peer_id, collaborator);
cx.notify(); cx.notify();
@ -6813,6 +7077,40 @@ impl Project {
})) }))
} }
async fn handle_resolve_inlay_hint(
this: ModelHandle<Self>,
envelope: TypedEnvelope<proto::ResolveInlayHint>,
_: Arc<Client>,
mut cx: AsyncAppContext,
) -> Result<proto::ResolveInlayHintResponse> {
let proto_hint = envelope
.payload
.hint
.expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
let hint = InlayHints::proto_to_project_hint(proto_hint)
.context("resolved proto inlay hint conversion")?;
let buffer = this.update(&mut cx, |this, cx| {
this.opened_buffers
.get(&envelope.payload.buffer_id)
.and_then(|buffer| buffer.upgrade(cx))
.ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
})?;
let response_hint = this
.update(&mut cx, |project, cx| {
project.resolve_inlay_hint(
hint,
buffer,
LanguageServerId(envelope.payload.language_server_id as usize),
cx,
)
})
.await
.context("inlay hints fetch")?;
Ok(proto::ResolveInlayHintResponse {
hint: Some(InlayHints::project_to_proto_hint(response_hint)),
})
}
async fn handle_refresh_inlay_hints( async fn handle_refresh_inlay_hints(
this: ModelHandle<Self>, this: ModelHandle<Self>,
_: TypedEnvelope<proto::RefreshInlayHints>, _: TypedEnvelope<proto::RefreshInlayHints>,
@ -6891,17 +7189,17 @@ impl Project {
) -> Result<proto::SearchProjectResponse> { ) -> Result<proto::SearchProjectResponse> {
let peer_id = envelope.original_sender_id()?; let peer_id = envelope.original_sender_id()?;
let query = SearchQuery::from_proto(envelope.payload)?; let query = SearchQuery::from_proto(envelope.payload)?;
let result = this let mut result = this.update(&mut cx, |this, cx| this.search(query, cx));
.update(&mut cx, |this, cx| this.search(query, cx))
.await?;
this.update(&mut cx, |this, cx| { cx.spawn(|mut cx| async move {
let mut locations = Vec::new(); let mut locations = Vec::new();
for (buffer, ranges) in result { while let Some((buffer, ranges)) = result.next().await {
for range in ranges { for range in ranges {
let start = serialize_anchor(&range.start); let start = serialize_anchor(&range.start);
let end = serialize_anchor(&range.end); let end = serialize_anchor(&range.end);
let buffer_id = this.create_buffer_for_peer(&buffer, peer_id, cx); let buffer_id = this.update(&mut cx, |this, cx| {
this.create_buffer_for_peer(&buffer, peer_id, cx)
});
locations.push(proto::Location { locations.push(proto::Location {
buffer_id, buffer_id,
start: Some(start), start: Some(start),
@ -6911,6 +7209,7 @@ impl Project {
} }
Ok(proto::SearchProjectResponse { locations }) Ok(proto::SearchProjectResponse { locations })
}) })
.await
} }
async fn handle_open_buffer_for_symbol( async fn handle_open_buffer_for_symbol(
@ -7576,7 +7875,7 @@ impl Project {
self.language_servers_for_buffer(buffer, cx).next() self.language_servers_for_buffer(buffer, cx).next()
} }
fn language_server_for_buffer( pub fn language_server_for_buffer(
&self, &self,
buffer: &Buffer, buffer: &Buffer,
server_id: LanguageServerId, server_id: LanguageServerId,
@ -7756,6 +8055,7 @@ impl Collaborator {
Ok(Self { Ok(Self {
peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?, peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
replica_id: message.replica_id as ReplicaId, replica_id: message.replica_id as ReplicaId,
user_id: message.user_id as UserId,
}) })
} }
} }

View file

@ -1,4 +1,4 @@
use crate::{search::PathMatcher, worktree::WorktreeHandle, Event, *}; use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
use fs::{FakeFs, LineEnding, RealFs}; use fs::{FakeFs, LineEnding, RealFs};
use futures::{future, StreamExt}; use futures::{future, StreamExt};
use gpui::{executor::Deterministic, test::subscribe, AppContext}; use gpui::{executor::Deterministic, test::subscribe, AppContext};
@ -3953,11 +3953,12 @@ async fn search(
query: SearchQuery, query: SearchQuery,
cx: &mut gpui::TestAppContext, cx: &mut gpui::TestAppContext,
) -> Result<HashMap<String, Vec<Range<usize>>>> { ) -> Result<HashMap<String, Vec<Range<usize>>>> {
let results = project let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
.update(cx, |project, cx| project.search(query, cx)) let mut result = HashMap::default();
.await?; while let Some((buffer, range)) = search_rx.next().await {
result.entry(buffer).or_insert(range);
Ok(results }
Ok(result
.into_iter() .into_iter()
.map(|(buffer, ranges)| { .map(|(buffer, ranges)| {
buffer.read_with(cx, |buffer, _| { buffer.read_with(cx, |buffer, _| {

View file

@ -1,7 +1,13 @@
use crate::Project; use crate::Project;
use gpui::{AnyWindowHandle, ModelContext, ModelHandle, WeakModelHandle}; use gpui::{AnyWindowHandle, ModelContext, ModelHandle, WeakModelHandle};
use std::path::PathBuf; use std::path::{Path, PathBuf};
use terminal::{Terminal, TerminalBuilder, TerminalSettings}; use terminal::{
terminal_settings::{self, TerminalSettings, VenvSettingsContent},
Terminal, TerminalBuilder,
};
#[cfg(target_os = "macos")]
use std::os::unix::ffi::OsStrExt;
pub struct Terminals { pub struct Terminals {
pub(crate) local_handles: Vec<WeakModelHandle<terminal::Terminal>>, pub(crate) local_handles: Vec<WeakModelHandle<terminal::Terminal>>,
@ -20,10 +26,12 @@ impl Project {
)); ));
} else { } else {
let settings = settings::get::<TerminalSettings>(cx); let settings = settings::get::<TerminalSettings>(cx);
let python_settings = settings.detect_venv.clone();
let shell = settings.shell.clone();
let terminal = TerminalBuilder::new( let terminal = TerminalBuilder::new(
working_directory.clone(), working_directory.clone(),
settings.shell.clone(), shell.clone(),
settings.env.clone(), settings.env.clone(),
Some(settings.blinking.clone()), Some(settings.blinking.clone()),
settings.alternate_scroll, settings.alternate_scroll,
@ -47,6 +55,15 @@ impl Project {
}) })
.detach(); .detach();
if let Some(python_settings) = &python_settings.as_option() {
let activate_script_path =
self.find_activate_script_path(&python_settings, working_directory);
self.activate_python_virtual_environment(
activate_script_path,
&terminal_handle,
cx,
);
}
terminal_handle terminal_handle
}); });
@ -54,6 +71,50 @@ impl Project {
} }
} }
pub fn find_activate_script_path(
&mut self,
settings: &VenvSettingsContent,
working_directory: Option<PathBuf>,
) -> Option<PathBuf> {
// When we are unable to resolve the working directory, the terminal builder
// defaults to '/'. We should probably encode this directly somewhere, but for
// now, let's just hard code it here.
let working_directory = working_directory.unwrap_or_else(|| Path::new("/").to_path_buf());
let activate_script_name = match settings.activate_script {
terminal_settings::ActivateScript::Default => "activate",
terminal_settings::ActivateScript::Csh => "activate.csh",
terminal_settings::ActivateScript::Fish => "activate.fish",
};
for virtual_environment_name in settings.directories {
let mut path = working_directory.join(virtual_environment_name);
path.push("bin/");
path.push(activate_script_name);
if path.exists() {
return Some(path);
}
}
None
}
fn activate_python_virtual_environment(
&mut self,
activate_script: Option<PathBuf>,
terminal_handle: &ModelHandle<Terminal>,
cx: &mut ModelContext<Project>,
) {
if let Some(activate_script) = activate_script {
// Paths are not strings so we need to jump through some hoops to format the command without `format!`
let mut command = Vec::from("source ".as_bytes());
command.extend_from_slice(activate_script.as_os_str().as_bytes());
command.push(b'\n');
terminal_handle.update(cx, |this, _| this.input_bytes(command));
}
}
pub fn local_terminal_handles(&self) -> &Vec<WeakModelHandle<terminal::Terminal>> { pub fn local_terminal_handles(&self) -> &Vec<WeakModelHandle<terminal::Terminal>> {
&self.terminals.local_handles &self.terminals.local_handles
} }

View file

@ -2317,9 +2317,10 @@ impl BackgroundScannerState {
for changed_path in changed_paths { for changed_path in changed_paths {
let Some(dot_git_dir) = changed_path let Some(dot_git_dir) = changed_path
.ancestors() .ancestors()
.find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT)) else { .find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT))
continue; else {
}; continue;
};
// Avoid processing the same repository multiple times, if multiple paths // Avoid processing the same repository multiple times, if multiple paths
// within it have changed. // within it have changed.
@ -2348,7 +2349,10 @@ impl BackgroundScannerState {
let Some(work_dir) = self let Some(work_dir) = self
.snapshot .snapshot
.entry_for_id(entry_id) .entry_for_id(entry_id)
.map(|entry| RepositoryWorkDirectory(entry.path.clone())) else { continue }; .map(|entry| RepositoryWorkDirectory(entry.path.clone()))
else {
continue;
};
log::info!("reload git repository {:?}", dot_git_dir); log::info!("reload git repository {:?}", dot_git_dir);
let repository = repository.repo_ptr.lock(); let repository = repository.repo_ptr.lock();
@ -4026,7 +4030,7 @@ struct UpdateIgnoreStatusJob {
scan_queue: Sender<ScanJob>, scan_queue: Sender<ScanJob>,
} }
pub trait WorktreeHandle { pub trait WorktreeModelHandle {
#[cfg(any(test, feature = "test-support"))] #[cfg(any(test, feature = "test-support"))]
fn flush_fs_events<'a>( fn flush_fs_events<'a>(
&self, &self,
@ -4034,7 +4038,7 @@ pub trait WorktreeHandle {
) -> futures::future::LocalBoxFuture<'a, ()>; ) -> futures::future::LocalBoxFuture<'a, ()>;
} }
impl WorktreeHandle for ModelHandle<Worktree> { impl WorktreeModelHandle for ModelHandle<Worktree> {
// When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
// occurred before the worktree was constructed. These events can cause the worktree to perform // occurred before the worktree was constructed. These events can cause the worktree to perform
// extra directory scans, and emit extra scan-state notifications. // extra directory scans, and emit extra scan-state notifications.

View file

@ -1,5 +1,5 @@
use crate::{ use crate::{
worktree::{Event, Snapshot, WorktreeHandle}, worktree::{Event, Snapshot, WorktreeModelHandle},
Entry, EntryKind, PathChange, Worktree, Entry, EntryKind, PathChange, Worktree,
}; };
use anyhow::Result; use anyhow::Result;

Some files were not shown because too many files have changed in this diff Show more