Rejoin channel notes after brief connection loss (#2930)
* [x] Re-send operations that weren't sent while disconnected * [x] Apply other clients' operations that were missed while disconnected * [x] Update collaborators that joined / left while disconnected * [x] Inform current collaborators that your peer id has changed * [x] Refresh channel buffer collaborators on server restart * [x] randomized test
This commit is contained in:
commit
d03a89ca19
24 changed files with 3960 additions and 2927 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
@ -1456,6 +1456,7 @@ name = "collab"
|
||||||
version = "0.20.0"
|
version = "0.20.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
"async-trait",
|
||||||
"async-tungstenite",
|
"async-tungstenite",
|
||||||
"audio",
|
"audio",
|
||||||
"axum",
|
"axum",
|
||||||
|
|
|
@ -10,6 +10,7 @@ pub(crate) fn init(client: &Arc<Client>) {
|
||||||
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer);
|
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer);
|
||||||
client.add_model_message_handler(ChannelBuffer::handle_add_channel_buffer_collaborator);
|
client.add_model_message_handler(ChannelBuffer::handle_add_channel_buffer_collaborator);
|
||||||
client.add_model_message_handler(ChannelBuffer::handle_remove_channel_buffer_collaborator);
|
client.add_model_message_handler(ChannelBuffer::handle_remove_channel_buffer_collaborator);
|
||||||
|
client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer_collaborator);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ChannelBuffer {
|
pub struct ChannelBuffer {
|
||||||
|
@ -17,6 +18,7 @@ pub struct ChannelBuffer {
|
||||||
connected: bool,
|
connected: bool,
|
||||||
collaborators: Vec<proto::Collaborator>,
|
collaborators: Vec<proto::Collaborator>,
|
||||||
buffer: ModelHandle<language::Buffer>,
|
buffer: ModelHandle<language::Buffer>,
|
||||||
|
buffer_epoch: u64,
|
||||||
client: Arc<Client>,
|
client: Arc<Client>,
|
||||||
subscription: Option<client::Subscription>,
|
subscription: Option<client::Subscription>,
|
||||||
}
|
}
|
||||||
|
@ -73,6 +75,7 @@ impl ChannelBuffer {
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
buffer,
|
buffer,
|
||||||
|
buffer_epoch: response.epoch,
|
||||||
client,
|
client,
|
||||||
connected: true,
|
connected: true,
|
||||||
collaborators,
|
collaborators,
|
||||||
|
@ -82,6 +85,26 @@ impl ChannelBuffer {
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn replace_collaborators(
|
||||||
|
&mut self,
|
||||||
|
collaborators: Vec<proto::Collaborator>,
|
||||||
|
cx: &mut ModelContext<Self>,
|
||||||
|
) {
|
||||||
|
for old_collaborator in &self.collaborators {
|
||||||
|
if collaborators
|
||||||
|
.iter()
|
||||||
|
.any(|c| c.replica_id == old_collaborator.replica_id)
|
||||||
|
{
|
||||||
|
self.buffer.update(cx, |buffer, cx| {
|
||||||
|
buffer.remove_peer(old_collaborator.replica_id as u16, cx)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.collaborators = collaborators;
|
||||||
|
cx.emit(Event::CollaboratorsChanged);
|
||||||
|
cx.notify();
|
||||||
|
}
|
||||||
|
|
||||||
async fn handle_update_channel_buffer(
|
async fn handle_update_channel_buffer(
|
||||||
this: ModelHandle<Self>,
|
this: ModelHandle<Self>,
|
||||||
update_channel_buffer: TypedEnvelope<proto::UpdateChannelBuffer>,
|
update_channel_buffer: TypedEnvelope<proto::UpdateChannelBuffer>,
|
||||||
|
@ -149,6 +172,26 @@ impl ChannelBuffer {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn handle_update_channel_buffer_collaborator(
|
||||||
|
this: ModelHandle<Self>,
|
||||||
|
message: TypedEnvelope<proto::UpdateChannelBufferCollaborator>,
|
||||||
|
_: Arc<Client>,
|
||||||
|
mut cx: AsyncAppContext,
|
||||||
|
) -> Result<()> {
|
||||||
|
this.update(&mut cx, |this, cx| {
|
||||||
|
for collaborator in &mut this.collaborators {
|
||||||
|
if collaborator.peer_id == message.payload.old_peer_id {
|
||||||
|
collaborator.peer_id = message.payload.new_peer_id;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cx.emit(Event::CollaboratorsChanged);
|
||||||
|
cx.notify();
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn on_buffer_update(
|
fn on_buffer_update(
|
||||||
&mut self,
|
&mut self,
|
||||||
_: ModelHandle<language::Buffer>,
|
_: ModelHandle<language::Buffer>,
|
||||||
|
@ -166,6 +209,10 @@ impl ChannelBuffer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn epoch(&self) -> u64 {
|
||||||
|
self.buffer_epoch
|
||||||
|
}
|
||||||
|
|
||||||
pub fn buffer(&self) -> ModelHandle<language::Buffer> {
|
pub fn buffer(&self) -> ModelHandle<language::Buffer> {
|
||||||
self.buffer.clone()
|
self.buffer.clone()
|
||||||
}
|
}
|
||||||
|
@ -179,6 +226,7 @@ impl ChannelBuffer {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn disconnect(&mut self, cx: &mut ModelContext<Self>) {
|
pub(crate) fn disconnect(&mut self, cx: &mut ModelContext<Self>) {
|
||||||
|
log::info!("channel buffer {} disconnected", self.channel.id);
|
||||||
if self.connected {
|
if self.connected {
|
||||||
self.connected = false;
|
self.connected = false;
|
||||||
self.subscription.take();
|
self.subscription.take();
|
||||||
|
|
|
@ -1,13 +1,15 @@
|
||||||
use crate::channel_buffer::ChannelBuffer;
|
use crate::channel_buffer::ChannelBuffer;
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::{anyhow, Result};
|
||||||
use client::{Client, Status, Subscription, User, UserId, UserStore};
|
use client::{Client, Subscription, User, UserId, UserStore};
|
||||||
use collections::{hash_map, HashMap, HashSet};
|
use collections::{hash_map, HashMap, HashSet};
|
||||||
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt};
|
||||||
use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
|
use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle};
|
||||||
use rpc::{proto, TypedEnvelope};
|
use rpc::{proto, TypedEnvelope};
|
||||||
use std::sync::Arc;
|
use std::{mem, sync::Arc, time::Duration};
|
||||||
use util::ResultExt;
|
use util::ResultExt;
|
||||||
|
|
||||||
|
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||||
|
|
||||||
pub type ChannelId = u64;
|
pub type ChannelId = u64;
|
||||||
|
|
||||||
pub struct ChannelStore {
|
pub struct ChannelStore {
|
||||||
|
@ -22,7 +24,8 @@ pub struct ChannelStore {
|
||||||
client: Arc<Client>,
|
client: Arc<Client>,
|
||||||
user_store: ModelHandle<UserStore>,
|
user_store: ModelHandle<UserStore>,
|
||||||
_rpc_subscription: Subscription,
|
_rpc_subscription: Subscription,
|
||||||
_watch_connection_status: Task<()>,
|
_watch_connection_status: Task<Option<()>>,
|
||||||
|
disconnect_channel_buffers_task: Option<Task<()>>,
|
||||||
_update_channels: Task<()>,
|
_update_channels: Task<()>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,24 +70,20 @@ impl ChannelStore {
|
||||||
let rpc_subscription =
|
let rpc_subscription =
|
||||||
client.add_message_handler(cx.handle(), Self::handle_update_channels);
|
client.add_message_handler(cx.handle(), Self::handle_update_channels);
|
||||||
|
|
||||||
let (update_channels_tx, mut update_channels_rx) = mpsc::unbounded();
|
|
||||||
let mut connection_status = client.status();
|
let mut connection_status = client.status();
|
||||||
|
let (update_channels_tx, mut update_channels_rx) = mpsc::unbounded();
|
||||||
let watch_connection_status = cx.spawn_weak(|this, mut cx| async move {
|
let watch_connection_status = cx.spawn_weak(|this, mut cx| async move {
|
||||||
while let Some(status) = connection_status.next().await {
|
while let Some(status) = connection_status.next().await {
|
||||||
if !status.is_connected() {
|
let this = this.upgrade(&cx)?;
|
||||||
if let Some(this) = this.upgrade(&cx) {
|
if status.is_connected() {
|
||||||
this.update(&mut cx, |this, cx| {
|
this.update(&mut cx, |this, cx| this.handle_connect(cx))
|
||||||
if matches!(status, Status::ConnectionLost | Status::SignedOut) {
|
.await
|
||||||
this.handle_disconnect(cx);
|
.log_err()?;
|
||||||
} else {
|
} else {
|
||||||
this.disconnect_buffers(cx);
|
this.update(&mut cx, |this, cx| this.handle_disconnect(cx));
|
||||||
}
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Some(())
|
||||||
});
|
});
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
|
@ -100,6 +99,7 @@ impl ChannelStore {
|
||||||
user_store,
|
user_store,
|
||||||
_rpc_subscription: rpc_subscription,
|
_rpc_subscription: rpc_subscription,
|
||||||
_watch_connection_status: watch_connection_status,
|
_watch_connection_status: watch_connection_status,
|
||||||
|
disconnect_channel_buffers_task: None,
|
||||||
_update_channels: cx.spawn_weak(|this, mut cx| async move {
|
_update_channels: cx.spawn_weak(|this, mut cx| async move {
|
||||||
while let Some(update_channels) = update_channels_rx.next().await {
|
while let Some(update_channels) = update_channels_rx.next().await {
|
||||||
if let Some(this) = this.upgrade(&cx) {
|
if let Some(this) = this.upgrade(&cx) {
|
||||||
|
@ -152,6 +152,15 @@ impl ChannelStore {
|
||||||
self.channels_by_id.get(&channel_id)
|
self.channels_by_id.get(&channel_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn has_open_channel_buffer(&self, channel_id: ChannelId, cx: &AppContext) -> bool {
|
||||||
|
if let Some(buffer) = self.opened_buffers.get(&channel_id) {
|
||||||
|
if let OpenedChannelBuffer::Open(buffer) = buffer {
|
||||||
|
return buffer.upgrade(cx).is_some();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
pub fn open_channel_buffer(
|
pub fn open_channel_buffer(
|
||||||
&mut self,
|
&mut self,
|
||||||
channel_id: ChannelId,
|
channel_id: ChannelId,
|
||||||
|
@ -482,8 +491,106 @@ impl ChannelStore {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_disconnect(&mut self, cx: &mut ModelContext<'_, ChannelStore>) {
|
fn handle_connect(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||||
self.disconnect_buffers(cx);
|
self.disconnect_channel_buffers_task.take();
|
||||||
|
|
||||||
|
let mut buffer_versions = Vec::new();
|
||||||
|
for buffer in self.opened_buffers.values() {
|
||||||
|
if let OpenedChannelBuffer::Open(buffer) = buffer {
|
||||||
|
if let Some(buffer) = buffer.upgrade(cx) {
|
||||||
|
let channel_buffer = buffer.read(cx);
|
||||||
|
let buffer = channel_buffer.buffer().read(cx);
|
||||||
|
buffer_versions.push(proto::ChannelBufferVersion {
|
||||||
|
channel_id: channel_buffer.channel().id,
|
||||||
|
epoch: channel_buffer.epoch(),
|
||||||
|
version: language::proto::serialize_version(&buffer.version()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if buffer_versions.is_empty() {
|
||||||
|
return Task::ready(Ok(()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let response = self.client.request(proto::RejoinChannelBuffers {
|
||||||
|
buffers: buffer_versions,
|
||||||
|
});
|
||||||
|
|
||||||
|
cx.spawn(|this, mut cx| async move {
|
||||||
|
let mut response = response.await?;
|
||||||
|
|
||||||
|
this.update(&mut cx, |this, cx| {
|
||||||
|
this.opened_buffers.retain(|_, buffer| match buffer {
|
||||||
|
OpenedChannelBuffer::Open(channel_buffer) => {
|
||||||
|
let Some(channel_buffer) = channel_buffer.upgrade(cx) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
channel_buffer.update(cx, |channel_buffer, cx| {
|
||||||
|
let channel_id = channel_buffer.channel().id;
|
||||||
|
if let Some(remote_buffer) = response
|
||||||
|
.buffers
|
||||||
|
.iter_mut()
|
||||||
|
.find(|buffer| buffer.channel_id == channel_id)
|
||||||
|
{
|
||||||
|
let channel_id = channel_buffer.channel().id;
|
||||||
|
let remote_version =
|
||||||
|
language::proto::deserialize_version(&remote_buffer.version);
|
||||||
|
|
||||||
|
channel_buffer.replace_collaborators(
|
||||||
|
mem::take(&mut remote_buffer.collaborators),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
|
||||||
|
let operations = channel_buffer
|
||||||
|
.buffer()
|
||||||
|
.update(cx, |buffer, cx| {
|
||||||
|
let outgoing_operations =
|
||||||
|
buffer.serialize_ops(Some(remote_version), cx);
|
||||||
|
let incoming_operations =
|
||||||
|
mem::take(&mut remote_buffer.operations)
|
||||||
|
.into_iter()
|
||||||
|
.map(language::proto::deserialize_operation)
|
||||||
|
.collect::<Result<Vec<_>>>()?;
|
||||||
|
buffer.apply_ops(incoming_operations, cx)?;
|
||||||
|
anyhow::Ok(outgoing_operations)
|
||||||
|
})
|
||||||
|
.log_err();
|
||||||
|
|
||||||
|
if let Some(operations) = operations {
|
||||||
|
let client = this.client.clone();
|
||||||
|
cx.background()
|
||||||
|
.spawn(async move {
|
||||||
|
let operations = operations.await;
|
||||||
|
for chunk in
|
||||||
|
language::proto::split_operations(operations)
|
||||||
|
{
|
||||||
|
client
|
||||||
|
.send(proto::UpdateChannelBuffer {
|
||||||
|
channel_id,
|
||||||
|
operations: chunk,
|
||||||
|
})
|
||||||
|
.ok();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.detach();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
channel_buffer.disconnect(cx);
|
||||||
|
false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
OpenedChannelBuffer::Loading(_) => true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
anyhow::Ok(())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handle_disconnect(&mut self, cx: &mut ModelContext<Self>) {
|
||||||
self.channels_by_id.clear();
|
self.channels_by_id.clear();
|
||||||
self.channel_invitations.clear();
|
self.channel_invitations.clear();
|
||||||
self.channel_participants.clear();
|
self.channel_participants.clear();
|
||||||
|
@ -491,16 +598,23 @@ impl ChannelStore {
|
||||||
self.channel_paths.clear();
|
self.channel_paths.clear();
|
||||||
self.outgoing_invites.clear();
|
self.outgoing_invites.clear();
|
||||||
cx.notify();
|
cx.notify();
|
||||||
}
|
|
||||||
|
|
||||||
fn disconnect_buffers(&mut self, cx: &mut ModelContext<ChannelStore>) {
|
self.disconnect_channel_buffers_task.get_or_insert_with(|| {
|
||||||
for (_, buffer) in self.opened_buffers.drain() {
|
cx.spawn_weak(|this, mut cx| async move {
|
||||||
|
cx.background().timer(RECONNECT_TIMEOUT).await;
|
||||||
|
if let Some(this) = this.upgrade(&cx) {
|
||||||
|
this.update(&mut cx, |this, cx| {
|
||||||
|
for (_, buffer) in this.opened_buffers.drain() {
|
||||||
if let OpenedChannelBuffer::Open(buffer) = buffer {
|
if let OpenedChannelBuffer::Open(buffer) = buffer {
|
||||||
if let Some(buffer) = buffer.upgrade(cx) {
|
if let Some(buffer) = buffer.upgrade(cx) {
|
||||||
buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
|
buffer.update(cx, |buffer, cx| buffer.disconnect(cx));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn update_channels(
|
pub(crate) fn update_channels(
|
||||||
|
|
|
@ -80,6 +80,7 @@ theme = { path = "../theme" }
|
||||||
workspace = { path = "../workspace", features = ["test-support"] }
|
workspace = { path = "../workspace", features = ["test-support"] }
|
||||||
collab_ui = { path = "../collab_ui", features = ["test-support"] }
|
collab_ui = { path = "../collab_ui", features = ["test-support"] }
|
||||||
|
|
||||||
|
async-trait.workspace = true
|
||||||
ctor.workspace = true
|
ctor.workspace = true
|
||||||
env_logger.workspace = true
|
env_logger.workspace = true
|
||||||
indoc.workspace = true
|
indoc.workspace = true
|
||||||
|
|
|
@ -435,6 +435,12 @@ pub struct ChannelsForUser {
|
||||||
pub channels_with_admin_privileges: HashSet<ChannelId>,
|
pub channels_with_admin_privileges: HashSet<ChannelId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct RejoinedChannelBuffer {
|
||||||
|
pub buffer: proto::RejoinedChannelBuffer,
|
||||||
|
pub old_connection_id: ConnectionId,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct JoinRoom {
|
pub struct JoinRoom {
|
||||||
pub room: proto::Room,
|
pub room: proto::Room,
|
||||||
|
@ -498,6 +504,11 @@ pub struct RefreshedRoom {
|
||||||
pub canceled_calls_to_user_ids: Vec<UserId>,
|
pub canceled_calls_to_user_ids: Vec<UserId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct RefreshedChannelBuffer {
|
||||||
|
pub connection_ids: Vec<ConnectionId>,
|
||||||
|
pub removed_collaborators: Vec<proto::RemoveChannelBufferCollaborator>,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct Project {
|
pub struct Project {
|
||||||
pub collaborators: Vec<ProjectCollaborator>,
|
pub collaborators: Vec<ProjectCollaborator>,
|
||||||
pub worktrees: BTreeMap<u64, Worktree>,
|
pub worktrees: BTreeMap<u64, Worktree>,
|
||||||
|
|
|
@ -10,8 +10,6 @@ impl Database {
|
||||||
connection: ConnectionId,
|
connection: ConnectionId,
|
||||||
) -> Result<proto::JoinChannelBufferResponse> {
|
) -> Result<proto::JoinChannelBufferResponse> {
|
||||||
self.transaction(|tx| async move {
|
self.transaction(|tx| async move {
|
||||||
let tx = tx;
|
|
||||||
|
|
||||||
self.check_user_is_channel_member(channel_id, user_id, &tx)
|
self.check_user_is_channel_member(channel_id, user_id, &tx)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
@ -70,7 +68,6 @@ impl Database {
|
||||||
.await?;
|
.await?;
|
||||||
collaborators.push(collaborator);
|
collaborators.push(collaborator);
|
||||||
|
|
||||||
// Assemble the buffer state
|
|
||||||
let (base_text, operations) = self.get_buffer_state(&buffer, &tx).await?;
|
let (base_text, operations) = self.get_buffer_state(&buffer, &tx).await?;
|
||||||
|
|
||||||
Ok(proto::JoinChannelBufferResponse {
|
Ok(proto::JoinChannelBufferResponse {
|
||||||
|
@ -78,6 +75,7 @@ impl Database {
|
||||||
replica_id: replica_id.to_proto() as u32,
|
replica_id: replica_id.to_proto() as u32,
|
||||||
base_text,
|
base_text,
|
||||||
operations,
|
operations,
|
||||||
|
epoch: buffer.epoch as u64,
|
||||||
collaborators: collaborators
|
collaborators: collaborators
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|collaborator| proto::Collaborator {
|
.map(|collaborator| proto::Collaborator {
|
||||||
|
@ -91,6 +89,154 @@ impl Database {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn rejoin_channel_buffers(
|
||||||
|
&self,
|
||||||
|
buffers: &[proto::ChannelBufferVersion],
|
||||||
|
user_id: UserId,
|
||||||
|
connection_id: ConnectionId,
|
||||||
|
) -> Result<Vec<RejoinedChannelBuffer>> {
|
||||||
|
self.transaction(|tx| async move {
|
||||||
|
let mut results = Vec::new();
|
||||||
|
for client_buffer in buffers {
|
||||||
|
let channel_id = ChannelId::from_proto(client_buffer.channel_id);
|
||||||
|
if self
|
||||||
|
.check_user_is_channel_member(channel_id, user_id, &*tx)
|
||||||
|
.await
|
||||||
|
.is_err()
|
||||||
|
{
|
||||||
|
log::info!("user is not a member of channel");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let buffer = self.get_channel_buffer(channel_id, &*tx).await?;
|
||||||
|
let mut collaborators = channel_buffer_collaborator::Entity::find()
|
||||||
|
.filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
|
||||||
|
.all(&*tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// If the buffer epoch hasn't changed since the client lost
|
||||||
|
// connection, then the client's buffer can be syncronized with
|
||||||
|
// the server's buffer.
|
||||||
|
if buffer.epoch as u64 != client_buffer.epoch {
|
||||||
|
log::info!("can't rejoin buffer, epoch has changed");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the collaborator record for this user's previous lost
|
||||||
|
// connection. Update it with the new connection id.
|
||||||
|
let server_id = ServerId(connection_id.owner_id as i32);
|
||||||
|
let Some(self_collaborator) = collaborators.iter_mut().find(|c| {
|
||||||
|
c.user_id == user_id
|
||||||
|
&& (c.connection_lost || c.connection_server_id != server_id)
|
||||||
|
}) else {
|
||||||
|
log::info!("can't rejoin buffer, no previous collaborator found");
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let old_connection_id = self_collaborator.connection();
|
||||||
|
*self_collaborator = channel_buffer_collaborator::ActiveModel {
|
||||||
|
id: ActiveValue::Unchanged(self_collaborator.id),
|
||||||
|
connection_id: ActiveValue::Set(connection_id.id as i32),
|
||||||
|
connection_server_id: ActiveValue::Set(ServerId(connection_id.owner_id as i32)),
|
||||||
|
connection_lost: ActiveValue::Set(false),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
.update(&*tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let client_version = version_from_wire(&client_buffer.version);
|
||||||
|
let serialization_version = self
|
||||||
|
.get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &*tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut rows = buffer_operation::Entity::find()
|
||||||
|
.filter(
|
||||||
|
buffer_operation::Column::BufferId
|
||||||
|
.eq(buffer.id)
|
||||||
|
.and(buffer_operation::Column::Epoch.eq(buffer.epoch)),
|
||||||
|
)
|
||||||
|
.stream(&*tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Find the server's version vector and any operations
|
||||||
|
// that the client has not seen.
|
||||||
|
let mut server_version = clock::Global::new();
|
||||||
|
let mut operations = Vec::new();
|
||||||
|
while let Some(row) = rows.next().await {
|
||||||
|
let row = row?;
|
||||||
|
let timestamp = clock::Lamport {
|
||||||
|
replica_id: row.replica_id as u16,
|
||||||
|
value: row.lamport_timestamp as u32,
|
||||||
|
};
|
||||||
|
server_version.observe(timestamp);
|
||||||
|
if !client_version.observed(timestamp) {
|
||||||
|
operations.push(proto::Operation {
|
||||||
|
variant: Some(operation_from_storage(row, serialization_version)?),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
results.push(RejoinedChannelBuffer {
|
||||||
|
old_connection_id,
|
||||||
|
buffer: proto::RejoinedChannelBuffer {
|
||||||
|
channel_id: client_buffer.channel_id,
|
||||||
|
version: version_to_wire(&server_version),
|
||||||
|
operations,
|
||||||
|
collaborators: collaborators
|
||||||
|
.into_iter()
|
||||||
|
.map(|collaborator| proto::Collaborator {
|
||||||
|
peer_id: Some(collaborator.connection().into()),
|
||||||
|
user_id: collaborator.user_id.to_proto(),
|
||||||
|
replica_id: collaborator.replica_id.0 as u32,
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(results)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn clear_stale_channel_buffer_collaborators(
|
||||||
|
&self,
|
||||||
|
channel_id: ChannelId,
|
||||||
|
server_id: ServerId,
|
||||||
|
) -> Result<RefreshedChannelBuffer> {
|
||||||
|
self.transaction(|tx| async move {
|
||||||
|
let collaborators = channel_buffer_collaborator::Entity::find()
|
||||||
|
.filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
|
||||||
|
.all(&*tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut connection_ids = Vec::new();
|
||||||
|
let mut removed_collaborators = Vec::new();
|
||||||
|
let mut collaborator_ids_to_remove = Vec::new();
|
||||||
|
for collaborator in &collaborators {
|
||||||
|
if !collaborator.connection_lost && collaborator.connection_server_id == server_id {
|
||||||
|
connection_ids.push(collaborator.connection());
|
||||||
|
} else {
|
||||||
|
removed_collaborators.push(proto::RemoveChannelBufferCollaborator {
|
||||||
|
channel_id: channel_id.to_proto(),
|
||||||
|
peer_id: Some(collaborator.connection().into()),
|
||||||
|
});
|
||||||
|
collaborator_ids_to_remove.push(collaborator.id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
channel_buffer_collaborator::Entity::delete_many()
|
||||||
|
.filter(channel_buffer_collaborator::Column::Id.is_in(collaborator_ids_to_remove))
|
||||||
|
.exec(&*tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(RefreshedChannelBuffer {
|
||||||
|
connection_ids,
|
||||||
|
removed_collaborators,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn leave_channel_buffer(
|
pub async fn leave_channel_buffer(
|
||||||
&self,
|
&self,
|
||||||
channel_id: ChannelId,
|
channel_id: ChannelId,
|
||||||
|
@ -103,6 +249,39 @@ impl Database {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn leave_channel_buffers(
|
||||||
|
&self,
|
||||||
|
connection: ConnectionId,
|
||||||
|
) -> Result<Vec<(ChannelId, Vec<ConnectionId>)>> {
|
||||||
|
self.transaction(|tx| async move {
|
||||||
|
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
|
||||||
|
enum QueryChannelIds {
|
||||||
|
ChannelId,
|
||||||
|
}
|
||||||
|
|
||||||
|
let channel_ids: Vec<ChannelId> = channel_buffer_collaborator::Entity::find()
|
||||||
|
.select_only()
|
||||||
|
.column(channel_buffer_collaborator::Column::ChannelId)
|
||||||
|
.filter(Condition::all().add(
|
||||||
|
channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32),
|
||||||
|
))
|
||||||
|
.into_values::<_, QueryChannelIds>()
|
||||||
|
.all(&*tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut result = Vec::new();
|
||||||
|
for channel_id in channel_ids {
|
||||||
|
let collaborators = self
|
||||||
|
.leave_channel_buffer_internal(channel_id, connection, &*tx)
|
||||||
|
.await?;
|
||||||
|
result.push((channel_id, collaborators));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn leave_channel_buffer_internal(
|
pub async fn leave_channel_buffer_internal(
|
||||||
&self,
|
&self,
|
||||||
channel_id: ChannelId,
|
channel_id: ChannelId,
|
||||||
|
@ -143,45 +322,12 @@ impl Database {
|
||||||
drop(rows);
|
drop(rows);
|
||||||
|
|
||||||
if connections.is_empty() {
|
if connections.is_empty() {
|
||||||
self.snapshot_buffer(channel_id, &tx).await?;
|
self.snapshot_channel_buffer(channel_id, &tx).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(connections)
|
Ok(connections)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn leave_channel_buffers(
|
|
||||||
&self,
|
|
||||||
connection: ConnectionId,
|
|
||||||
) -> Result<Vec<(ChannelId, Vec<ConnectionId>)>> {
|
|
||||||
self.transaction(|tx| async move {
|
|
||||||
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
|
|
||||||
enum QueryChannelIds {
|
|
||||||
ChannelId,
|
|
||||||
}
|
|
||||||
|
|
||||||
let channel_ids: Vec<ChannelId> = channel_buffer_collaborator::Entity::find()
|
|
||||||
.select_only()
|
|
||||||
.column(channel_buffer_collaborator::Column::ChannelId)
|
|
||||||
.filter(Condition::all().add(
|
|
||||||
channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32),
|
|
||||||
))
|
|
||||||
.into_values::<_, QueryChannelIds>()
|
|
||||||
.all(&*tx)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut result = Vec::new();
|
|
||||||
for channel_id in channel_ids {
|
|
||||||
let collaborators = self
|
|
||||||
.leave_channel_buffer_internal(channel_id, connection, &*tx)
|
|
||||||
.await?;
|
|
||||||
result.push((channel_id, collaborators));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(result)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_channel_buffer_collaborators(
|
pub async fn get_channel_buffer_collaborators(
|
||||||
&self,
|
&self,
|
||||||
channel_id: ChannelId,
|
channel_id: ChannelId,
|
||||||
|
@ -224,20 +370,9 @@ impl Database {
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| anyhow!("no such buffer"))?;
|
.ok_or_else(|| anyhow!("no such buffer"))?;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
|
let serialization_version = self
|
||||||
enum QueryVersion {
|
.get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &*tx)
|
||||||
OperationSerializationVersion,
|
.await?;
|
||||||
}
|
|
||||||
|
|
||||||
let serialization_version: i32 = buffer
|
|
||||||
.find_related(buffer_snapshot::Entity)
|
|
||||||
.select_only()
|
|
||||||
.column(buffer_snapshot::Column::OperationSerializationVersion)
|
|
||||||
.filter(buffer_snapshot::Column::Epoch.eq(buffer.epoch))
|
|
||||||
.into_values::<_, QueryVersion>()
|
|
||||||
.one(&*tx)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| anyhow!("missing buffer snapshot"))?;
|
|
||||||
|
|
||||||
let operations = operations
|
let operations = operations
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -245,6 +380,16 @@ impl Database {
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
if !operations.is_empty() {
|
if !operations.is_empty() {
|
||||||
buffer_operation::Entity::insert_many(operations)
|
buffer_operation::Entity::insert_many(operations)
|
||||||
|
.on_conflict(
|
||||||
|
OnConflict::columns([
|
||||||
|
buffer_operation::Column::BufferId,
|
||||||
|
buffer_operation::Column::Epoch,
|
||||||
|
buffer_operation::Column::LamportTimestamp,
|
||||||
|
buffer_operation::Column::ReplicaId,
|
||||||
|
])
|
||||||
|
.do_nothing()
|
||||||
|
.to_owned(),
|
||||||
|
)
|
||||||
.exec(&*tx)
|
.exec(&*tx)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
@ -270,6 +415,38 @@ impl Database {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn get_buffer_operation_serialization_version(
|
||||||
|
&self,
|
||||||
|
buffer_id: BufferId,
|
||||||
|
epoch: i32,
|
||||||
|
tx: &DatabaseTransaction,
|
||||||
|
) -> Result<i32> {
|
||||||
|
Ok(buffer_snapshot::Entity::find()
|
||||||
|
.filter(buffer_snapshot::Column::BufferId.eq(buffer_id))
|
||||||
|
.filter(buffer_snapshot::Column::Epoch.eq(epoch))
|
||||||
|
.select_only()
|
||||||
|
.column(buffer_snapshot::Column::OperationSerializationVersion)
|
||||||
|
.into_values::<_, QueryOperationSerializationVersion>()
|
||||||
|
.one(&*tx)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow!("missing buffer snapshot"))?)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_channel_buffer(
|
||||||
|
&self,
|
||||||
|
channel_id: ChannelId,
|
||||||
|
tx: &DatabaseTransaction,
|
||||||
|
) -> Result<buffer::Model> {
|
||||||
|
Ok(channel::Model {
|
||||||
|
id: channel_id,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
.find_related(buffer::Entity)
|
||||||
|
.one(&*tx)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| anyhow!("no such buffer"))?)
|
||||||
|
}
|
||||||
|
|
||||||
async fn get_buffer_state(
|
async fn get_buffer_state(
|
||||||
&self,
|
&self,
|
||||||
buffer: &buffer::Model,
|
buffer: &buffer::Model,
|
||||||
|
@ -303,27 +480,20 @@ impl Database {
|
||||||
.await?;
|
.await?;
|
||||||
let mut operations = Vec::new();
|
let mut operations = Vec::new();
|
||||||
while let Some(row) = rows.next().await {
|
while let Some(row) = rows.next().await {
|
||||||
let row = row?;
|
|
||||||
|
|
||||||
let operation = operation_from_storage(row, version)?;
|
|
||||||
operations.push(proto::Operation {
|
operations.push(proto::Operation {
|
||||||
variant: Some(operation),
|
variant: Some(operation_from_storage(row?, version)?),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok((base_text, operations))
|
Ok((base_text, operations))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn snapshot_buffer(&self, channel_id: ChannelId, tx: &DatabaseTransaction) -> Result<()> {
|
async fn snapshot_channel_buffer(
|
||||||
let buffer = channel::Model {
|
&self,
|
||||||
id: channel_id,
|
channel_id: ChannelId,
|
||||||
..Default::default()
|
tx: &DatabaseTransaction,
|
||||||
}
|
) -> Result<()> {
|
||||||
.find_related(buffer::Entity)
|
let buffer = self.get_channel_buffer(channel_id, tx).await?;
|
||||||
.one(&*tx)
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| anyhow!("no such buffer"))?;
|
|
||||||
|
|
||||||
let (base_text, operations) = self.get_buffer_state(&buffer, tx).await?;
|
let (base_text, operations) = self.get_buffer_state(&buffer, tx).await?;
|
||||||
if operations.is_empty() {
|
if operations.is_empty() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
@ -527,6 +697,22 @@ fn version_from_wire(message: &[proto::VectorClockEntry]) -> clock::Global {
|
||||||
version
|
version
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn version_to_wire(version: &clock::Global) -> Vec<proto::VectorClockEntry> {
|
||||||
|
let mut message = Vec::new();
|
||||||
|
for entry in version.iter() {
|
||||||
|
message.push(proto::VectorClockEntry {
|
||||||
|
replica_id: entry.replica_id as u32,
|
||||||
|
timestamp: entry.value,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
message
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
|
||||||
|
enum QueryOperationSerializationVersion {
|
||||||
|
OperationSerializationVersion,
|
||||||
|
}
|
||||||
|
|
||||||
mod storage {
|
mod storage {
|
||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
use prost::Message;
|
use prost::Message;
|
||||||
|
|
|
@ -1,6 +1,20 @@
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
impl Database {
|
impl Database {
|
||||||
|
#[cfg(test)]
|
||||||
|
pub async fn all_channels(&self) -> Result<Vec<(ChannelId, String)>> {
|
||||||
|
self.transaction(move |tx| async move {
|
||||||
|
let mut channels = Vec::new();
|
||||||
|
let mut rows = channel::Entity::find().stream(&*tx).await?;
|
||||||
|
while let Some(row) = rows.next().await {
|
||||||
|
let row = row?;
|
||||||
|
channels.push((row.id, row.name));
|
||||||
|
}
|
||||||
|
Ok(channels)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn create_root_channel(
|
pub async fn create_root_channel(
|
||||||
&self,
|
&self,
|
||||||
name: &str,
|
name: &str,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
impl Database {
|
impl Database {
|
||||||
pub async fn refresh_room(
|
pub async fn clear_stale_room_participants(
|
||||||
&self,
|
&self,
|
||||||
room_id: RoomId,
|
room_id: RoomId,
|
||||||
new_server_id: ServerId,
|
new_server_id: ServerId,
|
||||||
|
|
|
@ -14,31 +14,49 @@ impl Database {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn stale_room_ids(
|
pub async fn stale_server_resource_ids(
|
||||||
&self,
|
&self,
|
||||||
environment: &str,
|
environment: &str,
|
||||||
new_server_id: ServerId,
|
new_server_id: ServerId,
|
||||||
) -> Result<Vec<RoomId>> {
|
) -> Result<(Vec<RoomId>, Vec<ChannelId>)> {
|
||||||
self.transaction(|tx| async move {
|
self.transaction(|tx| async move {
|
||||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||||
enum QueryAs {
|
enum QueryRoomIds {
|
||||||
RoomId,
|
RoomId,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||||
|
enum QueryChannelIds {
|
||||||
|
ChannelId,
|
||||||
|
}
|
||||||
|
|
||||||
let stale_server_epochs = self
|
let stale_server_epochs = self
|
||||||
.stale_server_ids(environment, new_server_id, &tx)
|
.stale_server_ids(environment, new_server_id, &tx)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(room_participant::Entity::find()
|
let room_ids = room_participant::Entity::find()
|
||||||
.select_only()
|
.select_only()
|
||||||
.column(room_participant::Column::RoomId)
|
.column(room_participant::Column::RoomId)
|
||||||
.distinct()
|
.distinct()
|
||||||
.filter(
|
.filter(
|
||||||
room_participant::Column::AnsweringConnectionServerId
|
room_participant::Column::AnsweringConnectionServerId
|
||||||
.is_in(stale_server_epochs),
|
.is_in(stale_server_epochs.iter().copied()),
|
||||||
)
|
)
|
||||||
.into_values::<_, QueryAs>()
|
.into_values::<_, QueryRoomIds>()
|
||||||
.all(&*tx)
|
.all(&*tx)
|
||||||
.await?)
|
.await?;
|
||||||
|
let channel_ids = channel_buffer_collaborator::Entity::find()
|
||||||
|
.select_only()
|
||||||
|
.column(channel_buffer_collaborator::Column::ChannelId)
|
||||||
|
.distinct()
|
||||||
|
.filter(
|
||||||
|
channel_buffer_collaborator::Column::ConnectionServerId
|
||||||
|
.is_in(stale_server_epochs.iter().copied()),
|
||||||
|
)
|
||||||
|
.into_values::<_, QueryChannelIds>()
|
||||||
|
.all(&*tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok((room_ids, channel_ids))
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
|
@ -251,6 +251,7 @@ impl Server {
|
||||||
.add_request_handler(join_channel_buffer)
|
.add_request_handler(join_channel_buffer)
|
||||||
.add_request_handler(leave_channel_buffer)
|
.add_request_handler(leave_channel_buffer)
|
||||||
.add_message_handler(update_channel_buffer)
|
.add_message_handler(update_channel_buffer)
|
||||||
|
.add_request_handler(rejoin_channel_buffers)
|
||||||
.add_request_handler(get_channel_members)
|
.add_request_handler(get_channel_members)
|
||||||
.add_request_handler(respond_to_channel_invite)
|
.add_request_handler(respond_to_channel_invite)
|
||||||
.add_request_handler(join_channel)
|
.add_request_handler(join_channel)
|
||||||
|
@ -277,13 +278,33 @@ impl Server {
|
||||||
tracing::info!("waiting for cleanup timeout");
|
tracing::info!("waiting for cleanup timeout");
|
||||||
timeout.await;
|
timeout.await;
|
||||||
tracing::info!("cleanup timeout expired, retrieving stale rooms");
|
tracing::info!("cleanup timeout expired, retrieving stale rooms");
|
||||||
if let Some(room_ids) = app_state
|
if let Some((room_ids, channel_ids)) = app_state
|
||||||
.db
|
.db
|
||||||
.stale_room_ids(&app_state.config.zed_environment, server_id)
|
.stale_server_resource_ids(&app_state.config.zed_environment, server_id)
|
||||||
.await
|
.await
|
||||||
.trace_err()
|
.trace_err()
|
||||||
{
|
{
|
||||||
tracing::info!(stale_room_count = room_ids.len(), "retrieved stale rooms");
|
tracing::info!(stale_room_count = room_ids.len(), "retrieved stale rooms");
|
||||||
|
tracing::info!(
|
||||||
|
stale_channel_buffer_count = channel_ids.len(),
|
||||||
|
"retrieved stale channel buffers"
|
||||||
|
);
|
||||||
|
|
||||||
|
for channel_id in channel_ids {
|
||||||
|
if let Some(refreshed_channel_buffer) = app_state
|
||||||
|
.db
|
||||||
|
.clear_stale_channel_buffer_collaborators(channel_id, server_id)
|
||||||
|
.await
|
||||||
|
.trace_err()
|
||||||
|
{
|
||||||
|
for connection_id in refreshed_channel_buffer.connection_ids {
|
||||||
|
for message in &refreshed_channel_buffer.removed_collaborators {
|
||||||
|
peer.send(connection_id, message.clone()).trace_err();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for room_id in room_ids {
|
for room_id in room_ids {
|
||||||
let mut contacts_to_update = HashSet::default();
|
let mut contacts_to_update = HashSet::default();
|
||||||
let mut canceled_calls_to_user_ids = Vec::new();
|
let mut canceled_calls_to_user_ids = Vec::new();
|
||||||
|
@ -292,7 +313,7 @@ impl Server {
|
||||||
|
|
||||||
if let Some(mut refreshed_room) = app_state
|
if let Some(mut refreshed_room) = app_state
|
||||||
.db
|
.db
|
||||||
.refresh_room(room_id, server_id)
|
.clear_stale_room_participants(room_id, server_id)
|
||||||
.await
|
.await
|
||||||
.trace_err()
|
.trace_err()
|
||||||
{
|
{
|
||||||
|
@ -854,14 +875,14 @@ async fn connection_lost(
|
||||||
.await
|
.await
|
||||||
.trace_err();
|
.trace_err();
|
||||||
|
|
||||||
|
futures::select_biased! {
|
||||||
|
_ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
|
||||||
|
log::info!("connection lost, removing all resources for user:{}, connection:{:?}", session.user_id, session.connection_id);
|
||||||
|
leave_room_for_session(&session).await.trace_err();
|
||||||
leave_channel_buffers_for_session(&session)
|
leave_channel_buffers_for_session(&session)
|
||||||
.await
|
.await
|
||||||
.trace_err();
|
.trace_err();
|
||||||
|
|
||||||
futures::select_biased! {
|
|
||||||
_ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
|
|
||||||
leave_room_for_session(&session).await.trace_err();
|
|
||||||
|
|
||||||
if !session
|
if !session
|
||||||
.connection_pool()
|
.connection_pool()
|
||||||
.await
|
.await
|
||||||
|
@ -2547,6 +2568,41 @@ async fn update_channel_buffer(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn rejoin_channel_buffers(
|
||||||
|
request: proto::RejoinChannelBuffers,
|
||||||
|
response: Response<proto::RejoinChannelBuffers>,
|
||||||
|
session: Session,
|
||||||
|
) -> Result<()> {
|
||||||
|
let db = session.db().await;
|
||||||
|
let buffers = db
|
||||||
|
.rejoin_channel_buffers(&request.buffers, session.user_id, session.connection_id)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for buffer in &buffers {
|
||||||
|
let collaborators_to_notify = buffer
|
||||||
|
.buffer
|
||||||
|
.collaborators
|
||||||
|
.iter()
|
||||||
|
.filter_map(|c| Some(c.peer_id?.into()));
|
||||||
|
channel_buffer_updated(
|
||||||
|
session.connection_id,
|
||||||
|
collaborators_to_notify,
|
||||||
|
&proto::UpdateChannelBufferCollaborator {
|
||||||
|
channel_id: buffer.buffer.channel_id,
|
||||||
|
old_peer_id: Some(buffer.old_connection_id.into()),
|
||||||
|
new_peer_id: Some(session.connection_id.into()),
|
||||||
|
},
|
||||||
|
&session.peer,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
response.send(proto::RejoinChannelBuffersResponse {
|
||||||
|
buffers: buffers.into_iter().map(|b| b.buffer).collect(),
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
async fn leave_channel_buffer(
|
async fn leave_channel_buffer(
|
||||||
request: proto::LeaveChannelBuffer,
|
request: proto::LeaveChannelBuffer,
|
||||||
response: Response<proto::LeaveChannelBuffer>,
|
response: Response<proto::LeaveChannelBuffer>,
|
||||||
|
|
|
@ -1,555 +1,18 @@
|
||||||
use crate::{
|
use call::Room;
|
||||||
db::{tests::TestDb, NewUserParams, UserId},
|
use gpui::{ModelHandle, TestAppContext};
|
||||||
executor::Executor,
|
|
||||||
rpc::{Server, CLEANUP_TIMEOUT},
|
|
||||||
AppState,
|
|
||||||
};
|
|
||||||
use anyhow::anyhow;
|
|
||||||
use call::{ActiveCall, Room};
|
|
||||||
use channel::ChannelStore;
|
|
||||||
use client::{
|
|
||||||
self, proto::PeerId, Client, Connection, Credentials, EstablishConnectionError, UserStore,
|
|
||||||
};
|
|
||||||
use collections::{HashMap, HashSet};
|
|
||||||
use fs::FakeFs;
|
|
||||||
use futures::{channel::oneshot, StreamExt as _};
|
|
||||||
use gpui::{executor::Deterministic, ModelHandle, Task, TestAppContext, WindowHandle};
|
|
||||||
use language::LanguageRegistry;
|
|
||||||
use parking_lot::Mutex;
|
|
||||||
use project::{Project, WorktreeId};
|
|
||||||
use settings::SettingsStore;
|
|
||||||
use std::{
|
|
||||||
cell::{Ref, RefCell, RefMut},
|
|
||||||
env,
|
|
||||||
ops::{Deref, DerefMut},
|
|
||||||
path::Path,
|
|
||||||
sync::{
|
|
||||||
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
|
|
||||||
Arc,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
use util::http::FakeHttpClient;
|
|
||||||
use workspace::Workspace;
|
|
||||||
|
|
||||||
mod channel_buffer_tests;
|
mod channel_buffer_tests;
|
||||||
mod channel_tests;
|
mod channel_tests;
|
||||||
mod integration_tests;
|
mod integration_tests;
|
||||||
mod randomized_integration_tests;
|
mod random_channel_buffer_tests;
|
||||||
|
mod random_project_collaboration_tests;
|
||||||
|
mod randomized_test_helpers;
|
||||||
|
mod test_server;
|
||||||
|
|
||||||
struct TestServer {
|
pub use randomized_test_helpers::{
|
||||||
app_state: Arc<AppState>,
|
run_randomized_test, save_randomized_test_plan, RandomizedTest, TestError, UserTestPlan,
|
||||||
server: Arc<Server>,
|
};
|
||||||
connection_killers: Arc<Mutex<HashMap<PeerId, Arc<AtomicBool>>>>,
|
pub use test_server::{TestClient, TestServer};
|
||||||
forbid_connections: Arc<AtomicBool>,
|
|
||||||
_test_db: TestDb,
|
|
||||||
test_live_kit_server: Arc<live_kit_client::TestServer>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TestServer {
|
|
||||||
async fn start(deterministic: &Arc<Deterministic>) -> Self {
|
|
||||||
static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
|
|
||||||
|
|
||||||
let use_postgres = env::var("USE_POSTGRES").ok();
|
|
||||||
let use_postgres = use_postgres.as_deref();
|
|
||||||
let test_db = if use_postgres == Some("true") || use_postgres == Some("1") {
|
|
||||||
TestDb::postgres(deterministic.build_background())
|
|
||||||
} else {
|
|
||||||
TestDb::sqlite(deterministic.build_background())
|
|
||||||
};
|
|
||||||
let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
|
|
||||||
let live_kit_server = live_kit_client::TestServer::create(
|
|
||||||
format!("http://livekit.{}.test", live_kit_server_id),
|
|
||||||
format!("devkey-{}", live_kit_server_id),
|
|
||||||
format!("secret-{}", live_kit_server_id),
|
|
||||||
deterministic.build_background(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
let app_state = Self::build_app_state(&test_db, &live_kit_server).await;
|
|
||||||
let epoch = app_state
|
|
||||||
.db
|
|
||||||
.create_server(&app_state.config.zed_environment)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
let server = Server::new(
|
|
||||||
epoch,
|
|
||||||
app_state.clone(),
|
|
||||||
Executor::Deterministic(deterministic.build_background()),
|
|
||||||
);
|
|
||||||
server.start().await.unwrap();
|
|
||||||
// Advance clock to ensure the server's cleanup task is finished.
|
|
||||||
deterministic.advance_clock(CLEANUP_TIMEOUT);
|
|
||||||
Self {
|
|
||||||
app_state,
|
|
||||||
server,
|
|
||||||
connection_killers: Default::default(),
|
|
||||||
forbid_connections: Default::default(),
|
|
||||||
_test_db: test_db,
|
|
||||||
test_live_kit_server: live_kit_server,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn reset(&self) {
|
|
||||||
self.app_state.db.reset();
|
|
||||||
let epoch = self
|
|
||||||
.app_state
|
|
||||||
.db
|
|
||||||
.create_server(&self.app_state.config.zed_environment)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
self.server.reset(epoch);
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn create_client(&mut self, cx: &mut TestAppContext, name: &str) -> TestClient {
|
|
||||||
cx.update(|cx| {
|
|
||||||
if cx.has_global::<SettingsStore>() {
|
|
||||||
panic!("Same cx used to create two test clients")
|
|
||||||
}
|
|
||||||
cx.set_global(SettingsStore::test(cx));
|
|
||||||
});
|
|
||||||
|
|
||||||
let http = FakeHttpClient::with_404_response();
|
|
||||||
let user_id = if let Ok(Some(user)) = self.app_state.db.get_user_by_github_login(name).await
|
|
||||||
{
|
|
||||||
user.id
|
|
||||||
} else {
|
|
||||||
self.app_state
|
|
||||||
.db
|
|
||||||
.create_user(
|
|
||||||
&format!("{name}@example.com"),
|
|
||||||
false,
|
|
||||||
NewUserParams {
|
|
||||||
github_login: name.into(),
|
|
||||||
github_user_id: 0,
|
|
||||||
invite_count: 0,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.expect("creating user failed")
|
|
||||||
.user_id
|
|
||||||
};
|
|
||||||
let client_name = name.to_string();
|
|
||||||
let mut client = cx.read(|cx| Client::new(http.clone(), cx));
|
|
||||||
let server = self.server.clone();
|
|
||||||
let db = self.app_state.db.clone();
|
|
||||||
let connection_killers = self.connection_killers.clone();
|
|
||||||
let forbid_connections = self.forbid_connections.clone();
|
|
||||||
|
|
||||||
Arc::get_mut(&mut client)
|
|
||||||
.unwrap()
|
|
||||||
.set_id(user_id.0 as usize)
|
|
||||||
.override_authenticate(move |cx| {
|
|
||||||
cx.spawn(|_| async move {
|
|
||||||
let access_token = "the-token".to_string();
|
|
||||||
Ok(Credentials {
|
|
||||||
user_id: user_id.0 as u64,
|
|
||||||
access_token,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.override_establish_connection(move |credentials, cx| {
|
|
||||||
assert_eq!(credentials.user_id, user_id.0 as u64);
|
|
||||||
assert_eq!(credentials.access_token, "the-token");
|
|
||||||
|
|
||||||
let server = server.clone();
|
|
||||||
let db = db.clone();
|
|
||||||
let connection_killers = connection_killers.clone();
|
|
||||||
let forbid_connections = forbid_connections.clone();
|
|
||||||
let client_name = client_name.clone();
|
|
||||||
cx.spawn(move |cx| async move {
|
|
||||||
if forbid_connections.load(SeqCst) {
|
|
||||||
Err(EstablishConnectionError::other(anyhow!(
|
|
||||||
"server is forbidding connections"
|
|
||||||
)))
|
|
||||||
} else {
|
|
||||||
let (client_conn, server_conn, killed) =
|
|
||||||
Connection::in_memory(cx.background());
|
|
||||||
let (connection_id_tx, connection_id_rx) = oneshot::channel();
|
|
||||||
let user = db
|
|
||||||
.get_user_by_id(user_id)
|
|
||||||
.await
|
|
||||||
.expect("retrieving user failed")
|
|
||||||
.unwrap();
|
|
||||||
cx.background()
|
|
||||||
.spawn(server.handle_connection(
|
|
||||||
server_conn,
|
|
||||||
client_name,
|
|
||||||
user,
|
|
||||||
Some(connection_id_tx),
|
|
||||||
Executor::Deterministic(cx.background()),
|
|
||||||
))
|
|
||||||
.detach();
|
|
||||||
let connection_id = connection_id_rx.await.unwrap();
|
|
||||||
connection_killers
|
|
||||||
.lock()
|
|
||||||
.insert(connection_id.into(), killed);
|
|
||||||
Ok(client_conn)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
let fs = FakeFs::new(cx.background());
|
|
||||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
|
|
||||||
let channel_store =
|
|
||||||
cx.add_model(|cx| ChannelStore::new(client.clone(), user_store.clone(), cx));
|
|
||||||
let app_state = Arc::new(workspace::AppState {
|
|
||||||
client: client.clone(),
|
|
||||||
user_store: user_store.clone(),
|
|
||||||
channel_store: channel_store.clone(),
|
|
||||||
languages: Arc::new(LanguageRegistry::test()),
|
|
||||||
fs: fs.clone(),
|
|
||||||
build_window_options: |_, _, _| Default::default(),
|
|
||||||
initialize_workspace: |_, _, _, _| Task::ready(Ok(())),
|
|
||||||
background_actions: || &[],
|
|
||||||
});
|
|
||||||
|
|
||||||
cx.update(|cx| {
|
|
||||||
theme::init((), cx);
|
|
||||||
Project::init(&client, cx);
|
|
||||||
client::init(&client, cx);
|
|
||||||
language::init(cx);
|
|
||||||
editor::init_settings(cx);
|
|
||||||
workspace::init(app_state.clone(), cx);
|
|
||||||
audio::init((), cx);
|
|
||||||
call::init(client.clone(), user_store.clone(), cx);
|
|
||||||
channel::init(&client);
|
|
||||||
});
|
|
||||||
|
|
||||||
client
|
|
||||||
.authenticate_and_connect(false, &cx.to_async())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let client = TestClient {
|
|
||||||
app_state,
|
|
||||||
username: name.to_string(),
|
|
||||||
state: Default::default(),
|
|
||||||
};
|
|
||||||
client.wait_for_current_user(cx).await;
|
|
||||||
client
|
|
||||||
}
|
|
||||||
|
|
||||||
fn disconnect_client(&self, peer_id: PeerId) {
|
|
||||||
self.connection_killers
|
|
||||||
.lock()
|
|
||||||
.remove(&peer_id)
|
|
||||||
.unwrap()
|
|
||||||
.store(true, SeqCst);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn forbid_connections(&self) {
|
|
||||||
self.forbid_connections.store(true, SeqCst);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn allow_connections(&self) {
|
|
||||||
self.forbid_connections.store(false, SeqCst);
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn make_contacts(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
|
|
||||||
for ix in 1..clients.len() {
|
|
||||||
let (left, right) = clients.split_at_mut(ix);
|
|
||||||
let (client_a, cx_a) = left.last_mut().unwrap();
|
|
||||||
for (client_b, cx_b) in right {
|
|
||||||
client_a
|
|
||||||
.app_state
|
|
||||||
.user_store
|
|
||||||
.update(*cx_a, |store, cx| {
|
|
||||||
store.request_contact(client_b.user_id().unwrap(), cx)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
cx_a.foreground().run_until_parked();
|
|
||||||
client_b
|
|
||||||
.app_state
|
|
||||||
.user_store
|
|
||||||
.update(*cx_b, |store, cx| {
|
|
||||||
store.respond_to_contact_request(client_a.user_id().unwrap(), true, cx)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn make_channel(
|
|
||||||
&self,
|
|
||||||
channel: &str,
|
|
||||||
admin: (&TestClient, &mut TestAppContext),
|
|
||||||
members: &mut [(&TestClient, &mut TestAppContext)],
|
|
||||||
) -> u64 {
|
|
||||||
let (admin_client, admin_cx) = admin;
|
|
||||||
let channel_id = admin_client
|
|
||||||
.app_state
|
|
||||||
.channel_store
|
|
||||||
.update(admin_cx, |channel_store, cx| {
|
|
||||||
channel_store.create_channel(channel, None, cx)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
for (member_client, member_cx) in members {
|
|
||||||
admin_client
|
|
||||||
.app_state
|
|
||||||
.channel_store
|
|
||||||
.update(admin_cx, |channel_store, cx| {
|
|
||||||
channel_store.invite_member(
|
|
||||||
channel_id,
|
|
||||||
member_client.user_id().unwrap(),
|
|
||||||
false,
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
admin_cx.foreground().run_until_parked();
|
|
||||||
|
|
||||||
member_client
|
|
||||||
.app_state
|
|
||||||
.channel_store
|
|
||||||
.update(*member_cx, |channels, _| {
|
|
||||||
channels.respond_to_channel_invite(channel_id, true)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
channel_id
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn create_room(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
|
|
||||||
self.make_contacts(clients).await;
|
|
||||||
|
|
||||||
let (left, right) = clients.split_at_mut(1);
|
|
||||||
let (_client_a, cx_a) = &mut left[0];
|
|
||||||
let active_call_a = cx_a.read(ActiveCall::global);
|
|
||||||
|
|
||||||
for (client_b, cx_b) in right {
|
|
||||||
let user_id_b = client_b.current_user_id(*cx_b).to_proto();
|
|
||||||
active_call_a
|
|
||||||
.update(*cx_a, |call, cx| call.invite(user_id_b, None, cx))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
cx_b.foreground().run_until_parked();
|
|
||||||
let active_call_b = cx_b.read(ActiveCall::global);
|
|
||||||
active_call_b
|
|
||||||
.update(*cx_b, |call, cx| call.accept_incoming(cx))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn build_app_state(
|
|
||||||
test_db: &TestDb,
|
|
||||||
fake_server: &live_kit_client::TestServer,
|
|
||||||
) -> Arc<AppState> {
|
|
||||||
Arc::new(AppState {
|
|
||||||
db: test_db.db().clone(),
|
|
||||||
live_kit_client: Some(Arc::new(fake_server.create_api_client())),
|
|
||||||
config: Default::default(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for TestServer {
|
|
||||||
type Target = Server;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.server
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for TestServer {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
self.server.teardown();
|
|
||||||
self.test_live_kit_server.teardown().unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct TestClient {
|
|
||||||
username: String,
|
|
||||||
state: RefCell<TestClientState>,
|
|
||||||
app_state: Arc<workspace::AppState>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
struct TestClientState {
|
|
||||||
local_projects: Vec<ModelHandle<Project>>,
|
|
||||||
remote_projects: Vec<ModelHandle<Project>>,
|
|
||||||
buffers: HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Deref for TestClient {
|
|
||||||
type Target = Arc<Client>;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.app_state.client
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct ContactsSummary {
|
|
||||||
pub current: Vec<String>,
|
|
||||||
pub outgoing_requests: Vec<String>,
|
|
||||||
pub incoming_requests: Vec<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TestClient {
|
|
||||||
pub fn fs(&self) -> &FakeFs {
|
|
||||||
self.app_state.fs.as_fake()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn channel_store(&self) -> &ModelHandle<ChannelStore> {
|
|
||||||
&self.app_state.channel_store
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn user_store(&self) -> &ModelHandle<UserStore> {
|
|
||||||
&self.app_state.user_store
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn language_registry(&self) -> &Arc<LanguageRegistry> {
|
|
||||||
&self.app_state.languages
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn client(&self) -> &Arc<Client> {
|
|
||||||
&self.app_state.client
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn current_user_id(&self, cx: &TestAppContext) -> UserId {
|
|
||||||
UserId::from_proto(
|
|
||||||
self.app_state
|
|
||||||
.user_store
|
|
||||||
.read_with(cx, |user_store, _| user_store.current_user().unwrap().id),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn wait_for_current_user(&self, cx: &TestAppContext) {
|
|
||||||
let mut authed_user = self
|
|
||||||
.app_state
|
|
||||||
.user_store
|
|
||||||
.read_with(cx, |user_store, _| user_store.watch_current_user());
|
|
||||||
while authed_user.next().await.unwrap().is_none() {}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn clear_contacts(&self, cx: &mut TestAppContext) {
|
|
||||||
self.app_state
|
|
||||||
.user_store
|
|
||||||
.update(cx, |store, _| store.clear_contacts())
|
|
||||||
.await;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn local_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
|
|
||||||
Ref::map(self.state.borrow(), |state| &state.local_projects)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn remote_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
|
|
||||||
Ref::map(self.state.borrow(), |state| &state.remote_projects)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn local_projects_mut<'a>(&'a self) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
|
|
||||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.local_projects)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn remote_projects_mut<'a>(&'a self) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
|
|
||||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.remote_projects)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn buffers_for_project<'a>(
|
|
||||||
&'a self,
|
|
||||||
project: &ModelHandle<Project>,
|
|
||||||
) -> impl DerefMut<Target = HashSet<ModelHandle<language::Buffer>>> + 'a {
|
|
||||||
RefMut::map(self.state.borrow_mut(), |state| {
|
|
||||||
state.buffers.entry(project.clone()).or_default()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn buffers<'a>(
|
|
||||||
&'a self,
|
|
||||||
) -> impl DerefMut<Target = HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>> + 'a
|
|
||||||
{
|
|
||||||
RefMut::map(self.state.borrow_mut(), |state| &mut state.buffers)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn summarize_contacts(&self, cx: &TestAppContext) -> ContactsSummary {
|
|
||||||
self.app_state
|
|
||||||
.user_store
|
|
||||||
.read_with(cx, |store, _| ContactsSummary {
|
|
||||||
current: store
|
|
||||||
.contacts()
|
|
||||||
.iter()
|
|
||||||
.map(|contact| contact.user.github_login.clone())
|
|
||||||
.collect(),
|
|
||||||
outgoing_requests: store
|
|
||||||
.outgoing_contact_requests()
|
|
||||||
.iter()
|
|
||||||
.map(|user| user.github_login.clone())
|
|
||||||
.collect(),
|
|
||||||
incoming_requests: store
|
|
||||||
.incoming_contact_requests()
|
|
||||||
.iter()
|
|
||||||
.map(|user| user.github_login.clone())
|
|
||||||
.collect(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn build_local_project(
|
|
||||||
&self,
|
|
||||||
root_path: impl AsRef<Path>,
|
|
||||||
cx: &mut TestAppContext,
|
|
||||||
) -> (ModelHandle<Project>, WorktreeId) {
|
|
||||||
let project = cx.update(|cx| {
|
|
||||||
Project::local(
|
|
||||||
self.client().clone(),
|
|
||||||
self.app_state.user_store.clone(),
|
|
||||||
self.app_state.languages.clone(),
|
|
||||||
self.app_state.fs.clone(),
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
});
|
|
||||||
let (worktree, _) = project
|
|
||||||
.update(cx, |p, cx| {
|
|
||||||
p.find_or_create_local_worktree(root_path, true, cx)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
worktree
|
|
||||||
.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
|
|
||||||
.await;
|
|
||||||
(project, worktree.read_with(cx, |tree, _| tree.id()))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn build_remote_project(
|
|
||||||
&self,
|
|
||||||
host_project_id: u64,
|
|
||||||
guest_cx: &mut TestAppContext,
|
|
||||||
) -> ModelHandle<Project> {
|
|
||||||
let active_call = guest_cx.read(ActiveCall::global);
|
|
||||||
let room = active_call.read_with(guest_cx, |call, _| call.room().unwrap().clone());
|
|
||||||
room.update(guest_cx, |room, cx| {
|
|
||||||
room.join_project(
|
|
||||||
host_project_id,
|
|
||||||
self.app_state.languages.clone(),
|
|
||||||
self.app_state.fs.clone(),
|
|
||||||
cx,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_workspace(
|
|
||||||
&self,
|
|
||||||
project: &ModelHandle<Project>,
|
|
||||||
cx: &mut TestAppContext,
|
|
||||||
) -> WindowHandle<Workspace> {
|
|
||||||
cx.add_window(|cx| Workspace::new(0, project.clone(), self.app_state.clone(), cx))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for TestClient {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
self.app_state.client.teardown();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq)]
|
#[derive(Debug, Eq, PartialEq)]
|
||||||
struct RoomParticipants {
|
struct RoomParticipants {
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
|
use crate::{
|
||||||
|
rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
|
||||||
|
tests::TestServer,
|
||||||
|
};
|
||||||
use call::ActiveCall;
|
use call::ActiveCall;
|
||||||
use channel::Channel;
|
use channel::Channel;
|
||||||
use client::UserId;
|
use client::UserId;
|
||||||
|
@ -21,20 +24,19 @@ async fn test_core_channel_buffers(
|
||||||
let client_a = server.create_client(cx_a, "user_a").await;
|
let client_a = server.create_client(cx_a, "user_a").await;
|
||||||
let client_b = server.create_client(cx_b, "user_b").await;
|
let client_b = server.create_client(cx_b, "user_b").await;
|
||||||
|
|
||||||
let zed_id = server
|
let channel_id = server
|
||||||
.make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
|
.make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
// Client A joins the channel buffer
|
// Client A joins the channel buffer
|
||||||
let channel_buffer_a = client_a
|
let channel_buffer_a = client_a
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
|
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Client A edits the buffer
|
// Client A edits the buffer
|
||||||
let buffer_a = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer());
|
let buffer_a = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer());
|
||||||
|
|
||||||
buffer_a.update(cx_a, |buffer, cx| {
|
buffer_a.update(cx_a, |buffer, cx| {
|
||||||
buffer.edit([(0..0, "hello world")], None, cx)
|
buffer.edit([(0..0, "hello world")], None, cx)
|
||||||
});
|
});
|
||||||
|
@ -45,17 +47,15 @@ async fn test_core_channel_buffers(
|
||||||
buffer.edit([(0..5, "goodbye")], None, cx)
|
buffer.edit([(0..5, "goodbye")], None, cx)
|
||||||
});
|
});
|
||||||
buffer_a.update(cx_a, |buffer, cx| buffer.undo(cx));
|
buffer_a.update(cx_a, |buffer, cx| buffer.undo(cx));
|
||||||
deterministic.run_until_parked();
|
|
||||||
|
|
||||||
assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world");
|
assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world");
|
||||||
|
deterministic.run_until_parked();
|
||||||
|
|
||||||
// Client B joins the channel buffer
|
// Client B joins the channel buffer
|
||||||
let channel_buffer_b = client_b
|
let channel_buffer_b = client_b
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_b, |channel, cx| channel.open_channel_buffer(zed_id, cx))
|
.update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||||
assert_collaborators(
|
assert_collaborators(
|
||||||
buffer.collaborators(),
|
buffer.collaborators(),
|
||||||
|
@ -91,9 +91,7 @@ async fn test_core_channel_buffers(
|
||||||
// Client A rejoins the channel buffer
|
// Client A rejoins the channel buffer
|
||||||
let _channel_buffer_a = client_a
|
let _channel_buffer_a = client_a
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_a, |channels, cx| {
|
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
channels.open_channel_buffer(zed_id, cx)
|
|
||||||
})
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
deterministic.run_until_parked();
|
deterministic.run_until_parked();
|
||||||
|
@ -136,7 +134,7 @@ async fn test_channel_buffer_replica_ids(
|
||||||
|
|
||||||
let channel_id = server
|
let channel_id = server
|
||||||
.make_channel(
|
.make_channel(
|
||||||
"zed",
|
"the-channel",
|
||||||
(&client_a, cx_a),
|
(&client_a, cx_a),
|
||||||
&mut [(&client_b, cx_b), (&client_c, cx_c)],
|
&mut [(&client_b, cx_b), (&client_c, cx_c)],
|
||||||
)
|
)
|
||||||
|
@ -160,23 +158,17 @@ async fn test_channel_buffer_replica_ids(
|
||||||
// C first so that the replica IDs in the project and the channel buffer are different
|
// C first so that the replica IDs in the project and the channel buffer are different
|
||||||
let channel_buffer_c = client_c
|
let channel_buffer_c = client_c
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_c, |channel, cx| {
|
.update(cx_c, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
channel.open_channel_buffer(channel_id, cx)
|
|
||||||
})
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let channel_buffer_b = client_b
|
let channel_buffer_b = client_b
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_b, |channel, cx| {
|
.update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
channel.open_channel_buffer(channel_id, cx)
|
|
||||||
})
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let channel_buffer_a = client_a
|
let channel_buffer_a = client_a
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_a, |channel, cx| {
|
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
channel.open_channel_buffer(channel_id, cx)
|
|
||||||
})
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
@ -286,28 +278,30 @@ async fn test_reopen_channel_buffer(deterministic: Arc<Deterministic>, cx_a: &mu
|
||||||
let mut server = TestServer::start(&deterministic).await;
|
let mut server = TestServer::start(&deterministic).await;
|
||||||
let client_a = server.create_client(cx_a, "user_a").await;
|
let client_a = server.create_client(cx_a, "user_a").await;
|
||||||
|
|
||||||
let zed_id = server.make_channel("zed", (&client_a, cx_a), &mut []).await;
|
let channel_id = server
|
||||||
|
.make_channel("the-channel", (&client_a, cx_a), &mut [])
|
||||||
|
.await;
|
||||||
|
|
||||||
let channel_buffer_1 = client_a
|
let channel_buffer_1 = client_a
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
|
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx));
|
||||||
let channel_buffer_2 = client_a
|
let channel_buffer_2 = client_a
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
|
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx));
|
||||||
let channel_buffer_3 = client_a
|
let channel_buffer_3 = client_a
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx));
|
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx));
|
||||||
|
|
||||||
// All concurrent tasks for opening a channel buffer return the same model handle.
|
// All concurrent tasks for opening a channel buffer return the same model handle.
|
||||||
let (channel_buffer_1, channel_buffer_2, channel_buffer_3) =
|
let (channel_buffer, channel_buffer_2, channel_buffer_3) =
|
||||||
future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3)
|
future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let model_id = channel_buffer_1.id();
|
let channel_buffer_model_id = channel_buffer.id();
|
||||||
assert_eq!(channel_buffer_1, channel_buffer_2);
|
assert_eq!(channel_buffer, channel_buffer_2);
|
||||||
assert_eq!(channel_buffer_1, channel_buffer_3);
|
assert_eq!(channel_buffer, channel_buffer_3);
|
||||||
|
|
||||||
channel_buffer_1.update(cx_a, |buffer, cx| {
|
channel_buffer.update(cx_a, |buffer, cx| {
|
||||||
buffer.buffer().update(cx, |buffer, cx| {
|
buffer.buffer().update(cx, |buffer, cx| {
|
||||||
buffer.edit([(0..0, "hello")], None, cx);
|
buffer.edit([(0..0, "hello")], None, cx);
|
||||||
})
|
})
|
||||||
|
@ -315,7 +309,7 @@ async fn test_reopen_channel_buffer(deterministic: Arc<Deterministic>, cx_a: &mu
|
||||||
deterministic.run_until_parked();
|
deterministic.run_until_parked();
|
||||||
|
|
||||||
cx_a.update(|_| {
|
cx_a.update(|_| {
|
||||||
drop(channel_buffer_1);
|
drop(channel_buffer);
|
||||||
drop(channel_buffer_2);
|
drop(channel_buffer_2);
|
||||||
drop(channel_buffer_3);
|
drop(channel_buffer_3);
|
||||||
});
|
});
|
||||||
|
@ -324,10 +318,10 @@ async fn test_reopen_channel_buffer(deterministic: Arc<Deterministic>, cx_a: &mu
|
||||||
// The channel buffer can be reopened after dropping it.
|
// The channel buffer can be reopened after dropping it.
|
||||||
let channel_buffer = client_a
|
let channel_buffer = client_a
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx))
|
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_ne!(channel_buffer.id(), model_id);
|
assert_ne!(channel_buffer.id(), channel_buffer_model_id);
|
||||||
channel_buffer.update(cx_a, |buffer, cx| {
|
channel_buffer.update(cx_a, |buffer, cx| {
|
||||||
buffer.buffer().update(cx, |buffer, _| {
|
buffer.buffer().update(cx, |buffer, _| {
|
||||||
assert_eq!(buffer.text(), "hello");
|
assert_eq!(buffer.text(), "hello");
|
||||||
|
@ -347,22 +341,17 @@ async fn test_channel_buffer_disconnect(
|
||||||
let client_b = server.create_client(cx_b, "user_b").await;
|
let client_b = server.create_client(cx_b, "user_b").await;
|
||||||
|
|
||||||
let channel_id = server
|
let channel_id = server
|
||||||
.make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)])
|
.make_channel("the-channel", (&client_a, cx_a), &mut [(&client_b, cx_b)])
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
let channel_buffer_a = client_a
|
let channel_buffer_a = client_a
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_a, |channel, cx| {
|
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
channel.open_channel_buffer(channel_id, cx)
|
|
||||||
})
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let channel_buffer_b = client_b
|
let channel_buffer_b = client_b
|
||||||
.channel_store()
|
.channel_store()
|
||||||
.update(cx_b, |channel, cx| {
|
.update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
channel.open_channel_buffer(channel_id, cx)
|
|
||||||
})
|
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
@ -375,7 +364,7 @@ async fn test_channel_buffer_disconnect(
|
||||||
buffer.channel().as_ref(),
|
buffer.channel().as_ref(),
|
||||||
&Channel {
|
&Channel {
|
||||||
id: channel_id,
|
id: channel_id,
|
||||||
name: "zed".to_string()
|
name: "the-channel".to_string()
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assert!(!buffer.is_connected());
|
assert!(!buffer.is_connected());
|
||||||
|
@ -403,13 +392,180 @@ async fn test_channel_buffer_disconnect(
|
||||||
buffer.channel().as_ref(),
|
buffer.channel().as_ref(),
|
||||||
&Channel {
|
&Channel {
|
||||||
id: channel_id,
|
id: channel_id,
|
||||||
name: "zed".to_string()
|
name: "the-channel".to_string()
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assert!(!buffer.is_connected());
|
assert!(!buffer.is_connected());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
async fn test_rejoin_channel_buffer(
|
||||||
|
deterministic: Arc<Deterministic>,
|
||||||
|
cx_a: &mut TestAppContext,
|
||||||
|
cx_b: &mut TestAppContext,
|
||||||
|
) {
|
||||||
|
deterministic.forbid_parking();
|
||||||
|
let mut server = TestServer::start(&deterministic).await;
|
||||||
|
let client_a = server.create_client(cx_a, "user_a").await;
|
||||||
|
let client_b = server.create_client(cx_b, "user_b").await;
|
||||||
|
|
||||||
|
let channel_id = server
|
||||||
|
.make_channel("the-channel", (&client_a, cx_a), &mut [(&client_b, cx_b)])
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let channel_buffer_a = client_a
|
||||||
|
.channel_store()
|
||||||
|
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let channel_buffer_b = client_b
|
||||||
|
.channel_store()
|
||||||
|
.update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
channel_buffer_a.update(cx_a, |buffer, cx| {
|
||||||
|
buffer.buffer().update(cx, |buffer, cx| {
|
||||||
|
buffer.edit([(0..0, "1")], None, cx);
|
||||||
|
})
|
||||||
|
});
|
||||||
|
deterministic.run_until_parked();
|
||||||
|
|
||||||
|
// Client A disconnects.
|
||||||
|
server.forbid_connections();
|
||||||
|
server.disconnect_client(client_a.peer_id().unwrap());
|
||||||
|
|
||||||
|
// Both clients make an edit.
|
||||||
|
channel_buffer_a.update(cx_a, |buffer, cx| {
|
||||||
|
buffer.buffer().update(cx, |buffer, cx| {
|
||||||
|
buffer.edit([(1..1, "2")], None, cx);
|
||||||
|
})
|
||||||
|
});
|
||||||
|
channel_buffer_b.update(cx_b, |buffer, cx| {
|
||||||
|
buffer.buffer().update(cx, |buffer, cx| {
|
||||||
|
buffer.edit([(0..0, "0")], None, cx);
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// Both clients see their own edit.
|
||||||
|
deterministic.run_until_parked();
|
||||||
|
channel_buffer_a.read_with(cx_a, |buffer, cx| {
|
||||||
|
assert_eq!(buffer.buffer().read(cx).text(), "12");
|
||||||
|
});
|
||||||
|
channel_buffer_b.read_with(cx_b, |buffer, cx| {
|
||||||
|
assert_eq!(buffer.buffer().read(cx).text(), "01");
|
||||||
|
});
|
||||||
|
|
||||||
|
// Client A reconnects. Both clients see each other's edits, and see
|
||||||
|
// the same collaborators.
|
||||||
|
server.allow_connections();
|
||||||
|
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||||
|
channel_buffer_a.read_with(cx_a, |buffer, cx| {
|
||||||
|
assert_eq!(buffer.buffer().read(cx).text(), "012");
|
||||||
|
});
|
||||||
|
channel_buffer_b.read_with(cx_b, |buffer, cx| {
|
||||||
|
assert_eq!(buffer.buffer().read(cx).text(), "012");
|
||||||
|
});
|
||||||
|
|
||||||
|
channel_buffer_a.read_with(cx_a, |buffer_a, _| {
|
||||||
|
channel_buffer_b.read_with(cx_b, |buffer_b, _| {
|
||||||
|
assert_eq!(buffer_a.collaborators(), buffer_b.collaborators());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[gpui::test]
|
||||||
|
async fn test_channel_buffers_and_server_restarts(
|
||||||
|
deterministic: Arc<Deterministic>,
|
||||||
|
cx_a: &mut TestAppContext,
|
||||||
|
cx_b: &mut TestAppContext,
|
||||||
|
cx_c: &mut TestAppContext,
|
||||||
|
) {
|
||||||
|
deterministic.forbid_parking();
|
||||||
|
let mut server = TestServer::start(&deterministic).await;
|
||||||
|
let client_a = server.create_client(cx_a, "user_a").await;
|
||||||
|
let client_b = server.create_client(cx_b, "user_b").await;
|
||||||
|
let client_c = server.create_client(cx_c, "user_c").await;
|
||||||
|
|
||||||
|
let channel_id = server
|
||||||
|
.make_channel(
|
||||||
|
"the-channel",
|
||||||
|
(&client_a, cx_a),
|
||||||
|
&mut [(&client_b, cx_b), (&client_c, cx_c)],
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let channel_buffer_a = client_a
|
||||||
|
.channel_store()
|
||||||
|
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let channel_buffer_b = client_b
|
||||||
|
.channel_store()
|
||||||
|
.update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let _channel_buffer_c = client_c
|
||||||
|
.channel_store()
|
||||||
|
.update(cx_c, |store, cx| store.open_channel_buffer(channel_id, cx))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
channel_buffer_a.update(cx_a, |buffer, cx| {
|
||||||
|
buffer.buffer().update(cx, |buffer, cx| {
|
||||||
|
buffer.edit([(0..0, "1")], None, cx);
|
||||||
|
})
|
||||||
|
});
|
||||||
|
deterministic.run_until_parked();
|
||||||
|
|
||||||
|
// Client C can't reconnect.
|
||||||
|
client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
|
||||||
|
|
||||||
|
// Server stops.
|
||||||
|
server.reset().await;
|
||||||
|
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||||
|
|
||||||
|
// While the server is down, both clients make an edit.
|
||||||
|
channel_buffer_a.update(cx_a, |buffer, cx| {
|
||||||
|
buffer.buffer().update(cx, |buffer, cx| {
|
||||||
|
buffer.edit([(1..1, "2")], None, cx);
|
||||||
|
})
|
||||||
|
});
|
||||||
|
channel_buffer_b.update(cx_b, |buffer, cx| {
|
||||||
|
buffer.buffer().update(cx, |buffer, cx| {
|
||||||
|
buffer.edit([(0..0, "0")], None, cx);
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// Server restarts.
|
||||||
|
server.start().await.unwrap();
|
||||||
|
deterministic.advance_clock(CLEANUP_TIMEOUT);
|
||||||
|
|
||||||
|
// Clients reconnects. Clients A and B see each other's edits, and see
|
||||||
|
// that client C has disconnected.
|
||||||
|
channel_buffer_a.read_with(cx_a, |buffer, cx| {
|
||||||
|
assert_eq!(buffer.buffer().read(cx).text(), "012");
|
||||||
|
});
|
||||||
|
channel_buffer_b.read_with(cx_b, |buffer, cx| {
|
||||||
|
assert_eq!(buffer.buffer().read(cx).text(), "012");
|
||||||
|
});
|
||||||
|
|
||||||
|
channel_buffer_a.read_with(cx_a, |buffer_a, _| {
|
||||||
|
channel_buffer_b.read_with(cx_b, |buffer_b, _| {
|
||||||
|
assert_eq!(
|
||||||
|
buffer_a
|
||||||
|
.collaborators()
|
||||||
|
.iter()
|
||||||
|
.map(|c| c.user_id)
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
vec![client_a.user_id().unwrap(), client_b.user_id().unwrap()]
|
||||||
|
);
|
||||||
|
assert_eq!(buffer_a.collaborators(), buffer_b.collaborators());
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn assert_collaborators(collaborators: &[proto::Collaborator], ids: &[Option<UserId>]) {
|
fn assert_collaborators(collaborators: &[proto::Collaborator], ids: &[Option<UserId>]) {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
|
288
crates/collab/src/tests/random_channel_buffer_tests.rs
Normal file
288
crates/collab/src/tests/random_channel_buffer_tests.rs
Normal file
|
@ -0,0 +1,288 @@
|
||||||
|
use super::{run_randomized_test, RandomizedTest, TestClient, TestError, TestServer, UserTestPlan};
|
||||||
|
use anyhow::Result;
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use gpui::{executor::Deterministic, TestAppContext};
|
||||||
|
use rand::prelude::*;
|
||||||
|
use serde_derive::{Deserialize, Serialize};
|
||||||
|
use std::{ops::Range, rc::Rc, sync::Arc};
|
||||||
|
use text::Bias;
|
||||||
|
|
||||||
|
#[gpui::test(
|
||||||
|
iterations = 100,
|
||||||
|
on_failure = "crate::tests::save_randomized_test_plan"
|
||||||
|
)]
|
||||||
|
async fn test_random_channel_buffers(
|
||||||
|
cx: &mut TestAppContext,
|
||||||
|
deterministic: Arc<Deterministic>,
|
||||||
|
rng: StdRng,
|
||||||
|
) {
|
||||||
|
run_randomized_test::<RandomChannelBufferTest>(cx, deterministic, rng).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct RandomChannelBufferTest;
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize)]
|
||||||
|
enum ChannelBufferOperation {
|
||||||
|
JoinChannelNotes {
|
||||||
|
channel_name: String,
|
||||||
|
},
|
||||||
|
LeaveChannelNotes {
|
||||||
|
channel_name: String,
|
||||||
|
},
|
||||||
|
EditChannelNotes {
|
||||||
|
channel_name: String,
|
||||||
|
edits: Vec<(Range<usize>, Arc<str>)>,
|
||||||
|
},
|
||||||
|
Noop,
|
||||||
|
}
|
||||||
|
|
||||||
|
const CHANNEL_COUNT: usize = 3;
|
||||||
|
|
||||||
|
#[async_trait(?Send)]
|
||||||
|
impl RandomizedTest for RandomChannelBufferTest {
|
||||||
|
type Operation = ChannelBufferOperation;
|
||||||
|
|
||||||
|
async fn initialize(server: &mut TestServer, users: &[UserTestPlan]) {
|
||||||
|
let db = &server.app_state.db;
|
||||||
|
for ix in 0..CHANNEL_COUNT {
|
||||||
|
let id = db
|
||||||
|
.create_channel(
|
||||||
|
&format!("channel-{ix}"),
|
||||||
|
None,
|
||||||
|
&format!("livekit-room-{ix}"),
|
||||||
|
users[0].user_id,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
for user in &users[1..] {
|
||||||
|
db.invite_channel_member(id, user.user_id, users[0].user_id, false)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
db.respond_to_channel_invite(id, user.user_id, true)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_operation(
|
||||||
|
client: &TestClient,
|
||||||
|
rng: &mut StdRng,
|
||||||
|
_: &mut UserTestPlan,
|
||||||
|
cx: &TestAppContext,
|
||||||
|
) -> ChannelBufferOperation {
|
||||||
|
let channel_store = client.channel_store().clone();
|
||||||
|
let channel_buffers = client.channel_buffers();
|
||||||
|
|
||||||
|
// When signed out, we can't do anything unless a channel buffer is
|
||||||
|
// already open.
|
||||||
|
if channel_buffers.is_empty()
|
||||||
|
&& channel_store.read_with(cx, |store, _| store.channel_count() == 0)
|
||||||
|
{
|
||||||
|
return ChannelBufferOperation::Noop;
|
||||||
|
}
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match rng.gen_range(0..100_u32) {
|
||||||
|
0..=29 => {
|
||||||
|
let channel_name = client.channel_store().read_with(cx, |store, cx| {
|
||||||
|
store.channels().find_map(|(_, channel)| {
|
||||||
|
if store.has_open_channel_buffer(channel.id, cx) {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(channel.name.clone())
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
if let Some(channel_name) = channel_name {
|
||||||
|
break ChannelBufferOperation::JoinChannelNotes { channel_name };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
30..=40 => {
|
||||||
|
if let Some(buffer) = channel_buffers.iter().choose(rng) {
|
||||||
|
let channel_name = buffer.read_with(cx, |b, _| b.channel().name.clone());
|
||||||
|
break ChannelBufferOperation::LeaveChannelNotes { channel_name };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => {
|
||||||
|
if let Some(buffer) = channel_buffers.iter().choose(rng) {
|
||||||
|
break buffer.read_with(cx, |b, _| {
|
||||||
|
let channel_name = b.channel().name.clone();
|
||||||
|
let edits = b
|
||||||
|
.buffer()
|
||||||
|
.read_with(cx, |buffer, _| buffer.get_random_edits(rng, 3));
|
||||||
|
ChannelBufferOperation::EditChannelNotes {
|
||||||
|
channel_name,
|
||||||
|
edits,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn apply_operation(
|
||||||
|
client: &TestClient,
|
||||||
|
operation: ChannelBufferOperation,
|
||||||
|
cx: &mut TestAppContext,
|
||||||
|
) -> Result<(), TestError> {
|
||||||
|
match operation {
|
||||||
|
ChannelBufferOperation::JoinChannelNotes { channel_name } => {
|
||||||
|
let buffer = client.channel_store().update(cx, |store, cx| {
|
||||||
|
let channel_id = store
|
||||||
|
.channels()
|
||||||
|
.find(|(_, c)| c.name == channel_name)
|
||||||
|
.unwrap()
|
||||||
|
.1
|
||||||
|
.id;
|
||||||
|
if store.has_open_channel_buffer(channel_id, cx) {
|
||||||
|
Err(TestError::Inapplicable)
|
||||||
|
} else {
|
||||||
|
Ok(store.open_channel_buffer(channel_id, cx))
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
log::info!(
|
||||||
|
"{}: opening notes for channel {channel_name}",
|
||||||
|
client.username
|
||||||
|
);
|
||||||
|
client.channel_buffers().insert(buffer.await?);
|
||||||
|
}
|
||||||
|
|
||||||
|
ChannelBufferOperation::LeaveChannelNotes { channel_name } => {
|
||||||
|
let buffer = cx.update(|cx| {
|
||||||
|
let mut left_buffer = Err(TestError::Inapplicable);
|
||||||
|
client.channel_buffers().retain(|buffer| {
|
||||||
|
if buffer.read(cx).channel().name == channel_name {
|
||||||
|
left_buffer = Ok(buffer.clone());
|
||||||
|
false
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
});
|
||||||
|
left_buffer
|
||||||
|
})?;
|
||||||
|
|
||||||
|
log::info!(
|
||||||
|
"{}: closing notes for channel {channel_name}",
|
||||||
|
client.username
|
||||||
|
);
|
||||||
|
cx.update(|_| drop(buffer));
|
||||||
|
}
|
||||||
|
|
||||||
|
ChannelBufferOperation::EditChannelNotes {
|
||||||
|
channel_name,
|
||||||
|
edits,
|
||||||
|
} => {
|
||||||
|
let channel_buffer = cx
|
||||||
|
.read(|cx| {
|
||||||
|
client
|
||||||
|
.channel_buffers()
|
||||||
|
.iter()
|
||||||
|
.find(|buffer| buffer.read(cx).channel().name == channel_name)
|
||||||
|
.cloned()
|
||||||
|
})
|
||||||
|
.ok_or_else(|| TestError::Inapplicable)?;
|
||||||
|
|
||||||
|
log::info!(
|
||||||
|
"{}: editing notes for channel {channel_name} with {:?}",
|
||||||
|
client.username,
|
||||||
|
edits
|
||||||
|
);
|
||||||
|
|
||||||
|
channel_buffer.update(cx, |buffer, cx| {
|
||||||
|
let buffer = buffer.buffer();
|
||||||
|
buffer.update(cx, |buffer, cx| {
|
||||||
|
let snapshot = buffer.snapshot();
|
||||||
|
buffer.edit(
|
||||||
|
edits.into_iter().map(|(range, text)| {
|
||||||
|
let start = snapshot.clip_offset(range.start, Bias::Left);
|
||||||
|
let end = snapshot.clip_offset(range.end, Bias::Right);
|
||||||
|
(start..end, text)
|
||||||
|
}),
|
||||||
|
None,
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
ChannelBufferOperation::Noop => Err(TestError::Inapplicable)?,
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn on_client_added(client: &Rc<TestClient>, cx: &mut TestAppContext) {
|
||||||
|
let channel_store = client.channel_store();
|
||||||
|
while channel_store.read_with(cx, |store, _| store.channel_count() == 0) {
|
||||||
|
channel_store.next_notification(cx).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn on_quiesce(server: &mut TestServer, clients: &mut [(Rc<TestClient>, TestAppContext)]) {
|
||||||
|
let channels = server.app_state.db.all_channels().await.unwrap();
|
||||||
|
|
||||||
|
for (client, client_cx) in clients.iter_mut() {
|
||||||
|
client_cx.update(|cx| {
|
||||||
|
client
|
||||||
|
.channel_buffers()
|
||||||
|
.retain(|b| b.read(cx).is_connected());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (channel_id, channel_name) in channels {
|
||||||
|
let mut prev_text: Option<(u64, String)> = None;
|
||||||
|
|
||||||
|
let mut collaborator_user_ids = server
|
||||||
|
.app_state
|
||||||
|
.db
|
||||||
|
.get_channel_buffer_collaborators(channel_id)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.into_iter()
|
||||||
|
.map(|id| id.to_proto())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
collaborator_user_ids.sort();
|
||||||
|
|
||||||
|
for (client, client_cx) in clients.iter() {
|
||||||
|
let user_id = client.user_id().unwrap();
|
||||||
|
client_cx.read(|cx| {
|
||||||
|
if let Some(channel_buffer) = client
|
||||||
|
.channel_buffers()
|
||||||
|
.iter()
|
||||||
|
.find(|b| b.read(cx).channel().id == channel_id.to_proto())
|
||||||
|
{
|
||||||
|
let channel_buffer = channel_buffer.read(cx);
|
||||||
|
|
||||||
|
// Assert that channel buffer's text matches other clients' copies.
|
||||||
|
let text = channel_buffer.buffer().read(cx).text();
|
||||||
|
if let Some((prev_user_id, prev_text)) = &prev_text {
|
||||||
|
assert_eq!(
|
||||||
|
&text,
|
||||||
|
prev_text,
|
||||||
|
"client {user_id} has different text than client {prev_user_id} for channel {channel_name}",
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
prev_text = Some((user_id, text.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assert that all clients and the server agree about who is present in the
|
||||||
|
// channel buffer.
|
||||||
|
let collaborators = channel_buffer.collaborators();
|
||||||
|
let mut user_ids =
|
||||||
|
collaborators.iter().map(|c| c.user_id).collect::<Vec<_>>();
|
||||||
|
user_ids.sort();
|
||||||
|
assert_eq!(
|
||||||
|
user_ids,
|
||||||
|
collaborator_user_ids,
|
||||||
|
"client {user_id} has different user ids for channel {channel_name} than the server",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
1585
crates/collab/src/tests/random_project_collaboration_tests.rs
Normal file
1585
crates/collab/src/tests/random_project_collaboration_tests.rs
Normal file
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
689
crates/collab/src/tests/randomized_test_helpers.rs
Normal file
689
crates/collab/src/tests/randomized_test_helpers.rs
Normal file
|
@ -0,0 +1,689 @@
|
||||||
|
use crate::{
|
||||||
|
db::{self, NewUserParams, UserId},
|
||||||
|
rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
|
||||||
|
tests::{TestClient, TestServer},
|
||||||
|
};
|
||||||
|
use async_trait::async_trait;
|
||||||
|
use futures::StreamExt;
|
||||||
|
use gpui::{executor::Deterministic, Task, TestAppContext};
|
||||||
|
use parking_lot::Mutex;
|
||||||
|
use rand::prelude::*;
|
||||||
|
use rpc::RECEIVE_TIMEOUT;
|
||||||
|
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||||
|
use settings::SettingsStore;
|
||||||
|
use std::{
|
||||||
|
env,
|
||||||
|
path::PathBuf,
|
||||||
|
rc::Rc,
|
||||||
|
sync::{
|
||||||
|
atomic::{AtomicBool, Ordering::SeqCst},
|
||||||
|
Arc,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
lazy_static::lazy_static! {
|
||||||
|
static ref PLAN_LOAD_PATH: Option<PathBuf> = path_env_var("LOAD_PLAN");
|
||||||
|
static ref PLAN_SAVE_PATH: Option<PathBuf> = path_env_var("SAVE_PLAN");
|
||||||
|
static ref MAX_PEERS: usize = env::var("MAX_PEERS")
|
||||||
|
.map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
|
||||||
|
.unwrap_or(3);
|
||||||
|
static ref MAX_OPERATIONS: usize = env::var("OPERATIONS")
|
||||||
|
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||||
|
.unwrap_or(10);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
static LOADED_PLAN_JSON: Mutex<Option<Vec<u8>>> = Mutex::new(None);
|
||||||
|
static LAST_PLAN: Mutex<Option<Box<dyn Send + FnOnce() -> Vec<u8>>>> = Mutex::new(None);
|
||||||
|
|
||||||
|
struct TestPlan<T: RandomizedTest> {
|
||||||
|
rng: StdRng,
|
||||||
|
replay: bool,
|
||||||
|
stored_operations: Vec<(StoredOperation<T::Operation>, Arc<AtomicBool>)>,
|
||||||
|
max_operations: usize,
|
||||||
|
operation_ix: usize,
|
||||||
|
users: Vec<UserTestPlan>,
|
||||||
|
next_batch_id: usize,
|
||||||
|
allow_server_restarts: bool,
|
||||||
|
allow_client_reconnection: bool,
|
||||||
|
allow_client_disconnection: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct UserTestPlan {
|
||||||
|
pub user_id: UserId,
|
||||||
|
pub username: String,
|
||||||
|
pub allow_client_reconnection: bool,
|
||||||
|
pub allow_client_disconnection: bool,
|
||||||
|
next_root_id: usize,
|
||||||
|
operation_ix: usize,
|
||||||
|
online: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(untagged)]
|
||||||
|
enum StoredOperation<T> {
|
||||||
|
Server(ServerOperation),
|
||||||
|
Client {
|
||||||
|
user_id: UserId,
|
||||||
|
batch_id: usize,
|
||||||
|
operation: T,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
enum ServerOperation {
|
||||||
|
AddConnection {
|
||||||
|
user_id: UserId,
|
||||||
|
},
|
||||||
|
RemoveConnection {
|
||||||
|
user_id: UserId,
|
||||||
|
},
|
||||||
|
BounceConnection {
|
||||||
|
user_id: UserId,
|
||||||
|
},
|
||||||
|
RestartServer,
|
||||||
|
MutateClients {
|
||||||
|
batch_id: usize,
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
#[serde(skip_deserializing)]
|
||||||
|
user_ids: Vec<UserId>,
|
||||||
|
quiesce: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum TestError {
|
||||||
|
Inapplicable,
|
||||||
|
Other(anyhow::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait(?Send)]
|
||||||
|
pub trait RandomizedTest: 'static + Sized {
|
||||||
|
type Operation: Send + Clone + Serialize + DeserializeOwned;
|
||||||
|
|
||||||
|
fn generate_operation(
|
||||||
|
client: &TestClient,
|
||||||
|
rng: &mut StdRng,
|
||||||
|
plan: &mut UserTestPlan,
|
||||||
|
cx: &TestAppContext,
|
||||||
|
) -> Self::Operation;
|
||||||
|
|
||||||
|
async fn apply_operation(
|
||||||
|
client: &TestClient,
|
||||||
|
operation: Self::Operation,
|
||||||
|
cx: &mut TestAppContext,
|
||||||
|
) -> Result<(), TestError>;
|
||||||
|
|
||||||
|
async fn initialize(server: &mut TestServer, users: &[UserTestPlan]);
|
||||||
|
|
||||||
|
async fn on_client_added(client: &Rc<TestClient>, cx: &mut TestAppContext);
|
||||||
|
|
||||||
|
async fn on_quiesce(server: &mut TestServer, client: &mut [(Rc<TestClient>, TestAppContext)]);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run_randomized_test<T: RandomizedTest>(
|
||||||
|
cx: &mut TestAppContext,
|
||||||
|
deterministic: Arc<Deterministic>,
|
||||||
|
rng: StdRng,
|
||||||
|
) {
|
||||||
|
deterministic.forbid_parking();
|
||||||
|
let mut server = TestServer::start(&deterministic).await;
|
||||||
|
let plan = TestPlan::<T>::new(&mut server, rng).await;
|
||||||
|
|
||||||
|
LAST_PLAN.lock().replace({
|
||||||
|
let plan = plan.clone();
|
||||||
|
Box::new(move || plan.lock().serialize())
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut clients = Vec::new();
|
||||||
|
let mut client_tasks = Vec::new();
|
||||||
|
let mut operation_channels = Vec::new();
|
||||||
|
loop {
|
||||||
|
let Some((next_operation, applied)) = plan.lock().next_server_operation(&clients) else {
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
applied.store(true, SeqCst);
|
||||||
|
let did_apply = TestPlan::apply_server_operation(
|
||||||
|
plan.clone(),
|
||||||
|
deterministic.clone(),
|
||||||
|
&mut server,
|
||||||
|
&mut clients,
|
||||||
|
&mut client_tasks,
|
||||||
|
&mut operation_channels,
|
||||||
|
next_operation,
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
if !did_apply {
|
||||||
|
applied.store(false, SeqCst);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
drop(operation_channels);
|
||||||
|
deterministic.start_waiting();
|
||||||
|
futures::future::join_all(client_tasks).await;
|
||||||
|
deterministic.finish_waiting();
|
||||||
|
|
||||||
|
deterministic.run_until_parked();
|
||||||
|
T::on_quiesce(&mut server, &mut clients).await;
|
||||||
|
|
||||||
|
for (client, mut cx) in clients {
|
||||||
|
cx.update(|cx| {
|
||||||
|
let store = cx.remove_global::<SettingsStore>();
|
||||||
|
cx.clear_globals();
|
||||||
|
cx.set_global(store);
|
||||||
|
drop(client);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
deterministic.run_until_parked();
|
||||||
|
|
||||||
|
if let Some(path) = &*PLAN_SAVE_PATH {
|
||||||
|
eprintln!("saved test plan to path {:?}", path);
|
||||||
|
std::fs::write(path, plan.lock().serialize()).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn save_randomized_test_plan() {
|
||||||
|
if let Some(serialize_plan) = LAST_PLAN.lock().take() {
|
||||||
|
if let Some(path) = &*PLAN_SAVE_PATH {
|
||||||
|
eprintln!("saved test plan to path {:?}", path);
|
||||||
|
std::fs::write(path, serialize_plan()).unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: RandomizedTest> TestPlan<T> {
|
||||||
|
pub async fn new(server: &mut TestServer, mut rng: StdRng) -> Arc<Mutex<Self>> {
|
||||||
|
let allow_server_restarts = rng.gen_bool(0.7);
|
||||||
|
let allow_client_reconnection = rng.gen_bool(0.7);
|
||||||
|
let allow_client_disconnection = rng.gen_bool(0.1);
|
||||||
|
|
||||||
|
let mut users = Vec::new();
|
||||||
|
for ix in 0..*MAX_PEERS {
|
||||||
|
let username = format!("user-{}", ix + 1);
|
||||||
|
let user_id = server
|
||||||
|
.app_state
|
||||||
|
.db
|
||||||
|
.create_user(
|
||||||
|
&format!("{username}@example.com"),
|
||||||
|
false,
|
||||||
|
NewUserParams {
|
||||||
|
github_login: username.clone(),
|
||||||
|
github_user_id: (ix + 1) as i32,
|
||||||
|
invite_count: 0,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.user_id;
|
||||||
|
users.push(UserTestPlan {
|
||||||
|
user_id,
|
||||||
|
username,
|
||||||
|
online: false,
|
||||||
|
next_root_id: 0,
|
||||||
|
operation_ix: 0,
|
||||||
|
allow_client_disconnection,
|
||||||
|
allow_client_reconnection,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
T::initialize(server, &users).await;
|
||||||
|
|
||||||
|
let plan = Arc::new(Mutex::new(Self {
|
||||||
|
replay: false,
|
||||||
|
allow_server_restarts,
|
||||||
|
allow_client_reconnection,
|
||||||
|
allow_client_disconnection,
|
||||||
|
stored_operations: Vec::new(),
|
||||||
|
operation_ix: 0,
|
||||||
|
next_batch_id: 0,
|
||||||
|
max_operations: *MAX_OPERATIONS,
|
||||||
|
users,
|
||||||
|
rng,
|
||||||
|
}));
|
||||||
|
|
||||||
|
if let Some(path) = &*PLAN_LOAD_PATH {
|
||||||
|
let json = LOADED_PLAN_JSON
|
||||||
|
.lock()
|
||||||
|
.get_or_insert_with(|| {
|
||||||
|
eprintln!("loaded test plan from path {:?}", path);
|
||||||
|
std::fs::read(path).unwrap()
|
||||||
|
})
|
||||||
|
.clone();
|
||||||
|
plan.lock().deserialize(json);
|
||||||
|
}
|
||||||
|
|
||||||
|
plan
|
||||||
|
}
|
||||||
|
|
||||||
|
fn deserialize(&mut self, json: Vec<u8>) {
|
||||||
|
let stored_operations: Vec<StoredOperation<T::Operation>> =
|
||||||
|
serde_json::from_slice(&json).unwrap();
|
||||||
|
self.replay = true;
|
||||||
|
self.stored_operations = stored_operations
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, mut operation)| {
|
||||||
|
let did_apply = Arc::new(AtomicBool::new(false));
|
||||||
|
if let StoredOperation::Server(ServerOperation::MutateClients {
|
||||||
|
batch_id: current_batch_id,
|
||||||
|
user_ids,
|
||||||
|
..
|
||||||
|
}) = &mut operation
|
||||||
|
{
|
||||||
|
assert!(user_ids.is_empty());
|
||||||
|
user_ids.extend(stored_operations[i + 1..].iter().filter_map(|operation| {
|
||||||
|
if let StoredOperation::Client {
|
||||||
|
user_id, batch_id, ..
|
||||||
|
} = operation
|
||||||
|
{
|
||||||
|
if batch_id == current_batch_id {
|
||||||
|
return Some(user_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}));
|
||||||
|
user_ids.sort_unstable();
|
||||||
|
}
|
||||||
|
(operation, did_apply)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serialize(&mut self) -> Vec<u8> {
|
||||||
|
// Format each operation as one line
|
||||||
|
let mut json = Vec::new();
|
||||||
|
json.push(b'[');
|
||||||
|
for (operation, applied) in &self.stored_operations {
|
||||||
|
if !applied.load(SeqCst) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if json.len() > 1 {
|
||||||
|
json.push(b',');
|
||||||
|
}
|
||||||
|
json.extend_from_slice(b"\n ");
|
||||||
|
serde_json::to_writer(&mut json, operation).unwrap();
|
||||||
|
}
|
||||||
|
json.extend_from_slice(b"\n]\n");
|
||||||
|
json
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_server_operation(
|
||||||
|
&mut self,
|
||||||
|
clients: &[(Rc<TestClient>, TestAppContext)],
|
||||||
|
) -> Option<(ServerOperation, Arc<AtomicBool>)> {
|
||||||
|
if self.replay {
|
||||||
|
while let Some(stored_operation) = self.stored_operations.get(self.operation_ix) {
|
||||||
|
self.operation_ix += 1;
|
||||||
|
if let (StoredOperation::Server(operation), applied) = stored_operation {
|
||||||
|
return Some((operation.clone(), applied.clone()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let operation = self.generate_server_operation(clients)?;
|
||||||
|
let applied = Arc::new(AtomicBool::new(false));
|
||||||
|
self.stored_operations
|
||||||
|
.push((StoredOperation::Server(operation.clone()), applied.clone()));
|
||||||
|
Some((operation, applied))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_client_operation(
|
||||||
|
&mut self,
|
||||||
|
client: &TestClient,
|
||||||
|
current_batch_id: usize,
|
||||||
|
cx: &TestAppContext,
|
||||||
|
) -> Option<(T::Operation, Arc<AtomicBool>)> {
|
||||||
|
let current_user_id = client.current_user_id(cx);
|
||||||
|
let user_ix = self
|
||||||
|
.users
|
||||||
|
.iter()
|
||||||
|
.position(|user| user.user_id == current_user_id)
|
||||||
|
.unwrap();
|
||||||
|
let user_plan = &mut self.users[user_ix];
|
||||||
|
|
||||||
|
if self.replay {
|
||||||
|
while let Some(stored_operation) = self.stored_operations.get(user_plan.operation_ix) {
|
||||||
|
user_plan.operation_ix += 1;
|
||||||
|
if let (
|
||||||
|
StoredOperation::Client {
|
||||||
|
user_id, operation, ..
|
||||||
|
},
|
||||||
|
applied,
|
||||||
|
) = stored_operation
|
||||||
|
{
|
||||||
|
if user_id == ¤t_user_id {
|
||||||
|
return Some((operation.clone(), applied.clone()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
if self.operation_ix == self.max_operations {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
self.operation_ix += 1;
|
||||||
|
let operation = T::generate_operation(
|
||||||
|
client,
|
||||||
|
&mut self.rng,
|
||||||
|
self.users
|
||||||
|
.iter_mut()
|
||||||
|
.find(|user| user.user_id == current_user_id)
|
||||||
|
.unwrap(),
|
||||||
|
cx,
|
||||||
|
);
|
||||||
|
let applied = Arc::new(AtomicBool::new(false));
|
||||||
|
self.stored_operations.push((
|
||||||
|
StoredOperation::Client {
|
||||||
|
user_id: current_user_id,
|
||||||
|
batch_id: current_batch_id,
|
||||||
|
operation: operation.clone(),
|
||||||
|
},
|
||||||
|
applied.clone(),
|
||||||
|
));
|
||||||
|
Some((operation, applied))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_server_operation(
|
||||||
|
&mut self,
|
||||||
|
clients: &[(Rc<TestClient>, TestAppContext)],
|
||||||
|
) -> Option<ServerOperation> {
|
||||||
|
if self.operation_ix == self.max_operations {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(loop {
|
||||||
|
break match self.rng.gen_range(0..100) {
|
||||||
|
0..=29 if clients.len() < self.users.len() => {
|
||||||
|
let user = self
|
||||||
|
.users
|
||||||
|
.iter()
|
||||||
|
.filter(|u| !u.online)
|
||||||
|
.choose(&mut self.rng)
|
||||||
|
.unwrap();
|
||||||
|
self.operation_ix += 1;
|
||||||
|
ServerOperation::AddConnection {
|
||||||
|
user_id: user.user_id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
30..=34 if clients.len() > 1 && self.allow_client_disconnection => {
|
||||||
|
let (client, cx) = &clients[self.rng.gen_range(0..clients.len())];
|
||||||
|
let user_id = client.current_user_id(cx);
|
||||||
|
self.operation_ix += 1;
|
||||||
|
ServerOperation::RemoveConnection { user_id }
|
||||||
|
}
|
||||||
|
35..=39 if clients.len() > 1 && self.allow_client_reconnection => {
|
||||||
|
let (client, cx) = &clients[self.rng.gen_range(0..clients.len())];
|
||||||
|
let user_id = client.current_user_id(cx);
|
||||||
|
self.operation_ix += 1;
|
||||||
|
ServerOperation::BounceConnection { user_id }
|
||||||
|
}
|
||||||
|
40..=44 if self.allow_server_restarts && clients.len() > 1 => {
|
||||||
|
self.operation_ix += 1;
|
||||||
|
ServerOperation::RestartServer
|
||||||
|
}
|
||||||
|
_ if !clients.is_empty() => {
|
||||||
|
let count = self
|
||||||
|
.rng
|
||||||
|
.gen_range(1..10)
|
||||||
|
.min(self.max_operations - self.operation_ix);
|
||||||
|
let batch_id = util::post_inc(&mut self.next_batch_id);
|
||||||
|
let mut user_ids = (0..count)
|
||||||
|
.map(|_| {
|
||||||
|
let ix = self.rng.gen_range(0..clients.len());
|
||||||
|
let (client, cx) = &clients[ix];
|
||||||
|
client.current_user_id(cx)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
user_ids.sort_unstable();
|
||||||
|
ServerOperation::MutateClients {
|
||||||
|
user_ids,
|
||||||
|
batch_id,
|
||||||
|
quiesce: self.rng.gen_bool(0.7),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => continue,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn apply_server_operation(
|
||||||
|
plan: Arc<Mutex<Self>>,
|
||||||
|
deterministic: Arc<Deterministic>,
|
||||||
|
server: &mut TestServer,
|
||||||
|
clients: &mut Vec<(Rc<TestClient>, TestAppContext)>,
|
||||||
|
client_tasks: &mut Vec<Task<()>>,
|
||||||
|
operation_channels: &mut Vec<futures::channel::mpsc::UnboundedSender<usize>>,
|
||||||
|
operation: ServerOperation,
|
||||||
|
cx: &mut TestAppContext,
|
||||||
|
) -> bool {
|
||||||
|
match operation {
|
||||||
|
ServerOperation::AddConnection { user_id } => {
|
||||||
|
let username;
|
||||||
|
{
|
||||||
|
let mut plan = plan.lock();
|
||||||
|
let user = plan.user(user_id);
|
||||||
|
if user.online {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
user.online = true;
|
||||||
|
username = user.username.clone();
|
||||||
|
};
|
||||||
|
log::info!("adding new connection for {}", username);
|
||||||
|
let next_entity_id = (user_id.0 * 10_000) as usize;
|
||||||
|
let mut client_cx = TestAppContext::new(
|
||||||
|
cx.foreground_platform(),
|
||||||
|
cx.platform(),
|
||||||
|
deterministic.build_foreground(user_id.0 as usize),
|
||||||
|
deterministic.build_background(),
|
||||||
|
cx.font_cache(),
|
||||||
|
cx.leak_detector(),
|
||||||
|
next_entity_id,
|
||||||
|
cx.function_name.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let (operation_tx, operation_rx) = futures::channel::mpsc::unbounded();
|
||||||
|
let client = Rc::new(server.create_client(&mut client_cx, &username).await);
|
||||||
|
operation_channels.push(operation_tx);
|
||||||
|
clients.push((client.clone(), client_cx.clone()));
|
||||||
|
client_tasks.push(client_cx.foreground().spawn(Self::simulate_client(
|
||||||
|
plan.clone(),
|
||||||
|
client,
|
||||||
|
operation_rx,
|
||||||
|
client_cx,
|
||||||
|
)));
|
||||||
|
|
||||||
|
log::info!("added connection for {}", username);
|
||||||
|
}
|
||||||
|
|
||||||
|
ServerOperation::RemoveConnection {
|
||||||
|
user_id: removed_user_id,
|
||||||
|
} => {
|
||||||
|
log::info!("simulating full disconnection of user {}", removed_user_id);
|
||||||
|
let client_ix = clients
|
||||||
|
.iter()
|
||||||
|
.position(|(client, cx)| client.current_user_id(cx) == removed_user_id);
|
||||||
|
let Some(client_ix) = client_ix else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
let user_connection_ids = server
|
||||||
|
.connection_pool
|
||||||
|
.lock()
|
||||||
|
.user_connection_ids(removed_user_id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
assert_eq!(user_connection_ids.len(), 1);
|
||||||
|
let removed_peer_id = user_connection_ids[0].into();
|
||||||
|
let (client, mut client_cx) = clients.remove(client_ix);
|
||||||
|
let client_task = client_tasks.remove(client_ix);
|
||||||
|
operation_channels.remove(client_ix);
|
||||||
|
server.forbid_connections();
|
||||||
|
server.disconnect_client(removed_peer_id);
|
||||||
|
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||||
|
deterministic.start_waiting();
|
||||||
|
log::info!("waiting for user {} to exit...", removed_user_id);
|
||||||
|
client_task.await;
|
||||||
|
deterministic.finish_waiting();
|
||||||
|
server.allow_connections();
|
||||||
|
|
||||||
|
for project in client.remote_projects().iter() {
|
||||||
|
project.read_with(&client_cx, |project, _| {
|
||||||
|
assert!(
|
||||||
|
project.is_read_only(),
|
||||||
|
"project {:?} should be read only",
|
||||||
|
project.remote_id()
|
||||||
|
)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (client, cx) in clients {
|
||||||
|
let contacts = server
|
||||||
|
.app_state
|
||||||
|
.db
|
||||||
|
.get_contacts(client.current_user_id(cx))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let pool = server.connection_pool.lock();
|
||||||
|
for contact in contacts {
|
||||||
|
if let db::Contact::Accepted { user_id, busy, .. } = contact {
|
||||||
|
if user_id == removed_user_id {
|
||||||
|
assert!(!pool.is_user_online(user_id));
|
||||||
|
assert!(!busy);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log::info!("{} removed", client.username);
|
||||||
|
plan.lock().user(removed_user_id).online = false;
|
||||||
|
client_cx.update(|cx| {
|
||||||
|
cx.clear_globals();
|
||||||
|
drop(client);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
ServerOperation::BounceConnection { user_id } => {
|
||||||
|
log::info!("simulating temporary disconnection of user {}", user_id);
|
||||||
|
let user_connection_ids = server
|
||||||
|
.connection_pool
|
||||||
|
.lock()
|
||||||
|
.user_connection_ids(user_id)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
if user_connection_ids.is_empty() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
assert_eq!(user_connection_ids.len(), 1);
|
||||||
|
let peer_id = user_connection_ids[0].into();
|
||||||
|
server.disconnect_client(peer_id);
|
||||||
|
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||||
|
}
|
||||||
|
|
||||||
|
ServerOperation::RestartServer => {
|
||||||
|
log::info!("simulating server restart");
|
||||||
|
server.reset().await;
|
||||||
|
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||||
|
server.start().await.unwrap();
|
||||||
|
deterministic.advance_clock(CLEANUP_TIMEOUT);
|
||||||
|
let environment = &server.app_state.config.zed_environment;
|
||||||
|
let (stale_room_ids, _) = server
|
||||||
|
.app_state
|
||||||
|
.db
|
||||||
|
.stale_server_resource_ids(environment, server.id())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(stale_room_ids, vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
ServerOperation::MutateClients {
|
||||||
|
user_ids,
|
||||||
|
batch_id,
|
||||||
|
quiesce,
|
||||||
|
} => {
|
||||||
|
let mut applied = false;
|
||||||
|
for user_id in user_ids {
|
||||||
|
let client_ix = clients
|
||||||
|
.iter()
|
||||||
|
.position(|(client, cx)| client.current_user_id(cx) == user_id);
|
||||||
|
let Some(client_ix) = client_ix else { continue };
|
||||||
|
applied = true;
|
||||||
|
if let Err(err) = operation_channels[client_ix].unbounded_send(batch_id) {
|
||||||
|
log::error!("error signaling user {user_id}: {err}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if quiesce && applied {
|
||||||
|
deterministic.run_until_parked();
|
||||||
|
T::on_quiesce(server, clients).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
return applied;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn simulate_client(
|
||||||
|
plan: Arc<Mutex<Self>>,
|
||||||
|
client: Rc<TestClient>,
|
||||||
|
mut operation_rx: futures::channel::mpsc::UnboundedReceiver<usize>,
|
||||||
|
mut cx: TestAppContext,
|
||||||
|
) {
|
||||||
|
T::on_client_added(&client, &mut cx).await;
|
||||||
|
|
||||||
|
while let Some(batch_id) = operation_rx.next().await {
|
||||||
|
let Some((operation, applied)) =
|
||||||
|
plan.lock().next_client_operation(&client, batch_id, &cx)
|
||||||
|
else {
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
applied.store(true, SeqCst);
|
||||||
|
match T::apply_operation(&client, operation, &mut cx).await {
|
||||||
|
Ok(()) => {}
|
||||||
|
Err(TestError::Inapplicable) => {
|
||||||
|
applied.store(false, SeqCst);
|
||||||
|
log::info!("skipped operation");
|
||||||
|
}
|
||||||
|
Err(TestError::Other(error)) => {
|
||||||
|
log::error!("{} error: {}", client.username, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cx.background().simulate_random_delay().await;
|
||||||
|
}
|
||||||
|
log::info!("{}: done", client.username);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn user(&mut self, user_id: UserId) -> &mut UserTestPlan {
|
||||||
|
self.users
|
||||||
|
.iter_mut()
|
||||||
|
.find(|user| user.user_id == user_id)
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UserTestPlan {
|
||||||
|
pub fn next_root_dir_name(&mut self) -> String {
|
||||||
|
let user_id = self.user_id;
|
||||||
|
let root_id = util::post_inc(&mut self.next_root_id);
|
||||||
|
format!("dir-{user_id}-{root_id}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<anyhow::Error> for TestError {
|
||||||
|
fn from(value: anyhow::Error) -> Self {
|
||||||
|
Self::Other(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn path_env_var(name: &str) -> Option<PathBuf> {
|
||||||
|
let value = env::var(name).ok()?;
|
||||||
|
let mut path = PathBuf::from(value);
|
||||||
|
if path.is_relative() {
|
||||||
|
let mut abs_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||||
|
abs_path.pop();
|
||||||
|
abs_path.pop();
|
||||||
|
abs_path.push(path);
|
||||||
|
path = abs_path
|
||||||
|
}
|
||||||
|
Some(path)
|
||||||
|
}
|
558
crates/collab/src/tests/test_server.rs
Normal file
558
crates/collab/src/tests/test_server.rs
Normal file
|
@ -0,0 +1,558 @@
|
||||||
|
use crate::{
|
||||||
|
db::{tests::TestDb, NewUserParams, UserId},
|
||||||
|
executor::Executor,
|
||||||
|
rpc::{Server, CLEANUP_TIMEOUT},
|
||||||
|
AppState,
|
||||||
|
};
|
||||||
|
use anyhow::anyhow;
|
||||||
|
use call::ActiveCall;
|
||||||
|
use channel::{channel_buffer::ChannelBuffer, ChannelStore};
|
||||||
|
use client::{
|
||||||
|
self, proto::PeerId, Client, Connection, Credentials, EstablishConnectionError, UserStore,
|
||||||
|
};
|
||||||
|
use collections::{HashMap, HashSet};
|
||||||
|
use fs::FakeFs;
|
||||||
|
use futures::{channel::oneshot, StreamExt as _};
|
||||||
|
use gpui::{executor::Deterministic, ModelHandle, Task, TestAppContext, WindowHandle};
|
||||||
|
use language::LanguageRegistry;
|
||||||
|
use parking_lot::Mutex;
|
||||||
|
use project::{Project, WorktreeId};
|
||||||
|
use settings::SettingsStore;
|
||||||
|
use std::{
|
||||||
|
cell::{Ref, RefCell, RefMut},
|
||||||
|
env,
|
||||||
|
ops::{Deref, DerefMut},
|
||||||
|
path::Path,
|
||||||
|
sync::{
|
||||||
|
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
|
||||||
|
Arc,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use util::http::FakeHttpClient;
|
||||||
|
use workspace::Workspace;
|
||||||
|
|
||||||
|
pub struct TestServer {
|
||||||
|
pub app_state: Arc<AppState>,
|
||||||
|
pub test_live_kit_server: Arc<live_kit_client::TestServer>,
|
||||||
|
server: Arc<Server>,
|
||||||
|
connection_killers: Arc<Mutex<HashMap<PeerId, Arc<AtomicBool>>>>,
|
||||||
|
forbid_connections: Arc<AtomicBool>,
|
||||||
|
_test_db: TestDb,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct TestClient {
|
||||||
|
pub username: String,
|
||||||
|
pub app_state: Arc<workspace::AppState>,
|
||||||
|
state: RefCell<TestClientState>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct TestClientState {
|
||||||
|
local_projects: Vec<ModelHandle<Project>>,
|
||||||
|
remote_projects: Vec<ModelHandle<Project>>,
|
||||||
|
buffers: HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>,
|
||||||
|
channel_buffers: HashSet<ModelHandle<ChannelBuffer>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ContactsSummary {
|
||||||
|
pub current: Vec<String>,
|
||||||
|
pub outgoing_requests: Vec<String>,
|
||||||
|
pub incoming_requests: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TestServer {
|
||||||
|
pub async fn start(deterministic: &Arc<Deterministic>) -> Self {
|
||||||
|
static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
|
||||||
|
|
||||||
|
let use_postgres = env::var("USE_POSTGRES").ok();
|
||||||
|
let use_postgres = use_postgres.as_deref();
|
||||||
|
let test_db = if use_postgres == Some("true") || use_postgres == Some("1") {
|
||||||
|
TestDb::postgres(deterministic.build_background())
|
||||||
|
} else {
|
||||||
|
TestDb::sqlite(deterministic.build_background())
|
||||||
|
};
|
||||||
|
let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
|
||||||
|
let live_kit_server = live_kit_client::TestServer::create(
|
||||||
|
format!("http://livekit.{}.test", live_kit_server_id),
|
||||||
|
format!("devkey-{}", live_kit_server_id),
|
||||||
|
format!("secret-{}", live_kit_server_id),
|
||||||
|
deterministic.build_background(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let app_state = Self::build_app_state(&test_db, &live_kit_server).await;
|
||||||
|
let epoch = app_state
|
||||||
|
.db
|
||||||
|
.create_server(&app_state.config.zed_environment)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let server = Server::new(
|
||||||
|
epoch,
|
||||||
|
app_state.clone(),
|
||||||
|
Executor::Deterministic(deterministic.build_background()),
|
||||||
|
);
|
||||||
|
server.start().await.unwrap();
|
||||||
|
// Advance clock to ensure the server's cleanup task is finished.
|
||||||
|
deterministic.advance_clock(CLEANUP_TIMEOUT);
|
||||||
|
Self {
|
||||||
|
app_state,
|
||||||
|
server,
|
||||||
|
connection_killers: Default::default(),
|
||||||
|
forbid_connections: Default::default(),
|
||||||
|
_test_db: test_db,
|
||||||
|
test_live_kit_server: live_kit_server,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn reset(&self) {
|
||||||
|
self.app_state.db.reset();
|
||||||
|
let epoch = self
|
||||||
|
.app_state
|
||||||
|
.db
|
||||||
|
.create_server(&self.app_state.config.zed_environment)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
self.server.reset(epoch);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_client(&mut self, cx: &mut TestAppContext, name: &str) -> TestClient {
|
||||||
|
cx.update(|cx| {
|
||||||
|
if cx.has_global::<SettingsStore>() {
|
||||||
|
panic!("Same cx used to create two test clients")
|
||||||
|
}
|
||||||
|
cx.set_global(SettingsStore::test(cx));
|
||||||
|
});
|
||||||
|
|
||||||
|
let http = FakeHttpClient::with_404_response();
|
||||||
|
let user_id = if let Ok(Some(user)) = self.app_state.db.get_user_by_github_login(name).await
|
||||||
|
{
|
||||||
|
user.id
|
||||||
|
} else {
|
||||||
|
self.app_state
|
||||||
|
.db
|
||||||
|
.create_user(
|
||||||
|
&format!("{name}@example.com"),
|
||||||
|
false,
|
||||||
|
NewUserParams {
|
||||||
|
github_login: name.into(),
|
||||||
|
github_user_id: 0,
|
||||||
|
invite_count: 0,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.expect("creating user failed")
|
||||||
|
.user_id
|
||||||
|
};
|
||||||
|
let client_name = name.to_string();
|
||||||
|
let mut client = cx.read(|cx| Client::new(http.clone(), cx));
|
||||||
|
let server = self.server.clone();
|
||||||
|
let db = self.app_state.db.clone();
|
||||||
|
let connection_killers = self.connection_killers.clone();
|
||||||
|
let forbid_connections = self.forbid_connections.clone();
|
||||||
|
|
||||||
|
Arc::get_mut(&mut client)
|
||||||
|
.unwrap()
|
||||||
|
.set_id(user_id.0 as usize)
|
||||||
|
.override_authenticate(move |cx| {
|
||||||
|
cx.spawn(|_| async move {
|
||||||
|
let access_token = "the-token".to_string();
|
||||||
|
Ok(Credentials {
|
||||||
|
user_id: user_id.0 as u64,
|
||||||
|
access_token,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.override_establish_connection(move |credentials, cx| {
|
||||||
|
assert_eq!(credentials.user_id, user_id.0 as u64);
|
||||||
|
assert_eq!(credentials.access_token, "the-token");
|
||||||
|
|
||||||
|
let server = server.clone();
|
||||||
|
let db = db.clone();
|
||||||
|
let connection_killers = connection_killers.clone();
|
||||||
|
let forbid_connections = forbid_connections.clone();
|
||||||
|
let client_name = client_name.clone();
|
||||||
|
cx.spawn(move |cx| async move {
|
||||||
|
if forbid_connections.load(SeqCst) {
|
||||||
|
Err(EstablishConnectionError::other(anyhow!(
|
||||||
|
"server is forbidding connections"
|
||||||
|
)))
|
||||||
|
} else {
|
||||||
|
let (client_conn, server_conn, killed) =
|
||||||
|
Connection::in_memory(cx.background());
|
||||||
|
let (connection_id_tx, connection_id_rx) = oneshot::channel();
|
||||||
|
let user = db
|
||||||
|
.get_user_by_id(user_id)
|
||||||
|
.await
|
||||||
|
.expect("retrieving user failed")
|
||||||
|
.unwrap();
|
||||||
|
cx.background()
|
||||||
|
.spawn(server.handle_connection(
|
||||||
|
server_conn,
|
||||||
|
client_name,
|
||||||
|
user,
|
||||||
|
Some(connection_id_tx),
|
||||||
|
Executor::Deterministic(cx.background()),
|
||||||
|
))
|
||||||
|
.detach();
|
||||||
|
let connection_id = connection_id_rx.await.unwrap();
|
||||||
|
connection_killers
|
||||||
|
.lock()
|
||||||
|
.insert(connection_id.into(), killed);
|
||||||
|
Ok(client_conn)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
let fs = FakeFs::new(cx.background());
|
||||||
|
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
|
||||||
|
let channel_store =
|
||||||
|
cx.add_model(|cx| ChannelStore::new(client.clone(), user_store.clone(), cx));
|
||||||
|
let app_state = Arc::new(workspace::AppState {
|
||||||
|
client: client.clone(),
|
||||||
|
user_store: user_store.clone(),
|
||||||
|
channel_store: channel_store.clone(),
|
||||||
|
languages: Arc::new(LanguageRegistry::test()),
|
||||||
|
fs: fs.clone(),
|
||||||
|
build_window_options: |_, _, _| Default::default(),
|
||||||
|
initialize_workspace: |_, _, _, _| Task::ready(Ok(())),
|
||||||
|
background_actions: || &[],
|
||||||
|
});
|
||||||
|
|
||||||
|
cx.update(|cx| {
|
||||||
|
theme::init((), cx);
|
||||||
|
Project::init(&client, cx);
|
||||||
|
client::init(&client, cx);
|
||||||
|
language::init(cx);
|
||||||
|
editor::init_settings(cx);
|
||||||
|
workspace::init(app_state.clone(), cx);
|
||||||
|
audio::init((), cx);
|
||||||
|
call::init(client.clone(), user_store.clone(), cx);
|
||||||
|
channel::init(&client);
|
||||||
|
});
|
||||||
|
|
||||||
|
client
|
||||||
|
.authenticate_and_connect(false, &cx.to_async())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let client = TestClient {
|
||||||
|
app_state,
|
||||||
|
username: name.to_string(),
|
||||||
|
state: Default::default(),
|
||||||
|
};
|
||||||
|
client.wait_for_current_user(cx).await;
|
||||||
|
client
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn disconnect_client(&self, peer_id: PeerId) {
|
||||||
|
self.connection_killers
|
||||||
|
.lock()
|
||||||
|
.remove(&peer_id)
|
||||||
|
.unwrap()
|
||||||
|
.store(true, SeqCst);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn forbid_connections(&self) {
|
||||||
|
self.forbid_connections.store(true, SeqCst);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn allow_connections(&self) {
|
||||||
|
self.forbid_connections.store(false, SeqCst);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn make_contacts(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
|
||||||
|
for ix in 1..clients.len() {
|
||||||
|
let (left, right) = clients.split_at_mut(ix);
|
||||||
|
let (client_a, cx_a) = left.last_mut().unwrap();
|
||||||
|
for (client_b, cx_b) in right {
|
||||||
|
client_a
|
||||||
|
.app_state
|
||||||
|
.user_store
|
||||||
|
.update(*cx_a, |store, cx| {
|
||||||
|
store.request_contact(client_b.user_id().unwrap(), cx)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
cx_a.foreground().run_until_parked();
|
||||||
|
client_b
|
||||||
|
.app_state
|
||||||
|
.user_store
|
||||||
|
.update(*cx_b, |store, cx| {
|
||||||
|
store.respond_to_contact_request(client_a.user_id().unwrap(), true, cx)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn make_channel(
|
||||||
|
&self,
|
||||||
|
channel: &str,
|
||||||
|
admin: (&TestClient, &mut TestAppContext),
|
||||||
|
members: &mut [(&TestClient, &mut TestAppContext)],
|
||||||
|
) -> u64 {
|
||||||
|
let (admin_client, admin_cx) = admin;
|
||||||
|
let channel_id = admin_client
|
||||||
|
.app_state
|
||||||
|
.channel_store
|
||||||
|
.update(admin_cx, |channel_store, cx| {
|
||||||
|
channel_store.create_channel(channel, None, cx)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
for (member_client, member_cx) in members {
|
||||||
|
admin_client
|
||||||
|
.app_state
|
||||||
|
.channel_store
|
||||||
|
.update(admin_cx, |channel_store, cx| {
|
||||||
|
channel_store.invite_member(
|
||||||
|
channel_id,
|
||||||
|
member_client.user_id().unwrap(),
|
||||||
|
false,
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
admin_cx.foreground().run_until_parked();
|
||||||
|
|
||||||
|
member_client
|
||||||
|
.app_state
|
||||||
|
.channel_store
|
||||||
|
.update(*member_cx, |channels, _| {
|
||||||
|
channels.respond_to_channel_invite(channel_id, true)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
channel_id
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_room(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
|
||||||
|
self.make_contacts(clients).await;
|
||||||
|
|
||||||
|
let (left, right) = clients.split_at_mut(1);
|
||||||
|
let (_client_a, cx_a) = &mut left[0];
|
||||||
|
let active_call_a = cx_a.read(ActiveCall::global);
|
||||||
|
|
||||||
|
for (client_b, cx_b) in right {
|
||||||
|
let user_id_b = client_b.current_user_id(*cx_b).to_proto();
|
||||||
|
active_call_a
|
||||||
|
.update(*cx_a, |call, cx| call.invite(user_id_b, None, cx))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
cx_b.foreground().run_until_parked();
|
||||||
|
let active_call_b = cx_b.read(ActiveCall::global);
|
||||||
|
active_call_b
|
||||||
|
.update(*cx_b, |call, cx| call.accept_incoming(cx))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn build_app_state(
|
||||||
|
test_db: &TestDb,
|
||||||
|
fake_server: &live_kit_client::TestServer,
|
||||||
|
) -> Arc<AppState> {
|
||||||
|
Arc::new(AppState {
|
||||||
|
db: test_db.db().clone(),
|
||||||
|
live_kit_client: Some(Arc::new(fake_server.create_api_client())),
|
||||||
|
config: Default::default(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for TestServer {
|
||||||
|
type Target = Server;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.server
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for TestServer {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
self.server.teardown();
|
||||||
|
self.test_live_kit_server.teardown().unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for TestClient {
|
||||||
|
type Target = Arc<Client>;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.app_state.client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TestClient {
|
||||||
|
pub fn fs(&self) -> &FakeFs {
|
||||||
|
self.app_state.fs.as_fake()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn channel_store(&self) -> &ModelHandle<ChannelStore> {
|
||||||
|
&self.app_state.channel_store
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn user_store(&self) -> &ModelHandle<UserStore> {
|
||||||
|
&self.app_state.user_store
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn language_registry(&self) -> &Arc<LanguageRegistry> {
|
||||||
|
&self.app_state.languages
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn client(&self) -> &Arc<Client> {
|
||||||
|
&self.app_state.client
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn current_user_id(&self, cx: &TestAppContext) -> UserId {
|
||||||
|
UserId::from_proto(
|
||||||
|
self.app_state
|
||||||
|
.user_store
|
||||||
|
.read_with(cx, |user_store, _| user_store.current_user().unwrap().id),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn wait_for_current_user(&self, cx: &TestAppContext) {
|
||||||
|
let mut authed_user = self
|
||||||
|
.app_state
|
||||||
|
.user_store
|
||||||
|
.read_with(cx, |user_store, _| user_store.watch_current_user());
|
||||||
|
while authed_user.next().await.unwrap().is_none() {}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn clear_contacts(&self, cx: &mut TestAppContext) {
|
||||||
|
self.app_state
|
||||||
|
.user_store
|
||||||
|
.update(cx, |store, _| store.clear_contacts())
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn local_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||||
|
Ref::map(self.state.borrow(), |state| &state.local_projects)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remote_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||||
|
Ref::map(self.state.borrow(), |state| &state.remote_projects)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn local_projects_mut<'a>(
|
||||||
|
&'a self,
|
||||||
|
) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||||
|
RefMut::map(self.state.borrow_mut(), |state| &mut state.local_projects)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remote_projects_mut<'a>(
|
||||||
|
&'a self,
|
||||||
|
) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
|
||||||
|
RefMut::map(self.state.borrow_mut(), |state| &mut state.remote_projects)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn buffers_for_project<'a>(
|
||||||
|
&'a self,
|
||||||
|
project: &ModelHandle<Project>,
|
||||||
|
) -> impl DerefMut<Target = HashSet<ModelHandle<language::Buffer>>> + 'a {
|
||||||
|
RefMut::map(self.state.borrow_mut(), |state| {
|
||||||
|
state.buffers.entry(project.clone()).or_default()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn buffers<'a>(
|
||||||
|
&'a self,
|
||||||
|
) -> impl DerefMut<Target = HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>> + 'a
|
||||||
|
{
|
||||||
|
RefMut::map(self.state.borrow_mut(), |state| &mut state.buffers)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn channel_buffers<'a>(
|
||||||
|
&'a self,
|
||||||
|
) -> impl DerefMut<Target = HashSet<ModelHandle<ChannelBuffer>>> + 'a {
|
||||||
|
RefMut::map(self.state.borrow_mut(), |state| &mut state.channel_buffers)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn summarize_contacts(&self, cx: &TestAppContext) -> ContactsSummary {
|
||||||
|
self.app_state
|
||||||
|
.user_store
|
||||||
|
.read_with(cx, |store, _| ContactsSummary {
|
||||||
|
current: store
|
||||||
|
.contacts()
|
||||||
|
.iter()
|
||||||
|
.map(|contact| contact.user.github_login.clone())
|
||||||
|
.collect(),
|
||||||
|
outgoing_requests: store
|
||||||
|
.outgoing_contact_requests()
|
||||||
|
.iter()
|
||||||
|
.map(|user| user.github_login.clone())
|
||||||
|
.collect(),
|
||||||
|
incoming_requests: store
|
||||||
|
.incoming_contact_requests()
|
||||||
|
.iter()
|
||||||
|
.map(|user| user.github_login.clone())
|
||||||
|
.collect(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn build_local_project(
|
||||||
|
&self,
|
||||||
|
root_path: impl AsRef<Path>,
|
||||||
|
cx: &mut TestAppContext,
|
||||||
|
) -> (ModelHandle<Project>, WorktreeId) {
|
||||||
|
let project = cx.update(|cx| {
|
||||||
|
Project::local(
|
||||||
|
self.client().clone(),
|
||||||
|
self.app_state.user_store.clone(),
|
||||||
|
self.app_state.languages.clone(),
|
||||||
|
self.app_state.fs.clone(),
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
});
|
||||||
|
let (worktree, _) = project
|
||||||
|
.update(cx, |p, cx| {
|
||||||
|
p.find_or_create_local_worktree(root_path, true, cx)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
worktree
|
||||||
|
.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
|
||||||
|
.await;
|
||||||
|
(project, worktree.read_with(cx, |tree, _| tree.id()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn build_remote_project(
|
||||||
|
&self,
|
||||||
|
host_project_id: u64,
|
||||||
|
guest_cx: &mut TestAppContext,
|
||||||
|
) -> ModelHandle<Project> {
|
||||||
|
let active_call = guest_cx.read(ActiveCall::global);
|
||||||
|
let room = active_call.read_with(guest_cx, |call, _| call.room().unwrap().clone());
|
||||||
|
room.update(guest_cx, |room, cx| {
|
||||||
|
room.join_project(
|
||||||
|
host_project_id,
|
||||||
|
self.app_state.languages.clone(),
|
||||||
|
self.app_state.fs.clone(),
|
||||||
|
cx,
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn build_workspace(
|
||||||
|
&self,
|
||||||
|
project: &ModelHandle<Project>,
|
||||||
|
cx: &mut TestAppContext,
|
||||||
|
) -> WindowHandle<Workspace> {
|
||||||
|
cx.add_window(|cx| Workspace::new(0, project.clone(), self.app_state.clone(), cx))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for TestClient {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
self.app_state.client.teardown();
|
||||||
|
}
|
||||||
|
}
|
|
@ -106,6 +106,7 @@ pub struct Deterministic {
|
||||||
parker: parking_lot::Mutex<parking::Parker>,
|
parker: parking_lot::Mutex<parking::Parker>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub enum Timer {
|
pub enum Timer {
|
||||||
Production(smol::Timer),
|
Production(smol::Timer),
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
|
|
@ -37,8 +37,14 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
||||||
Some("seed") => starting_seed = parse_int(&meta.lit)?,
|
Some("seed") => starting_seed = parse_int(&meta.lit)?,
|
||||||
Some("on_failure") => {
|
Some("on_failure") => {
|
||||||
if let Lit::Str(name) = meta.lit {
|
if let Lit::Str(name) = meta.lit {
|
||||||
let ident = Ident::new(&name.value(), name.span());
|
let mut path = syn::Path {
|
||||||
on_failure_fn_name = quote!(Some(#ident));
|
leading_colon: None,
|
||||||
|
segments: Default::default(),
|
||||||
|
};
|
||||||
|
for part in name.value().split("::") {
|
||||||
|
path.segments.push(Ident::new(part, name.span()).into());
|
||||||
|
}
|
||||||
|
on_failure_fn_name = quote!(Some(#path));
|
||||||
} else {
|
} else {
|
||||||
return Err(TokenStream::from(
|
return Err(TokenStream::from(
|
||||||
syn::Error::new(
|
syn::Error::new(
|
||||||
|
|
|
@ -127,6 +127,31 @@ pub fn serialize_undo_map_entry(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn split_operations(
|
||||||
|
mut operations: Vec<proto::Operation>,
|
||||||
|
) -> impl Iterator<Item = Vec<proto::Operation>> {
|
||||||
|
#[cfg(any(test, feature = "test-support"))]
|
||||||
|
const CHUNK_SIZE: usize = 5;
|
||||||
|
|
||||||
|
#[cfg(not(any(test, feature = "test-support")))]
|
||||||
|
const CHUNK_SIZE: usize = 100;
|
||||||
|
|
||||||
|
let mut done = false;
|
||||||
|
std::iter::from_fn(move || {
|
||||||
|
if done {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let operations = operations
|
||||||
|
.drain(..std::cmp::min(CHUNK_SIZE, operations.len()))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
if operations.is_empty() {
|
||||||
|
done = true;
|
||||||
|
}
|
||||||
|
Some(operations)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
pub fn serialize_selections(selections: &Arc<[Selection<Anchor>]>) -> Vec<proto::Selection> {
|
pub fn serialize_selections(selections: &Arc<[Selection<Anchor>]>) -> Vec<proto::Selection> {
|
||||||
selections.iter().map(serialize_selection).collect()
|
selections.iter().map(serialize_selection).collect()
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ use language::{
|
||||||
point_to_lsp,
|
point_to_lsp,
|
||||||
proto::{
|
proto::{
|
||||||
deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
|
deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
|
||||||
serialize_anchor, serialize_version,
|
serialize_anchor, serialize_version, split_operations,
|
||||||
},
|
},
|
||||||
range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
|
range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
|
||||||
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
|
CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
|
||||||
|
@ -8200,31 +8200,6 @@ impl LspAdapterDelegate for ProjectLspAdapterDelegate {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn split_operations(
|
|
||||||
mut operations: Vec<proto::Operation>,
|
|
||||||
) -> impl Iterator<Item = Vec<proto::Operation>> {
|
|
||||||
#[cfg(any(test, feature = "test-support"))]
|
|
||||||
const CHUNK_SIZE: usize = 5;
|
|
||||||
|
|
||||||
#[cfg(not(any(test, feature = "test-support")))]
|
|
||||||
const CHUNK_SIZE: usize = 100;
|
|
||||||
|
|
||||||
let mut done = false;
|
|
||||||
std::iter::from_fn(move || {
|
|
||||||
if done {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let operations = operations
|
|
||||||
.drain(..cmp::min(CHUNK_SIZE, operations.len()))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
if operations.is_empty() {
|
|
||||||
done = true;
|
|
||||||
}
|
|
||||||
Some(operations)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
|
fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
|
||||||
proto::Symbol {
|
proto::Symbol {
|
||||||
language_server_name: symbol.language_server_name.0.to_string(),
|
language_server_name: symbol.language_server_name.0.to_string(),
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
syntax = "proto3";
|
syntax = "proto3";
|
||||||
package zed.messages;
|
package zed.messages;
|
||||||
|
|
||||||
|
// Looking for a number? Search "// Current max"
|
||||||
|
|
||||||
message PeerId {
|
message PeerId {
|
||||||
uint32 owner_id = 1;
|
uint32 owner_id = 1;
|
||||||
uint32 id = 2;
|
uint32 id = 2;
|
||||||
|
@ -151,6 +153,9 @@ message Envelope {
|
||||||
LeaveChannelBuffer leave_channel_buffer = 134;
|
LeaveChannelBuffer leave_channel_buffer = 134;
|
||||||
AddChannelBufferCollaborator add_channel_buffer_collaborator = 135;
|
AddChannelBufferCollaborator add_channel_buffer_collaborator = 135;
|
||||||
RemoveChannelBufferCollaborator remove_channel_buffer_collaborator = 136;
|
RemoveChannelBufferCollaborator remove_channel_buffer_collaborator = 136;
|
||||||
|
UpdateChannelBufferCollaborator update_channel_buffer_collaborator = 139;
|
||||||
|
RejoinChannelBuffers rejoin_channel_buffers = 140;
|
||||||
|
RejoinChannelBuffersResponse rejoin_channel_buffers_response = 141; // Current max
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -430,6 +435,12 @@ message RemoveChannelBufferCollaborator {
|
||||||
PeerId peer_id = 2;
|
PeerId peer_id = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message UpdateChannelBufferCollaborator {
|
||||||
|
uint64 channel_id = 1;
|
||||||
|
PeerId old_peer_id = 2;
|
||||||
|
PeerId new_peer_id = 3;
|
||||||
|
}
|
||||||
|
|
||||||
message GetDefinition {
|
message GetDefinition {
|
||||||
uint64 project_id = 1;
|
uint64 project_id = 1;
|
||||||
uint64 buffer_id = 2;
|
uint64 buffer_id = 2;
|
||||||
|
@ -616,6 +627,12 @@ message BufferVersion {
|
||||||
repeated VectorClockEntry version = 2;
|
repeated VectorClockEntry version = 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message ChannelBufferVersion {
|
||||||
|
uint64 channel_id = 1;
|
||||||
|
repeated VectorClockEntry version = 2;
|
||||||
|
uint64 epoch = 3;
|
||||||
|
}
|
||||||
|
|
||||||
enum FormatTrigger {
|
enum FormatTrigger {
|
||||||
Save = 0;
|
Save = 0;
|
||||||
Manual = 1;
|
Manual = 1;
|
||||||
|
@ -1008,12 +1025,28 @@ message JoinChannelBuffer {
|
||||||
uint64 channel_id = 1;
|
uint64 channel_id = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
message RejoinChannelBuffers {
|
||||||
|
repeated ChannelBufferVersion buffers = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message RejoinChannelBuffersResponse {
|
||||||
|
repeated RejoinedChannelBuffer buffers = 1;
|
||||||
|
}
|
||||||
|
|
||||||
message JoinChannelBufferResponse {
|
message JoinChannelBufferResponse {
|
||||||
uint64 buffer_id = 1;
|
uint64 buffer_id = 1;
|
||||||
uint32 replica_id = 2;
|
uint32 replica_id = 2;
|
||||||
string base_text = 3;
|
string base_text = 3;
|
||||||
repeated Operation operations = 4;
|
repeated Operation operations = 4;
|
||||||
repeated Collaborator collaborators = 5;
|
repeated Collaborator collaborators = 5;
|
||||||
|
uint64 epoch = 6;
|
||||||
|
}
|
||||||
|
|
||||||
|
message RejoinedChannelBuffer {
|
||||||
|
uint64 channel_id = 1;
|
||||||
|
repeated VectorClockEntry version = 2;
|
||||||
|
repeated Operation operations = 3;
|
||||||
|
repeated Collaborator collaborators = 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
message LeaveChannelBuffer {
|
message LeaveChannelBuffer {
|
||||||
|
|
|
@ -229,6 +229,8 @@ messages!(
|
||||||
(StartLanguageServer, Foreground),
|
(StartLanguageServer, Foreground),
|
||||||
(SynchronizeBuffers, Foreground),
|
(SynchronizeBuffers, Foreground),
|
||||||
(SynchronizeBuffersResponse, Foreground),
|
(SynchronizeBuffersResponse, Foreground),
|
||||||
|
(RejoinChannelBuffers, Foreground),
|
||||||
|
(RejoinChannelBuffersResponse, Foreground),
|
||||||
(Test, Foreground),
|
(Test, Foreground),
|
||||||
(Unfollow, Foreground),
|
(Unfollow, Foreground),
|
||||||
(UnshareProject, Foreground),
|
(UnshareProject, Foreground),
|
||||||
|
@ -257,6 +259,7 @@ messages!(
|
||||||
(UpdateChannelBuffer, Foreground),
|
(UpdateChannelBuffer, Foreground),
|
||||||
(RemoveChannelBufferCollaborator, Foreground),
|
(RemoveChannelBufferCollaborator, Foreground),
|
||||||
(AddChannelBufferCollaborator, Foreground),
|
(AddChannelBufferCollaborator, Foreground),
|
||||||
|
(UpdateChannelBufferCollaborator, Foreground),
|
||||||
);
|
);
|
||||||
|
|
||||||
request_messages!(
|
request_messages!(
|
||||||
|
@ -319,6 +322,7 @@ request_messages!(
|
||||||
(SearchProject, SearchProjectResponse),
|
(SearchProject, SearchProjectResponse),
|
||||||
(ShareProject, ShareProjectResponse),
|
(ShareProject, ShareProjectResponse),
|
||||||
(SynchronizeBuffers, SynchronizeBuffersResponse),
|
(SynchronizeBuffers, SynchronizeBuffersResponse),
|
||||||
|
(RejoinChannelBuffers, RejoinChannelBuffersResponse),
|
||||||
(Test, Test),
|
(Test, Test),
|
||||||
(UpdateBuffer, Ack),
|
(UpdateBuffer, Ack),
|
||||||
(UpdateParticipantLocation, Ack),
|
(UpdateParticipantLocation, Ack),
|
||||||
|
@ -386,7 +390,8 @@ entity_messages!(
|
||||||
channel_id,
|
channel_id,
|
||||||
UpdateChannelBuffer,
|
UpdateChannelBuffer,
|
||||||
RemoveChannelBufferCollaborator,
|
RemoveChannelBufferCollaborator,
|
||||||
AddChannelBufferCollaborator
|
AddChannelBufferCollaborator,
|
||||||
|
UpdateChannelBufferCollaborator
|
||||||
);
|
);
|
||||||
|
|
||||||
const KIB: usize = 1024;
|
const KIB: usize = 1024;
|
||||||
|
|
|
@ -9,7 +9,6 @@ const CARGO_TEST_ARGS = [
|
||||||
'--release',
|
'--release',
|
||||||
'--lib',
|
'--lib',
|
||||||
'--package', 'collab',
|
'--package', 'collab',
|
||||||
'random_collaboration',
|
|
||||||
]
|
]
|
||||||
|
|
||||||
if (require.main === module) {
|
if (require.main === module) {
|
||||||
|
@ -99,7 +98,7 @@ function buildTests() {
|
||||||
}
|
}
|
||||||
|
|
||||||
function runTests(env) {
|
function runTests(env) {
|
||||||
const {status, stdout} = spawnSync('cargo', ['test', ...CARGO_TEST_ARGS], {
|
const {status, stdout} = spawnSync('cargo', ['test', ...CARGO_TEST_ARGS, 'random_project_collaboration'], {
|
||||||
stdio: 'pipe',
|
stdio: 'pipe',
|
||||||
encoding: 'utf8',
|
encoding: 'utf8',
|
||||||
env: {
|
env: {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue