Get collab2 green
This commit is contained in:
parent
c529343ba1
commit
e1525e2b47
265 changed files with 64477 additions and 40 deletions
199
crates/collab2/src/db/ids.rs
Normal file
199
crates/collab2/src/db/ids.rs
Normal file
|
@ -0,0 +1,199 @@
|
|||
use crate::Result;
|
||||
use rpc::proto;
|
||||
use sea_orm::{entity::prelude::*, DbErr};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
macro_rules! id_type {
|
||||
($name:ident) => {
|
||||
#[derive(
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
Default,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Hash,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
DeriveValueType,
|
||||
)]
|
||||
#[serde(transparent)]
|
||||
pub struct $name(pub i32);
|
||||
|
||||
impl $name {
|
||||
#[allow(unused)]
|
||||
pub const MAX: Self = Self(i32::MAX);
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn from_proto(value: u64) -> Self {
|
||||
Self(value as i32)
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn to_proto(self) -> u64 {
|
||||
self.0 as u64
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for $name {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl sea_orm::TryFromU64 for $name {
|
||||
fn try_from_u64(n: u64) -> Result<Self, DbErr> {
|
||||
Ok(Self(n.try_into().map_err(|_| {
|
||||
DbErr::ConvertFromU64(concat!(
|
||||
"error converting ",
|
||||
stringify!($name),
|
||||
" to u64"
|
||||
))
|
||||
})?))
|
||||
}
|
||||
}
|
||||
|
||||
impl sea_orm::sea_query::Nullable for $name {
|
||||
fn null() -> Value {
|
||||
Value::Int(None)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
id_type!(BufferId);
|
||||
id_type!(AccessTokenId);
|
||||
id_type!(ChannelChatParticipantId);
|
||||
id_type!(ChannelId);
|
||||
id_type!(ChannelMemberId);
|
||||
id_type!(MessageId);
|
||||
id_type!(ContactId);
|
||||
id_type!(FollowerId);
|
||||
id_type!(RoomId);
|
||||
id_type!(RoomParticipantId);
|
||||
id_type!(ProjectId);
|
||||
id_type!(ProjectCollaboratorId);
|
||||
id_type!(ReplicaId);
|
||||
id_type!(ServerId);
|
||||
id_type!(SignupId);
|
||||
id_type!(UserId);
|
||||
id_type!(ChannelBufferCollaboratorId);
|
||||
id_type!(FlagId);
|
||||
id_type!(NotificationId);
|
||||
id_type!(NotificationKindId);
|
||||
|
||||
#[derive(Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Default, Hash)]
|
||||
#[sea_orm(rs_type = "String", db_type = "String(None)")]
|
||||
pub enum ChannelRole {
|
||||
#[sea_orm(string_value = "admin")]
|
||||
Admin,
|
||||
#[sea_orm(string_value = "member")]
|
||||
#[default]
|
||||
Member,
|
||||
#[sea_orm(string_value = "guest")]
|
||||
Guest,
|
||||
#[sea_orm(string_value = "banned")]
|
||||
Banned,
|
||||
}
|
||||
|
||||
impl ChannelRole {
|
||||
pub fn should_override(&self, other: Self) -> bool {
|
||||
use ChannelRole::*;
|
||||
match self {
|
||||
Admin => matches!(other, Member | Banned | Guest),
|
||||
Member => matches!(other, Banned | Guest),
|
||||
Banned => matches!(other, Guest),
|
||||
Guest => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max(&self, other: Self) -> Self {
|
||||
if self.should_override(other) {
|
||||
*self
|
||||
} else {
|
||||
other
|
||||
}
|
||||
}
|
||||
|
||||
pub fn can_see_all_descendants(&self) -> bool {
|
||||
use ChannelRole::*;
|
||||
match self {
|
||||
Admin | Member => true,
|
||||
Guest | Banned => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn can_only_see_public_descendants(&self) -> bool {
|
||||
use ChannelRole::*;
|
||||
match self {
|
||||
Guest => true,
|
||||
Admin | Member | Banned => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proto::ChannelRole> for ChannelRole {
|
||||
fn from(value: proto::ChannelRole) -> Self {
|
||||
match value {
|
||||
proto::ChannelRole::Admin => ChannelRole::Admin,
|
||||
proto::ChannelRole::Member => ChannelRole::Member,
|
||||
proto::ChannelRole::Guest => ChannelRole::Guest,
|
||||
proto::ChannelRole::Banned => ChannelRole::Banned,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<proto::ChannelRole> for ChannelRole {
|
||||
fn into(self) -> proto::ChannelRole {
|
||||
match self {
|
||||
ChannelRole::Admin => proto::ChannelRole::Admin,
|
||||
ChannelRole::Member => proto::ChannelRole::Member,
|
||||
ChannelRole::Guest => proto::ChannelRole::Guest,
|
||||
ChannelRole::Banned => proto::ChannelRole::Banned,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<i32> for ChannelRole {
|
||||
fn into(self) -> i32 {
|
||||
let proto: proto::ChannelRole = self.into();
|
||||
proto.into()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Default, Hash)]
|
||||
#[sea_orm(rs_type = "String", db_type = "String(None)")]
|
||||
pub enum ChannelVisibility {
|
||||
#[sea_orm(string_value = "public")]
|
||||
Public,
|
||||
#[sea_orm(string_value = "members")]
|
||||
#[default]
|
||||
Members,
|
||||
}
|
||||
|
||||
impl From<proto::ChannelVisibility> for ChannelVisibility {
|
||||
fn from(value: proto::ChannelVisibility) -> Self {
|
||||
match value {
|
||||
proto::ChannelVisibility::Public => ChannelVisibility::Public,
|
||||
proto::ChannelVisibility::Members => ChannelVisibility::Members,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<proto::ChannelVisibility> for ChannelVisibility {
|
||||
fn into(self) -> proto::ChannelVisibility {
|
||||
match self {
|
||||
ChannelVisibility::Public => proto::ChannelVisibility::Public,
|
||||
ChannelVisibility::Members => proto::ChannelVisibility::Members,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<i32> for ChannelVisibility {
|
||||
fn into(self) -> i32 {
|
||||
let proto: proto::ChannelVisibility = self.into();
|
||||
proto.into()
|
||||
}
|
||||
}
|
12
crates/collab2/src/db/queries.rs
Normal file
12
crates/collab2/src/db/queries.rs
Normal file
|
@ -0,0 +1,12 @@
|
|||
use super::*;
|
||||
|
||||
pub mod access_tokens;
|
||||
pub mod buffers;
|
||||
pub mod channels;
|
||||
pub mod contacts;
|
||||
pub mod messages;
|
||||
pub mod notifications;
|
||||
pub mod projects;
|
||||
pub mod rooms;
|
||||
pub mod servers;
|
||||
pub mod users;
|
54
crates/collab2/src/db/queries/access_tokens.rs
Normal file
54
crates/collab2/src/db/queries/access_tokens.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
use super::*;
|
||||
use sea_orm::sea_query::Query;
|
||||
|
||||
impl Database {
|
||||
pub async fn create_access_token(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
access_token_hash: &str,
|
||||
max_access_token_count: usize,
|
||||
) -> Result<AccessTokenId> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
|
||||
let token = access_token::ActiveModel {
|
||||
user_id: ActiveValue::set(user_id),
|
||||
hash: ActiveValue::set(access_token_hash.into()),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
access_token::Entity::delete_many()
|
||||
.filter(
|
||||
access_token::Column::Id.in_subquery(
|
||||
Query::select()
|
||||
.column(access_token::Column::Id)
|
||||
.from(access_token::Entity)
|
||||
.and_where(access_token::Column::UserId.eq(user_id))
|
||||
.order_by(access_token::Column::Id, sea_orm::Order::Desc)
|
||||
.limit(10000)
|
||||
.offset(max_access_token_count as u64)
|
||||
.to_owned(),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(token.id)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_access_token(
|
||||
&self,
|
||||
access_token_id: AccessTokenId,
|
||||
) -> Result<access_token::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(access_token::Entity::find_by_id(access_token_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such access token"))?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
1078
crates/collab2/src/db/queries/buffers.rs
Normal file
1078
crates/collab2/src/db/queries/buffers.rs
Normal file
File diff suppressed because it is too large
Load diff
1312
crates/collab2/src/db/queries/channels.rs
Normal file
1312
crates/collab2/src/db/queries/channels.rs
Normal file
File diff suppressed because it is too large
Load diff
353
crates/collab2/src/db/queries/contacts.rs
Normal file
353
crates/collab2/src/db/queries/contacts.rs
Normal file
|
@ -0,0 +1,353 @@
|
|||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn get_contacts(&self, user_id: UserId) -> Result<Vec<Contact>> {
|
||||
#[derive(Debug, FromQueryResult)]
|
||||
struct ContactWithUserBusyStatuses {
|
||||
user_id_a: UserId,
|
||||
user_id_b: UserId,
|
||||
a_to_b: bool,
|
||||
accepted: bool,
|
||||
user_a_busy: bool,
|
||||
user_b_busy: bool,
|
||||
}
|
||||
|
||||
self.transaction(|tx| async move {
|
||||
let user_a_participant = Alias::new("user_a_participant");
|
||||
let user_b_participant = Alias::new("user_b_participant");
|
||||
let mut db_contacts = contact::Entity::find()
|
||||
.column_as(
|
||||
Expr::col((user_a_participant.clone(), room_participant::Column::Id))
|
||||
.is_not_null(),
|
||||
"user_a_busy",
|
||||
)
|
||||
.column_as(
|
||||
Expr::col((user_b_participant.clone(), room_participant::Column::Id))
|
||||
.is_not_null(),
|
||||
"user_b_busy",
|
||||
)
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(user_id)
|
||||
.or(contact::Column::UserIdB.eq(user_id)),
|
||||
)
|
||||
.join_as(
|
||||
JoinType::LeftJoin,
|
||||
contact::Relation::UserARoomParticipant.def(),
|
||||
user_a_participant,
|
||||
)
|
||||
.join_as(
|
||||
JoinType::LeftJoin,
|
||||
contact::Relation::UserBRoomParticipant.def(),
|
||||
user_b_participant,
|
||||
)
|
||||
.into_model::<ContactWithUserBusyStatuses>()
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut contacts = Vec::new();
|
||||
while let Some(db_contact) = db_contacts.next().await {
|
||||
let db_contact = db_contact?;
|
||||
if db_contact.user_id_a == user_id {
|
||||
if db_contact.accepted {
|
||||
contacts.push(Contact::Accepted {
|
||||
user_id: db_contact.user_id_b,
|
||||
busy: db_contact.user_b_busy,
|
||||
});
|
||||
} else if db_contact.a_to_b {
|
||||
contacts.push(Contact::Outgoing {
|
||||
user_id: db_contact.user_id_b,
|
||||
})
|
||||
} else {
|
||||
contacts.push(Contact::Incoming {
|
||||
user_id: db_contact.user_id_b,
|
||||
});
|
||||
}
|
||||
} else if db_contact.accepted {
|
||||
contacts.push(Contact::Accepted {
|
||||
user_id: db_contact.user_id_a,
|
||||
busy: db_contact.user_a_busy,
|
||||
});
|
||||
} else if db_contact.a_to_b {
|
||||
contacts.push(Contact::Incoming {
|
||||
user_id: db_contact.user_id_a,
|
||||
});
|
||||
} else {
|
||||
contacts.push(Contact::Outgoing {
|
||||
user_id: db_contact.user_id_a,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
contacts.sort_unstable_by_key(|contact| contact.user_id());
|
||||
|
||||
Ok(contacts)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn is_user_busy(&self, user_id: UserId) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
.filter(room_participant::Column::UserId.eq(user_id))
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
Ok(participant.is_some())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn has_contact(&self, user_id_1: UserId, user_id_2: UserId) -> Result<bool> {
|
||||
self.transaction(|tx| async move {
|
||||
let (id_a, id_b) = if user_id_1 < user_id_2 {
|
||||
(user_id_1, user_id_2)
|
||||
} else {
|
||||
(user_id_2, user_id_1)
|
||||
};
|
||||
|
||||
Ok(contact::Entity::find()
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(id_a)
|
||||
.and(contact::Column::UserIdB.eq(id_b))
|
||||
.and(contact::Column::Accepted.eq(true)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.is_some())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_contact_request(
|
||||
&self,
|
||||
sender_id: UserId,
|
||||
receiver_id: UserId,
|
||||
) -> Result<NotificationBatch> {
|
||||
self.transaction(|tx| async move {
|
||||
let (id_a, id_b, a_to_b) = if sender_id < receiver_id {
|
||||
(sender_id, receiver_id, true)
|
||||
} else {
|
||||
(receiver_id, sender_id, false)
|
||||
};
|
||||
|
||||
let rows_affected = contact::Entity::insert(contact::ActiveModel {
|
||||
user_id_a: ActiveValue::set(id_a),
|
||||
user_id_b: ActiveValue::set(id_b),
|
||||
a_to_b: ActiveValue::set(a_to_b),
|
||||
accepted: ActiveValue::set(false),
|
||||
should_notify: ActiveValue::set(true),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([contact::Column::UserIdA, contact::Column::UserIdB])
|
||||
.values([
|
||||
(contact::Column::Accepted, true.into()),
|
||||
(contact::Column::ShouldNotify, false.into()),
|
||||
])
|
||||
.action_and_where(
|
||||
contact::Column::Accepted.eq(false).and(
|
||||
contact::Column::AToB
|
||||
.eq(a_to_b)
|
||||
.and(contact::Column::UserIdA.eq(id_b))
|
||||
.or(contact::Column::AToB
|
||||
.ne(a_to_b)
|
||||
.and(contact::Column::UserIdA.eq(id_a))),
|
||||
),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec_without_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
if rows_affected == 0 {
|
||||
Err(anyhow!("contact already requested"))?;
|
||||
}
|
||||
|
||||
Ok(self
|
||||
.create_notification(
|
||||
receiver_id,
|
||||
rpc::Notification::ContactRequest {
|
||||
sender_id: sender_id.to_proto(),
|
||||
},
|
||||
true,
|
||||
&*tx,
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.collect())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns a bool indicating whether the removed contact had originally accepted or not
|
||||
///
|
||||
/// Deletes the contact identified by the requester and responder ids, and then returns
|
||||
/// whether the deleted contact had originally accepted or was a pending contact request.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `requester_id` - The user that initiates this request
|
||||
/// * `responder_id` - The user that will be removed
|
||||
pub async fn remove_contact(
|
||||
&self,
|
||||
requester_id: UserId,
|
||||
responder_id: UserId,
|
||||
) -> Result<(bool, Option<NotificationId>)> {
|
||||
self.transaction(|tx| async move {
|
||||
let (id_a, id_b) = if responder_id < requester_id {
|
||||
(responder_id, requester_id)
|
||||
} else {
|
||||
(requester_id, responder_id)
|
||||
};
|
||||
|
||||
let contact = contact::Entity::find()
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(id_a)
|
||||
.and(contact::Column::UserIdB.eq(id_b)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such contact"))?;
|
||||
|
||||
contact::Entity::delete_by_id(contact.id).exec(&*tx).await?;
|
||||
|
||||
let mut deleted_notification_id = None;
|
||||
if !contact.accepted {
|
||||
deleted_notification_id = self
|
||||
.remove_notification(
|
||||
responder_id,
|
||||
rpc::Notification::ContactRequest {
|
||||
sender_id: requester_id.to_proto(),
|
||||
},
|
||||
&*tx,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok((contact.accepted, deleted_notification_id))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn dismiss_contact_notification(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
contact_user_id: UserId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
let (id_a, id_b, a_to_b) = if user_id < contact_user_id {
|
||||
(user_id, contact_user_id, true)
|
||||
} else {
|
||||
(contact_user_id, user_id, false)
|
||||
};
|
||||
|
||||
let result = contact::Entity::update_many()
|
||||
.set(contact::ActiveModel {
|
||||
should_notify: ActiveValue::set(false),
|
||||
..Default::default()
|
||||
})
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(id_a)
|
||||
.and(contact::Column::UserIdB.eq(id_b))
|
||||
.and(
|
||||
contact::Column::AToB
|
||||
.eq(a_to_b)
|
||||
.and(contact::Column::Accepted.eq(true))
|
||||
.or(contact::Column::AToB
|
||||
.ne(a_to_b)
|
||||
.and(contact::Column::Accepted.eq(false))),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
if result.rows_affected == 0 {
|
||||
Err(anyhow!("no such contact request"))?
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn respond_to_contact_request(
|
||||
&self,
|
||||
responder_id: UserId,
|
||||
requester_id: UserId,
|
||||
accept: bool,
|
||||
) -> Result<NotificationBatch> {
|
||||
self.transaction(|tx| async move {
|
||||
let (id_a, id_b, a_to_b) = if responder_id < requester_id {
|
||||
(responder_id, requester_id, false)
|
||||
} else {
|
||||
(requester_id, responder_id, true)
|
||||
};
|
||||
let rows_affected = if accept {
|
||||
let result = contact::Entity::update_many()
|
||||
.set(contact::ActiveModel {
|
||||
accepted: ActiveValue::set(true),
|
||||
should_notify: ActiveValue::set(true),
|
||||
..Default::default()
|
||||
})
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(id_a)
|
||||
.and(contact::Column::UserIdB.eq(id_b))
|
||||
.and(contact::Column::AToB.eq(a_to_b)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
result.rows_affected
|
||||
} else {
|
||||
let result = contact::Entity::delete_many()
|
||||
.filter(
|
||||
contact::Column::UserIdA
|
||||
.eq(id_a)
|
||||
.and(contact::Column::UserIdB.eq(id_b))
|
||||
.and(contact::Column::AToB.eq(a_to_b))
|
||||
.and(contact::Column::Accepted.eq(false)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
result.rows_affected
|
||||
};
|
||||
|
||||
if rows_affected == 0 {
|
||||
Err(anyhow!("no such contact request"))?
|
||||
}
|
||||
|
||||
let mut notifications = Vec::new();
|
||||
notifications.extend(
|
||||
self.mark_notification_as_read_with_response(
|
||||
responder_id,
|
||||
&rpc::Notification::ContactRequest {
|
||||
sender_id: requester_id.to_proto(),
|
||||
},
|
||||
accept,
|
||||
&*tx,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
|
||||
if accept {
|
||||
notifications.extend(
|
||||
self.create_notification(
|
||||
requester_id,
|
||||
rpc::Notification::ContactRequestAccepted {
|
||||
responder_id: responder_id.to_proto(),
|
||||
},
|
||||
true,
|
||||
&*tx,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(notifications)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
505
crates/collab2/src/db/queries/messages.rs
Normal file
505
crates/collab2/src/db/queries/messages.rs
Normal file
|
@ -0,0 +1,505 @@
|
|||
use super::*;
|
||||
use rpc::Notification;
|
||||
use sea_orm::TryInsertResult;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
impl Database {
|
||||
pub async fn join_channel_chat(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
connection_id: ConnectionId,
|
||||
user_id: UserId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &*tx)
|
||||
.await?;
|
||||
channel_chat_participant::ActiveModel {
|
||||
id: ActiveValue::NotSet,
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
user_id: ActiveValue::Set(user_id),
|
||||
connection_id: ActiveValue::Set(connection_id.id as i32),
|
||||
connection_server_id: ActiveValue::Set(ServerId(connection_id.owner_id as i32)),
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn channel_chat_connection_lost(
|
||||
&self,
|
||||
connection_id: ConnectionId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
channel_chat_participant::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(
|
||||
channel_chat_participant::Column::ConnectionServerId
|
||||
.eq(connection_id.owner_id),
|
||||
)
|
||||
.add(channel_chat_participant::Column::ConnectionId.eq(connection_id.id)),
|
||||
)
|
||||
.exec(tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn leave_channel_chat(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
connection_id: ConnectionId,
|
||||
_user_id: UserId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
channel_chat_participant::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(
|
||||
channel_chat_participant::Column::ConnectionServerId
|
||||
.eq(connection_id.owner_id),
|
||||
)
|
||||
.add(channel_chat_participant::Column::ConnectionId.eq(connection_id.id))
|
||||
.add(channel_chat_participant::Column::ChannelId.eq(channel_id)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_channel_messages(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
count: usize,
|
||||
before_message_id: Option<MessageId>,
|
||||
) -> Result<Vec<proto::ChannelMessage>> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &*tx)
|
||||
.await?;
|
||||
|
||||
let mut condition =
|
||||
Condition::all().add(channel_message::Column::ChannelId.eq(channel_id));
|
||||
|
||||
if let Some(before_message_id) = before_message_id {
|
||||
condition = condition.add(channel_message::Column::Id.lt(before_message_id));
|
||||
}
|
||||
|
||||
let rows = channel_message::Entity::find()
|
||||
.filter(condition)
|
||||
.order_by_desc(channel_message::Column::Id)
|
||||
.limit(count as u64)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
self.load_channel_messages(rows, &*tx).await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_channel_messages_by_id(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
message_ids: &[MessageId],
|
||||
) -> Result<Vec<proto::ChannelMessage>> {
|
||||
self.transaction(|tx| async move {
|
||||
let rows = channel_message::Entity::find()
|
||||
.filter(channel_message::Column::Id.is_in(message_ids.iter().copied()))
|
||||
.order_by_desc(channel_message::Column::Id)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut channels = HashMap::<ChannelId, channel::Model>::default();
|
||||
for row in &rows {
|
||||
channels.insert(
|
||||
row.channel_id,
|
||||
self.get_channel_internal(row.channel_id, &*tx).await?,
|
||||
);
|
||||
}
|
||||
|
||||
for (_, channel) in channels {
|
||||
self.check_user_is_channel_participant(&channel, user_id, &*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let messages = self.load_channel_messages(rows, &*tx).await?;
|
||||
Ok(messages)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn load_channel_messages(
|
||||
&self,
|
||||
rows: Vec<channel_message::Model>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<proto::ChannelMessage>> {
|
||||
let mut messages = rows
|
||||
.into_iter()
|
||||
.map(|row| {
|
||||
let nonce = row.nonce.as_u64_pair();
|
||||
proto::ChannelMessage {
|
||||
id: row.id.to_proto(),
|
||||
sender_id: row.sender_id.to_proto(),
|
||||
body: row.body,
|
||||
timestamp: row.sent_at.assume_utc().unix_timestamp() as u64,
|
||||
mentions: vec![],
|
||||
nonce: Some(proto::Nonce {
|
||||
upper_half: nonce.0,
|
||||
lower_half: nonce.1,
|
||||
}),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
messages.reverse();
|
||||
|
||||
let mut mentions = channel_message_mention::Entity::find()
|
||||
.filter(channel_message_mention::Column::MessageId.is_in(messages.iter().map(|m| m.id)))
|
||||
.order_by_asc(channel_message_mention::Column::MessageId)
|
||||
.order_by_asc(channel_message_mention::Column::StartOffset)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut message_ix = 0;
|
||||
while let Some(mention) = mentions.next().await {
|
||||
let mention = mention?;
|
||||
let message_id = mention.message_id.to_proto();
|
||||
while let Some(message) = messages.get_mut(message_ix) {
|
||||
if message.id < message_id {
|
||||
message_ix += 1;
|
||||
} else {
|
||||
if message.id == message_id {
|
||||
message.mentions.push(proto::ChatMention {
|
||||
range: Some(proto::Range {
|
||||
start: mention.start_offset as u64,
|
||||
end: mention.end_offset as u64,
|
||||
}),
|
||||
user_id: mention.user_id.to_proto(),
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(messages)
|
||||
}
|
||||
|
||||
pub async fn create_channel_message(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
body: &str,
|
||||
mentions: &[proto::ChatMention],
|
||||
timestamp: OffsetDateTime,
|
||||
nonce: u128,
|
||||
) -> Result<CreatedChannelMessage> {
|
||||
self.transaction(|tx| async move {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
self.check_user_is_channel_participant(&channel, user_id, &*tx)
|
||||
.await?;
|
||||
|
||||
let mut rows = channel_chat_participant::Entity::find()
|
||||
.filter(channel_chat_participant::Column::ChannelId.eq(channel_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut is_participant = false;
|
||||
let mut participant_connection_ids = Vec::new();
|
||||
let mut participant_user_ids = Vec::new();
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
if row.user_id == user_id {
|
||||
is_participant = true;
|
||||
}
|
||||
participant_user_ids.push(row.user_id);
|
||||
participant_connection_ids.push(row.connection());
|
||||
}
|
||||
drop(rows);
|
||||
|
||||
if !is_participant {
|
||||
Err(anyhow!("not a chat participant"))?;
|
||||
}
|
||||
|
||||
let timestamp = timestamp.to_offset(time::UtcOffset::UTC);
|
||||
let timestamp = time::PrimitiveDateTime::new(timestamp.date(), timestamp.time());
|
||||
|
||||
let result = channel_message::Entity::insert(channel_message::ActiveModel {
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
sender_id: ActiveValue::Set(user_id),
|
||||
body: ActiveValue::Set(body.to_string()),
|
||||
sent_at: ActiveValue::Set(timestamp),
|
||||
nonce: ActiveValue::Set(Uuid::from_u128(nonce)),
|
||||
id: ActiveValue::NotSet,
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
channel_message::Column::SenderId,
|
||||
channel_message::Column::Nonce,
|
||||
])
|
||||
.do_nothing()
|
||||
.to_owned(),
|
||||
)
|
||||
.do_nothing()
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let message_id;
|
||||
let mut notifications = Vec::new();
|
||||
match result {
|
||||
TryInsertResult::Inserted(result) => {
|
||||
message_id = result.last_insert_id;
|
||||
let mentioned_user_ids =
|
||||
mentions.iter().map(|m| m.user_id).collect::<HashSet<_>>();
|
||||
let mentions = mentions
|
||||
.iter()
|
||||
.filter_map(|mention| {
|
||||
let range = mention.range.as_ref()?;
|
||||
if !body.is_char_boundary(range.start as usize)
|
||||
|| !body.is_char_boundary(range.end as usize)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
Some(channel_message_mention::ActiveModel {
|
||||
message_id: ActiveValue::Set(message_id),
|
||||
start_offset: ActiveValue::Set(range.start as i32),
|
||||
end_offset: ActiveValue::Set(range.end as i32),
|
||||
user_id: ActiveValue::Set(UserId::from_proto(mention.user_id)),
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if !mentions.is_empty() {
|
||||
channel_message_mention::Entity::insert_many(mentions)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
for mentioned_user in mentioned_user_ids {
|
||||
notifications.extend(
|
||||
self.create_notification(
|
||||
UserId::from_proto(mentioned_user),
|
||||
rpc::Notification::ChannelMessageMention {
|
||||
message_id: message_id.to_proto(),
|
||||
sender_id: user_id.to_proto(),
|
||||
channel_id: channel_id.to_proto(),
|
||||
},
|
||||
false,
|
||||
&*tx,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
}
|
||||
|
||||
self.observe_channel_message_internal(channel_id, user_id, message_id, &*tx)
|
||||
.await?;
|
||||
}
|
||||
_ => {
|
||||
message_id = channel_message::Entity::find()
|
||||
.filter(channel_message::Column::Nonce.eq(Uuid::from_u128(nonce)))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("failed to insert message"))?
|
||||
.id;
|
||||
}
|
||||
}
|
||||
|
||||
let mut channel_members = self.get_channel_participants(&channel, &*tx).await?;
|
||||
channel_members.retain(|member| !participant_user_ids.contains(member));
|
||||
|
||||
Ok(CreatedChannelMessage {
|
||||
message_id,
|
||||
participant_connection_ids,
|
||||
channel_members,
|
||||
notifications,
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn observe_channel_message(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
message_id: MessageId,
|
||||
) -> Result<NotificationBatch> {
|
||||
self.transaction(|tx| async move {
|
||||
self.observe_channel_message_internal(channel_id, user_id, message_id, &*tx)
|
||||
.await?;
|
||||
let mut batch = NotificationBatch::default();
|
||||
batch.extend(
|
||||
self.mark_notification_as_read(
|
||||
user_id,
|
||||
&Notification::ChannelMessageMention {
|
||||
message_id: message_id.to_proto(),
|
||||
sender_id: Default::default(),
|
||||
channel_id: Default::default(),
|
||||
},
|
||||
&*tx,
|
||||
)
|
||||
.await?,
|
||||
);
|
||||
Ok(batch)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn observe_channel_message_internal(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
message_id: MessageId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
observed_channel_messages::Entity::insert(observed_channel_messages::ActiveModel {
|
||||
user_id: ActiveValue::Set(user_id),
|
||||
channel_id: ActiveValue::Set(channel_id),
|
||||
channel_message_id: ActiveValue::Set(message_id),
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
observed_channel_messages::Column::ChannelId,
|
||||
observed_channel_messages::Column::UserId,
|
||||
])
|
||||
.update_column(observed_channel_messages::Column::ChannelMessageId)
|
||||
.action_cond_where(observed_channel_messages::Column::ChannelMessageId.lt(message_id))
|
||||
.to_owned(),
|
||||
)
|
||||
// TODO: Try to upgrade SeaORM so we don't have to do this hack around their bug
|
||||
.exec_without_returning(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn unseen_channel_messages(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
channel_ids: &[ChannelId],
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<proto::UnseenChannelMessage>> {
|
||||
let mut observed_messages_by_channel_id = HashMap::default();
|
||||
let mut rows = observed_channel_messages::Entity::find()
|
||||
.filter(observed_channel_messages::Column::UserId.eq(user_id))
|
||||
.filter(observed_channel_messages::Column::ChannelId.is_in(channel_ids.iter().copied()))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
observed_messages_by_channel_id.insert(row.channel_id, row);
|
||||
}
|
||||
drop(rows);
|
||||
let mut values = String::new();
|
||||
for id in channel_ids {
|
||||
if !values.is_empty() {
|
||||
values.push_str(", ");
|
||||
}
|
||||
write!(&mut values, "({})", id).unwrap();
|
||||
}
|
||||
|
||||
if values.is_empty() {
|
||||
return Ok(Default::default());
|
||||
}
|
||||
|
||||
let sql = format!(
|
||||
r#"
|
||||
SELECT
|
||||
*
|
||||
FROM (
|
||||
SELECT
|
||||
*,
|
||||
row_number() OVER (
|
||||
PARTITION BY channel_id
|
||||
ORDER BY id DESC
|
||||
) as row_number
|
||||
FROM channel_messages
|
||||
WHERE
|
||||
channel_id in ({values})
|
||||
) AS messages
|
||||
WHERE
|
||||
row_number = 1
|
||||
"#,
|
||||
);
|
||||
|
||||
let stmt = Statement::from_string(self.pool.get_database_backend(), sql);
|
||||
let last_messages = channel_message::Model::find_by_statement(stmt)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut changes = Vec::new();
|
||||
for last_message in last_messages {
|
||||
if let Some(observed_message) =
|
||||
observed_messages_by_channel_id.get(&last_message.channel_id)
|
||||
{
|
||||
if observed_message.channel_message_id == last_message.id {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
changes.push(proto::UnseenChannelMessage {
|
||||
channel_id: last_message.channel_id.to_proto(),
|
||||
message_id: last_message.id.to_proto(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(changes)
|
||||
}
|
||||
|
||||
pub async fn remove_channel_message(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
message_id: MessageId,
|
||||
user_id: UserId,
|
||||
) -> Result<Vec<ConnectionId>> {
|
||||
self.transaction(|tx| async move {
|
||||
let mut rows = channel_chat_participant::Entity::find()
|
||||
.filter(channel_chat_participant::Column::ChannelId.eq(channel_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut is_participant = false;
|
||||
let mut participant_connection_ids = Vec::new();
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
if row.user_id == user_id {
|
||||
is_participant = true;
|
||||
}
|
||||
participant_connection_ids.push(row.connection());
|
||||
}
|
||||
drop(rows);
|
||||
|
||||
if !is_participant {
|
||||
Err(anyhow!("not a chat participant"))?;
|
||||
}
|
||||
|
||||
let result = channel_message::Entity::delete_by_id(message_id)
|
||||
.filter(channel_message::Column::SenderId.eq(user_id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
if result.rows_affected == 0 {
|
||||
let channel = self.get_channel_internal(channel_id, &*tx).await?;
|
||||
if self
|
||||
.check_user_is_channel_admin(&channel, user_id, &*tx)
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
let result = channel_message::Entity::delete_by_id(message_id)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
if result.rows_affected == 0 {
|
||||
Err(anyhow!("no such message"))?;
|
||||
}
|
||||
} else {
|
||||
Err(anyhow!("operation could not be completed"))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(participant_connection_ids)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
262
crates/collab2/src/db/queries/notifications.rs
Normal file
262
crates/collab2/src/db/queries/notifications.rs
Normal file
|
@ -0,0 +1,262 @@
|
|||
use super::*;
|
||||
use rpc::Notification;
|
||||
|
||||
impl Database {
|
||||
pub async fn initialize_notification_kinds(&mut self) -> Result<()> {
|
||||
notification_kind::Entity::insert_many(Notification::all_variant_names().iter().map(
|
||||
|kind| notification_kind::ActiveModel {
|
||||
name: ActiveValue::Set(kind.to_string()),
|
||||
..Default::default()
|
||||
},
|
||||
))
|
||||
.on_conflict(OnConflict::new().do_nothing().to_owned())
|
||||
.exec_without_returning(&self.pool)
|
||||
.await?;
|
||||
|
||||
let mut rows = notification_kind::Entity::find().stream(&self.pool).await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
self.notification_kinds_by_name.insert(row.name, row.id);
|
||||
}
|
||||
|
||||
for name in Notification::all_variant_names() {
|
||||
if let Some(id) = self.notification_kinds_by_name.get(*name).copied() {
|
||||
self.notification_kinds_by_id.insert(id, name);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_notifications(
|
||||
&self,
|
||||
recipient_id: UserId,
|
||||
limit: usize,
|
||||
before_id: Option<NotificationId>,
|
||||
) -> Result<Vec<proto::Notification>> {
|
||||
self.transaction(|tx| async move {
|
||||
let mut result = Vec::new();
|
||||
let mut condition =
|
||||
Condition::all().add(notification::Column::RecipientId.eq(recipient_id));
|
||||
|
||||
if let Some(before_id) = before_id {
|
||||
condition = condition.add(notification::Column::Id.lt(before_id));
|
||||
}
|
||||
|
||||
let mut rows = notification::Entity::find()
|
||||
.filter(condition)
|
||||
.order_by_desc(notification::Column::Id)
|
||||
.limit(limit as u64)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(row) = rows.next().await {
|
||||
let row = row?;
|
||||
let kind = row.kind;
|
||||
if let Some(proto) = model_to_proto(self, row) {
|
||||
result.push(proto);
|
||||
} else {
|
||||
log::warn!("unknown notification kind {:?}", kind);
|
||||
}
|
||||
}
|
||||
result.reverse();
|
||||
Ok(result)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Create a notification. If `avoid_duplicates` is set to true, then avoid
|
||||
/// creating a new notification if the given recipient already has an
|
||||
/// unread notification with the given kind and entity id.
|
||||
pub async fn create_notification(
|
||||
&self,
|
||||
recipient_id: UserId,
|
||||
notification: Notification,
|
||||
avoid_duplicates: bool,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Option<(UserId, proto::Notification)>> {
|
||||
if avoid_duplicates {
|
||||
if self
|
||||
.find_notification(recipient_id, ¬ification, tx)
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
|
||||
let proto = notification.to_proto();
|
||||
let kind = notification_kind_from_proto(self, &proto)?;
|
||||
let model = notification::ActiveModel {
|
||||
recipient_id: ActiveValue::Set(recipient_id),
|
||||
kind: ActiveValue::Set(kind),
|
||||
entity_id: ActiveValue::Set(proto.entity_id.map(|id| id as i32)),
|
||||
content: ActiveValue::Set(proto.content.clone()),
|
||||
..Default::default()
|
||||
}
|
||||
.save(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(Some((
|
||||
recipient_id,
|
||||
proto::Notification {
|
||||
id: model.id.as_ref().to_proto(),
|
||||
kind: proto.kind,
|
||||
timestamp: model.created_at.as_ref().assume_utc().unix_timestamp() as u64,
|
||||
is_read: false,
|
||||
response: None,
|
||||
content: proto.content,
|
||||
entity_id: proto.entity_id,
|
||||
},
|
||||
)))
|
||||
}
|
||||
|
||||
/// Remove an unread notification with the given recipient, kind and
|
||||
/// entity id.
|
||||
pub async fn remove_notification(
|
||||
&self,
|
||||
recipient_id: UserId,
|
||||
notification: Notification,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Option<NotificationId>> {
|
||||
let id = self
|
||||
.find_notification(recipient_id, ¬ification, tx)
|
||||
.await?;
|
||||
if let Some(id) = id {
|
||||
notification::Entity::delete_by_id(id).exec(tx).await?;
|
||||
}
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
/// Populate the response for the notification with the given kind and
|
||||
/// entity id.
|
||||
pub async fn mark_notification_as_read_with_response(
|
||||
&self,
|
||||
recipient_id: UserId,
|
||||
notification: &Notification,
|
||||
response: bool,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Option<(UserId, proto::Notification)>> {
|
||||
self.mark_notification_as_read_internal(recipient_id, notification, Some(response), tx)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn mark_notification_as_read(
|
||||
&self,
|
||||
recipient_id: UserId,
|
||||
notification: &Notification,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Option<(UserId, proto::Notification)>> {
|
||||
self.mark_notification_as_read_internal(recipient_id, notification, None, tx)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn mark_notification_as_read_by_id(
|
||||
&self,
|
||||
recipient_id: UserId,
|
||||
notification_id: NotificationId,
|
||||
) -> Result<NotificationBatch> {
|
||||
self.transaction(|tx| async move {
|
||||
let row = notification::Entity::update(notification::ActiveModel {
|
||||
id: ActiveValue::Unchanged(notification_id),
|
||||
recipient_id: ActiveValue::Unchanged(recipient_id),
|
||||
is_read: ActiveValue::Set(true),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(model_to_proto(self, row)
|
||||
.map(|notification| (recipient_id, notification))
|
||||
.into_iter()
|
||||
.collect())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn mark_notification_as_read_internal(
|
||||
&self,
|
||||
recipient_id: UserId,
|
||||
notification: &Notification,
|
||||
response: Option<bool>,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Option<(UserId, proto::Notification)>> {
|
||||
if let Some(id) = self
|
||||
.find_notification(recipient_id, notification, &*tx)
|
||||
.await?
|
||||
{
|
||||
let row = notification::Entity::update(notification::ActiveModel {
|
||||
id: ActiveValue::Unchanged(id),
|
||||
recipient_id: ActiveValue::Unchanged(recipient_id),
|
||||
is_read: ActiveValue::Set(true),
|
||||
response: if let Some(response) = response {
|
||||
ActiveValue::Set(Some(response))
|
||||
} else {
|
||||
ActiveValue::NotSet
|
||||
},
|
||||
..Default::default()
|
||||
})
|
||||
.exec(tx)
|
||||
.await?;
|
||||
Ok(model_to_proto(self, row).map(|notification| (recipient_id, notification)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Find an unread notification by its recipient, kind and entity id.
|
||||
async fn find_notification(
|
||||
&self,
|
||||
recipient_id: UserId,
|
||||
notification: &Notification,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Option<NotificationId>> {
|
||||
let proto = notification.to_proto();
|
||||
let kind = notification_kind_from_proto(self, &proto)?;
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryIds {
|
||||
Id,
|
||||
}
|
||||
|
||||
Ok(notification::Entity::find()
|
||||
.select_only()
|
||||
.column(notification::Column::Id)
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(notification::Column::RecipientId.eq(recipient_id))
|
||||
.add(notification::Column::IsRead.eq(false))
|
||||
.add(notification::Column::Kind.eq(kind))
|
||||
.add(if proto.entity_id.is_some() {
|
||||
notification::Column::EntityId.eq(proto.entity_id)
|
||||
} else {
|
||||
notification::Column::EntityId.is_null()
|
||||
}),
|
||||
)
|
||||
.into_values::<_, QueryIds>()
|
||||
.one(&*tx)
|
||||
.await?)
|
||||
}
|
||||
}
|
||||
|
||||
fn model_to_proto(this: &Database, row: notification::Model) -> Option<proto::Notification> {
|
||||
let kind = this.notification_kinds_by_id.get(&row.kind)?;
|
||||
Some(proto::Notification {
|
||||
id: row.id.to_proto(),
|
||||
kind: kind.to_string(),
|
||||
timestamp: row.created_at.assume_utc().unix_timestamp() as u64,
|
||||
is_read: row.is_read,
|
||||
response: row.response,
|
||||
content: row.content,
|
||||
entity_id: row.entity_id.map(|id| id as u64),
|
||||
})
|
||||
}
|
||||
|
||||
fn notification_kind_from_proto(
|
||||
this: &Database,
|
||||
proto: &proto::Notification,
|
||||
) -> Result<NotificationKindId> {
|
||||
Ok(this
|
||||
.notification_kinds_by_name
|
||||
.get(&proto.kind)
|
||||
.copied()
|
||||
.ok_or_else(|| anyhow!("invalid notification kind {:?}", proto.kind))?)
|
||||
}
|
960
crates/collab2/src/db/queries/projects.rs
Normal file
960
crates/collab2/src/db/queries/projects.rs
Normal file
|
@ -0,0 +1,960 @@
|
|||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn project_count_excluding_admins(&self) -> Result<usize> {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryAs {
|
||||
Count,
|
||||
}
|
||||
|
||||
self.transaction(|tx| async move {
|
||||
Ok(project::Entity::find()
|
||||
.select_only()
|
||||
.column_as(project::Column::Id.count(), QueryAs::Count)
|
||||
.inner_join(user::Entity)
|
||||
.filter(user::Column::Admin.eq(false))
|
||||
.into_values::<_, QueryAs>()
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.unwrap_or(0i64) as usize)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn share_project(
|
||||
&self,
|
||||
room_id: RoomId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
) -> Result<RoomGuard<(ProjectId, proto::Room)>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionId
|
||||
.eq(connection.id as i32),
|
||||
)
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("could not find participant"))?;
|
||||
if participant.room_id != room_id {
|
||||
return Err(anyhow!("shared project on unexpected room"))?;
|
||||
}
|
||||
|
||||
let project = project::ActiveModel {
|
||||
room_id: ActiveValue::set(participant.room_id),
|
||||
host_user_id: ActiveValue::set(participant.user_id),
|
||||
host_connection_id: ActiveValue::set(Some(connection.id as i32)),
|
||||
host_connection_server_id: ActiveValue::set(Some(ServerId(
|
||||
connection.owner_id as i32,
|
||||
))),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
if !worktrees.is_empty() {
|
||||
worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
|
||||
worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree.id as i64),
|
||||
project_id: ActiveValue::set(project.id),
|
||||
abs_path: ActiveValue::set(worktree.abs_path.clone()),
|
||||
root_name: ActiveValue::set(worktree.root_name.clone()),
|
||||
visible: ActiveValue::set(worktree.visible),
|
||||
scan_id: ActiveValue::set(0),
|
||||
completed_scan_id: ActiveValue::set(0),
|
||||
}
|
||||
}))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
project_collaborator::ActiveModel {
|
||||
project_id: ActiveValue::set(project.id),
|
||||
connection_id: ActiveValue::set(connection.id as i32),
|
||||
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
|
||||
user_id: ActiveValue::set(participant.user_id),
|
||||
replica_id: ActiveValue::set(ReplicaId(0)),
|
||||
is_host: ActiveValue::set(true),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
Ok((project.id, room))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn unshare_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<(proto::Room, Vec<ConnectionId>)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project not found"))?;
|
||||
if project.host_connection()? == connection {
|
||||
project::Entity::delete(project.into_active_model())
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
Ok((room, guest_connection_ids))
|
||||
} else {
|
||||
Err(anyhow!("cannot unshare a project hosted by another user"))?
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
) -> Result<RoomGuard<(proto::Room, Vec<ConnectionId>)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::HostConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
project::Column::HostConnectionServerId.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
|
||||
self.update_project_worktrees(project.id, worktrees, &tx)
|
||||
.await?;
|
||||
|
||||
let guest_connection_ids = self.project_guest_connection_ids(project.id, &tx).await?;
|
||||
let room = self.get_room(project.room_id, &tx).await?;
|
||||
Ok((room, guest_connection_ids))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub(in crate::db) async fn update_project_worktrees(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
worktrees: &[proto::WorktreeMetadata],
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
if !worktrees.is_empty() {
|
||||
worktree::Entity::insert_many(worktrees.iter().map(|worktree| worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree.id as i64),
|
||||
project_id: ActiveValue::set(project_id),
|
||||
abs_path: ActiveValue::set(worktree.abs_path.clone()),
|
||||
root_name: ActiveValue::set(worktree.root_name.clone()),
|
||||
visible: ActiveValue::set(worktree.visible),
|
||||
scan_id: ActiveValue::set(0),
|
||||
completed_scan_id: ActiveValue::set(0),
|
||||
}))
|
||||
.on_conflict(
|
||||
OnConflict::columns([worktree::Column::ProjectId, worktree::Column::Id])
|
||||
.update_column(worktree::Column::RootName)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
worktree::Entity::delete_many()
|
||||
.filter(worktree::Column::ProjectId.eq(project_id).and(
|
||||
worktree::Column::Id.is_not_in(worktrees.iter().map(|worktree| worktree.id as i64)),
|
||||
))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_worktree(
|
||||
&self,
|
||||
update: &proto::UpdateWorktree,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let worktree_id = update.worktree_id as i64;
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
// Ensure the update comes from the host.
|
||||
let _project = project::Entity::find_by_id(project_id)
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project::Column::HostConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
project::Column::HostConnectionServerId.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
|
||||
// Update metadata.
|
||||
worktree::Entity::update(worktree::ActiveModel {
|
||||
id: ActiveValue::set(worktree_id),
|
||||
project_id: ActiveValue::set(project_id),
|
||||
root_name: ActiveValue::set(update.root_name.clone()),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
completed_scan_id: if update.is_last_update {
|
||||
ActiveValue::set(update.scan_id as i64)
|
||||
} else {
|
||||
ActiveValue::default()
|
||||
},
|
||||
abs_path: ActiveValue::set(update.abs_path.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
if !update.updated_entries.is_empty() {
|
||||
worktree_entry::Entity::insert_many(update.updated_entries.iter().map(|entry| {
|
||||
let mtime = entry.mtime.clone().unwrap_or_default();
|
||||
worktree_entry::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
worktree_id: ActiveValue::set(worktree_id),
|
||||
id: ActiveValue::set(entry.id as i64),
|
||||
is_dir: ActiveValue::set(entry.is_dir),
|
||||
path: ActiveValue::set(entry.path.clone()),
|
||||
inode: ActiveValue::set(entry.inode as i64),
|
||||
mtime_seconds: ActiveValue::set(mtime.seconds as i64),
|
||||
mtime_nanos: ActiveValue::set(mtime.nanos as i32),
|
||||
is_symlink: ActiveValue::set(entry.is_symlink),
|
||||
is_ignored: ActiveValue::set(entry.is_ignored),
|
||||
is_external: ActiveValue::set(entry.is_external),
|
||||
git_status: ActiveValue::set(entry.git_status.map(|status| status as i64)),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
}
|
||||
}))
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
worktree_entry::Column::ProjectId,
|
||||
worktree_entry::Column::WorktreeId,
|
||||
worktree_entry::Column::Id,
|
||||
])
|
||||
.update_columns([
|
||||
worktree_entry::Column::IsDir,
|
||||
worktree_entry::Column::Path,
|
||||
worktree_entry::Column::Inode,
|
||||
worktree_entry::Column::MtimeSeconds,
|
||||
worktree_entry::Column::MtimeNanos,
|
||||
worktree_entry::Column::IsSymlink,
|
||||
worktree_entry::Column::IsIgnored,
|
||||
worktree_entry::Column::GitStatus,
|
||||
worktree_entry::Column::ScanId,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !update.removed_entries.is_empty() {
|
||||
worktree_entry::Entity::update_many()
|
||||
.filter(
|
||||
worktree_entry::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(worktree_entry::Column::WorktreeId.eq(worktree_id))
|
||||
.and(
|
||||
worktree_entry::Column::Id
|
||||
.is_in(update.removed_entries.iter().map(|id| *id as i64)),
|
||||
),
|
||||
)
|
||||
.set(worktree_entry::ActiveModel {
|
||||
is_deleted: ActiveValue::Set(true),
|
||||
scan_id: ActiveValue::Set(update.scan_id as i64),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !update.updated_repositories.is_empty() {
|
||||
worktree_repository::Entity::insert_many(update.updated_repositories.iter().map(
|
||||
|repository| worktree_repository::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
worktree_id: ActiveValue::set(worktree_id),
|
||||
work_directory_id: ActiveValue::set(repository.work_directory_id as i64),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
branch: ActiveValue::set(repository.branch.clone()),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
},
|
||||
))
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
worktree_repository::Column::ProjectId,
|
||||
worktree_repository::Column::WorktreeId,
|
||||
worktree_repository::Column::WorkDirectoryId,
|
||||
])
|
||||
.update_columns([
|
||||
worktree_repository::Column::ScanId,
|
||||
worktree_repository::Column::Branch,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if !update.removed_repositories.is_empty() {
|
||||
worktree_repository::Entity::update_many()
|
||||
.filter(
|
||||
worktree_repository::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(worktree_repository::Column::WorktreeId.eq(worktree_id))
|
||||
.and(
|
||||
worktree_repository::Column::WorkDirectoryId
|
||||
.is_in(update.removed_repositories.iter().map(|id| *id as i64)),
|
||||
),
|
||||
)
|
||||
.set(worktree_repository::ActiveModel {
|
||||
is_deleted: ActiveValue::Set(true),
|
||||
scan_id: ActiveValue::Set(update.scan_id as i64),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
Ok(connection_ids)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_diagnostic_summary(
|
||||
&self,
|
||||
update: &proto::UpdateDiagnosticSummary,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let worktree_id = update.worktree_id as i64;
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let summary = update
|
||||
.summary
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("invalid summary"))?;
|
||||
|
||||
// Ensure the update comes from the host.
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
if project.host_connection()? != connection {
|
||||
return Err(anyhow!("can't update a project hosted by someone else"))?;
|
||||
}
|
||||
|
||||
// Update summary.
|
||||
worktree_diagnostic_summary::Entity::insert(worktree_diagnostic_summary::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
worktree_id: ActiveValue::set(worktree_id),
|
||||
path: ActiveValue::set(summary.path.clone()),
|
||||
language_server_id: ActiveValue::set(summary.language_server_id as i64),
|
||||
error_count: ActiveValue::set(summary.error_count as i32),
|
||||
warning_count: ActiveValue::set(summary.warning_count as i32),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
worktree_diagnostic_summary::Column::ProjectId,
|
||||
worktree_diagnostic_summary::Column::WorktreeId,
|
||||
worktree_diagnostic_summary::Column::Path,
|
||||
])
|
||||
.update_columns([
|
||||
worktree_diagnostic_summary::Column::LanguageServerId,
|
||||
worktree_diagnostic_summary::Column::ErrorCount,
|
||||
worktree_diagnostic_summary::Column::WarningCount,
|
||||
])
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
Ok(connection_ids)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn start_language_server(
|
||||
&self,
|
||||
update: &proto::StartLanguageServer,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let server = update
|
||||
.server
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("invalid language server"))?;
|
||||
|
||||
// Ensure the update comes from the host.
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
if project.host_connection()? != connection {
|
||||
return Err(anyhow!("can't update a project hosted by someone else"))?;
|
||||
}
|
||||
|
||||
// Add the newly-started language server.
|
||||
language_server::Entity::insert(language_server::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
id: ActiveValue::set(server.id as i64),
|
||||
name: ActiveValue::set(server.name.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
language_server::Column::ProjectId,
|
||||
language_server::Column::Id,
|
||||
])
|
||||
.update_column(language_server::Column::Name)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
Ok(connection_ids)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_worktree_settings(
|
||||
&self,
|
||||
update: &proto::UpdateWorktreeSettings,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
// Ensure the update comes from the host.
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
if project.host_connection()? != connection {
|
||||
return Err(anyhow!("can't update a project hosted by someone else"))?;
|
||||
}
|
||||
|
||||
if let Some(content) = &update.content {
|
||||
worktree_settings_file::Entity::insert(worktree_settings_file::ActiveModel {
|
||||
project_id: ActiveValue::Set(project_id),
|
||||
worktree_id: ActiveValue::Set(update.worktree_id as i64),
|
||||
path: ActiveValue::Set(update.path.clone()),
|
||||
content: ActiveValue::Set(content.clone()),
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
worktree_settings_file::Column::ProjectId,
|
||||
worktree_settings_file::Column::WorktreeId,
|
||||
worktree_settings_file::Column::Path,
|
||||
])
|
||||
.update_column(worktree_settings_file::Column::Content)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
} else {
|
||||
worktree_settings_file::Entity::delete(worktree_settings_file::ActiveModel {
|
||||
project_id: ActiveValue::Set(project_id),
|
||||
worktree_id: ActiveValue::Set(update.worktree_id as i64),
|
||||
path: ActiveValue::Set(update.path.clone()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
|
||||
Ok(connection_ids)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn join_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<(Project, ReplicaId)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let participant = room_participant::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionId
|
||||
.eq(connection.id as i32),
|
||||
)
|
||||
.add(
|
||||
room_participant::Column::AnsweringConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("must join a room first"))?;
|
||||
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
if project.room_id != participant.room_id {
|
||||
return Err(anyhow!("no such project"))?;
|
||||
}
|
||||
|
||||
let mut collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let replica_ids = collaborators
|
||||
.iter()
|
||||
.map(|c| c.replica_id)
|
||||
.collect::<HashSet<_>>();
|
||||
let mut replica_id = ReplicaId(1);
|
||||
while replica_ids.contains(&replica_id) {
|
||||
replica_id.0 += 1;
|
||||
}
|
||||
let new_collaborator = project_collaborator::ActiveModel {
|
||||
project_id: ActiveValue::set(project_id),
|
||||
connection_id: ActiveValue::set(connection.id as i32),
|
||||
connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
|
||||
user_id: ActiveValue::set(participant.user_id),
|
||||
replica_id: ActiveValue::set(replica_id),
|
||||
is_host: ActiveValue::set(false),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
collaborators.push(new_collaborator);
|
||||
|
||||
let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?;
|
||||
let mut worktrees = db_worktrees
|
||||
.into_iter()
|
||||
.map(|db_worktree| {
|
||||
(
|
||||
db_worktree.id as u64,
|
||||
Worktree {
|
||||
id: db_worktree.id as u64,
|
||||
abs_path: db_worktree.abs_path,
|
||||
root_name: db_worktree.root_name,
|
||||
visible: db_worktree.visible,
|
||||
entries: Default::default(),
|
||||
repository_entries: Default::default(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
settings_files: Default::default(),
|
||||
scan_id: db_worktree.scan_id as u64,
|
||||
completed_scan_id: db_worktree.completed_scan_id as u64,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
|
||||
// Populate worktree entries.
|
||||
{
|
||||
let mut db_entries = worktree_entry::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_entry::Column::ProjectId.eq(project_id))
|
||||
.add(worktree_entry::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_entry) = db_entries.next().await {
|
||||
let db_entry = db_entry?;
|
||||
if let Some(worktree) = worktrees.get_mut(&(db_entry.worktree_id as u64)) {
|
||||
worktree.entries.push(proto::Entry {
|
||||
id: db_entry.id as u64,
|
||||
is_dir: db_entry.is_dir,
|
||||
path: db_entry.path,
|
||||
inode: db_entry.inode as u64,
|
||||
mtime: Some(proto::Timestamp {
|
||||
seconds: db_entry.mtime_seconds as u64,
|
||||
nanos: db_entry.mtime_nanos as u32,
|
||||
}),
|
||||
is_symlink: db_entry.is_symlink,
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
git_status: db_entry.git_status.map(|status| status as i32),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate repository entries.
|
||||
{
|
||||
let mut db_repository_entries = worktree_repository::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(worktree_repository::Column::ProjectId.eq(project_id))
|
||||
.add(worktree_repository::Column::IsDeleted.eq(false)),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_repository_entry) = db_repository_entries.next().await {
|
||||
let db_repository_entry = db_repository_entry?;
|
||||
if let Some(worktree) =
|
||||
worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
|
||||
{
|
||||
worktree.repository_entries.insert(
|
||||
db_repository_entry.work_directory_id as u64,
|
||||
proto::RepositoryEntry {
|
||||
work_directory_id: db_repository_entry.work_directory_id as u64,
|
||||
branch: db_repository_entry.branch,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate worktree diagnostic summaries.
|
||||
{
|
||||
let mut db_summaries = worktree_diagnostic_summary::Entity::find()
|
||||
.filter(worktree_diagnostic_summary::Column::ProjectId.eq(project_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_summary) = db_summaries.next().await {
|
||||
let db_summary = db_summary?;
|
||||
if let Some(worktree) = worktrees.get_mut(&(db_summary.worktree_id as u64)) {
|
||||
worktree
|
||||
.diagnostic_summaries
|
||||
.push(proto::DiagnosticSummary {
|
||||
path: db_summary.path,
|
||||
language_server_id: db_summary.language_server_id as u64,
|
||||
error_count: db_summary.error_count as u32,
|
||||
warning_count: db_summary.warning_count as u32,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate worktree settings files
|
||||
{
|
||||
let mut db_settings_files = worktree_settings_file::Entity::find()
|
||||
.filter(worktree_settings_file::Column::ProjectId.eq(project_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
while let Some(db_settings_file) = db_settings_files.next().await {
|
||||
let db_settings_file = db_settings_file?;
|
||||
if let Some(worktree) =
|
||||
worktrees.get_mut(&(db_settings_file.worktree_id as u64))
|
||||
{
|
||||
worktree.settings_files.push(WorktreeSettingsFile {
|
||||
path: db_settings_file.path,
|
||||
content: db_settings_file.content,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Populate language servers.
|
||||
let language_servers = project
|
||||
.find_related(language_server::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let project = Project {
|
||||
collaborators: collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect(),
|
||||
worktrees,
|
||||
language_servers: language_servers
|
||||
.into_iter()
|
||||
.map(|language_server| proto::LanguageServer {
|
||||
id: language_server.id as u64,
|
||||
name: language_server.name,
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
Ok((project, replica_id as ReplicaId))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn leave_project(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
) -> Result<RoomGuard<(proto::Room, LeftProject)>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let result = project_collaborator::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.add(project_collaborator::Column::ConnectionId.eq(connection.id as i32))
|
||||
.add(
|
||||
project_collaborator::Column::ConnectionServerId
|
||||
.eq(connection.owner_id as i32),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
if result.rows_affected == 0 {
|
||||
Err(anyhow!("not a collaborator on this project"))?;
|
||||
}
|
||||
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
let collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let connection_ids = collaborators
|
||||
.into_iter()
|
||||
.map(|collaborator| collaborator.connection())
|
||||
.collect();
|
||||
|
||||
follower::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::any()
|
||||
.add(
|
||||
Condition::all()
|
||||
.add(follower::Column::ProjectId.eq(Some(project_id)))
|
||||
.add(
|
||||
follower::Column::LeaderConnectionServerId
|
||||
.eq(connection.owner_id),
|
||||
)
|
||||
.add(follower::Column::LeaderConnectionId.eq(connection.id)),
|
||||
)
|
||||
.add(
|
||||
Condition::all()
|
||||
.add(follower::Column::ProjectId.eq(Some(project_id)))
|
||||
.add(
|
||||
follower::Column::FollowerConnectionServerId
|
||||
.eq(connection.owner_id),
|
||||
)
|
||||
.add(follower::Column::FollowerConnectionId.eq(connection.id)),
|
||||
),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(project.room_id, &tx).await?;
|
||||
let left_project = LeftProject {
|
||||
id: project_id,
|
||||
host_user_id: project.host_user_id,
|
||||
host_connection_id: project.host_connection()?,
|
||||
connection_ids,
|
||||
};
|
||||
Ok((room, left_project))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn project_collaborators(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
) -> Result<RoomGuard<Vec<ProjectCollaborator>>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let collaborators = project_collaborator::Entity::find()
|
||||
.filter(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.all(&*tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|collaborator| ProjectCollaborator {
|
||||
connection_id: collaborator.connection(),
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if collaborators
|
||||
.iter()
|
||||
.any(|collaborator| collaborator.connection_id == connection_id)
|
||||
{
|
||||
Ok(collaborators)
|
||||
} else {
|
||||
Err(anyhow!("no such project"))?
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn project_connection_ids(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
connection_id: ConnectionId,
|
||||
) -> Result<RoomGuard<HashSet<ConnectionId>>> {
|
||||
let room_id = self.room_id_for_project(project_id).await?;
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
let mut collaborators = project_collaborator::Entity::find()
|
||||
.filter(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut connection_ids = HashSet::default();
|
||||
while let Some(collaborator) = collaborators.next().await {
|
||||
let collaborator = collaborator?;
|
||||
connection_ids.insert(collaborator.connection());
|
||||
}
|
||||
|
||||
if connection_ids.contains(&connection_id) {
|
||||
Ok(connection_ids)
|
||||
} else {
|
||||
Err(anyhow!("no such project"))?
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn project_guest_connection_ids(
|
||||
&self,
|
||||
project_id: ProjectId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<ConnectionId>> {
|
||||
let mut collaborators = project_collaborator::Entity::find()
|
||||
.filter(
|
||||
project_collaborator::Column::ProjectId
|
||||
.eq(project_id)
|
||||
.and(project_collaborator::Column::IsHost.eq(false)),
|
||||
)
|
||||
.stream(tx)
|
||||
.await?;
|
||||
|
||||
let mut guest_connection_ids = Vec::new();
|
||||
while let Some(collaborator) = collaborators.next().await {
|
||||
let collaborator = collaborator?;
|
||||
guest_connection_ids.push(collaborator.connection());
|
||||
}
|
||||
Ok(guest_connection_ids)
|
||||
}
|
||||
|
||||
pub async fn room_id_for_project(&self, project_id: ProjectId) -> Result<RoomId> {
|
||||
self.transaction(|tx| async move {
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project {} not found", project_id))?;
|
||||
Ok(project.room_id)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn check_room_participants(
|
||||
&self,
|
||||
room_id: RoomId,
|
||||
leader_id: ConnectionId,
|
||||
follower_id: ConnectionId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
use room_participant::Column;
|
||||
|
||||
let count = room_participant::Entity::find()
|
||||
.filter(
|
||||
Condition::all().add(Column::RoomId.eq(room_id)).add(
|
||||
Condition::any()
|
||||
.add(Column::AnsweringConnectionId.eq(leader_id.id as i32).and(
|
||||
Column::AnsweringConnectionServerId.eq(leader_id.owner_id as i32),
|
||||
))
|
||||
.add(Column::AnsweringConnectionId.eq(follower_id.id as i32).and(
|
||||
Column::AnsweringConnectionServerId.eq(follower_id.owner_id as i32),
|
||||
)),
|
||||
),
|
||||
)
|
||||
.count(&*tx)
|
||||
.await?;
|
||||
|
||||
if count < 2 {
|
||||
Err(anyhow!("not room participants"))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn follow(
|
||||
&self,
|
||||
room_id: RoomId,
|
||||
project_id: ProjectId,
|
||||
leader_connection: ConnectionId,
|
||||
follower_connection: ConnectionId,
|
||||
) -> Result<RoomGuard<proto::Room>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
follower::ActiveModel {
|
||||
room_id: ActiveValue::set(room_id),
|
||||
project_id: ActiveValue::set(project_id),
|
||||
leader_connection_server_id: ActiveValue::set(ServerId(
|
||||
leader_connection.owner_id as i32,
|
||||
)),
|
||||
leader_connection_id: ActiveValue::set(leader_connection.id as i32),
|
||||
follower_connection_server_id: ActiveValue::set(ServerId(
|
||||
follower_connection.owner_id as i32,
|
||||
)),
|
||||
follower_connection_id: ActiveValue::set(follower_connection.id as i32),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(room_id, &*tx).await?;
|
||||
Ok(room)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn unfollow(
|
||||
&self,
|
||||
room_id: RoomId,
|
||||
project_id: ProjectId,
|
||||
leader_connection: ConnectionId,
|
||||
follower_connection: ConnectionId,
|
||||
) -> Result<RoomGuard<proto::Room>> {
|
||||
self.room_transaction(room_id, |tx| async move {
|
||||
follower::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(follower::Column::RoomId.eq(room_id))
|
||||
.add(follower::Column::ProjectId.eq(project_id))
|
||||
.add(
|
||||
follower::Column::LeaderConnectionServerId
|
||||
.eq(leader_connection.owner_id),
|
||||
)
|
||||
.add(follower::Column::LeaderConnectionId.eq(leader_connection.id))
|
||||
.add(
|
||||
follower::Column::FollowerConnectionServerId
|
||||
.eq(follower_connection.owner_id),
|
||||
)
|
||||
.add(follower::Column::FollowerConnectionId.eq(follower_connection.id)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
let room = self.get_room(room_id, &*tx).await?;
|
||||
Ok(room)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
1203
crates/collab2/src/db/queries/rooms.rs
Normal file
1203
crates/collab2/src/db/queries/rooms.rs
Normal file
File diff suppressed because it is too large
Load diff
99
crates/collab2/src/db/queries/servers.rs
Normal file
99
crates/collab2/src/db/queries/servers.rs
Normal file
|
@ -0,0 +1,99 @@
|
|||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn create_server(&self, environment: &str) -> Result<ServerId> {
|
||||
self.transaction(|tx| async move {
|
||||
let server = server::ActiveModel {
|
||||
environment: ActiveValue::set(environment.into()),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
Ok(server.id)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn stale_server_resource_ids(
|
||||
&self,
|
||||
environment: &str,
|
||||
new_server_id: ServerId,
|
||||
) -> Result<(Vec<RoomId>, Vec<ChannelId>)> {
|
||||
self.transaction(|tx| async move {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryRoomIds {
|
||||
RoomId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryChannelIds {
|
||||
ChannelId,
|
||||
}
|
||||
|
||||
let stale_server_epochs = self
|
||||
.stale_server_ids(environment, new_server_id, &tx)
|
||||
.await?;
|
||||
let room_ids = room_participant::Entity::find()
|
||||
.select_only()
|
||||
.column(room_participant::Column::RoomId)
|
||||
.distinct()
|
||||
.filter(
|
||||
room_participant::Column::AnsweringConnectionServerId
|
||||
.is_in(stale_server_epochs.iter().copied()),
|
||||
)
|
||||
.into_values::<_, QueryRoomIds>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
let channel_ids = channel_buffer_collaborator::Entity::find()
|
||||
.select_only()
|
||||
.column(channel_buffer_collaborator::Column::ChannelId)
|
||||
.distinct()
|
||||
.filter(
|
||||
channel_buffer_collaborator::Column::ConnectionServerId
|
||||
.is_in(stale_server_epochs.iter().copied()),
|
||||
)
|
||||
.into_values::<_, QueryChannelIds>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok((room_ids, channel_ids))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn delete_stale_servers(
|
||||
&self,
|
||||
environment: &str,
|
||||
new_server_id: ServerId,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
server::Entity::delete_many()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(server::Column::Environment.eq(environment))
|
||||
.add(server::Column::Id.ne(new_server_id)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn stale_server_ids(
|
||||
&self,
|
||||
environment: &str,
|
||||
new_server_id: ServerId,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Vec<ServerId>> {
|
||||
let stale_servers = server::Entity::find()
|
||||
.filter(
|
||||
Condition::all()
|
||||
.add(server::Column::Environment.eq(environment))
|
||||
.add(server::Column::Id.ne(new_server_id)),
|
||||
)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
Ok(stale_servers.into_iter().map(|server| server.id).collect())
|
||||
}
|
||||
}
|
259
crates/collab2/src/db/queries/users.rs
Normal file
259
crates/collab2/src/db/queries/users.rs
Normal file
|
@ -0,0 +1,259 @@
|
|||
use super::*;
|
||||
|
||||
impl Database {
|
||||
pub async fn create_user(
|
||||
&self,
|
||||
email_address: &str,
|
||||
admin: bool,
|
||||
params: NewUserParams,
|
||||
) -> Result<NewUserResult> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
let user = user::Entity::insert(user::ActiveModel {
|
||||
email_address: ActiveValue::set(Some(email_address.into())),
|
||||
github_login: ActiveValue::set(params.github_login.clone()),
|
||||
github_user_id: ActiveValue::set(Some(params.github_user_id)),
|
||||
admin: ActiveValue::set(admin),
|
||||
metrics_id: ActiveValue::set(Uuid::new_v4()),
|
||||
..Default::default()
|
||||
})
|
||||
.on_conflict(
|
||||
OnConflict::column(user::Column::GithubLogin)
|
||||
.update_column(user::Column::GithubLogin)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(NewUserResult {
|
||||
user_id: user.id,
|
||||
metrics_id: user.metrics_id.to_string(),
|
||||
signup_device_id: None,
|
||||
inviting_user_id: None,
|
||||
})
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_user_by_id(&self, id: UserId) -> Result<Option<user::Model>> {
|
||||
self.transaction(|tx| async move { Ok(user::Entity::find_by_id(id).one(&*tx).await?) })
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_users_by_ids(&self, ids: Vec<UserId>) -> Result<Vec<user::Model>> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::Id.is_in(ids.iter().copied()))
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_user_by_github_login(&self, github_login: &str) -> Result<Option<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::GithubLogin.eq(github_login))
|
||||
.one(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_or_create_user_by_github_account(
|
||||
&self,
|
||||
github_login: &str,
|
||||
github_user_id: Option<i32>,
|
||||
github_email: Option<&str>,
|
||||
) -> Result<Option<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
let tx = &*tx;
|
||||
if let Some(github_user_id) = github_user_id {
|
||||
if let Some(user_by_github_user_id) = user::Entity::find()
|
||||
.filter(user::Column::GithubUserId.eq(github_user_id))
|
||||
.one(tx)
|
||||
.await?
|
||||
{
|
||||
let mut user_by_github_user_id = user_by_github_user_id.into_active_model();
|
||||
user_by_github_user_id.github_login = ActiveValue::set(github_login.into());
|
||||
Ok(Some(user_by_github_user_id.update(tx).await?))
|
||||
} else if let Some(user_by_github_login) = user::Entity::find()
|
||||
.filter(user::Column::GithubLogin.eq(github_login))
|
||||
.one(tx)
|
||||
.await?
|
||||
{
|
||||
let mut user_by_github_login = user_by_github_login.into_active_model();
|
||||
user_by_github_login.github_user_id = ActiveValue::set(Some(github_user_id));
|
||||
Ok(Some(user_by_github_login.update(tx).await?))
|
||||
} else {
|
||||
let user = user::Entity::insert(user::ActiveModel {
|
||||
email_address: ActiveValue::set(github_email.map(|email| email.into())),
|
||||
github_login: ActiveValue::set(github_login.into()),
|
||||
github_user_id: ActiveValue::set(Some(github_user_id)),
|
||||
admin: ActiveValue::set(false),
|
||||
invite_count: ActiveValue::set(0),
|
||||
invite_code: ActiveValue::set(None),
|
||||
metrics_id: ActiveValue::set(Uuid::new_v4()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec_with_returning(&*tx)
|
||||
.await?;
|
||||
Ok(Some(user))
|
||||
}
|
||||
} else {
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::GithubLogin.eq(github_login))
|
||||
.one(tx)
|
||||
.await?)
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_all_users(&self, page: u32, limit: u32) -> Result<Vec<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.order_by_asc(user::Column::GithubLogin)
|
||||
.limit(limit as u64)
|
||||
.offset(page as u64 * limit as u64)
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_user_metrics_id(&self, id: UserId) -> Result<String> {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryAs {
|
||||
MetricsId,
|
||||
}
|
||||
|
||||
self.transaction(|tx| async move {
|
||||
let metrics_id: Uuid = user::Entity::find_by_id(id)
|
||||
.select_only()
|
||||
.column(user::Column::MetricsId)
|
||||
.into_values::<_, QueryAs>()
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("could not find user"))?;
|
||||
Ok(metrics_id.to_string())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn set_user_connected_once(&self, id: UserId, connected_once: bool) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
user::Entity::update_many()
|
||||
.filter(user::Column::Id.eq(id))
|
||||
.set(user::ActiveModel {
|
||||
connected_once: ActiveValue::set(connected_once),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn destroy_user(&self, id: UserId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
access_token::Entity::delete_many()
|
||||
.filter(access_token::Column::UserId.eq(id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
user::Entity::delete_by_id(id).exec(&*tx).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result<Vec<User>> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
let like_string = Self::fuzzy_like_string(name_query);
|
||||
let query = "
|
||||
SELECT users.*
|
||||
FROM users
|
||||
WHERE github_login ILIKE $1
|
||||
ORDER BY github_login <-> $2
|
||||
LIMIT $3
|
||||
";
|
||||
|
||||
Ok(user::Entity::find()
|
||||
.from_raw_sql(Statement::from_sql_and_values(
|
||||
self.pool.get_database_backend(),
|
||||
query,
|
||||
vec![like_string.into(), name_query.into(), limit.into()],
|
||||
))
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn fuzzy_like_string(string: &str) -> String {
|
||||
let mut result = String::with_capacity(string.len() * 2 + 1);
|
||||
for c in string.chars() {
|
||||
if c.is_alphanumeric() {
|
||||
result.push('%');
|
||||
result.push(c);
|
||||
}
|
||||
}
|
||||
result.push('%');
|
||||
result
|
||||
}
|
||||
|
||||
pub async fn create_user_flag(&self, flag: &str) -> Result<FlagId> {
|
||||
self.transaction(|tx| async move {
|
||||
let flag = feature_flag::Entity::insert(feature_flag::ActiveModel {
|
||||
flag: ActiveValue::set(flag.to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?
|
||||
.last_insert_id;
|
||||
|
||||
Ok(flag)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn add_user_flag(&self, user: UserId, flag: FlagId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
user_feature::Entity::insert(user_feature::ActiveModel {
|
||||
user_id: ActiveValue::set(user),
|
||||
feature_id: ActiveValue::set(flag),
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_user_flags(&self, user: UserId) -> Result<Vec<String>> {
|
||||
self.transaction(|tx| async move {
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
|
||||
enum QueryAs {
|
||||
Flag,
|
||||
}
|
||||
|
||||
let flags = user::Model {
|
||||
id: user,
|
||||
..Default::default()
|
||||
}
|
||||
.find_linked(user::UserFlags)
|
||||
.select_only()
|
||||
.column(feature_flag::Column::Flag)
|
||||
.into_values::<_, QueryAs>()
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(flags)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
32
crates/collab2/src/db/tables.rs
Normal file
32
crates/collab2/src/db/tables.rs
Normal file
|
@ -0,0 +1,32 @@
|
|||
pub mod access_token;
|
||||
pub mod buffer;
|
||||
pub mod buffer_operation;
|
||||
pub mod buffer_snapshot;
|
||||
pub mod channel;
|
||||
pub mod channel_buffer_collaborator;
|
||||
pub mod channel_chat_participant;
|
||||
pub mod channel_member;
|
||||
pub mod channel_message;
|
||||
pub mod channel_message_mention;
|
||||
pub mod contact;
|
||||
pub mod feature_flag;
|
||||
pub mod follower;
|
||||
pub mod language_server;
|
||||
pub mod notification;
|
||||
pub mod notification_kind;
|
||||
pub mod observed_buffer_edits;
|
||||
pub mod observed_channel_messages;
|
||||
pub mod project;
|
||||
pub mod project_collaborator;
|
||||
pub mod room;
|
||||
pub mod room_participant;
|
||||
pub mod server;
|
||||
pub mod signup;
|
||||
pub mod user;
|
||||
pub mod user_feature;
|
||||
pub mod worktree;
|
||||
pub mod worktree_diagnostic_summary;
|
||||
pub mod worktree_entry;
|
||||
pub mod worktree_repository;
|
||||
pub mod worktree_repository_statuses;
|
||||
pub mod worktree_settings_file;
|
29
crates/collab2/src/db/tables/access_token.rs
Normal file
29
crates/collab2/src/db/tables/access_token.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
use crate::db::{AccessTokenId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "access_tokens")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: AccessTokenId,
|
||||
pub user_id: UserId,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
45
crates/collab2/src/db/tables/buffer.rs
Normal file
45
crates/collab2/src/db/tables/buffer.rs
Normal file
|
@ -0,0 +1,45 @@
|
|||
use crate::db::{BufferId, ChannelId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "buffers")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: BufferId,
|
||||
pub epoch: i32,
|
||||
pub channel_id: ChannelId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::buffer_operation::Entity")]
|
||||
Operations,
|
||||
#[sea_orm(has_many = "super::buffer_snapshot::Entity")]
|
||||
Snapshots,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
}
|
||||
|
||||
impl Related<super::buffer_operation::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Operations.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::buffer_snapshot::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Snapshots.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
34
crates/collab2/src/db/tables/buffer_operation.rs
Normal file
34
crates/collab2/src/db/tables/buffer_operation.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
use crate::db::BufferId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "buffer_operations")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub buffer_id: BufferId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub epoch: i32,
|
||||
#[sea_orm(primary_key)]
|
||||
pub lamport_timestamp: i32,
|
||||
#[sea_orm(primary_key)]
|
||||
pub replica_id: i32,
|
||||
pub value: Vec<u8>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::buffer::Entity",
|
||||
from = "Column::BufferId",
|
||||
to = "super::buffer::Column::Id"
|
||||
)]
|
||||
Buffer,
|
||||
}
|
||||
|
||||
impl Related<super::buffer::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Buffer.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
31
crates/collab2/src/db/tables/buffer_snapshot.rs
Normal file
31
crates/collab2/src/db/tables/buffer_snapshot.rs
Normal file
|
@ -0,0 +1,31 @@
|
|||
use crate::db::BufferId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "buffer_snapshots")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub buffer_id: BufferId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub epoch: i32,
|
||||
pub text: String,
|
||||
pub operation_serialization_version: i32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::buffer::Entity",
|
||||
from = "Column::BufferId",
|
||||
to = "super::buffer::Column::Id"
|
||||
)]
|
||||
Buffer,
|
||||
}
|
||||
|
||||
impl Related<super::buffer::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Buffer.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
79
crates/collab2/src/db/tables/channel.rs
Normal file
79
crates/collab2/src/db/tables/channel.rs
Normal file
|
@ -0,0 +1,79 @@
|
|||
use crate::db::{ChannelId, ChannelVisibility};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "channels")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ChannelId,
|
||||
pub name: String,
|
||||
pub visibility: ChannelVisibility,
|
||||
pub parent_path: String,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn parent_id(&self) -> Option<ChannelId> {
|
||||
self.ancestors().last()
|
||||
}
|
||||
|
||||
pub fn ancestors(&self) -> impl Iterator<Item = ChannelId> + '_ {
|
||||
self.parent_path
|
||||
.trim_end_matches('/')
|
||||
.split('/')
|
||||
.filter_map(|id| Some(ChannelId::from_proto(id.parse().ok()?)))
|
||||
}
|
||||
|
||||
pub fn ancestors_including_self(&self) -> impl Iterator<Item = ChannelId> + '_ {
|
||||
self.ancestors().chain(Some(self.id))
|
||||
}
|
||||
|
||||
pub fn path(&self) -> String {
|
||||
format!("{}{}/", self.parent_path, self.id)
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_one = "super::room::Entity")]
|
||||
Room,
|
||||
#[sea_orm(has_one = "super::buffer::Entity")]
|
||||
Buffer,
|
||||
#[sea_orm(has_many = "super::channel_member::Entity")]
|
||||
Member,
|
||||
#[sea_orm(has_many = "super::channel_buffer_collaborator::Entity")]
|
||||
BufferCollaborators,
|
||||
#[sea_orm(has_many = "super::channel_chat_participant::Entity")]
|
||||
ChatParticipants,
|
||||
}
|
||||
|
||||
impl Related<super::channel_member::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Member.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::room::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Room.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::buffer::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Buffer.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::channel_buffer_collaborator::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::BufferCollaborators.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::channel_chat_participant::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::ChatParticipants.def()
|
||||
}
|
||||
}
|
43
crates/collab2/src/db/tables/channel_buffer_collaborator.rs
Normal file
43
crates/collab2/src/db/tables/channel_buffer_collaborator.rs
Normal file
|
@ -0,0 +1,43 @@
|
|||
use crate::db::{ChannelBufferCollaboratorId, ChannelId, ReplicaId, ServerId, UserId};
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "channel_buffer_collaborators")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ChannelBufferCollaboratorId,
|
||||
pub channel_id: ChannelId,
|
||||
pub connection_id: i32,
|
||||
pub connection_server_id: ServerId,
|
||||
pub connection_lost: bool,
|
||||
pub user_id: UserId,
|
||||
pub replica_id: ReplicaId,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn connection(&self) -> ConnectionId {
|
||||
ConnectionId {
|
||||
owner_id: self.connection_server_id.0 as u32,
|
||||
id: self.connection_id as u32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
41
crates/collab2/src/db/tables/channel_chat_participant.rs
Normal file
41
crates/collab2/src/db/tables/channel_chat_participant.rs
Normal file
|
@ -0,0 +1,41 @@
|
|||
use crate::db::{ChannelChatParticipantId, ChannelId, ServerId, UserId};
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "channel_chat_participants")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ChannelChatParticipantId,
|
||||
pub channel_id: ChannelId,
|
||||
pub user_id: UserId,
|
||||
pub connection_id: i32,
|
||||
pub connection_server_id: ServerId,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn connection(&self) -> ConnectionId {
|
||||
ConnectionId {
|
||||
owner_id: self.connection_server_id.0 as u32,
|
||||
id: self.connection_id as u32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
59
crates/collab2/src/db/tables/channel_member.rs
Normal file
59
crates/collab2/src/db/tables/channel_member.rs
Normal file
|
@ -0,0 +1,59 @@
|
|||
use crate::db::{channel_member, ChannelId, ChannelMemberId, ChannelRole, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "channel_members")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ChannelMemberId,
|
||||
pub channel_id: ChannelId,
|
||||
pub user_id: UserId,
|
||||
pub accepted: bool,
|
||||
pub role: ChannelRole,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UserToChannel;
|
||||
|
||||
impl Linked for UserToChannel {
|
||||
type FromEntity = super::user::Entity;
|
||||
|
||||
type ToEntity = super::channel::Entity;
|
||||
|
||||
fn link(&self) -> Vec<RelationDef> {
|
||||
vec![
|
||||
channel_member::Relation::User.def().rev(),
|
||||
channel_member::Relation::Channel.def(),
|
||||
]
|
||||
}
|
||||
}
|
45
crates/collab2/src/db/tables/channel_message.rs
Normal file
45
crates/collab2/src/db/tables/channel_message.rs
Normal file
|
@ -0,0 +1,45 @@
|
|||
use crate::db::{ChannelId, MessageId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
use time::PrimitiveDateTime;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "channel_messages")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: MessageId,
|
||||
pub channel_id: ChannelId,
|
||||
pub sender_id: UserId,
|
||||
pub body: String,
|
||||
pub sent_at: PrimitiveDateTime,
|
||||
pub nonce: Uuid,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::SenderId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
Sender,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Sender.def()
|
||||
}
|
||||
}
|
43
crates/collab2/src/db/tables/channel_message_mention.rs
Normal file
43
crates/collab2/src/db/tables/channel_message_mention.rs
Normal file
|
@ -0,0 +1,43 @@
|
|||
use crate::db::{MessageId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "channel_message_mentions")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub message_id: MessageId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub start_offset: i32,
|
||||
pub end_offset: i32,
|
||||
pub user_id: UserId,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel_message::Entity",
|
||||
from = "Column::MessageId",
|
||||
to = "super::channel_message::Column::Id"
|
||||
)]
|
||||
Message,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
MentionedUser,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Message.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::MentionedUser.def()
|
||||
}
|
||||
}
|
32
crates/collab2/src/db/tables/contact.rs
Normal file
32
crates/collab2/src/db/tables/contact.rs
Normal file
|
@ -0,0 +1,32 @@
|
|||
use crate::db::{ContactId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "contacts")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ContactId,
|
||||
pub user_id_a: UserId,
|
||||
pub user_id_b: UserId,
|
||||
pub a_to_b: bool,
|
||||
pub should_notify: bool,
|
||||
pub accepted: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room_participant::Entity",
|
||||
from = "Column::UserIdA",
|
||||
to = "super::room_participant::Column::UserId"
|
||||
)]
|
||||
UserARoomParticipant,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room_participant::Entity",
|
||||
from = "Column::UserIdB",
|
||||
to = "super::room_participant::Column::UserId"
|
||||
)]
|
||||
UserBRoomParticipant,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
40
crates/collab2/src/db/tables/feature_flag.rs
Normal file
40
crates/collab2/src/db/tables/feature_flag.rs
Normal file
|
@ -0,0 +1,40 @@
|
|||
use sea_orm::entity::prelude::*;
|
||||
|
||||
use crate::db::FlagId;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "feature_flags")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: FlagId,
|
||||
pub flag: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::user_feature::Entity")]
|
||||
UserFeature,
|
||||
}
|
||||
|
||||
impl Related<super::user_feature::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::UserFeature.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
pub struct FlaggedUsers;
|
||||
|
||||
impl Linked for FlaggedUsers {
|
||||
type FromEntity = Entity;
|
||||
|
||||
type ToEntity = super::user::Entity;
|
||||
|
||||
fn link(&self) -> Vec<RelationDef> {
|
||||
vec![
|
||||
super::user_feature::Relation::Flag.def().rev(),
|
||||
super::user_feature::Relation::User.def(),
|
||||
]
|
||||
}
|
||||
}
|
50
crates/collab2/src/db/tables/follower.rs
Normal file
50
crates/collab2/src/db/tables/follower.rs
Normal file
|
@ -0,0 +1,50 @@
|
|||
use crate::db::{FollowerId, ProjectId, RoomId, ServerId};
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "followers")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: FollowerId,
|
||||
pub room_id: RoomId,
|
||||
pub project_id: ProjectId,
|
||||
pub leader_connection_server_id: ServerId,
|
||||
pub leader_connection_id: i32,
|
||||
pub follower_connection_server_id: ServerId,
|
||||
pub follower_connection_id: i32,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn leader_connection(&self) -> ConnectionId {
|
||||
ConnectionId {
|
||||
owner_id: self.leader_connection_server_id.0 as u32,
|
||||
id: self.leader_connection_id as u32,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn follower_connection(&self) -> ConnectionId {
|
||||
ConnectionId {
|
||||
owner_id: self.follower_connection_server_id.0 as u32,
|
||||
id: self.follower_connection_id as u32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room::Entity",
|
||||
from = "Column::RoomId",
|
||||
to = "super::room::Column::Id"
|
||||
)]
|
||||
Room,
|
||||
}
|
||||
|
||||
impl Related<super::room::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Room.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
30
crates/collab2/src/db/tables/language_server.rs
Normal file
30
crates/collab2/src/db/tables/language_server.rs
Normal file
|
@ -0,0 +1,30 @@
|
|||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "language_servers")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::project::Entity",
|
||||
from = "Column::ProjectId",
|
||||
to = "super::project::Column::Id"
|
||||
)]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
29
crates/collab2/src/db/tables/notification.rs
Normal file
29
crates/collab2/src/db/tables/notification.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
use crate::db::{NotificationId, NotificationKindId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
use time::PrimitiveDateTime;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "notifications")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: NotificationId,
|
||||
pub created_at: PrimitiveDateTime,
|
||||
pub recipient_id: UserId,
|
||||
pub kind: NotificationKindId,
|
||||
pub entity_id: Option<i32>,
|
||||
pub content: String,
|
||||
pub is_read: bool,
|
||||
pub response: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::RecipientId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
Recipient,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
15
crates/collab2/src/db/tables/notification_kind.rs
Normal file
15
crates/collab2/src/db/tables/notification_kind.rs
Normal file
|
@ -0,0 +1,15 @@
|
|||
use crate::db::NotificationKindId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "notification_kinds")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: NotificationKindId,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
43
crates/collab2/src/db/tables/observed_buffer_edits.rs
Normal file
43
crates/collab2/src/db/tables/observed_buffer_edits.rs
Normal file
|
@ -0,0 +1,43 @@
|
|||
use crate::db::{BufferId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "observed_buffer_edits")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub user_id: UserId,
|
||||
pub buffer_id: BufferId,
|
||||
pub epoch: i32,
|
||||
pub lamport_timestamp: i32,
|
||||
pub replica_id: i32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::buffer::Entity",
|
||||
from = "Column::BufferId",
|
||||
to = "super::buffer::Column::Id"
|
||||
)]
|
||||
Buffer,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
}
|
||||
|
||||
impl Related<super::buffer::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Buffer.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
41
crates/collab2/src/db/tables/observed_channel_messages.rs
Normal file
41
crates/collab2/src/db/tables/observed_channel_messages.rs
Normal file
|
@ -0,0 +1,41 @@
|
|||
use crate::db::{ChannelId, MessageId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "observed_channel_messages")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub user_id: UserId,
|
||||
pub channel_id: ChannelId,
|
||||
pub channel_message_id: MessageId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
84
crates/collab2/src/db/tables/project.rs
Normal file
84
crates/collab2/src/db/tables/project.rs
Normal file
|
@ -0,0 +1,84 @@
|
|||
use crate::db::{ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use anyhow::anyhow;
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "projects")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ProjectId,
|
||||
pub room_id: RoomId,
|
||||
pub host_user_id: UserId,
|
||||
pub host_connection_id: Option<i32>,
|
||||
pub host_connection_server_id: Option<ServerId>,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn host_connection(&self) -> Result<ConnectionId> {
|
||||
let host_connection_server_id = self
|
||||
.host_connection_server_id
|
||||
.ok_or_else(|| anyhow!("empty host_connection_server_id"))?;
|
||||
let host_connection_id = self
|
||||
.host_connection_id
|
||||
.ok_or_else(|| anyhow!("empty host_connection_id"))?;
|
||||
Ok(ConnectionId {
|
||||
owner_id: host_connection_server_id.0 as u32,
|
||||
id: host_connection_id as u32,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::HostUserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
HostUser,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room::Entity",
|
||||
from = "Column::RoomId",
|
||||
to = "super::room::Column::Id"
|
||||
)]
|
||||
Room,
|
||||
#[sea_orm(has_many = "super::worktree::Entity")]
|
||||
Worktrees,
|
||||
#[sea_orm(has_many = "super::project_collaborator::Entity")]
|
||||
Collaborators,
|
||||
#[sea_orm(has_many = "super::language_server::Entity")]
|
||||
LanguageServers,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::HostUser.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::room::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Room.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::worktree::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Worktrees.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::project_collaborator::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Collaborators.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::language_server::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::LanguageServers.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
43
crates/collab2/src/db/tables/project_collaborator.rs
Normal file
43
crates/collab2/src/db/tables/project_collaborator.rs
Normal file
|
@ -0,0 +1,43 @@
|
|||
use crate::db::{ProjectCollaboratorId, ProjectId, ReplicaId, ServerId, UserId};
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "project_collaborators")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ProjectCollaboratorId,
|
||||
pub project_id: ProjectId,
|
||||
pub connection_id: i32,
|
||||
pub connection_server_id: ServerId,
|
||||
pub user_id: UserId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub is_host: bool,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn connection(&self) -> ConnectionId {
|
||||
ConnectionId {
|
||||
owner_id: self.connection_server_id.0 as u32,
|
||||
id: self.connection_id as u32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::project::Entity",
|
||||
from = "Column::ProjectId",
|
||||
to = "super::project::Column::Id"
|
||||
)]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
54
crates/collab2/src/db/tables/room.rs
Normal file
54
crates/collab2/src/db/tables/room.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
use crate::db::{ChannelId, RoomId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "rooms")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: RoomId,
|
||||
pub live_kit_room: String,
|
||||
pub channel_id: Option<ChannelId>,
|
||||
pub enviroment: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::room_participant::Entity")]
|
||||
RoomParticipant,
|
||||
#[sea_orm(has_many = "super::project::Entity")]
|
||||
Project,
|
||||
#[sea_orm(has_many = "super::follower::Entity")]
|
||||
Follower,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
}
|
||||
|
||||
impl Related<super::room_participant::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RoomParticipant.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::follower::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Follower.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
61
crates/collab2/src/db/tables/room_participant.rs
Normal file
61
crates/collab2/src/db/tables/room_participant.rs
Normal file
|
@ -0,0 +1,61 @@
|
|||
use crate::db::{ProjectId, RoomId, RoomParticipantId, ServerId, UserId};
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "room_participants")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: RoomParticipantId,
|
||||
pub room_id: RoomId,
|
||||
pub user_id: UserId,
|
||||
pub answering_connection_id: Option<i32>,
|
||||
pub answering_connection_server_id: Option<ServerId>,
|
||||
pub answering_connection_lost: bool,
|
||||
pub location_kind: Option<i32>,
|
||||
pub location_project_id: Option<ProjectId>,
|
||||
pub initial_project_id: Option<ProjectId>,
|
||||
pub calling_user_id: UserId,
|
||||
pub calling_connection_id: i32,
|
||||
pub calling_connection_server_id: Option<ServerId>,
|
||||
pub participant_index: Option<i32>,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
pub fn answering_connection(&self) -> Option<ConnectionId> {
|
||||
Some(ConnectionId {
|
||||
owner_id: self.answering_connection_server_id?.0 as u32,
|
||||
id: self.answering_connection_id? as u32,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room::Entity",
|
||||
from = "Column::RoomId",
|
||||
to = "super::room::Column::Id"
|
||||
)]
|
||||
Room,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::room::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Room.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
15
crates/collab2/src/db/tables/server.rs
Normal file
15
crates/collab2/src/db/tables/server.rs
Normal file
|
@ -0,0 +1,15 @@
|
|||
use crate::db::ServerId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "servers")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ServerId,
|
||||
pub environment: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
28
crates/collab2/src/db/tables/signup.rs
Normal file
28
crates/collab2/src/db/tables/signup.rs
Normal file
|
@ -0,0 +1,28 @@
|
|||
use crate::db::{SignupId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "signups")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: SignupId,
|
||||
pub email_address: String,
|
||||
pub email_confirmation_code: String,
|
||||
pub email_confirmation_sent: bool,
|
||||
pub created_at: DateTime,
|
||||
pub device_id: Option<String>,
|
||||
pub user_id: Option<UserId>,
|
||||
pub inviting_user_id: Option<UserId>,
|
||||
pub platform_mac: bool,
|
||||
pub platform_linux: bool,
|
||||
pub platform_windows: bool,
|
||||
pub platform_unknown: bool,
|
||||
pub editor_features: Option<Vec<String>>,
|
||||
pub programming_languages: Option<Vec<String>>,
|
||||
pub added_to_mailing_list: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
80
crates/collab2/src/db/tables/user.rs
Normal file
80
crates/collab2/src/db/tables/user.rs
Normal file
|
@ -0,0 +1,80 @@
|
|||
use crate::db::UserId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel, Serialize)]
|
||||
#[sea_orm(table_name = "users")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: UserId,
|
||||
pub github_login: String,
|
||||
pub github_user_id: Option<i32>,
|
||||
pub email_address: Option<String>,
|
||||
pub admin: bool,
|
||||
pub invite_code: Option<String>,
|
||||
pub invite_count: i32,
|
||||
pub inviter_id: Option<UserId>,
|
||||
pub connected_once: bool,
|
||||
pub metrics_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::access_token::Entity")]
|
||||
AccessToken,
|
||||
#[sea_orm(has_one = "super::room_participant::Entity")]
|
||||
RoomParticipant,
|
||||
#[sea_orm(has_many = "super::project::Entity")]
|
||||
HostedProjects,
|
||||
#[sea_orm(has_many = "super::channel_member::Entity")]
|
||||
ChannelMemberships,
|
||||
#[sea_orm(has_many = "super::user_feature::Entity")]
|
||||
UserFeatures,
|
||||
}
|
||||
|
||||
impl Related<super::access_token::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::AccessToken.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::room_participant::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RoomParticipant.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::HostedProjects.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::channel_member::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::ChannelMemberships.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user_feature::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::UserFeatures.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
pub struct UserFlags;
|
||||
|
||||
impl Linked for UserFlags {
|
||||
type FromEntity = Entity;
|
||||
|
||||
type ToEntity = super::feature_flag::Entity;
|
||||
|
||||
fn link(&self) -> Vec<RelationDef> {
|
||||
vec![
|
||||
super::user_feature::Relation::User.def().rev(),
|
||||
super::user_feature::Relation::Flag.def(),
|
||||
]
|
||||
}
|
||||
}
|
42
crates/collab2/src/db/tables/user_feature.rs
Normal file
42
crates/collab2/src/db/tables/user_feature.rs
Normal file
|
@ -0,0 +1,42 @@
|
|||
use sea_orm::entity::prelude::*;
|
||||
|
||||
use crate::db::{FlagId, UserId};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "user_features")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub user_id: UserId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub feature_id: FlagId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::feature_flag::Entity",
|
||||
from = "Column::FeatureId",
|
||||
to = "super::feature_flag::Column::Id"
|
||||
)]
|
||||
Flag,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
}
|
||||
|
||||
impl Related<super::feature_flag::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Flag.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
36
crates/collab2/src/db/tables/worktree.rs
Normal file
36
crates/collab2/src/db/tables/worktree.rs
Normal file
|
@ -0,0 +1,36 @@
|
|||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktrees")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
pub abs_path: String,
|
||||
pub root_name: String,
|
||||
pub visible: bool,
|
||||
/// The last scan for which we've observed entries. It may be in progress.
|
||||
pub scan_id: i64,
|
||||
/// The last scan that fully completed.
|
||||
pub completed_scan_id: i64,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::project::Entity",
|
||||
from = "Column::ProjectId",
|
||||
to = "super::project::Column::Id"
|
||||
)]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
21
crates/collab2/src/db/tables/worktree_diagnostic_summary.rs
Normal file
21
crates/collab2/src/db/tables/worktree_diagnostic_summary.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktree_diagnostic_summaries")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub worktree_id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub path: String,
|
||||
pub language_server_id: i64,
|
||||
pub error_count: i32,
|
||||
pub warning_count: i32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
29
crates/collab2/src/db/tables/worktree_entry.rs
Normal file
29
crates/collab2/src/db/tables/worktree_entry.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktree_entries")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub worktree_id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub is_dir: bool,
|
||||
pub path: String,
|
||||
pub inode: i64,
|
||||
pub mtime_seconds: i64,
|
||||
pub mtime_nanos: i32,
|
||||
pub git_status: Option<i64>,
|
||||
pub is_symlink: bool,
|
||||
pub is_ignored: bool,
|
||||
pub is_external: bool,
|
||||
pub is_deleted: bool,
|
||||
pub scan_id: i64,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
21
crates/collab2/src/db/tables/worktree_repository.rs
Normal file
21
crates/collab2/src/db/tables/worktree_repository.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktree_repositories")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub worktree_id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub work_directory_id: i64,
|
||||
pub scan_id: i64,
|
||||
pub branch: Option<String>,
|
||||
pub is_deleted: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
23
crates/collab2/src/db/tables/worktree_repository_statuses.rs
Normal file
23
crates/collab2/src/db/tables/worktree_repository_statuses.rs
Normal file
|
@ -0,0 +1,23 @@
|
|||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktree_repository_statuses")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub worktree_id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub work_directory_id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub repo_path: String,
|
||||
pub status: i64,
|
||||
pub scan_id: i64,
|
||||
pub is_deleted: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
19
crates/collab2/src/db/tables/worktree_settings_file.rs
Normal file
19
crates/collab2/src/db/tables/worktree_settings_file.rs
Normal file
|
@ -0,0 +1,19 @@
|
|||
use crate::db::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktree_settings_files")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub worktree_id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub path: String,
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
187
crates/collab2/src/db/tests.rs
Normal file
187
crates/collab2/src/db/tests.rs
Normal file
|
@ -0,0 +1,187 @@
|
|||
mod buffer_tests;
|
||||
mod channel_tests;
|
||||
mod db_tests;
|
||||
mod feature_flag_tests;
|
||||
mod message_tests;
|
||||
|
||||
use super::*;
|
||||
use gpui::BackgroundExecutor;
|
||||
use parking_lot::Mutex;
|
||||
use sea_orm::ConnectionTrait;
|
||||
use sqlx::migrate::MigrateDatabase;
|
||||
use std::sync::{
|
||||
atomic::{AtomicI32, AtomicU32, Ordering::SeqCst},
|
||||
Arc,
|
||||
};
|
||||
|
||||
const TEST_RELEASE_CHANNEL: &'static str = "test";
|
||||
|
||||
pub struct TestDb {
|
||||
pub db: Option<Arc<Database>>,
|
||||
pub connection: Option<sqlx::AnyConnection>,
|
||||
}
|
||||
|
||||
impl TestDb {
|
||||
pub fn sqlite(background: BackgroundExecutor) -> Self {
|
||||
let url = format!("sqlite::memory:");
|
||||
let runtime = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_io()
|
||||
.enable_time()
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let mut db = runtime.block_on(async {
|
||||
let mut options = ConnectOptions::new(url);
|
||||
options.max_connections(5);
|
||||
let mut db = Database::new(options, Executor::Deterministic(background))
|
||||
.await
|
||||
.unwrap();
|
||||
let sql = include_str!(concat!(
|
||||
env!("CARGO_MANIFEST_DIR"),
|
||||
"/migrations.sqlite/20221109000000_test_schema.sql"
|
||||
));
|
||||
db.pool
|
||||
.execute(sea_orm::Statement::from_string(
|
||||
db.pool.get_database_backend(),
|
||||
sql,
|
||||
))
|
||||
.await
|
||||
.unwrap();
|
||||
db.initialize_notification_kinds().await.unwrap();
|
||||
db
|
||||
});
|
||||
|
||||
db.runtime = Some(runtime);
|
||||
|
||||
Self {
|
||||
db: Some(Arc::new(db)),
|
||||
connection: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn postgres(background: BackgroundExecutor) -> Self {
|
||||
static LOCK: Mutex<()> = Mutex::new(());
|
||||
|
||||
let _guard = LOCK.lock();
|
||||
let mut rng = StdRng::from_entropy();
|
||||
let url = format!(
|
||||
"postgres://postgres@localhost/zed-test-{}",
|
||||
rng.gen::<u128>()
|
||||
);
|
||||
let runtime = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_io()
|
||||
.enable_time()
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let mut db = runtime.block_on(async {
|
||||
sqlx::Postgres::create_database(&url)
|
||||
.await
|
||||
.expect("failed to create test db");
|
||||
let mut options = ConnectOptions::new(url);
|
||||
options
|
||||
.max_connections(5)
|
||||
.idle_timeout(Duration::from_secs(0));
|
||||
let mut db = Database::new(options, Executor::Deterministic(background))
|
||||
.await
|
||||
.unwrap();
|
||||
let migrations_path = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations");
|
||||
db.migrate(Path::new(migrations_path), false).await.unwrap();
|
||||
db.initialize_notification_kinds().await.unwrap();
|
||||
db
|
||||
});
|
||||
|
||||
db.runtime = Some(runtime);
|
||||
|
||||
Self {
|
||||
db: Some(Arc::new(db)),
|
||||
connection: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn db(&self) -> &Arc<Database> {
|
||||
self.db.as_ref().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! test_both_dbs {
|
||||
($test_name:ident, $postgres_test_name:ident, $sqlite_test_name:ident) => {
|
||||
#[gpui::test]
|
||||
async fn $postgres_test_name(cx: &mut gpui::TestAppContext) {
|
||||
let test_db = crate::db::TestDb::postgres(cx.executor().clone());
|
||||
$test_name(test_db.db()).await;
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn $sqlite_test_name(cx: &mut gpui::TestAppContext) {
|
||||
let test_db = crate::db::TestDb::sqlite(cx.executor().clone());
|
||||
$test_name(test_db.db()).await;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl Drop for TestDb {
|
||||
fn drop(&mut self) {
|
||||
let db = self.db.take().unwrap();
|
||||
if let sea_orm::DatabaseBackend::Postgres = db.pool.get_database_backend() {
|
||||
db.runtime.as_ref().unwrap().block_on(async {
|
||||
use util::ResultExt;
|
||||
let query = "
|
||||
SELECT pg_terminate_backend(pg_stat_activity.pid)
|
||||
FROM pg_stat_activity
|
||||
WHERE
|
||||
pg_stat_activity.datname = current_database() AND
|
||||
pid <> pg_backend_pid();
|
||||
";
|
||||
db.pool
|
||||
.execute(sea_orm::Statement::from_string(
|
||||
db.pool.get_database_backend(),
|
||||
query,
|
||||
))
|
||||
.await
|
||||
.log_err();
|
||||
sqlx::Postgres::drop_database(db.options.get_url())
|
||||
.await
|
||||
.log_err();
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn channel_tree(channels: &[(ChannelId, &[ChannelId], &'static str, ChannelRole)]) -> Vec<Channel> {
|
||||
channels
|
||||
.iter()
|
||||
.map(|(id, parent_path, name, role)| Channel {
|
||||
id: *id,
|
||||
name: name.to_string(),
|
||||
visibility: ChannelVisibility::Members,
|
||||
role: *role,
|
||||
parent_path: parent_path.to_vec(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
static GITHUB_USER_ID: AtomicI32 = AtomicI32::new(5);
|
||||
|
||||
async fn new_test_user(db: &Arc<Database>, email: &str) -> UserId {
|
||||
db.create_user(
|
||||
email,
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: email[0..email.find("@").unwrap()].to_string(),
|
||||
github_user_id: GITHUB_USER_ID.fetch_add(1, SeqCst),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id
|
||||
}
|
||||
|
||||
static TEST_CONNECTION_ID: AtomicU32 = AtomicU32::new(1);
|
||||
fn new_test_connection(server: ServerId) -> ConnectionId {
|
||||
ConnectionId {
|
||||
id: TEST_CONNECTION_ID.fetch_add(1, SeqCst),
|
||||
owner_id: server.0 as u32,
|
||||
}
|
||||
}
|
506
crates/collab2/src/db/tests/buffer_tests.rs
Normal file
506
crates/collab2/src/db/tests/buffer_tests.rs
Normal file
|
@ -0,0 +1,506 @@
|
|||
use super::*;
|
||||
use crate::test_both_dbs;
|
||||
use language::proto::{self, serialize_version};
|
||||
use text::Buffer;
|
||||
|
||||
test_both_dbs!(
|
||||
test_channel_buffers,
|
||||
test_channel_buffers_postgres,
|
||||
test_channel_buffers_sqlite
|
||||
);
|
||||
|
||||
async fn test_channel_buffers(db: &Arc<Database>) {
|
||||
let a_id = db
|
||||
.create_user(
|
||||
"user_a@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user_a".into(),
|
||||
github_user_id: 101,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
let b_id = db
|
||||
.create_user(
|
||||
"user_b@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user_b".into(),
|
||||
github_user_id: 102,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
// This user will not be a part of the channel
|
||||
let c_id = db
|
||||
.create_user(
|
||||
"user_c@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user_c".into(),
|
||||
github_user_id: 102,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let owner_id = db.create_server("production").await.unwrap().0 as u32;
|
||||
|
||||
let zed_id = db.create_root_channel("zed", a_id).await.unwrap();
|
||||
|
||||
db.invite_channel_member(zed_id, b_id, a_id, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.respond_to_channel_invite(zed_id, b_id, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let connection_id_a = ConnectionId { owner_id, id: 1 };
|
||||
let _ = db
|
||||
.join_channel_buffer(zed_id, a_id, connection_id_a)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut buffer_a = Buffer::new(0, 0, "".to_string());
|
||||
let mut operations = Vec::new();
|
||||
operations.push(buffer_a.edit([(0..0, "hello world")]));
|
||||
operations.push(buffer_a.edit([(5..5, ", cruel")]));
|
||||
operations.push(buffer_a.edit([(0..5, "goodbye")]));
|
||||
operations.push(buffer_a.undo().unwrap().1);
|
||||
assert_eq!(buffer_a.text(), "hello, cruel world");
|
||||
|
||||
let operations = operations
|
||||
.into_iter()
|
||||
.map(|op| proto::serialize_operation(&language::Operation::Buffer(op)))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
db.update_channel_buffer(zed_id, a_id, &operations)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let connection_id_b = ConnectionId { owner_id, id: 2 };
|
||||
let buffer_response_b = db
|
||||
.join_channel_buffer(zed_id, b_id, connection_id_b)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut buffer_b = Buffer::new(0, 0, buffer_response_b.base_text);
|
||||
buffer_b
|
||||
.apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
|
||||
let operation = proto::deserialize_operation(operation).unwrap();
|
||||
if let language::Operation::Buffer(operation) = operation {
|
||||
operation
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(buffer_b.text(), "hello, cruel world");
|
||||
|
||||
// Ensure that C fails to open the buffer
|
||||
assert!(db
|
||||
.join_channel_buffer(zed_id, c_id, ConnectionId { owner_id, id: 3 })
|
||||
.await
|
||||
.is_err());
|
||||
|
||||
// Ensure that both collaborators have shown up
|
||||
assert_eq!(
|
||||
buffer_response_b.collaborators,
|
||||
&[
|
||||
rpc::proto::Collaborator {
|
||||
user_id: a_id.to_proto(),
|
||||
peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }),
|
||||
replica_id: 0,
|
||||
},
|
||||
rpc::proto::Collaborator {
|
||||
user_id: b_id.to_proto(),
|
||||
peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }),
|
||||
replica_id: 1,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
||||
// Ensure that get_channel_buffer_collaborators works
|
||||
let zed_collaborats = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
|
||||
assert_eq!(zed_collaborats, &[a_id, b_id]);
|
||||
|
||||
let left_buffer = db
|
||||
.leave_channel_buffer(zed_id, connection_id_b)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(left_buffer.connections, &[connection_id_a],);
|
||||
|
||||
let cargo_id = db.create_root_channel("cargo", a_id).await.unwrap();
|
||||
let _ = db
|
||||
.join_channel_buffer(cargo_id, a_id, connection_id_a)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.leave_channel_buffers(connection_id_a).await.unwrap();
|
||||
|
||||
let zed_collaborators = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
|
||||
let cargo_collaborators = db.get_channel_buffer_collaborators(cargo_id).await.unwrap();
|
||||
assert_eq!(zed_collaborators, &[]);
|
||||
assert_eq!(cargo_collaborators, &[]);
|
||||
|
||||
// When everyone has left the channel, the operations are collapsed into
|
||||
// a new base text.
|
||||
let buffer_response_b = db
|
||||
.join_channel_buffer(zed_id, b_id, connection_id_b)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(buffer_response_b.base_text, "hello, cruel world");
|
||||
assert_eq!(buffer_response_b.operations, &[]);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_channel_buffers_last_operations,
|
||||
test_channel_buffers_last_operations_postgres,
|
||||
test_channel_buffers_last_operations_sqlite
|
||||
);
|
||||
|
||||
async fn test_channel_buffers_last_operations(db: &Database) {
|
||||
let user_id = db
|
||||
.create_user(
|
||||
"user_a@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user_a".into(),
|
||||
github_user_id: 101,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
let observer_id = db
|
||||
.create_user(
|
||||
"user_b@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user_b".into(),
|
||||
github_user_id: 102,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
let owner_id = db.create_server("production").await.unwrap().0 as u32;
|
||||
let connection_id = ConnectionId {
|
||||
owner_id,
|
||||
id: user_id.0 as u32,
|
||||
};
|
||||
|
||||
let mut buffers = Vec::new();
|
||||
let mut text_buffers = Vec::new();
|
||||
for i in 0..3 {
|
||||
let channel = db
|
||||
.create_root_channel(&format!("channel-{i}"), user_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.invite_channel_member(channel, observer_id, user_id, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
db.respond_to_channel_invite(channel, observer_id, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.join_channel_buffer(channel, user_id, connection_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
buffers.push(
|
||||
db.transaction(|tx| async move { db.get_channel_buffer(channel, &*tx).await })
|
||||
.await
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
text_buffers.push(Buffer::new(0, 0, "".to_string()));
|
||||
}
|
||||
|
||||
let operations = db
|
||||
.transaction(|tx| {
|
||||
let buffers = &buffers;
|
||||
async move {
|
||||
db.get_latest_operations_for_buffers([buffers[0].id, buffers[2].id], &*tx)
|
||||
.await
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(operations.is_empty());
|
||||
|
||||
update_buffer(
|
||||
buffers[0].channel_id,
|
||||
user_id,
|
||||
db,
|
||||
vec![
|
||||
text_buffers[0].edit([(0..0, "a")]),
|
||||
text_buffers[0].edit([(0..0, "b")]),
|
||||
text_buffers[0].edit([(0..0, "c")]),
|
||||
],
|
||||
)
|
||||
.await;
|
||||
|
||||
update_buffer(
|
||||
buffers[1].channel_id,
|
||||
user_id,
|
||||
db,
|
||||
vec![
|
||||
text_buffers[1].edit([(0..0, "d")]),
|
||||
text_buffers[1].edit([(1..1, "e")]),
|
||||
text_buffers[1].edit([(2..2, "f")]),
|
||||
],
|
||||
)
|
||||
.await;
|
||||
|
||||
// cause buffer 1's epoch to increment.
|
||||
db.leave_channel_buffer(buffers[1].channel_id, connection_id)
|
||||
.await
|
||||
.unwrap();
|
||||
db.join_channel_buffer(buffers[1].channel_id, user_id, connection_id)
|
||||
.await
|
||||
.unwrap();
|
||||
text_buffers[1] = Buffer::new(1, 0, "def".to_string());
|
||||
update_buffer(
|
||||
buffers[1].channel_id,
|
||||
user_id,
|
||||
db,
|
||||
vec![
|
||||
text_buffers[1].edit([(0..0, "g")]),
|
||||
text_buffers[1].edit([(0..0, "h")]),
|
||||
],
|
||||
)
|
||||
.await;
|
||||
|
||||
update_buffer(
|
||||
buffers[2].channel_id,
|
||||
user_id,
|
||||
db,
|
||||
vec![text_buffers[2].edit([(0..0, "i")])],
|
||||
)
|
||||
.await;
|
||||
|
||||
let operations = db
|
||||
.transaction(|tx| {
|
||||
let buffers = &buffers;
|
||||
async move {
|
||||
db.get_latest_operations_for_buffers([buffers[1].id, buffers[2].id], &*tx)
|
||||
.await
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert_operations(
|
||||
&operations,
|
||||
&[
|
||||
(buffers[1].id, 1, &text_buffers[1]),
|
||||
(buffers[2].id, 0, &text_buffers[2]),
|
||||
],
|
||||
);
|
||||
|
||||
let operations = db
|
||||
.transaction(|tx| {
|
||||
let buffers = &buffers;
|
||||
async move {
|
||||
db.get_latest_operations_for_buffers([buffers[0].id, buffers[1].id], &*tx)
|
||||
.await
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert_operations(
|
||||
&operations,
|
||||
&[
|
||||
(buffers[0].id, 0, &text_buffers[0]),
|
||||
(buffers[1].id, 1, &text_buffers[1]),
|
||||
],
|
||||
);
|
||||
|
||||
let buffer_changes = db
|
||||
.transaction(|tx| {
|
||||
let buffers = &buffers;
|
||||
async move {
|
||||
db.unseen_channel_buffer_changes(
|
||||
observer_id,
|
||||
&[
|
||||
buffers[0].channel_id,
|
||||
buffers[1].channel_id,
|
||||
buffers[2].channel_id,
|
||||
],
|
||||
&*tx,
|
||||
)
|
||||
.await
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
buffer_changes,
|
||||
[
|
||||
rpc::proto::UnseenChannelBufferChange {
|
||||
channel_id: buffers[0].channel_id.to_proto(),
|
||||
epoch: 0,
|
||||
version: serialize_version(&text_buffers[0].version()),
|
||||
},
|
||||
rpc::proto::UnseenChannelBufferChange {
|
||||
channel_id: buffers[1].channel_id.to_proto(),
|
||||
epoch: 1,
|
||||
version: serialize_version(&text_buffers[1].version())
|
||||
.into_iter()
|
||||
.filter(|vector| vector.replica_id
|
||||
== buffer_changes[1].version.first().unwrap().replica_id)
|
||||
.collect::<Vec<_>>(),
|
||||
},
|
||||
rpc::proto::UnseenChannelBufferChange {
|
||||
channel_id: buffers[2].channel_id.to_proto(),
|
||||
epoch: 0,
|
||||
version: serialize_version(&text_buffers[2].version()),
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
db.observe_buffer_version(
|
||||
buffers[1].id,
|
||||
observer_id,
|
||||
1,
|
||||
serialize_version(&text_buffers[1].version()).as_slice(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let buffer_changes = db
|
||||
.transaction(|tx| {
|
||||
let buffers = &buffers;
|
||||
async move {
|
||||
db.unseen_channel_buffer_changes(
|
||||
observer_id,
|
||||
&[
|
||||
buffers[0].channel_id,
|
||||
buffers[1].channel_id,
|
||||
buffers[2].channel_id,
|
||||
],
|
||||
&*tx,
|
||||
)
|
||||
.await
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
buffer_changes,
|
||||
[
|
||||
rpc::proto::UnseenChannelBufferChange {
|
||||
channel_id: buffers[0].channel_id.to_proto(),
|
||||
epoch: 0,
|
||||
version: serialize_version(&text_buffers[0].version()),
|
||||
},
|
||||
rpc::proto::UnseenChannelBufferChange {
|
||||
channel_id: buffers[2].channel_id.to_proto(),
|
||||
epoch: 0,
|
||||
version: serialize_version(&text_buffers[2].version()),
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
// Observe an earlier version of the buffer.
|
||||
db.observe_buffer_version(
|
||||
buffers[1].id,
|
||||
observer_id,
|
||||
1,
|
||||
&[rpc::proto::VectorClockEntry {
|
||||
replica_id: 0,
|
||||
timestamp: 0,
|
||||
}],
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let buffer_changes = db
|
||||
.transaction(|tx| {
|
||||
let buffers = &buffers;
|
||||
async move {
|
||||
db.unseen_channel_buffer_changes(
|
||||
observer_id,
|
||||
&[
|
||||
buffers[0].channel_id,
|
||||
buffers[1].channel_id,
|
||||
buffers[2].channel_id,
|
||||
],
|
||||
&*tx,
|
||||
)
|
||||
.await
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
buffer_changes,
|
||||
[
|
||||
rpc::proto::UnseenChannelBufferChange {
|
||||
channel_id: buffers[0].channel_id.to_proto(),
|
||||
epoch: 0,
|
||||
version: serialize_version(&text_buffers[0].version()),
|
||||
},
|
||||
rpc::proto::UnseenChannelBufferChange {
|
||||
channel_id: buffers[2].channel_id.to_proto(),
|
||||
epoch: 0,
|
||||
version: serialize_version(&text_buffers[2].version()),
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
async fn update_buffer(
|
||||
channel_id: ChannelId,
|
||||
user_id: UserId,
|
||||
db: &Database,
|
||||
operations: Vec<text::Operation>,
|
||||
) {
|
||||
let operations = operations
|
||||
.into_iter()
|
||||
.map(|op| proto::serialize_operation(&language::Operation::Buffer(op)))
|
||||
.collect::<Vec<_>>();
|
||||
db.update_channel_buffer(channel_id, user_id, &operations)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn assert_operations(
|
||||
operations: &[buffer_operation::Model],
|
||||
expected: &[(BufferId, i32, &text::Buffer)],
|
||||
) {
|
||||
let actual = operations
|
||||
.iter()
|
||||
.map(|op| buffer_operation::Model {
|
||||
buffer_id: op.buffer_id,
|
||||
epoch: op.epoch,
|
||||
lamport_timestamp: op.lamport_timestamp,
|
||||
replica_id: op.replica_id,
|
||||
value: vec![],
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let expected = expected
|
||||
.iter()
|
||||
.map(|(buffer_id, epoch, buffer)| buffer_operation::Model {
|
||||
buffer_id: *buffer_id,
|
||||
epoch: *epoch,
|
||||
lamport_timestamp: buffer.lamport_clock.value as i32 - 1,
|
||||
replica_id: buffer.replica_id() as i32,
|
||||
value: vec![],
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(actual, expected, "unexpected operations")
|
||||
}
|
819
crates/collab2/src/db/tests/channel_tests.rs
Normal file
819
crates/collab2/src/db/tests/channel_tests.rs
Normal file
|
@ -0,0 +1,819 @@
|
|||
use crate::{
|
||||
db::{
|
||||
tests::{channel_tree, new_test_connection, new_test_user, TEST_RELEASE_CHANNEL},
|
||||
Channel, ChannelId, ChannelRole, Database, NewUserParams, RoomId,
|
||||
},
|
||||
test_both_dbs,
|
||||
};
|
||||
use rpc::{
|
||||
proto::{self},
|
||||
ConnectionId,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
test_both_dbs!(test_channels, test_channels_postgres, test_channels_sqlite);
|
||||
|
||||
async fn test_channels(db: &Arc<Database>) {
|
||||
let a_id = new_test_user(db, "user1@example.com").await;
|
||||
let b_id = new_test_user(db, "user2@example.com").await;
|
||||
|
||||
let zed_id = db.create_root_channel("zed", a_id).await.unwrap();
|
||||
|
||||
// Make sure that people cannot read channels they haven't been invited to
|
||||
assert!(db.get_channel(zed_id, b_id).await.is_err());
|
||||
|
||||
db.invite_channel_member(zed_id, b_id, a_id, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.respond_to_channel_invite(zed_id, b_id, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let crdb_id = db.create_sub_channel("crdb", zed_id, a_id).await.unwrap();
|
||||
let livestreaming_id = db
|
||||
.create_sub_channel("livestreaming", zed_id, a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
let replace_id = db
|
||||
.create_sub_channel("replace", zed_id, a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut members = db
|
||||
.transaction(|tx| async move {
|
||||
let channel = db.get_channel_internal(replace_id, &*tx).await?;
|
||||
Ok(db.get_channel_participants(&channel, &*tx).await?)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
members.sort();
|
||||
assert_eq!(members, &[a_id, b_id]);
|
||||
|
||||
let rust_id = db.create_root_channel("rust", a_id).await.unwrap();
|
||||
let cargo_id = db.create_sub_channel("cargo", rust_id, a_id).await.unwrap();
|
||||
|
||||
let cargo_ra_id = db
|
||||
.create_sub_channel("cargo-ra", cargo_id, a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let result = db.get_channels_for_user(a_id).await.unwrap();
|
||||
assert_eq!(
|
||||
result.channels,
|
||||
channel_tree(&[
|
||||
(zed_id, &[], "zed", ChannelRole::Admin),
|
||||
(crdb_id, &[zed_id], "crdb", ChannelRole::Admin),
|
||||
(
|
||||
livestreaming_id,
|
||||
&[zed_id],
|
||||
"livestreaming",
|
||||
ChannelRole::Admin
|
||||
),
|
||||
(replace_id, &[zed_id], "replace", ChannelRole::Admin),
|
||||
(rust_id, &[], "rust", ChannelRole::Admin),
|
||||
(cargo_id, &[rust_id], "cargo", ChannelRole::Admin),
|
||||
(
|
||||
cargo_ra_id,
|
||||
&[rust_id, cargo_id],
|
||||
"cargo-ra",
|
||||
ChannelRole::Admin
|
||||
)
|
||||
],)
|
||||
);
|
||||
|
||||
let result = db.get_channels_for_user(b_id).await.unwrap();
|
||||
assert_eq!(
|
||||
result.channels,
|
||||
channel_tree(&[
|
||||
(zed_id, &[], "zed", ChannelRole::Member),
|
||||
(crdb_id, &[zed_id], "crdb", ChannelRole::Member),
|
||||
(
|
||||
livestreaming_id,
|
||||
&[zed_id],
|
||||
"livestreaming",
|
||||
ChannelRole::Member
|
||||
),
|
||||
(replace_id, &[zed_id], "replace", ChannelRole::Member)
|
||||
],)
|
||||
);
|
||||
|
||||
// Update member permissions
|
||||
let set_subchannel_admin = db
|
||||
.set_channel_member_role(crdb_id, a_id, b_id, ChannelRole::Admin)
|
||||
.await;
|
||||
assert!(set_subchannel_admin.is_err());
|
||||
let set_channel_admin = db
|
||||
.set_channel_member_role(zed_id, a_id, b_id, ChannelRole::Admin)
|
||||
.await;
|
||||
assert!(set_channel_admin.is_ok());
|
||||
|
||||
let result = db.get_channels_for_user(b_id).await.unwrap();
|
||||
assert_eq!(
|
||||
result.channels,
|
||||
channel_tree(&[
|
||||
(zed_id, &[], "zed", ChannelRole::Admin),
|
||||
(crdb_id, &[zed_id], "crdb", ChannelRole::Admin),
|
||||
(
|
||||
livestreaming_id,
|
||||
&[zed_id],
|
||||
"livestreaming",
|
||||
ChannelRole::Admin
|
||||
),
|
||||
(replace_id, &[zed_id], "replace", ChannelRole::Admin)
|
||||
],)
|
||||
);
|
||||
|
||||
// Remove a single channel
|
||||
db.delete_channel(crdb_id, a_id).await.unwrap();
|
||||
assert!(db.get_channel(crdb_id, a_id).await.is_err());
|
||||
|
||||
// Remove a channel tree
|
||||
let (mut channel_ids, user_ids) = db.delete_channel(rust_id, a_id).await.unwrap();
|
||||
channel_ids.sort();
|
||||
assert_eq!(channel_ids, &[rust_id, cargo_id, cargo_ra_id]);
|
||||
assert_eq!(user_ids, &[a_id]);
|
||||
|
||||
assert!(db.get_channel(rust_id, a_id).await.is_err());
|
||||
assert!(db.get_channel(cargo_id, a_id).await.is_err());
|
||||
assert!(db.get_channel(cargo_ra_id, a_id).await.is_err());
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_joining_channels,
|
||||
test_joining_channels_postgres,
|
||||
test_joining_channels_sqlite
|
||||
);
|
||||
|
||||
async fn test_joining_channels(db: &Arc<Database>) {
|
||||
let owner_id = db.create_server("test").await.unwrap().0 as u32;
|
||||
|
||||
let user_1 = new_test_user(db, "user1@example.com").await;
|
||||
let user_2 = new_test_user(db, "user2@example.com").await;
|
||||
|
||||
let channel_1 = db.create_root_channel("channel_1", user_1).await.unwrap();
|
||||
|
||||
// can join a room with membership to its channel
|
||||
let (joined_room, _, _) = db
|
||||
.join_channel(
|
||||
channel_1,
|
||||
user_1,
|
||||
ConnectionId { owner_id, id: 1 },
|
||||
TEST_RELEASE_CHANNEL,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(joined_room.room.participants.len(), 1);
|
||||
|
||||
let room_id = RoomId::from_proto(joined_room.room.id);
|
||||
drop(joined_room);
|
||||
// cannot join a room without membership to its channel
|
||||
assert!(db
|
||||
.join_room(
|
||||
room_id,
|
||||
user_2,
|
||||
ConnectionId { owner_id, id: 1 },
|
||||
TEST_RELEASE_CHANNEL
|
||||
)
|
||||
.await
|
||||
.is_err());
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_channel_invites,
|
||||
test_channel_invites_postgres,
|
||||
test_channel_invites_sqlite
|
||||
);
|
||||
|
||||
async fn test_channel_invites(db: &Arc<Database>) {
|
||||
db.create_server("test").await.unwrap();
|
||||
|
||||
let user_1 = new_test_user(db, "user1@example.com").await;
|
||||
let user_2 = new_test_user(db, "user2@example.com").await;
|
||||
let user_3 = new_test_user(db, "user3@example.com").await;
|
||||
|
||||
let channel_1_1 = db.create_root_channel("channel_1", user_1).await.unwrap();
|
||||
|
||||
let channel_1_2 = db.create_root_channel("channel_2", user_1).await.unwrap();
|
||||
|
||||
db.invite_channel_member(channel_1_1, user_2, user_1, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
db.invite_channel_member(channel_1_2, user_2, user_1, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
db.invite_channel_member(channel_1_1, user_3, user_1, ChannelRole::Admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let user_2_invites = db
|
||||
.get_channel_invites_for_user(user_2) // -> [channel_1_1, channel_1_2]
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|channel| channel.id)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(user_2_invites, &[channel_1_1, channel_1_2]);
|
||||
|
||||
let user_3_invites = db
|
||||
.get_channel_invites_for_user(user_3) // -> [channel_1_1]
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|channel| channel.id)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(user_3_invites, &[channel_1_1]);
|
||||
|
||||
let mut members = db
|
||||
.get_channel_participant_details(channel_1_1, user_1)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
members.sort_by_key(|member| member.user_id);
|
||||
assert_eq!(
|
||||
members,
|
||||
&[
|
||||
proto::ChannelMember {
|
||||
user_id: user_1.to_proto(),
|
||||
kind: proto::channel_member::Kind::Member.into(),
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
},
|
||||
proto::ChannelMember {
|
||||
user_id: user_2.to_proto(),
|
||||
kind: proto::channel_member::Kind::Invitee.into(),
|
||||
role: proto::ChannelRole::Member.into(),
|
||||
},
|
||||
proto::ChannelMember {
|
||||
user_id: user_3.to_proto(),
|
||||
kind: proto::channel_member::Kind::Invitee.into(),
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
db.respond_to_channel_invite(channel_1_1, user_2, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let channel_1_3 = db
|
||||
.create_sub_channel("channel_3", channel_1_1, user_1)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let members = db
|
||||
.get_channel_participant_details(channel_1_3, user_1)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
members,
|
||||
&[
|
||||
proto::ChannelMember {
|
||||
user_id: user_1.to_proto(),
|
||||
kind: proto::channel_member::Kind::AncestorMember.into(),
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
},
|
||||
proto::ChannelMember {
|
||||
user_id: user_2.to_proto(),
|
||||
kind: proto::channel_member::Kind::AncestorMember.into(),
|
||||
role: proto::ChannelRole::Member.into(),
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_channel_renames,
|
||||
test_channel_renames_postgres,
|
||||
test_channel_renames_sqlite
|
||||
);
|
||||
|
||||
async fn test_channel_renames(db: &Arc<Database>) {
|
||||
db.create_server("test").await.unwrap();
|
||||
|
||||
let user_1 = db
|
||||
.create_user(
|
||||
"user1@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user1".into(),
|
||||
github_user_id: 5,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let user_2 = db
|
||||
.create_user(
|
||||
"user2@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user2".into(),
|
||||
github_user_id: 6,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let zed_id = db.create_root_channel("zed", user_1).await.unwrap();
|
||||
|
||||
db.rename_channel(zed_id, user_1, "#zed-archive")
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let channel = db.get_channel(zed_id, user_1).await.unwrap();
|
||||
assert_eq!(channel.name, "zed-archive");
|
||||
|
||||
let non_permissioned_rename = db.rename_channel(zed_id, user_2, "hacked-lol").await;
|
||||
assert!(non_permissioned_rename.is_err());
|
||||
|
||||
let bad_name_rename = db.rename_channel(zed_id, user_1, "#").await;
|
||||
assert!(bad_name_rename.is_err())
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_db_channel_moving,
|
||||
test_channels_moving_postgres,
|
||||
test_channels_moving_sqlite
|
||||
);
|
||||
|
||||
async fn test_db_channel_moving(db: &Arc<Database>) {
|
||||
let a_id = db
|
||||
.create_user(
|
||||
"user1@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user1".into(),
|
||||
github_user_id: 5,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let zed_id = db.create_root_channel("zed", a_id).await.unwrap();
|
||||
|
||||
let crdb_id = db.create_sub_channel("crdb", zed_id, a_id).await.unwrap();
|
||||
|
||||
let gpui2_id = db.create_sub_channel("gpui2", zed_id, a_id).await.unwrap();
|
||||
|
||||
let livestreaming_id = db
|
||||
.create_sub_channel("livestreaming", crdb_id, a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let livestreaming_dag_id = db
|
||||
.create_sub_channel("livestreaming_dag", livestreaming_id, a_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// ========================================================================
|
||||
// sanity check
|
||||
// Initial DAG:
|
||||
// /- gpui2
|
||||
// zed -- crdb - livestreaming - livestreaming_dag
|
||||
let result = db.get_channels_for_user(a_id).await.unwrap();
|
||||
assert_channel_tree(
|
||||
result.channels,
|
||||
&[
|
||||
(zed_id, &[]),
|
||||
(crdb_id, &[zed_id]),
|
||||
(livestreaming_id, &[zed_id, crdb_id]),
|
||||
(livestreaming_dag_id, &[zed_id, crdb_id, livestreaming_id]),
|
||||
(gpui2_id, &[zed_id]),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_db_channel_moving_bugs,
|
||||
test_db_channel_moving_bugs_postgres,
|
||||
test_db_channel_moving_bugs_sqlite
|
||||
);
|
||||
|
||||
async fn test_db_channel_moving_bugs(db: &Arc<Database>) {
|
||||
let user_id = db
|
||||
.create_user(
|
||||
"user1@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user1".into(),
|
||||
github_user_id: 5,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let zed_id = db.create_root_channel("zed", user_id).await.unwrap();
|
||||
|
||||
let projects_id = db
|
||||
.create_sub_channel("projects", zed_id, user_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let livestreaming_id = db
|
||||
.create_sub_channel("livestreaming", projects_id, user_id)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Dag is: zed - projects - livestreaming
|
||||
|
||||
// Move to same parent should be a no-op
|
||||
assert!(db
|
||||
.move_channel(projects_id, Some(zed_id), user_id)
|
||||
.await
|
||||
.unwrap()
|
||||
.is_none());
|
||||
|
||||
let result = db.get_channels_for_user(user_id).await.unwrap();
|
||||
assert_channel_tree(
|
||||
result.channels,
|
||||
&[
|
||||
(zed_id, &[]),
|
||||
(projects_id, &[zed_id]),
|
||||
(livestreaming_id, &[zed_id, projects_id]),
|
||||
],
|
||||
);
|
||||
|
||||
// Move the project channel to the root
|
||||
db.move_channel(projects_id, None, user_id).await.unwrap();
|
||||
let result = db.get_channels_for_user(user_id).await.unwrap();
|
||||
assert_channel_tree(
|
||||
result.channels,
|
||||
&[
|
||||
(zed_id, &[]),
|
||||
(projects_id, &[]),
|
||||
(livestreaming_id, &[projects_id]),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_user_is_channel_participant,
|
||||
test_user_is_channel_participant_postgres,
|
||||
test_user_is_channel_participant_sqlite
|
||||
);
|
||||
|
||||
async fn test_user_is_channel_participant(db: &Arc<Database>) {
|
||||
let admin = new_test_user(db, "admin@example.com").await;
|
||||
let member = new_test_user(db, "member@example.com").await;
|
||||
let guest = new_test_user(db, "guest@example.com").await;
|
||||
|
||||
let zed_channel = db.create_root_channel("zed", admin).await.unwrap();
|
||||
let active_channel_id = db
|
||||
.create_sub_channel("active", zed_channel, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
let vim_channel_id = db
|
||||
.create_sub_channel("vim", active_channel_id, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.set_channel_visibility(vim_channel_id, crate::db::ChannelVisibility::Public, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
db.invite_channel_member(active_channel_id, member, admin, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
db.invite_channel_member(vim_channel_id, guest, admin, ChannelRole::Guest)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.respond_to_channel_invite(active_channel_id, member, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.transaction(|tx| async move {
|
||||
db.check_user_is_channel_participant(
|
||||
&db.get_channel_internal(vim_channel_id, &*tx).await?,
|
||||
admin,
|
||||
&*tx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
db.transaction(|tx| async move {
|
||||
db.check_user_is_channel_participant(
|
||||
&db.get_channel_internal(vim_channel_id, &*tx).await?,
|
||||
member,
|
||||
&*tx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut members = db
|
||||
.get_channel_participant_details(vim_channel_id, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
members.sort_by_key(|member| member.user_id);
|
||||
|
||||
assert_eq!(
|
||||
members,
|
||||
&[
|
||||
proto::ChannelMember {
|
||||
user_id: admin.to_proto(),
|
||||
kind: proto::channel_member::Kind::AncestorMember.into(),
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
},
|
||||
proto::ChannelMember {
|
||||
user_id: member.to_proto(),
|
||||
kind: proto::channel_member::Kind::AncestorMember.into(),
|
||||
role: proto::ChannelRole::Member.into(),
|
||||
},
|
||||
proto::ChannelMember {
|
||||
user_id: guest.to_proto(),
|
||||
kind: proto::channel_member::Kind::Invitee.into(),
|
||||
role: proto::ChannelRole::Guest.into(),
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
db.respond_to_channel_invite(vim_channel_id, guest, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.transaction(|tx| async move {
|
||||
db.check_user_is_channel_participant(
|
||||
&db.get_channel_internal(vim_channel_id, &*tx).await?,
|
||||
guest,
|
||||
&*tx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let channels = db.get_channels_for_user(guest).await.unwrap().channels;
|
||||
assert_channel_tree(channels, &[(vim_channel_id, &[])]);
|
||||
let channels = db.get_channels_for_user(member).await.unwrap().channels;
|
||||
assert_channel_tree(
|
||||
channels,
|
||||
&[
|
||||
(active_channel_id, &[]),
|
||||
(vim_channel_id, &[active_channel_id]),
|
||||
],
|
||||
);
|
||||
|
||||
db.set_channel_member_role(vim_channel_id, admin, guest, ChannelRole::Banned)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(db
|
||||
.transaction(|tx| async move {
|
||||
db.check_user_is_channel_participant(
|
||||
&db.get_channel_internal(vim_channel_id, &*tx).await.unwrap(),
|
||||
guest,
|
||||
&*tx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
.is_err());
|
||||
|
||||
let mut members = db
|
||||
.get_channel_participant_details(vim_channel_id, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
members.sort_by_key(|member| member.user_id);
|
||||
|
||||
assert_eq!(
|
||||
members,
|
||||
&[
|
||||
proto::ChannelMember {
|
||||
user_id: admin.to_proto(),
|
||||
kind: proto::channel_member::Kind::AncestorMember.into(),
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
},
|
||||
proto::ChannelMember {
|
||||
user_id: member.to_proto(),
|
||||
kind: proto::channel_member::Kind::AncestorMember.into(),
|
||||
role: proto::ChannelRole::Member.into(),
|
||||
},
|
||||
proto::ChannelMember {
|
||||
user_id: guest.to_proto(),
|
||||
kind: proto::channel_member::Kind::Member.into(),
|
||||
role: proto::ChannelRole::Banned.into(),
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
db.remove_channel_member(vim_channel_id, guest, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.set_channel_visibility(zed_channel, crate::db::ChannelVisibility::Public, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.invite_channel_member(zed_channel, guest, admin, ChannelRole::Guest)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// currently people invited to parent channels are not shown here
|
||||
let mut members = db
|
||||
.get_channel_participant_details(vim_channel_id, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
members.sort_by_key(|member| member.user_id);
|
||||
|
||||
assert_eq!(
|
||||
members,
|
||||
&[
|
||||
proto::ChannelMember {
|
||||
user_id: admin.to_proto(),
|
||||
kind: proto::channel_member::Kind::AncestorMember.into(),
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
},
|
||||
proto::ChannelMember {
|
||||
user_id: member.to_proto(),
|
||||
kind: proto::channel_member::Kind::AncestorMember.into(),
|
||||
role: proto::ChannelRole::Member.into(),
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
db.respond_to_channel_invite(zed_channel, guest, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.transaction(|tx| async move {
|
||||
db.check_user_is_channel_participant(
|
||||
&db.get_channel_internal(zed_channel, &*tx).await.unwrap(),
|
||||
guest,
|
||||
&*tx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(db
|
||||
.transaction(|tx| async move {
|
||||
db.check_user_is_channel_participant(
|
||||
&db.get_channel_internal(active_channel_id, &*tx)
|
||||
.await
|
||||
.unwrap(),
|
||||
guest,
|
||||
&*tx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
.is_err(),);
|
||||
|
||||
db.transaction(|tx| async move {
|
||||
db.check_user_is_channel_participant(
|
||||
&db.get_channel_internal(vim_channel_id, &*tx).await.unwrap(),
|
||||
guest,
|
||||
&*tx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut members = db
|
||||
.get_channel_participant_details(vim_channel_id, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
members.sort_by_key(|member| member.user_id);
|
||||
|
||||
assert_eq!(
|
||||
members,
|
||||
&[
|
||||
proto::ChannelMember {
|
||||
user_id: admin.to_proto(),
|
||||
kind: proto::channel_member::Kind::AncestorMember.into(),
|
||||
role: proto::ChannelRole::Admin.into(),
|
||||
},
|
||||
proto::ChannelMember {
|
||||
user_id: member.to_proto(),
|
||||
kind: proto::channel_member::Kind::AncestorMember.into(),
|
||||
role: proto::ChannelRole::Member.into(),
|
||||
},
|
||||
proto::ChannelMember {
|
||||
user_id: guest.to_proto(),
|
||||
kind: proto::channel_member::Kind::AncestorMember.into(),
|
||||
role: proto::ChannelRole::Guest.into(),
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
let channels = db.get_channels_for_user(guest).await.unwrap().channels;
|
||||
assert_channel_tree(
|
||||
channels,
|
||||
&[(zed_channel, &[]), (vim_channel_id, &[zed_channel])],
|
||||
)
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_user_joins_correct_channel,
|
||||
test_user_joins_correct_channel_postgres,
|
||||
test_user_joins_correct_channel_sqlite
|
||||
);
|
||||
|
||||
async fn test_user_joins_correct_channel(db: &Arc<Database>) {
|
||||
let admin = new_test_user(db, "admin@example.com").await;
|
||||
|
||||
let zed_channel = db.create_root_channel("zed", admin).await.unwrap();
|
||||
|
||||
let active_channel = db
|
||||
.create_sub_channel("active", zed_channel, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let vim_channel = db
|
||||
.create_sub_channel("vim", active_channel, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let vim2_channel = db
|
||||
.create_sub_channel("vim2", vim_channel, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.set_channel_visibility(zed_channel, crate::db::ChannelVisibility::Public, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.set_channel_visibility(vim_channel, crate::db::ChannelVisibility::Public, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.set_channel_visibility(vim2_channel, crate::db::ChannelVisibility::Public, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let most_public = db
|
||||
.transaction(|tx| async move {
|
||||
Ok(db
|
||||
.public_ancestors_including_self(
|
||||
&db.get_channel_internal(vim_channel, &*tx).await.unwrap(),
|
||||
&tx,
|
||||
)
|
||||
.await?
|
||||
.first()
|
||||
.cloned())
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.id;
|
||||
|
||||
assert_eq!(most_public, zed_channel)
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_guest_access,
|
||||
test_guest_access_postgres,
|
||||
test_guest_access_sqlite
|
||||
);
|
||||
|
||||
async fn test_guest_access(db: &Arc<Database>) {
|
||||
let server = db.create_server("test").await.unwrap();
|
||||
|
||||
let admin = new_test_user(db, "admin@example.com").await;
|
||||
let guest = new_test_user(db, "guest@example.com").await;
|
||||
let guest_connection = new_test_connection(server);
|
||||
|
||||
let zed_channel = db.create_root_channel("zed", admin).await.unwrap();
|
||||
db.set_channel_visibility(zed_channel, crate::db::ChannelVisibility::Public, admin)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(db
|
||||
.join_channel_chat(zed_channel, guest_connection, guest)
|
||||
.await
|
||||
.is_err());
|
||||
|
||||
db.join_channel(zed_channel, guest, guest_connection, TEST_RELEASE_CHANNEL)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(db
|
||||
.join_channel_chat(zed_channel, guest_connection, guest)
|
||||
.await
|
||||
.is_ok())
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_channel_tree(actual: Vec<Channel>, expected: &[(ChannelId, &[ChannelId])]) {
|
||||
let actual = actual
|
||||
.iter()
|
||||
.map(|channel| (channel.id, channel.parent_path.as_slice()))
|
||||
.collect::<Vec<_>>();
|
||||
pretty_assertions::assert_eq!(
|
||||
actual,
|
||||
expected.to_vec(),
|
||||
"wrong channel ids and parent paths"
|
||||
);
|
||||
}
|
633
crates/collab2/src/db/tests/db_tests.rs
Normal file
633
crates/collab2/src/db/tests/db_tests.rs
Normal file
|
@ -0,0 +1,633 @@
|
|||
use super::*;
|
||||
use crate::test_both_dbs;
|
||||
use gpui::TestAppContext;
|
||||
use pretty_assertions::{assert_eq, assert_ne};
|
||||
use std::sync::Arc;
|
||||
use tests::TestDb;
|
||||
|
||||
test_both_dbs!(
|
||||
test_get_users,
|
||||
test_get_users_by_ids_postgres,
|
||||
test_get_users_by_ids_sqlite
|
||||
);
|
||||
|
||||
async fn test_get_users(db: &Arc<Database>) {
|
||||
let mut user_ids = Vec::new();
|
||||
let mut user_metric_ids = Vec::new();
|
||||
for i in 1..=4 {
|
||||
let user = db
|
||||
.create_user(
|
||||
&format!("user{i}@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: format!("user{i}"),
|
||||
github_user_id: i,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
user_ids.push(user.user_id);
|
||||
user_metric_ids.push(user.metrics_id);
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
db.get_users_by_ids(user_ids.clone()).await.unwrap(),
|
||||
vec![
|
||||
User {
|
||||
id: user_ids[0],
|
||||
github_login: "user1".to_string(),
|
||||
github_user_id: Some(1),
|
||||
email_address: Some("user1@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[0].parse().unwrap(),
|
||||
..Default::default()
|
||||
},
|
||||
User {
|
||||
id: user_ids[1],
|
||||
github_login: "user2".to_string(),
|
||||
github_user_id: Some(2),
|
||||
email_address: Some("user2@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[1].parse().unwrap(),
|
||||
..Default::default()
|
||||
},
|
||||
User {
|
||||
id: user_ids[2],
|
||||
github_login: "user3".to_string(),
|
||||
github_user_id: Some(3),
|
||||
email_address: Some("user3@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[2].parse().unwrap(),
|
||||
..Default::default()
|
||||
},
|
||||
User {
|
||||
id: user_ids[3],
|
||||
github_login: "user4".to_string(),
|
||||
github_user_id: Some(4),
|
||||
email_address: Some("user4@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[3].parse().unwrap(),
|
||||
..Default::default()
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_get_or_create_user_by_github_account,
|
||||
test_get_or_create_user_by_github_account_postgres,
|
||||
test_get_or_create_user_by_github_account_sqlite
|
||||
);
|
||||
|
||||
async fn test_get_or_create_user_by_github_account(db: &Arc<Database>) {
|
||||
let user_id1 = db
|
||||
.create_user(
|
||||
"user1@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "login1".into(),
|
||||
github_user_id: 101,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
let user_id2 = db
|
||||
.create_user(
|
||||
"user2@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "login2".into(),
|
||||
github_user_id: 102,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let user = db
|
||||
.get_or_create_user_by_github_account("login1", None, None)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(user.id, user_id1);
|
||||
assert_eq!(&user.github_login, "login1");
|
||||
assert_eq!(user.github_user_id, Some(101));
|
||||
|
||||
assert!(db
|
||||
.get_or_create_user_by_github_account("non-existent-login", None, None)
|
||||
.await
|
||||
.unwrap()
|
||||
.is_none());
|
||||
|
||||
let user = db
|
||||
.get_or_create_user_by_github_account("the-new-login2", Some(102), None)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(user.id, user_id2);
|
||||
assert_eq!(&user.github_login, "the-new-login2");
|
||||
assert_eq!(user.github_user_id, Some(102));
|
||||
|
||||
let user = db
|
||||
.get_or_create_user_by_github_account("login3", Some(103), Some("user3@example.com"))
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
assert_eq!(&user.github_login, "login3");
|
||||
assert_eq!(user.github_user_id, Some(103));
|
||||
assert_eq!(user.email_address, Some("user3@example.com".into()));
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_create_access_tokens,
|
||||
test_create_access_tokens_postgres,
|
||||
test_create_access_tokens_sqlite
|
||||
);
|
||||
|
||||
async fn test_create_access_tokens(db: &Arc<Database>) {
|
||||
let user = db
|
||||
.create_user(
|
||||
"u1@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "u1".into(),
|
||||
github_user_id: 1,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let token_1 = db.create_access_token(user, "h1", 2).await.unwrap();
|
||||
let token_2 = db.create_access_token(user, "h2", 2).await.unwrap();
|
||||
assert_eq!(
|
||||
db.get_access_token(token_1).await.unwrap(),
|
||||
access_token::Model {
|
||||
id: token_1,
|
||||
user_id: user,
|
||||
hash: "h1".into(),
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
db.get_access_token(token_2).await.unwrap(),
|
||||
access_token::Model {
|
||||
id: token_2,
|
||||
user_id: user,
|
||||
hash: "h2".into()
|
||||
}
|
||||
);
|
||||
|
||||
let token_3 = db.create_access_token(user, "h3", 2).await.unwrap();
|
||||
assert_eq!(
|
||||
db.get_access_token(token_3).await.unwrap(),
|
||||
access_token::Model {
|
||||
id: token_3,
|
||||
user_id: user,
|
||||
hash: "h3".into()
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
db.get_access_token(token_2).await.unwrap(),
|
||||
access_token::Model {
|
||||
id: token_2,
|
||||
user_id: user,
|
||||
hash: "h2".into()
|
||||
}
|
||||
);
|
||||
assert!(db.get_access_token(token_1).await.is_err());
|
||||
|
||||
let token_4 = db.create_access_token(user, "h4", 2).await.unwrap();
|
||||
assert_eq!(
|
||||
db.get_access_token(token_4).await.unwrap(),
|
||||
access_token::Model {
|
||||
id: token_4,
|
||||
user_id: user,
|
||||
hash: "h4".into()
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
db.get_access_token(token_3).await.unwrap(),
|
||||
access_token::Model {
|
||||
id: token_3,
|
||||
user_id: user,
|
||||
hash: "h3".into()
|
||||
}
|
||||
);
|
||||
assert!(db.get_access_token(token_2).await.is_err());
|
||||
assert!(db.get_access_token(token_1).await.is_err());
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_add_contacts,
|
||||
test_add_contacts_postgres,
|
||||
test_add_contacts_sqlite
|
||||
);
|
||||
|
||||
async fn test_add_contacts(db: &Arc<Database>) {
|
||||
let mut user_ids = Vec::new();
|
||||
for i in 0..3 {
|
||||
user_ids.push(
|
||||
db.create_user(
|
||||
&format!("user{i}@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: format!("user{i}"),
|
||||
github_user_id: i,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id,
|
||||
);
|
||||
}
|
||||
|
||||
let user_1 = user_ids[0];
|
||||
let user_2 = user_ids[1];
|
||||
let user_3 = user_ids[2];
|
||||
|
||||
// User starts with no contacts
|
||||
assert_eq!(db.get_contacts(user_1).await.unwrap(), &[]);
|
||||
|
||||
// User requests a contact. Both users see the pending request.
|
||||
db.send_contact_request(user_1, user_2).await.unwrap();
|
||||
assert!(!db.has_contact(user_1, user_2).await.unwrap());
|
||||
assert!(!db.has_contact(user_2, user_1).await.unwrap());
|
||||
assert_eq!(
|
||||
db.get_contacts(user_1).await.unwrap(),
|
||||
&[Contact::Outgoing { user_id: user_2 }],
|
||||
);
|
||||
assert_eq!(
|
||||
db.get_contacts(user_2).await.unwrap(),
|
||||
&[Contact::Incoming { user_id: user_1 }]
|
||||
);
|
||||
|
||||
// User 2 dismisses the contact request notification without accepting or rejecting.
|
||||
// We shouldn't notify them again.
|
||||
db.dismiss_contact_notification(user_1, user_2)
|
||||
.await
|
||||
.unwrap_err();
|
||||
db.dismiss_contact_notification(user_2, user_1)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
db.get_contacts(user_2).await.unwrap(),
|
||||
&[Contact::Incoming { user_id: user_1 }]
|
||||
);
|
||||
|
||||
// User can't accept their own contact request
|
||||
db.respond_to_contact_request(user_1, user_2, true)
|
||||
.await
|
||||
.unwrap_err();
|
||||
|
||||
// User accepts a contact request. Both users see the contact.
|
||||
db.respond_to_contact_request(user_2, user_1, true)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
db.get_contacts(user_1).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_2,
|
||||
busy: false,
|
||||
}],
|
||||
);
|
||||
assert!(db.has_contact(user_1, user_2).await.unwrap());
|
||||
assert!(db.has_contact(user_2, user_1).await.unwrap());
|
||||
assert_eq!(
|
||||
db.get_contacts(user_2).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
|
||||
// Users cannot re-request existing contacts.
|
||||
db.send_contact_request(user_1, user_2).await.unwrap_err();
|
||||
db.send_contact_request(user_2, user_1).await.unwrap_err();
|
||||
|
||||
// Users can't dismiss notifications of them accepting other users' requests.
|
||||
db.dismiss_contact_notification(user_2, user_1)
|
||||
.await
|
||||
.unwrap_err();
|
||||
assert_eq!(
|
||||
db.get_contacts(user_1).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_2,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
|
||||
// Users can dismiss notifications of other users accepting their requests.
|
||||
db.dismiss_contact_notification(user_1, user_2)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
db.get_contacts(user_1).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_2,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
|
||||
// Users send each other concurrent contact requests and
|
||||
// see that they are immediately accepted.
|
||||
db.send_contact_request(user_1, user_3).await.unwrap();
|
||||
db.send_contact_request(user_3, user_1).await.unwrap();
|
||||
assert_eq!(
|
||||
db.get_contacts(user_1).await.unwrap(),
|
||||
&[
|
||||
Contact::Accepted {
|
||||
user_id: user_2,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user_3,
|
||||
busy: false,
|
||||
}
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
db.get_contacts(user_3).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
busy: false,
|
||||
}],
|
||||
);
|
||||
|
||||
// User declines a contact request. Both users see that it is gone.
|
||||
db.send_contact_request(user_2, user_3).await.unwrap();
|
||||
db.respond_to_contact_request(user_3, user_2, false)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(!db.has_contact(user_2, user_3).await.unwrap());
|
||||
assert!(!db.has_contact(user_3, user_2).await.unwrap());
|
||||
assert_eq!(
|
||||
db.get_contacts(user_2).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
db.get_contacts(user_3).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
busy: false,
|
||||
}],
|
||||
);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_metrics_id,
|
||||
test_metrics_id_postgres,
|
||||
test_metrics_id_sqlite
|
||||
);
|
||||
|
||||
async fn test_metrics_id(db: &Arc<Database>) {
|
||||
let NewUserResult {
|
||||
user_id: user1,
|
||||
metrics_id: metrics_id1,
|
||||
..
|
||||
} = db
|
||||
.create_user(
|
||||
"person1@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "person1".into(),
|
||||
github_user_id: 101,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let NewUserResult {
|
||||
user_id: user2,
|
||||
metrics_id: metrics_id2,
|
||||
..
|
||||
} = db
|
||||
.create_user(
|
||||
"person2@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "person2".into(),
|
||||
github_user_id: 102,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(db.get_user_metrics_id(user1).await.unwrap(), metrics_id1);
|
||||
assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id2);
|
||||
assert_eq!(metrics_id1.len(), 36);
|
||||
assert_eq!(metrics_id2.len(), 36);
|
||||
assert_ne!(metrics_id1, metrics_id2);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_project_count,
|
||||
test_project_count_postgres,
|
||||
test_project_count_sqlite
|
||||
);
|
||||
|
||||
async fn test_project_count(db: &Arc<Database>) {
|
||||
let owner_id = db.create_server("test").await.unwrap().0 as u32;
|
||||
|
||||
let user1 = db
|
||||
.create_user(
|
||||
&format!("admin@example.com"),
|
||||
true,
|
||||
NewUserParams {
|
||||
github_login: "admin".into(),
|
||||
github_user_id: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let user2 = db
|
||||
.create_user(
|
||||
&format!("user@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user".into(),
|
||||
github_user_id: 1,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let room_id = RoomId::from_proto(
|
||||
db.create_room(user1.user_id, ConnectionId { owner_id, id: 0 }, "", "dev")
|
||||
.await
|
||||
.unwrap()
|
||||
.id,
|
||||
);
|
||||
db.call(
|
||||
room_id,
|
||||
user1.user_id,
|
||||
ConnectionId { owner_id, id: 0 },
|
||||
user2.user_id,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
db.join_room(
|
||||
room_id,
|
||||
user2.user_id,
|
||||
ConnectionId { owner_id, id: 1 },
|
||||
"dev",
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
|
||||
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
|
||||
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
|
||||
|
||||
// Projects shared by admins aren't counted.
|
||||
db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[])
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
|
||||
|
||||
db.leave_room(ConnectionId { owner_id, id: 1 })
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuzzy_like_string() {
|
||||
assert_eq!(Database::fuzzy_like_string("abcd"), "%a%b%c%d%");
|
||||
assert_eq!(Database::fuzzy_like_string("x y"), "%x%y%");
|
||||
assert_eq!(Database::fuzzy_like_string(" z "), "%z%");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_fuzzy_search_users(cx: &mut TestAppContext) {
|
||||
let test_db = TestDb::postgres(cx.executor().clone());
|
||||
let db = test_db.db();
|
||||
for (i, github_login) in [
|
||||
"California",
|
||||
"colorado",
|
||||
"oregon",
|
||||
"washington",
|
||||
"florida",
|
||||
"delaware",
|
||||
"rhode-island",
|
||||
]
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
{
|
||||
db.create_user(
|
||||
&format!("{github_login}@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: github_login.into(),
|
||||
github_user_id: i as i32,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
fuzzy_search_user_names(db, "clr").await,
|
||||
&["colorado", "California"]
|
||||
);
|
||||
assert_eq!(
|
||||
fuzzy_search_user_names(db, "ro").await,
|
||||
&["rhode-island", "colorado", "oregon"],
|
||||
);
|
||||
|
||||
async fn fuzzy_search_user_names(db: &Database, query: &str) -> Vec<String> {
|
||||
db.fuzzy_search_users(query, 10)
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|user| user.github_login)
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_non_matching_release_channels,
|
||||
test_non_matching_release_channels_postgres,
|
||||
test_non_matching_release_channels_sqlite
|
||||
);
|
||||
|
||||
async fn test_non_matching_release_channels(db: &Arc<Database>) {
|
||||
let owner_id = db.create_server("test").await.unwrap().0 as u32;
|
||||
|
||||
let user1 = db
|
||||
.create_user(
|
||||
&format!("admin@example.com"),
|
||||
true,
|
||||
NewUserParams {
|
||||
github_login: "admin".into(),
|
||||
github_user_id: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let user2 = db
|
||||
.create_user(
|
||||
&format!("user@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user".into(),
|
||||
github_user_id: 1,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let room = db
|
||||
.create_room(
|
||||
user1.user_id,
|
||||
ConnectionId { owner_id, id: 0 },
|
||||
"",
|
||||
"stable",
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.call(
|
||||
RoomId::from_proto(room.id),
|
||||
user1.user_id,
|
||||
ConnectionId { owner_id, id: 0 },
|
||||
user2.user_id,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// User attempts to join from preview
|
||||
let result = db
|
||||
.join_room(
|
||||
RoomId::from_proto(room.id),
|
||||
user2.user_id,
|
||||
ConnectionId { owner_id, id: 1 },
|
||||
"preview",
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
|
||||
// User switches to stable
|
||||
let result = db
|
||||
.join_room(
|
||||
RoomId::from_proto(room.id),
|
||||
user2.user_id,
|
||||
ConnectionId { owner_id, id: 1 },
|
||||
"stable",
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok())
|
||||
}
|
58
crates/collab2/src/db/tests/feature_flag_tests.rs
Normal file
58
crates/collab2/src/db/tests/feature_flag_tests.rs
Normal file
|
@ -0,0 +1,58 @@
|
|||
use crate::{
|
||||
db::{Database, NewUserParams},
|
||||
test_both_dbs,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
test_both_dbs!(
|
||||
test_get_user_flags,
|
||||
test_get_user_flags_postgres,
|
||||
test_get_user_flags_sqlite
|
||||
);
|
||||
|
||||
async fn test_get_user_flags(db: &Arc<Database>) {
|
||||
let user_1 = db
|
||||
.create_user(
|
||||
&format!("user1@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: format!("user1"),
|
||||
github_user_id: 1,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
let user_2 = db
|
||||
.create_user(
|
||||
&format!("user2@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: format!("user2"),
|
||||
github_user_id: 2,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
|
||||
const CHANNELS_ALPHA: &'static str = "channels-alpha";
|
||||
const NEW_SEARCH: &'static str = "new-search";
|
||||
|
||||
let channels_flag = db.create_user_flag(CHANNELS_ALPHA).await.unwrap();
|
||||
let search_flag = db.create_user_flag(NEW_SEARCH).await.unwrap();
|
||||
|
||||
db.add_user_flag(user_1, channels_flag).await.unwrap();
|
||||
db.add_user_flag(user_1, search_flag).await.unwrap();
|
||||
|
||||
db.add_user_flag(user_2, channels_flag).await.unwrap();
|
||||
|
||||
let mut user_1_flags = db.get_user_flags(user_1).await.unwrap();
|
||||
user_1_flags.sort();
|
||||
assert_eq!(user_1_flags, &[CHANNELS_ALPHA, NEW_SEARCH]);
|
||||
|
||||
let mut user_2_flags = db.get_user_flags(user_2).await.unwrap();
|
||||
user_2_flags.sort();
|
||||
assert_eq!(user_2_flags, &[CHANNELS_ALPHA]);
|
||||
}
|
454
crates/collab2/src/db/tests/message_tests.rs
Normal file
454
crates/collab2/src/db/tests/message_tests.rs
Normal file
|
@ -0,0 +1,454 @@
|
|||
use super::new_test_user;
|
||||
use crate::{
|
||||
db::{ChannelRole, Database, MessageId},
|
||||
test_both_dbs,
|
||||
};
|
||||
use channel::mentions_to_proto;
|
||||
use std::sync::Arc;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
test_both_dbs!(
|
||||
test_channel_message_retrieval,
|
||||
test_channel_message_retrieval_postgres,
|
||||
test_channel_message_retrieval_sqlite
|
||||
);
|
||||
|
||||
async fn test_channel_message_retrieval(db: &Arc<Database>) {
|
||||
let user = new_test_user(db, "user@example.com").await;
|
||||
let result = db.create_channel("channel", None, user).await.unwrap();
|
||||
|
||||
let owner_id = db.create_server("test").await.unwrap().0 as u32;
|
||||
db.join_channel_chat(
|
||||
result.channel.id,
|
||||
rpc::ConnectionId { owner_id, id: 0 },
|
||||
user,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut all_messages = Vec::new();
|
||||
for i in 0..10 {
|
||||
all_messages.push(
|
||||
db.create_channel_message(
|
||||
result.channel.id,
|
||||
user,
|
||||
&i.to_string(),
|
||||
&[],
|
||||
OffsetDateTime::now_utc(),
|
||||
i,
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.message_id
|
||||
.to_proto(),
|
||||
);
|
||||
}
|
||||
|
||||
let messages = db
|
||||
.get_channel_messages(result.channel.id, user, 3, None)
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|message| message.id)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(messages, &all_messages[7..10]);
|
||||
|
||||
let messages = db
|
||||
.get_channel_messages(
|
||||
result.channel.id,
|
||||
user,
|
||||
4,
|
||||
Some(MessageId::from_proto(all_messages[6])),
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|message| message.id)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(messages, &all_messages[2..6]);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_channel_message_nonces,
|
||||
test_channel_message_nonces_postgres,
|
||||
test_channel_message_nonces_sqlite
|
||||
);
|
||||
|
||||
async fn test_channel_message_nonces(db: &Arc<Database>) {
|
||||
let user_a = new_test_user(db, "user_a@example.com").await;
|
||||
let user_b = new_test_user(db, "user_b@example.com").await;
|
||||
let user_c = new_test_user(db, "user_c@example.com").await;
|
||||
let channel = db.create_root_channel("channel", user_a).await.unwrap();
|
||||
db.invite_channel_member(channel, user_b, user_a, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
db.invite_channel_member(channel, user_c, user_a, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
db.respond_to_channel_invite(channel, user_b, true)
|
||||
.await
|
||||
.unwrap();
|
||||
db.respond_to_channel_invite(channel, user_c, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let owner_id = db.create_server("test").await.unwrap().0 as u32;
|
||||
db.join_channel_chat(channel, rpc::ConnectionId { owner_id, id: 0 }, user_a)
|
||||
.await
|
||||
.unwrap();
|
||||
db.join_channel_chat(channel, rpc::ConnectionId { owner_id, id: 1 }, user_b)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// As user A, create messages that re-use the same nonces. The requests
|
||||
// succeed, but return the same ids.
|
||||
let id1 = db
|
||||
.create_channel_message(
|
||||
channel,
|
||||
user_a,
|
||||
"hi @user_b",
|
||||
&mentions_to_proto(&[(3..10, user_b.to_proto())]),
|
||||
OffsetDateTime::now_utc(),
|
||||
100,
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.message_id;
|
||||
let id2 = db
|
||||
.create_channel_message(
|
||||
channel,
|
||||
user_a,
|
||||
"hello, fellow users",
|
||||
&mentions_to_proto(&[]),
|
||||
OffsetDateTime::now_utc(),
|
||||
200,
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.message_id;
|
||||
let id3 = db
|
||||
.create_channel_message(
|
||||
channel,
|
||||
user_a,
|
||||
"bye @user_c (same nonce as first message)",
|
||||
&mentions_to_proto(&[(4..11, user_c.to_proto())]),
|
||||
OffsetDateTime::now_utc(),
|
||||
100,
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.message_id;
|
||||
let id4 = db
|
||||
.create_channel_message(
|
||||
channel,
|
||||
user_a,
|
||||
"omg (same nonce as second message)",
|
||||
&mentions_to_proto(&[]),
|
||||
OffsetDateTime::now_utc(),
|
||||
200,
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.message_id;
|
||||
|
||||
// As a different user, reuse one of the same nonces. This request succeeds
|
||||
// and returns a different id.
|
||||
let id5 = db
|
||||
.create_channel_message(
|
||||
channel,
|
||||
user_b,
|
||||
"omg @user_a (same nonce as user_a's first message)",
|
||||
&mentions_to_proto(&[(4..11, user_a.to_proto())]),
|
||||
OffsetDateTime::now_utc(),
|
||||
100,
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.message_id;
|
||||
|
||||
assert_ne!(id1, id2);
|
||||
assert_eq!(id1, id3);
|
||||
assert_eq!(id2, id4);
|
||||
assert_ne!(id5, id1);
|
||||
|
||||
let messages = db
|
||||
.get_channel_messages(channel, user_a, 5, None)
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|m| (m.id, m.body, m.mentions))
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
messages,
|
||||
&[
|
||||
(
|
||||
id1.to_proto(),
|
||||
"hi @user_b".into(),
|
||||
mentions_to_proto(&[(3..10, user_b.to_proto())]),
|
||||
),
|
||||
(
|
||||
id2.to_proto(),
|
||||
"hello, fellow users".into(),
|
||||
mentions_to_proto(&[])
|
||||
),
|
||||
(
|
||||
id5.to_proto(),
|
||||
"omg @user_a (same nonce as user_a's first message)".into(),
|
||||
mentions_to_proto(&[(4..11, user_a.to_proto())]),
|
||||
),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_unseen_channel_messages,
|
||||
test_unseen_channel_messages_postgres,
|
||||
test_unseen_channel_messages_sqlite
|
||||
);
|
||||
|
||||
async fn test_unseen_channel_messages(db: &Arc<Database>) {
|
||||
let user = new_test_user(db, "user_a@example.com").await;
|
||||
let observer = new_test_user(db, "user_b@example.com").await;
|
||||
|
||||
let channel_1 = db.create_root_channel("channel", user).await.unwrap();
|
||||
let channel_2 = db.create_root_channel("channel-2", user).await.unwrap();
|
||||
|
||||
db.invite_channel_member(channel_1, observer, user, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
db.invite_channel_member(channel_2, observer, user, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.respond_to_channel_invite(channel_1, observer, true)
|
||||
.await
|
||||
.unwrap();
|
||||
db.respond_to_channel_invite(channel_2, observer, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let owner_id = db.create_server("test").await.unwrap().0 as u32;
|
||||
let user_connection_id = rpc::ConnectionId { owner_id, id: 0 };
|
||||
|
||||
db.join_channel_chat(channel_1, user_connection_id, user)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let _ = db
|
||||
.create_channel_message(channel_1, user, "1_1", &[], OffsetDateTime::now_utc(), 1)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let second_message = db
|
||||
.create_channel_message(channel_1, user, "1_2", &[], OffsetDateTime::now_utc(), 2)
|
||||
.await
|
||||
.unwrap()
|
||||
.message_id;
|
||||
|
||||
let third_message = db
|
||||
.create_channel_message(channel_1, user, "1_3", &[], OffsetDateTime::now_utc(), 3)
|
||||
.await
|
||||
.unwrap()
|
||||
.message_id;
|
||||
|
||||
db.join_channel_chat(channel_2, user_connection_id, user)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let fourth_message = db
|
||||
.create_channel_message(channel_2, user, "2_1", &[], OffsetDateTime::now_utc(), 4)
|
||||
.await
|
||||
.unwrap()
|
||||
.message_id;
|
||||
|
||||
// Check that observer has new messages
|
||||
let unseen_messages = db
|
||||
.transaction(|tx| async move {
|
||||
db.unseen_channel_messages(observer, &[channel_1, channel_2], &*tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
unseen_messages,
|
||||
[
|
||||
rpc::proto::UnseenChannelMessage {
|
||||
channel_id: channel_1.to_proto(),
|
||||
message_id: third_message.to_proto(),
|
||||
},
|
||||
rpc::proto::UnseenChannelMessage {
|
||||
channel_id: channel_2.to_proto(),
|
||||
message_id: fourth_message.to_proto(),
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
// Observe the second message
|
||||
db.observe_channel_message(channel_1, observer, second_message)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Make sure the observer still has a new message
|
||||
let unseen_messages = db
|
||||
.transaction(|tx| async move {
|
||||
db.unseen_channel_messages(observer, &[channel_1, channel_2], &*tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
unseen_messages,
|
||||
[
|
||||
rpc::proto::UnseenChannelMessage {
|
||||
channel_id: channel_1.to_proto(),
|
||||
message_id: third_message.to_proto(),
|
||||
},
|
||||
rpc::proto::UnseenChannelMessage {
|
||||
channel_id: channel_2.to_proto(),
|
||||
message_id: fourth_message.to_proto(),
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
// Observe the third message,
|
||||
db.observe_channel_message(channel_1, observer, third_message)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Make sure the observer does not have a new method
|
||||
let unseen_messages = db
|
||||
.transaction(|tx| async move {
|
||||
db.unseen_channel_messages(observer, &[channel_1, channel_2], &*tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
unseen_messages,
|
||||
[rpc::proto::UnseenChannelMessage {
|
||||
channel_id: channel_2.to_proto(),
|
||||
message_id: fourth_message.to_proto(),
|
||||
}]
|
||||
);
|
||||
|
||||
// Observe the second message again, should not regress our observed state
|
||||
db.observe_channel_message(channel_1, observer, second_message)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Make sure the observer does not have a new message
|
||||
let unseen_messages = db
|
||||
.transaction(|tx| async move {
|
||||
db.unseen_channel_messages(observer, &[channel_1, channel_2], &*tx)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
unseen_messages,
|
||||
[rpc::proto::UnseenChannelMessage {
|
||||
channel_id: channel_2.to_proto(),
|
||||
message_id: fourth_message.to_proto(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_channel_message_mentions,
|
||||
test_channel_message_mentions_postgres,
|
||||
test_channel_message_mentions_sqlite
|
||||
);
|
||||
|
||||
async fn test_channel_message_mentions(db: &Arc<Database>) {
|
||||
let user_a = new_test_user(db, "user_a@example.com").await;
|
||||
let user_b = new_test_user(db, "user_b@example.com").await;
|
||||
let user_c = new_test_user(db, "user_c@example.com").await;
|
||||
|
||||
let channel = db
|
||||
.create_channel("channel", None, user_a)
|
||||
.await
|
||||
.unwrap()
|
||||
.channel
|
||||
.id;
|
||||
db.invite_channel_member(channel, user_b, user_a, ChannelRole::Member)
|
||||
.await
|
||||
.unwrap();
|
||||
db.respond_to_channel_invite(channel, user_b, true)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let owner_id = db.create_server("test").await.unwrap().0 as u32;
|
||||
let connection_id = rpc::ConnectionId { owner_id, id: 0 };
|
||||
db.join_channel_chat(channel, connection_id, user_a)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
db.create_channel_message(
|
||||
channel,
|
||||
user_a,
|
||||
"hi @user_b and @user_c",
|
||||
&mentions_to_proto(&[(3..10, user_b.to_proto()), (15..22, user_c.to_proto())]),
|
||||
OffsetDateTime::now_utc(),
|
||||
1,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
db.create_channel_message(
|
||||
channel,
|
||||
user_a,
|
||||
"bye @user_c",
|
||||
&mentions_to_proto(&[(4..11, user_c.to_proto())]),
|
||||
OffsetDateTime::now_utc(),
|
||||
2,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
db.create_channel_message(
|
||||
channel,
|
||||
user_a,
|
||||
"umm",
|
||||
&mentions_to_proto(&[]),
|
||||
OffsetDateTime::now_utc(),
|
||||
3,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
db.create_channel_message(
|
||||
channel,
|
||||
user_a,
|
||||
"@user_b, stop.",
|
||||
&mentions_to_proto(&[(0..7, user_b.to_proto())]),
|
||||
OffsetDateTime::now_utc(),
|
||||
4,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let messages = db
|
||||
.get_channel_messages(channel, user_b, 5, None)
|
||||
.await
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|m| (m.body, m.mentions))
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
&messages,
|
||||
&[
|
||||
(
|
||||
"hi @user_b and @user_c".into(),
|
||||
mentions_to_proto(&[(3..10, user_b.to_proto()), (15..22, user_c.to_proto())]),
|
||||
),
|
||||
(
|
||||
"bye @user_c".into(),
|
||||
mentions_to_proto(&[(4..11, user_c.to_proto())]),
|
||||
),
|
||||
("umm".into(), mentions_to_proto(&[]),),
|
||||
(
|
||||
"@user_b, stop.".into(),
|
||||
mentions_to_proto(&[(0..7, user_b.to_proto())]),
|
||||
),
|
||||
]
|
||||
);
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue