From 29e43384f020f7e20c3012a1f46c53a717941297 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Sun, 20 Aug 2023 14:25:19 -0700 Subject: [PATCH 01/22] Simplify macro for running a test with both databases --- crates/collab/src/db/db_tests.rs | 1078 +++++++++++++++--------------- crates/collab/src/db/test_db.rs | 17 + 2 files changed, 558 insertions(+), 537 deletions(-) diff --git a/crates/collab/src/db/db_tests.rs b/crates/collab/src/db/db_tests.rs index 8e9a80dbab..8c5dab77bd 100644 --- a/crates/collab/src/db/db_tests.rs +++ b/crates/collab/src/db/db_tests.rs @@ -1,242 +1,234 @@ use super::*; +use crate::test_both_dbs; use gpui::executor::{Background, Deterministic}; use pretty_assertions::{assert_eq, assert_ne}; use std::sync::Arc; use test_db::TestDb; -macro_rules! test_both_dbs { - ($postgres_test_name:ident, $sqlite_test_name:ident, $db:ident, $body:block) => { - #[gpui::test] - async fn $postgres_test_name() { - let test_db = TestDb::postgres(Deterministic::new(0).build_background()); - let $db = test_db.db(); - $body - } +test_both_dbs!( + test_get_users, + test_get_users_by_ids_postgres, + test_get_users_by_ids_sqlite +); - #[gpui::test] - async fn $sqlite_test_name() { - let test_db = TestDb::sqlite(Deterministic::new(0).build_background()); - let $db = test_db.db(); - $body - } - }; +async fn test_get_users(db: &Arc) { + let mut user_ids = Vec::new(); + let mut user_metric_ids = Vec::new(); + for i in 1..=4 { + let user = db + .create_user( + &format!("user{i}@example.com"), + false, + NewUserParams { + github_login: format!("user{i}"), + github_user_id: i, + invite_count: 0, + }, + ) + .await + .unwrap(); + user_ids.push(user.user_id); + user_metric_ids.push(user.metrics_id); + } + + assert_eq!( + db.get_users_by_ids(user_ids.clone()).await.unwrap(), + vec![ + User { + id: user_ids[0], + github_login: "user1".to_string(), + github_user_id: Some(1), + email_address: Some("user1@example.com".to_string()), + admin: false, + metrics_id: user_metric_ids[0].parse().unwrap(), + ..Default::default() + }, + User { + id: user_ids[1], + github_login: "user2".to_string(), + github_user_id: Some(2), + email_address: Some("user2@example.com".to_string()), + admin: false, + metrics_id: user_metric_ids[1].parse().unwrap(), + ..Default::default() + }, + User { + id: user_ids[2], + github_login: "user3".to_string(), + github_user_id: Some(3), + email_address: Some("user3@example.com".to_string()), + admin: false, + metrics_id: user_metric_ids[2].parse().unwrap(), + ..Default::default() + }, + User { + id: user_ids[3], + github_login: "user4".to_string(), + github_user_id: Some(4), + email_address: Some("user4@example.com".to_string()), + admin: false, + metrics_id: user_metric_ids[3].parse().unwrap(), + ..Default::default() + } + ] + ); } test_both_dbs!( - test_get_users_by_ids_postgres, - test_get_users_by_ids_sqlite, - db, - { - let mut user_ids = Vec::new(); - let mut user_metric_ids = Vec::new(); - for i in 1..=4 { - let user = db - .create_user( - &format!("user{i}@example.com"), - false, - NewUserParams { - github_login: format!("user{i}"), - github_user_id: i, - invite_count: 0, - }, - ) - .await - .unwrap(); - user_ids.push(user.user_id); - user_metric_ids.push(user.metrics_id); - } - - assert_eq!( - db.get_users_by_ids(user_ids.clone()).await.unwrap(), - vec![ - User { - id: user_ids[0], - github_login: "user1".to_string(), - github_user_id: Some(1), - email_address: Some("user1@example.com".to_string()), - admin: false, - metrics_id: user_metric_ids[0].parse().unwrap(), - ..Default::default() - }, - User { - id: user_ids[1], - github_login: "user2".to_string(), - github_user_id: Some(2), - email_address: Some("user2@example.com".to_string()), - admin: false, - metrics_id: user_metric_ids[1].parse().unwrap(), - ..Default::default() - }, - User { - id: user_ids[2], - github_login: "user3".to_string(), - github_user_id: Some(3), - email_address: Some("user3@example.com".to_string()), - admin: false, - metrics_id: user_metric_ids[2].parse().unwrap(), - ..Default::default() - }, - User { - id: user_ids[3], - github_login: "user4".to_string(), - github_user_id: Some(4), - email_address: Some("user4@example.com".to_string()), - admin: false, - metrics_id: user_metric_ids[3].parse().unwrap(), - ..Default::default() - } - ] - ); - } -); - -test_both_dbs!( + test_get_or_create_user_by_github_account, test_get_or_create_user_by_github_account_postgres, - test_get_or_create_user_by_github_account_sqlite, - db, - { - let user_id1 = db - .create_user( - "user1@example.com", - false, - NewUserParams { - github_login: "login1".into(), - github_user_id: 101, - invite_count: 0, - }, - ) - .await - .unwrap() - .user_id; - let user_id2 = db - .create_user( - "user2@example.com", - false, - NewUserParams { - github_login: "login2".into(), - github_user_id: 102, - invite_count: 0, - }, - ) - .await - .unwrap() - .user_id; - - let user = db - .get_or_create_user_by_github_account("login1", None, None) - .await - .unwrap() - .unwrap(); - assert_eq!(user.id, user_id1); - assert_eq!(&user.github_login, "login1"); - assert_eq!(user.github_user_id, Some(101)); - - assert!(db - .get_or_create_user_by_github_account("non-existent-login", None, None) - .await - .unwrap() - .is_none()); - - let user = db - .get_or_create_user_by_github_account("the-new-login2", Some(102), None) - .await - .unwrap() - .unwrap(); - assert_eq!(user.id, user_id2); - assert_eq!(&user.github_login, "the-new-login2"); - assert_eq!(user.github_user_id, Some(102)); - - let user = db - .get_or_create_user_by_github_account("login3", Some(103), Some("user3@example.com")) - .await - .unwrap() - .unwrap(); - assert_eq!(&user.github_login, "login3"); - assert_eq!(user.github_user_id, Some(103)); - assert_eq!(user.email_address, Some("user3@example.com".into())); - } + test_get_or_create_user_by_github_account_sqlite ); +async fn test_get_or_create_user_by_github_account(db: &Arc) { + let user_id1 = db + .create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "login1".into(), + github_user_id: 101, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + let user_id2 = db + .create_user( + "user2@example.com", + false, + NewUserParams { + github_login: "login2".into(), + github_user_id: 102, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + + let user = db + .get_or_create_user_by_github_account("login1", None, None) + .await + .unwrap() + .unwrap(); + assert_eq!(user.id, user_id1); + assert_eq!(&user.github_login, "login1"); + assert_eq!(user.github_user_id, Some(101)); + + assert!(db + .get_or_create_user_by_github_account("non-existent-login", None, None) + .await + .unwrap() + .is_none()); + + let user = db + .get_or_create_user_by_github_account("the-new-login2", Some(102), None) + .await + .unwrap() + .unwrap(); + assert_eq!(user.id, user_id2); + assert_eq!(&user.github_login, "the-new-login2"); + assert_eq!(user.github_user_id, Some(102)); + + let user = db + .get_or_create_user_by_github_account("login3", Some(103), Some("user3@example.com")) + .await + .unwrap() + .unwrap(); + assert_eq!(&user.github_login, "login3"); + assert_eq!(user.github_user_id, Some(103)); + assert_eq!(user.email_address, Some("user3@example.com".into())); +} + test_both_dbs!( + test_create_access_tokens, test_create_access_tokens_postgres, - test_create_access_tokens_sqlite, - db, - { - let user = db - .create_user( - "u1@example.com", - false, - NewUserParams { - github_login: "u1".into(), - github_user_id: 1, - invite_count: 0, - }, - ) - .await - .unwrap() - .user_id; - - let token_1 = db.create_access_token(user, "h1", 2).await.unwrap(); - let token_2 = db.create_access_token(user, "h2", 2).await.unwrap(); - assert_eq!( - db.get_access_token(token_1).await.unwrap(), - access_token::Model { - id: token_1, - user_id: user, - hash: "h1".into(), - } - ); - assert_eq!( - db.get_access_token(token_2).await.unwrap(), - access_token::Model { - id: token_2, - user_id: user, - hash: "h2".into() - } - ); - - let token_3 = db.create_access_token(user, "h3", 2).await.unwrap(); - assert_eq!( - db.get_access_token(token_3).await.unwrap(), - access_token::Model { - id: token_3, - user_id: user, - hash: "h3".into() - } - ); - assert_eq!( - db.get_access_token(token_2).await.unwrap(), - access_token::Model { - id: token_2, - user_id: user, - hash: "h2".into() - } - ); - assert!(db.get_access_token(token_1).await.is_err()); - - let token_4 = db.create_access_token(user, "h4", 2).await.unwrap(); - assert_eq!( - db.get_access_token(token_4).await.unwrap(), - access_token::Model { - id: token_4, - user_id: user, - hash: "h4".into() - } - ); - assert_eq!( - db.get_access_token(token_3).await.unwrap(), - access_token::Model { - id: token_3, - user_id: user, - hash: "h3".into() - } - ); - assert!(db.get_access_token(token_2).await.is_err()); - assert!(db.get_access_token(token_1).await.is_err()); - } + test_create_access_tokens_sqlite ); -test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, { +async fn test_create_access_tokens(db: &Arc) { + let user = db + .create_user( + "u1@example.com", + false, + NewUserParams { + github_login: "u1".into(), + github_user_id: 1, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + + let token_1 = db.create_access_token(user, "h1", 2).await.unwrap(); + let token_2 = db.create_access_token(user, "h2", 2).await.unwrap(); + assert_eq!( + db.get_access_token(token_1).await.unwrap(), + access_token::Model { + id: token_1, + user_id: user, + hash: "h1".into(), + } + ); + assert_eq!( + db.get_access_token(token_2).await.unwrap(), + access_token::Model { + id: token_2, + user_id: user, + hash: "h2".into() + } + ); + + let token_3 = db.create_access_token(user, "h3", 2).await.unwrap(); + assert_eq!( + db.get_access_token(token_3).await.unwrap(), + access_token::Model { + id: token_3, + user_id: user, + hash: "h3".into() + } + ); + assert_eq!( + db.get_access_token(token_2).await.unwrap(), + access_token::Model { + id: token_2, + user_id: user, + hash: "h2".into() + } + ); + assert!(db.get_access_token(token_1).await.is_err()); + + let token_4 = db.create_access_token(user, "h4", 2).await.unwrap(); + assert_eq!( + db.get_access_token(token_4).await.unwrap(), + access_token::Model { + id: token_4, + user_id: user, + hash: "h4".into() + } + ); + assert_eq!( + db.get_access_token(token_3).await.unwrap(), + access_token::Model { + id: token_3, + user_id: user, + hash: "h3".into() + } + ); + assert!(db.get_access_token(token_2).await.is_err()); + assert!(db.get_access_token(token_1).await.is_err()); +} + +test_both_dbs!( + test_add_contacts, + test_add_contacts_postgres, + test_add_contacts_sqlite +); + +async fn test_add_contacts(db: &Arc) { let mut user_ids = Vec::new(); for i in 0..3 { user_ids.push( @@ -403,9 +395,15 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, { busy: false, }], ); -}); +} -test_both_dbs!(test_metrics_id_postgres, test_metrics_id_sqlite, db, { +test_both_dbs!( + test_metrics_id, + test_metrics_id_postgres, + test_metrics_id_sqlite +); + +async fn test_metrics_id(db: &Arc) { let NewUserResult { user_id: user1, metrics_id: metrics_id1, @@ -444,82 +442,83 @@ test_both_dbs!(test_metrics_id_postgres, test_metrics_id_sqlite, db, { assert_eq!(metrics_id1.len(), 36); assert_eq!(metrics_id2.len(), 36); assert_ne!(metrics_id1, metrics_id2); -}); +} test_both_dbs!( + test_project_count, test_project_count_postgres, - test_project_count_sqlite, - db, - { - let owner_id = db.create_server("test").await.unwrap().0 as u32; + test_project_count_sqlite +); - let user1 = db - .create_user( - &format!("admin@example.com"), - true, - NewUserParams { - github_login: "admin".into(), - github_user_id: 0, - invite_count: 0, - }, - ) - .await - .unwrap(); - let user2 = db - .create_user( - &format!("user@example.com"), - false, - NewUserParams { - github_login: "user".into(), - github_user_id: 1, - invite_count: 0, - }, - ) - .await - .unwrap(); +async fn test_project_count(db: &Arc) { + let owner_id = db.create_server("test").await.unwrap().0 as u32; - let room_id = RoomId::from_proto( - db.create_room(user1.user_id, ConnectionId { owner_id, id: 0 }, "") - .await - .unwrap() - .id, - ); - db.call( - room_id, - user1.user_id, - ConnectionId { owner_id, id: 0 }, - user2.user_id, - None, + let user1 = db + .create_user( + &format!("admin@example.com"), + true, + NewUserParams { + github_login: "admin".into(), + github_user_id: 0, + invite_count: 0, + }, + ) + .await + .unwrap(); + let user2 = db + .create_user( + &format!("user@example.com"), + false, + NewUserParams { + github_login: "user".into(), + github_user_id: 1, + invite_count: 0, + }, ) .await .unwrap(); - db.join_room(room_id, user2.user_id, ConnectionId { owner_id, id: 1 }) - .await - .unwrap(); - assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[]) + let room_id = RoomId::from_proto( + db.create_room(user1.user_id, ConnectionId { owner_id, id: 0 }, "") .await - .unwrap(); - assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1); + .unwrap() + .id, + ); + db.call( + room_id, + user1.user_id, + ConnectionId { owner_id, id: 0 }, + user2.user_id, + None, + ) + .await + .unwrap(); + db.join_room(room_id, user2.user_id, ConnectionId { owner_id, id: 1 }) + .await + .unwrap(); + assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); - db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[]) - .await - .unwrap(); - assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); + db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[]) + .await + .unwrap(); + assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1); - // Projects shared by admins aren't counted. - db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[]) - .await - .unwrap(); - assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); + db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[]) + .await + .unwrap(); + assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); - db.leave_room(ConnectionId { owner_id, id: 1 }) - .await - .unwrap(); - assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); - } -); + // Projects shared by admins aren't counted. + db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[]) + .await + .unwrap(); + assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2); + + db.leave_room(ConnectionId { owner_id, id: 1 }) + .await + .unwrap(); + assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0); +} #[test] fn test_fuzzy_like_string() { @@ -878,7 +877,9 @@ async fn test_invite_codes() { assert!(db.has_contact(user5, user1).await.unwrap()); } -test_both_dbs!(test_channels_postgres, test_channels_sqlite, db, { +test_both_dbs!(test_channels, test_channels_postgres, test_channels_sqlite); + +async fn test_channels(db: &Arc) { let a_id = db .create_user( "user1@example.com", @@ -1063,268 +1064,271 @@ test_both_dbs!(test_channels_postgres, test_channels_sqlite, db, { assert!(db.get_channel(rust_id, a_id).await.unwrap().is_none()); assert!(db.get_channel(cargo_id, a_id).await.unwrap().is_none()); assert!(db.get_channel(cargo_ra_id, a_id).await.unwrap().is_none()); -}); +} test_both_dbs!( + test_joining_channels, test_joining_channels_postgres, - test_joining_channels_sqlite, - db, - { - let owner_id = db.create_server("test").await.unwrap().0 as u32; - - let user_1 = db - .create_user( - "user1@example.com", - false, - NewUserParams { - github_login: "user1".into(), - github_user_id: 5, - invite_count: 0, - }, - ) - .await - .unwrap() - .user_id; - let user_2 = db - .create_user( - "user2@example.com", - false, - NewUserParams { - github_login: "user2".into(), - github_user_id: 6, - invite_count: 0, - }, - ) - .await - .unwrap() - .user_id; - - let channel_1 = db - .create_root_channel("channel_1", "1", user_1) - .await - .unwrap(); - let room_1 = db.room_id_for_channel(channel_1).await.unwrap(); - - // can join a room with membership to its channel - let joined_room = db - .join_room(room_1, user_1, ConnectionId { owner_id, id: 1 }) - .await - .unwrap(); - assert_eq!(joined_room.room.participants.len(), 1); - - drop(joined_room); - // cannot join a room without membership to its channel - assert!(db - .join_room(room_1, user_2, ConnectionId { owner_id, id: 1 }) - .await - .is_err()); - } + test_joining_channels_sqlite ); +async fn test_joining_channels(db: &Arc) { + let owner_id = db.create_server("test").await.unwrap().0 as u32; + + let user_1 = db + .create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "user1".into(), + github_user_id: 5, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + let user_2 = db + .create_user( + "user2@example.com", + false, + NewUserParams { + github_login: "user2".into(), + github_user_id: 6, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + + let channel_1 = db + .create_root_channel("channel_1", "1", user_1) + .await + .unwrap(); + let room_1 = db.room_id_for_channel(channel_1).await.unwrap(); + + // can join a room with membership to its channel + let joined_room = db + .join_room(room_1, user_1, ConnectionId { owner_id, id: 1 }) + .await + .unwrap(); + assert_eq!(joined_room.room.participants.len(), 1); + + drop(joined_room); + // cannot join a room without membership to its channel + assert!(db + .join_room(room_1, user_2, ConnectionId { owner_id, id: 1 }) + .await + .is_err()); +} + test_both_dbs!( + test_channel_invites, test_channel_invites_postgres, - test_channel_invites_sqlite, - db, - { - db.create_server("test").await.unwrap(); - - let user_1 = db - .create_user( - "user1@example.com", - false, - NewUserParams { - github_login: "user1".into(), - github_user_id: 5, - invite_count: 0, - }, - ) - .await - .unwrap() - .user_id; - let user_2 = db - .create_user( - "user2@example.com", - false, - NewUserParams { - github_login: "user2".into(), - github_user_id: 6, - invite_count: 0, - }, - ) - .await - .unwrap() - .user_id; - - let user_3 = db - .create_user( - "user3@example.com", - false, - NewUserParams { - github_login: "user3".into(), - github_user_id: 7, - invite_count: 0, - }, - ) - .await - .unwrap() - .user_id; - - let channel_1_1 = db - .create_root_channel("channel_1", "1", user_1) - .await - .unwrap(); - - let channel_1_2 = db - .create_root_channel("channel_2", "2", user_1) - .await - .unwrap(); - - db.invite_channel_member(channel_1_1, user_2, user_1, false) - .await - .unwrap(); - db.invite_channel_member(channel_1_2, user_2, user_1, false) - .await - .unwrap(); - db.invite_channel_member(channel_1_1, user_3, user_1, true) - .await - .unwrap(); - - let user_2_invites = db - .get_channel_invites_for_user(user_2) // -> [channel_1_1, channel_1_2] - .await - .unwrap() - .into_iter() - .map(|channel| channel.id) - .collect::>(); - - assert_eq!(user_2_invites, &[channel_1_1, channel_1_2]); - - let user_3_invites = db - .get_channel_invites_for_user(user_3) // -> [channel_1_1] - .await - .unwrap() - .into_iter() - .map(|channel| channel.id) - .collect::>(); - - assert_eq!(user_3_invites, &[channel_1_1]); - - let members = db - .get_channel_member_details(channel_1_1, user_1) - .await - .unwrap(); - assert_eq!( - members, - &[ - proto::ChannelMember { - user_id: user_1.to_proto(), - kind: proto::channel_member::Kind::Member.into(), - admin: true, - }, - proto::ChannelMember { - user_id: user_2.to_proto(), - kind: proto::channel_member::Kind::Invitee.into(), - admin: false, - }, - proto::ChannelMember { - user_id: user_3.to_proto(), - kind: proto::channel_member::Kind::Invitee.into(), - admin: true, - }, - ] - ); - - db.respond_to_channel_invite(channel_1_1, user_2, true) - .await - .unwrap(); - - let channel_1_3 = db - .create_channel("channel_3", Some(channel_1_1), "1", user_1) - .await - .unwrap(); - - let members = db - .get_channel_member_details(channel_1_3, user_1) - .await - .unwrap(); - assert_eq!( - members, - &[ - proto::ChannelMember { - user_id: user_1.to_proto(), - kind: proto::channel_member::Kind::Member.into(), - admin: true, - }, - proto::ChannelMember { - user_id: user_2.to_proto(), - kind: proto::channel_member::Kind::AncestorMember.into(), - admin: false, - }, - ] - ); - } + test_channel_invites_sqlite ); +async fn test_channel_invites(db: &Arc) { + db.create_server("test").await.unwrap(); + + let user_1 = db + .create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "user1".into(), + github_user_id: 5, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + let user_2 = db + .create_user( + "user2@example.com", + false, + NewUserParams { + github_login: "user2".into(), + github_user_id: 6, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + + let user_3 = db + .create_user( + "user3@example.com", + false, + NewUserParams { + github_login: "user3".into(), + github_user_id: 7, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + + let channel_1_1 = db + .create_root_channel("channel_1", "1", user_1) + .await + .unwrap(); + + let channel_1_2 = db + .create_root_channel("channel_2", "2", user_1) + .await + .unwrap(); + + db.invite_channel_member(channel_1_1, user_2, user_1, false) + .await + .unwrap(); + db.invite_channel_member(channel_1_2, user_2, user_1, false) + .await + .unwrap(); + db.invite_channel_member(channel_1_1, user_3, user_1, true) + .await + .unwrap(); + + let user_2_invites = db + .get_channel_invites_for_user(user_2) // -> [channel_1_1, channel_1_2] + .await + .unwrap() + .into_iter() + .map(|channel| channel.id) + .collect::>(); + + assert_eq!(user_2_invites, &[channel_1_1, channel_1_2]); + + let user_3_invites = db + .get_channel_invites_for_user(user_3) // -> [channel_1_1] + .await + .unwrap() + .into_iter() + .map(|channel| channel.id) + .collect::>(); + + assert_eq!(user_3_invites, &[channel_1_1]); + + let members = db + .get_channel_member_details(channel_1_1, user_1) + .await + .unwrap(); + assert_eq!( + members, + &[ + proto::ChannelMember { + user_id: user_1.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + admin: true, + }, + proto::ChannelMember { + user_id: user_2.to_proto(), + kind: proto::channel_member::Kind::Invitee.into(), + admin: false, + }, + proto::ChannelMember { + user_id: user_3.to_proto(), + kind: proto::channel_member::Kind::Invitee.into(), + admin: true, + }, + ] + ); + + db.respond_to_channel_invite(channel_1_1, user_2, true) + .await + .unwrap(); + + let channel_1_3 = db + .create_channel("channel_3", Some(channel_1_1), "1", user_1) + .await + .unwrap(); + + let members = db + .get_channel_member_details(channel_1_3, user_1) + .await + .unwrap(); + assert_eq!( + members, + &[ + proto::ChannelMember { + user_id: user_1.to_proto(), + kind: proto::channel_member::Kind::Member.into(), + admin: true, + }, + proto::ChannelMember { + user_id: user_2.to_proto(), + kind: proto::channel_member::Kind::AncestorMember.into(), + admin: false, + }, + ] + ); +} + test_both_dbs!( + test_channel_renames, test_channel_renames_postgres, - test_channel_renames_sqlite, - db, - { - db.create_server("test").await.unwrap(); - - let user_1 = db - .create_user( - "user1@example.com", - false, - NewUserParams { - github_login: "user1".into(), - github_user_id: 5, - invite_count: 0, - }, - ) - .await - .unwrap() - .user_id; - - let user_2 = db - .create_user( - "user2@example.com", - false, - NewUserParams { - github_login: "user2".into(), - github_user_id: 6, - invite_count: 0, - }, - ) - .await - .unwrap() - .user_id; - - let zed_id = db.create_root_channel("zed", "1", user_1).await.unwrap(); - - db.rename_channel(zed_id, user_1, "#zed-archive") - .await - .unwrap(); - - let zed_archive_id = zed_id; - - let (channel, _) = db - .get_channel(zed_archive_id, user_1) - .await - .unwrap() - .unwrap(); - assert_eq!(channel.name, "zed-archive"); - - let non_permissioned_rename = db - .rename_channel(zed_archive_id, user_2, "hacked-lol") - .await; - assert!(non_permissioned_rename.is_err()); - - let bad_name_rename = db.rename_channel(zed_id, user_1, "#").await; - assert!(bad_name_rename.is_err()) - } + test_channel_renames_sqlite ); +async fn test_channel_renames(db: &Arc) { + db.create_server("test").await.unwrap(); + + let user_1 = db + .create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "user1".into(), + github_user_id: 5, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + + let user_2 = db + .create_user( + "user2@example.com", + false, + NewUserParams { + github_login: "user2".into(), + github_user_id: 6, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + + let zed_id = db.create_root_channel("zed", "1", user_1).await.unwrap(); + + db.rename_channel(zed_id, user_1, "#zed-archive") + .await + .unwrap(); + + let zed_archive_id = zed_id; + + let (channel, _) = db + .get_channel(zed_archive_id, user_1) + .await + .unwrap() + .unwrap(); + assert_eq!(channel.name, "zed-archive"); + + let non_permissioned_rename = db + .rename_channel(zed_archive_id, user_2, "hacked-lol") + .await; + assert!(non_permissioned_rename.is_err()); + + let bad_name_rename = db.rename_channel(zed_id, user_1, "#").await; + assert!(bad_name_rename.is_err()) +} + #[gpui::test] async fn test_multiple_signup_overwrite() { let test_db = TestDb::postgres(build_background_executor()); diff --git a/crates/collab/src/db/test_db.rs b/crates/collab/src/db/test_db.rs index 064f85c700..40013d5b03 100644 --- a/crates/collab/src/db/test_db.rs +++ b/crates/collab/src/db/test_db.rs @@ -91,6 +91,23 @@ impl TestDb { } } +#[macro_export] +macro_rules! test_both_dbs { + ($test_name:ident, $postgres_test_name:ident, $sqlite_test_name:ident) => { + #[gpui::test] + async fn $postgres_test_name() { + let test_db = TestDb::postgres(Deterministic::new(0).build_background()); + $test_name(test_db.db()).await; + } + + #[gpui::test] + async fn $sqlite_test_name() { + let test_db = TestDb::sqlite(Deterministic::new(0).build_background()); + $test_name(test_db.db()).await; + } + }; +} + impl Drop for TestDb { fn drop(&mut self) { let db = self.db.take().unwrap(); From ff5035ea3761026deadc595e483fd1bd8057230c Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 21 Aug 2023 12:00:43 -0700 Subject: [PATCH 02/22] Start work on storing channel buffers --- Cargo.lock | 2 + Cargo.toml | 1 + crates/collab/Cargo.toml | 2 + .../20221109000000_test_schema.sql | 27 +- .../20230819154600_add_channel_buffers.sql | 25 ++ crates/collab/src/db.rs | 2 + crates/collab/src/db/ids.rs | 1 + crates/collab/src/db/queries.rs | 4 + crates/collab/src/db/queries/buffer_tests.rs | 41 +++ crates/collab/src/db/queries/buffers.rs | 271 ++++++++++++++++++ crates/collab/src/db/tables.rs | 3 + crates/collab/src/db/tables/buffer.rs | 32 +++ .../collab/src/db/tables/buffer_operation.rs | 37 +++ .../collab/src/db/tables/buffer_snapshot.rs | 30 ++ crates/collab/src/db/test_db.rs | 8 +- crates/rpc/Cargo.toml | 2 +- 16 files changed, 484 insertions(+), 4 deletions(-) create mode 100644 crates/collab/migrations/20230819154600_add_channel_buffers.sql create mode 100644 crates/collab/src/db/queries/buffer_tests.rs create mode 100644 crates/collab/src/db/queries/buffers.rs create mode 100644 crates/collab/src/db/tables/buffer.rs create mode 100644 crates/collab/src/db/tables/buffer_operation.rs create mode 100644 crates/collab/src/db/tables/buffer_snapshot.rs diff --git a/Cargo.lock b/Cargo.lock index 101a495b6e..b10d8730fb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1444,6 +1444,7 @@ dependencies = [ "pretty_assertions", "project", "prometheus", + "prost 0.8.0", "rand 0.8.5", "reqwest", "rpc", @@ -1456,6 +1457,7 @@ dependencies = [ "settings", "sha-1 0.9.8", "sqlx", + "text", "theme", "time 0.3.24", "tokio", diff --git a/Cargo.toml b/Cargo.toml index cd15a72366..a35b3eea23 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -96,6 +96,7 @@ log = { version = "0.4.16", features = ["kv_unstable_serde"] } ordered-float = { version = "2.1.1" } parking_lot = { version = "0.11.1" } postage = { version = "0.5", features = ["futures-traits"] } +prost = { version = "0.8" } rand = { version = "0.8.5" } refineable = { path = "./crates/refineable" } regex = { version = "1.5" } diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index b8d0c26960..49d17bdc63 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -16,6 +16,7 @@ required-features = ["seed-support"] [dependencies] collections = { path = "../collections" } live_kit_server = { path = "../live_kit_server" } +text = { path = "../text" } rpc = { path = "../rpc" } util = { path = "../util" } @@ -35,6 +36,7 @@ log.workspace = true nanoid = "0.4" parking_lot.workspace = true prometheus = "0.13" +prost.workspace = true rand.workspace = true reqwest = { version = "0.11", features = ["json"], optional = true } scrypt = "0.7" diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 3dceaecef4..1e4663a6f6 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -189,7 +189,8 @@ CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id"); CREATE TABLE "channels" ( "id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" VARCHAR NOT NULL, - "created_at" TIMESTAMP NOT NULL DEFAULT now + "created_at" TIMESTAMP NOT NULL DEFAULT now, + "main_buffer_id" INTEGER REFERENCES buffers (id) ); CREATE TABLE "channel_paths" ( @@ -208,3 +209,27 @@ CREATE TABLE "channel_members" ( ); CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id"); + +CREATE TABLE "buffers" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "epoch" INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE "buffer_operations" ( + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL, + "replica_id" INTEGER NOT NULL, + "lamport_timestamp" INTEGER NOT NULL, + "local_timestamp" INTEGER NOT NULL, + "version" BLOB NOT NULL, + "is_undo" BOOLEAN NOT NULL, + "value" BLOB NOT NULL, + PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id) +); + +CREATE TABLE "buffer_snapshots" ( + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL, + "text" TEXT NOT NULL, + PRIMARY KEY(buffer_id, epoch) +); diff --git a/crates/collab/migrations/20230819154600_add_channel_buffers.sql b/crates/collab/migrations/20230819154600_add_channel_buffers.sql new file mode 100644 index 0000000000..a4d936fd74 --- /dev/null +++ b/crates/collab/migrations/20230819154600_add_channel_buffers.sql @@ -0,0 +1,25 @@ +CREATE TABLE "buffers" ( + "id" SERIAL PRIMARY KEY, + "epoch" INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE "buffer_operations" ( + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL, + "replica_id" INTEGER NOT NULL, + "local_timestamp" INTEGER NOT NULL, + "lamport_timestamp" INTEGER NOT NULL, + "version" BYTEA NOT NULL, + "is_undo" BOOLEAN NOT NULL, + "value" BYTEA NOT NULL, + PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id) +); + +CREATE TABLE "buffer_snapshots" ( + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "epoch" INTEGER NOT NULL, + "text" TEXT NOT NULL, + PRIMARY KEY(buffer_id, epoch) +); + +ALTER TABLE "channels" ADD COLUMN "main_buffer_id" INTEGER REFERENCES buffers (id); diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index d322b03589..19915777dc 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -52,6 +52,8 @@ pub struct Database { runtime: Option, } +// The `Database` type has so many methods that its impl blocks are split into +// separate files in the `queries` folder. impl Database { pub async fn new(options: ConnectOptions, executor: Executor) -> Result { Ok(Self { diff --git a/crates/collab/src/db/ids.rs b/crates/collab/src/db/ids.rs index 514c973dad..54f9463cca 100644 --- a/crates/collab/src/db/ids.rs +++ b/crates/collab/src/db/ids.rs @@ -110,6 +110,7 @@ fn value_to_integer(v: Value) -> Result { } } +id_type!(BufferId); id_type!(AccessTokenId); id_type!(ChannelId); id_type!(ChannelMemberId); diff --git a/crates/collab/src/db/queries.rs b/crates/collab/src/db/queries.rs index f67bde30b8..c4a1d57eb4 100644 --- a/crates/collab/src/db/queries.rs +++ b/crates/collab/src/db/queries.rs @@ -1,6 +1,7 @@ use super::*; pub mod access_tokens; +pub mod buffers; pub mod channels; pub mod contacts; pub mod projects; @@ -8,3 +9,6 @@ pub mod rooms; pub mod servers; pub mod signups; pub mod users; + +#[cfg(test)] +pub mod buffer_tests; diff --git a/crates/collab/src/db/queries/buffer_tests.rs b/crates/collab/src/db/queries/buffer_tests.rs new file mode 100644 index 0000000000..f0e78e1fe4 --- /dev/null +++ b/crates/collab/src/db/queries/buffer_tests.rs @@ -0,0 +1,41 @@ +use super::*; +use crate::test_both_dbs; +use language::proto; +use text::Buffer; + +test_both_dbs!(test_buffers, test_buffers_postgres, test_buffers_sqlite); + +async fn test_buffers(db: &Arc) { + let buffer_id = db.create_buffer().await.unwrap(); + + let mut buffer = Buffer::new(0, 0, "".to_string()); + let mut operations = Vec::new(); + operations.push(buffer.edit([(0..0, "hello world")])); + operations.push(buffer.edit([(5..5, ", cruel")])); + operations.push(buffer.edit([(0..5, "goodbye")])); + operations.push(buffer.undo().unwrap().1); + assert_eq!(buffer.text(), "hello, cruel world"); + + let operations = operations + .into_iter() + .map(|op| proto::serialize_operation(&language::Operation::Buffer(op))) + .collect::>(); + + db.update_buffer(buffer_id, &operations).await.unwrap(); + + let buffer_data = db.get_buffer(buffer_id).await.unwrap(); + + let mut buffer_2 = Buffer::new(0, 0, buffer_data.base_text); + buffer_2 + .apply_ops(buffer_data.operations.into_iter().map(|operation| { + let operation = proto::deserialize_operation(operation).unwrap(); + if let language::Operation::Buffer(operation) = operation { + operation + } else { + unreachable!() + } + })) + .unwrap(); + + assert_eq!(buffer_2.text(), "hello, cruel world"); +} diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs new file mode 100644 index 0000000000..f5ff2e3367 --- /dev/null +++ b/crates/collab/src/db/queries/buffers.rs @@ -0,0 +1,271 @@ +use super::*; +use prost::Message; + +pub struct Buffer { + pub base_text: String, + pub operations: Vec, +} + +impl Database { + pub async fn create_buffer(&self) -> Result { + self.transaction(|tx| async move { + let buffer = buffer::ActiveModel::new().insert(&*tx).await?; + Ok(buffer.id) + }) + .await + } + + pub async fn update_buffer( + &self, + buffer_id: BufferId, + operations: &[proto::Operation], + ) -> Result<()> { + self.transaction(|tx| async move { + let buffer = buffer::Entity::find_by_id(buffer_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such buffer"))?; + buffer_operation::Entity::insert_many(operations.iter().filter_map(|operation| { + match operation.variant.as_ref()? { + proto::operation::Variant::Edit(operation) => { + let value = + serialize_edit_operation(&operation.ranges, &operation.new_text); + let version = serialize_version(&operation.version); + Some(buffer_operation::ActiveModel { + buffer_id: ActiveValue::Set(buffer_id), + epoch: ActiveValue::Set(buffer.epoch), + replica_id: ActiveValue::Set(operation.replica_id as i32), + lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), + local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), + is_undo: ActiveValue::Set(false), + version: ActiveValue::Set(version), + value: ActiveValue::Set(value), + }) + } + proto::operation::Variant::Undo(operation) => { + let value = serialize_undo_operation(&operation.counts); + let version = serialize_version(&operation.version); + Some(buffer_operation::ActiveModel { + buffer_id: ActiveValue::Set(buffer_id), + epoch: ActiveValue::Set(buffer.epoch), + replica_id: ActiveValue::Set(operation.replica_id as i32), + lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), + local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), + is_undo: ActiveValue::Set(true), + version: ActiveValue::Set(version), + value: ActiveValue::Set(value), + }) + } + proto::operation::Variant::UpdateSelections(_) => None, + proto::operation::Variant::UpdateDiagnostics(_) => None, + proto::operation::Variant::UpdateCompletionTriggers(_) => None, + } + })) + .exec(&*tx) + .await?; + + Ok(()) + }) + .await + } + + pub async fn get_buffer(&self, id: BufferId) -> Result { + self.transaction(|tx| async move { + let buffer = buffer::Entity::find_by_id(id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such buffer"))?; + + let base_text = if buffer.epoch > 0 { + buffer_snapshot::Entity::find() + .filter( + buffer_snapshot::Column::BufferId + .eq(id) + .and(buffer_snapshot::Column::Epoch.eq(buffer.epoch)), + ) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such snapshot"))? + .text + } else { + String::new() + }; + + let mut rows = buffer_operation::Entity::find() + .filter( + buffer_operation::Column::BufferId + .eq(id) + .and(buffer_operation::Column::Epoch.eq(buffer.epoch)), + ) + .stream(&*tx) + .await?; + let mut operations = Vec::new(); + while let Some(row) = rows.next().await { + let row = row?; + let version = deserialize_version(&row.version)?; + let operation = if row.is_undo { + let counts = deserialize_undo_operation(&row.value)?; + proto::operation::Variant::Undo(proto::operation::Undo { + replica_id: row.replica_id as u32, + local_timestamp: row.local_timestamp as u32, + lamport_timestamp: row.lamport_timestamp as u32, + version, + counts, + }) + } else { + let (ranges, new_text) = deserialize_edit_operation(&row.value)?; + proto::operation::Variant::Edit(proto::operation::Edit { + replica_id: row.replica_id as u32, + local_timestamp: row.local_timestamp as u32, + lamport_timestamp: row.lamport_timestamp as u32, + version, + ranges, + new_text, + }) + }; + operations.push(proto::Operation { + variant: Some(operation), + }) + } + + Ok(Buffer { + base_text, + operations, + }) + }) + .await + } +} + +mod storage { + #![allow(non_snake_case)] + + use prost::Message; + + pub const VERSION: usize = 1; + + #[derive(Message)] + pub struct VectorClock { + #[prost(message, repeated, tag = "1")] + pub entries: Vec, + } + + #[derive(Message)] + pub struct VectorClockEntry { + #[prost(uint32, tag = "1")] + pub replica_id: u32, + #[prost(uint32, tag = "2")] + pub timestamp: u32, + } + + #[derive(Message)] + pub struct TextEdit { + #[prost(message, repeated, tag = "1")] + pub ranges: Vec, + #[prost(string, repeated, tag = "2")] + pub texts: Vec, + } + + #[derive(Message)] + pub struct Range { + #[prost(uint64, tag = "1")] + pub start: u64, + #[prost(uint64, tag = "2")] + pub end: u64, + } + + #[derive(Message)] + pub struct Undo { + #[prost(message, repeated, tag = "1")] + pub entries: Vec, + } + + #[derive(Message)] + pub struct UndoCount { + #[prost(uint32, tag = "1")] + pub replica_id: u32, + #[prost(uint32, tag = "2")] + pub local_timestamp: u32, + #[prost(uint32, tag = "3")] + pub count: u32, + } +} + +fn serialize_version(version: &Vec) -> Vec { + storage::VectorClock { + entries: version + .iter() + .map(|entry| storage::VectorClockEntry { + replica_id: entry.replica_id, + timestamp: entry.timestamp, + }) + .collect(), + } + .encode_to_vec() +} + +fn deserialize_version(bytes: &[u8]) -> Result> { + let clock = storage::VectorClock::decode(bytes).map_err(|error| anyhow!("{}", error))?; + Ok(clock + .entries + .into_iter() + .map(|entry| proto::VectorClockEntry { + replica_id: entry.replica_id, + timestamp: entry.timestamp, + }) + .collect()) +} + +fn serialize_edit_operation(ranges: &[proto::Range], texts: &[String]) -> Vec { + storage::TextEdit { + ranges: ranges + .iter() + .map(|range| storage::Range { + start: range.start, + end: range.end, + }) + .collect(), + texts: texts.to_vec(), + } + .encode_to_vec() +} + +fn deserialize_edit_operation(bytes: &[u8]) -> Result<(Vec, Vec)> { + let edit = storage::TextEdit::decode(bytes).map_err(|error| anyhow!("{}", error))?; + let ranges = edit + .ranges + .into_iter() + .map(|range| proto::Range { + start: range.start, + end: range.end, + }) + .collect(); + Ok((ranges, edit.texts)) +} + +fn serialize_undo_operation(counts: &Vec) -> Vec { + storage::Undo { + entries: counts + .iter() + .map(|entry| storage::UndoCount { + replica_id: entry.replica_id, + local_timestamp: entry.local_timestamp, + count: entry.count, + }) + .collect(), + } + .encode_to_vec() +} + +fn deserialize_undo_operation(bytes: &[u8]) -> Result> { + let undo = storage::Undo::decode(bytes).map_err(|error| anyhow!("{}", error))?; + Ok(undo + .entries + .iter() + .map(|entry| proto::UndoCount { + replica_id: entry.replica_id, + local_timestamp: entry.local_timestamp, + count: entry.count, + }) + .collect()) +} diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs index c4c7e4f312..fbf4bff2a6 100644 --- a/crates/collab/src/db/tables.rs +++ b/crates/collab/src/db/tables.rs @@ -1,4 +1,7 @@ pub mod access_token; +pub mod buffer; +pub mod buffer_operation; +pub mod buffer_snapshot; pub mod channel; pub mod channel_member; pub mod channel_path; diff --git a/crates/collab/src/db/tables/buffer.rs b/crates/collab/src/db/tables/buffer.rs new file mode 100644 index 0000000000..84e62cc071 --- /dev/null +++ b/crates/collab/src/db/tables/buffer.rs @@ -0,0 +1,32 @@ +use crate::db::BufferId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "buffers")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: BufferId, + pub epoch: i32, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm(has_many = "super::buffer_operation::Entity")] + Operations, + #[sea_orm(has_many = "super::buffer_snapshot::Entity")] + Snapshots, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Operations.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Snapshots.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/buffer_operation.rs b/crates/collab/src/db/tables/buffer_operation.rs new file mode 100644 index 0000000000..59626c1e77 --- /dev/null +++ b/crates/collab/src/db/tables/buffer_operation.rs @@ -0,0 +1,37 @@ +use crate::db::BufferId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "buffer_operations")] +pub struct Model { + #[sea_orm(primary_key)] + pub buffer_id: BufferId, + #[sea_orm(primary_key)] + pub epoch: i32, + #[sea_orm(primary_key)] + pub lamport_timestamp: i32, + #[sea_orm(primary_key)] + pub replica_id: i32, + pub local_timestamp: i32, + pub version: Vec, + pub is_undo: bool, + pub value: Vec, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::buffer::Entity", + from = "Column::BufferId", + to = "super::buffer::Column::Id" + )] + Buffer, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Buffer.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/buffer_snapshot.rs b/crates/collab/src/db/tables/buffer_snapshot.rs new file mode 100644 index 0000000000..ca8712a053 --- /dev/null +++ b/crates/collab/src/db/tables/buffer_snapshot.rs @@ -0,0 +1,30 @@ +use crate::db::BufferId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "buffer_snapshots")] +pub struct Model { + #[sea_orm(primary_key)] + pub buffer_id: BufferId, + #[sea_orm(primary_key)] + pub epoch: i32, + pub text: String, +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::buffer::Entity", + from = "Column::BufferId", + to = "super::buffer::Column::Id" + )] + Buffer, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Buffer.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/test_db.rs b/crates/collab/src/db/test_db.rs index 40013d5b03..71e352eb86 100644 --- a/crates/collab/src/db/test_db.rs +++ b/crates/collab/src/db/test_db.rs @@ -96,13 +96,17 @@ macro_rules! test_both_dbs { ($test_name:ident, $postgres_test_name:ident, $sqlite_test_name:ident) => { #[gpui::test] async fn $postgres_test_name() { - let test_db = TestDb::postgres(Deterministic::new(0).build_background()); + let test_db = crate::db::test_db::TestDb::postgres( + gpui::executor::Deterministic::new(0).build_background(), + ); $test_name(test_db.db()).await; } #[gpui::test] async fn $sqlite_test_name() { - let test_db = TestDb::sqlite(Deterministic::new(0).build_background()); + let test_db = crate::db::test_db::TestDb::sqlite( + gpui::executor::Deterministic::new(0).build_background(), + ); $test_name(test_db.db()).await; } }; diff --git a/crates/rpc/Cargo.toml b/crates/rpc/Cargo.toml index 008fa9c316..3c307be4fb 100644 --- a/crates/rpc/Cargo.toml +++ b/crates/rpc/Cargo.toml @@ -23,7 +23,7 @@ async-tungstenite = "0.16" base64 = "0.13" futures.workspace = true parking_lot.workspace = true -prost = "0.8" +prost.workspace = true rand.workspace = true rsa = "0.4" serde.workspace = true From a7a4e2e3699659af9e8bbfd232faa3608aae97ea Mon Sep 17 00:00:00 2001 From: Mikayla Date: Mon, 21 Aug 2023 16:30:57 -0700 Subject: [PATCH 03/22] Add buffer integration test Rearrange channel crate structure Get channel buffer from database co-authored-by: Max --- Cargo.lock | 41 +++++++++ Cargo.toml | 1 + crates/call/Cargo.toml | 1 + crates/call/src/call.rs | 5 +- crates/channel/Cargo.toml | 51 +++++++++++ crates/channel/src/channel.rs | 7 ++ crates/channel/src/channel_buffer.rs | 80 ++++++++++++++++++ .../{client => channel}/src/channel_store.rs | 6 +- .../src/channel_store_tests.rs | 3 + crates/client/Cargo.toml | 1 + crates/client/src/client.rs | 5 -- crates/client/src/user.rs | 4 +- crates/collab/Cargo.toml | 1 + crates/collab/src/db.rs | 5 +- crates/collab/src/db/queries.rs | 3 - crates/collab/src/db/queries/channels.rs | 28 +++++++ crates/collab/src/db/tables/channel.rs | 3 +- crates/collab/src/db/{test_db.rs => tests.rs} | 10 ++- .../src/db/{queries => tests}/buffer_tests.rs | 0 crates/collab/src/db/{ => tests}/db_tests.rs | 31 ++++++- crates/collab/src/rpc.rs | 25 +++++- crates/collab/src/tests.rs | 7 +- .../collab/src/tests/channel_buffer_tests.rs | 84 +++++++++++++++++++ crates/collab/src/tests/channel_tests.rs | 3 +- crates/collab_ui/Cargo.toml | 1 + crates/collab_ui/src/collab_panel.rs | 6 +- .../src/collab_panel/channel_modal.rs | 3 +- crates/rpc/proto/zed.proto | 12 +++ crates/rpc/src/proto.rs | 5 +- crates/workspace/Cargo.toml | 1 + crates/workspace/src/workspace.rs | 3 +- crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 5 +- 33 files changed, 403 insertions(+), 39 deletions(-) create mode 100644 crates/channel/Cargo.toml create mode 100644 crates/channel/src/channel.rs create mode 100644 crates/channel/src/channel_buffer.rs rename crates/{client => channel}/src/channel_store.rs (99%) rename crates/{client => channel}/src/channel_store_tests.rs (98%) rename crates/collab/src/db/{test_db.rs => tests.rs} (95%) rename crates/collab/src/db/{queries => tests}/buffer_tests.rs (100%) rename crates/collab/src/db/{ => tests}/db_tests.rs (98%) create mode 100644 crates/collab/src/tests/channel_buffer_tests.rs diff --git a/Cargo.lock b/Cargo.lock index b10d8730fb..a40aa7d89c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1063,6 +1063,7 @@ dependencies = [ "anyhow", "async-broadcast", "audio", + "channel", "client", "collections", "fs", @@ -1190,6 +1191,41 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "channel" +version = "0.1.0" +dependencies = [ + "anyhow", + "client", + "collections", + "db", + "futures 0.3.28", + "gpui", + "image", + "language", + "lazy_static", + "log", + "parking_lot 0.11.2", + "postage", + "rand 0.8.5", + "rpc", + "schemars", + "serde", + "serde_derive", + "settings", + "smol", + "staff_mode", + "sum_tree", + "tempfile", + "text", + "thiserror", + "time 0.3.24", + "tiny_http", + "url", + "util", + "uuid 1.4.1", +] + [[package]] name = "chrono" version = "0.4.26" @@ -1354,6 +1390,7 @@ dependencies = [ "staff_mode", "sum_tree", "tempfile", + "text", "thiserror", "time 0.3.24", "tiny_http", @@ -1418,6 +1455,7 @@ dependencies = [ "axum-extra", "base64 0.13.1", "call", + "channel", "clap 3.2.25", "client", "collections", @@ -1480,6 +1518,7 @@ dependencies = [ "anyhow", "auto_update", "call", + "channel", "client", "clock", "collections", @@ -9536,6 +9575,7 @@ dependencies = [ "async-recursion 1.0.4", "bincode", "call", + "channel", "client", "collections", "context_menu", @@ -9661,6 +9701,7 @@ dependencies = [ "backtrace", "breadcrumbs", "call", + "channel", "chrono", "cli", "client", diff --git a/Cargo.toml b/Cargo.toml index a35b3eea23..0fb8f0b6b7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ members = [ "crates/auto_update", "crates/breadcrumbs", "crates/call", + "crates/channel", "crates/cli", "crates/client", "crates/clock", diff --git a/crates/call/Cargo.toml b/crates/call/Cargo.toml index eb448d8d8d..b4e94fe56c 100644 --- a/crates/call/Cargo.toml +++ b/crates/call/Cargo.toml @@ -20,6 +20,7 @@ test-support = [ [dependencies] audio = { path = "../audio" } +channel = { path = "../channel" } client = { path = "../client" } collections = { path = "../collections" } gpui = { path = "../gpui" } diff --git a/crates/call/src/call.rs b/crates/call/src/call.rs index 5fef53fa81..5af094df05 100644 --- a/crates/call/src/call.rs +++ b/crates/call/src/call.rs @@ -7,9 +7,8 @@ use std::sync::Arc; use anyhow::{anyhow, Result}; use audio::Audio; use call_settings::CallSettings; -use client::{ - proto, ChannelId, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore, -}; +use channel::ChannelId; +use client::{proto, ClickhouseEvent, Client, TelemetrySettings, TypedEnvelope, User, UserStore}; use collections::HashSet; use futures::{future::Shared, FutureExt}; use postage::watch; diff --git a/crates/channel/Cargo.toml b/crates/channel/Cargo.toml new file mode 100644 index 0000000000..0978462a1a --- /dev/null +++ b/crates/channel/Cargo.toml @@ -0,0 +1,51 @@ +[package] +name = "channel" +version = "0.1.0" +edition = "2021" +publish = false + +[lib] +path = "src/channel.rs" +doctest = false + +[features] +test-support = ["collections/test-support", "gpui/test-support", "rpc/test-support"] + +[dependencies] +client = { path = "../client" } +collections = { path = "../collections" } +db = { path = "../db" } +gpui = { path = "../gpui" } +util = { path = "../util" } +rpc = { path = "../rpc" } +text = { path = "../text" } +language = { path = "../language" } +settings = { path = "../settings" } +staff_mode = { path = "../staff_mode" } +sum_tree = { path = "../sum_tree" } + +anyhow.workspace = true +futures.workspace = true +image = "0.23" +lazy_static.workspace = true +log.workspace = true +parking_lot.workspace = true +postage.workspace = true +rand.workspace = true +schemars.workspace = true +smol.workspace = true +thiserror.workspace = true +time.workspace = true +tiny_http = "0.8" +uuid = { version = "1.1.2", features = ["v4"] } +url = "2.2" +serde.workspace = true +serde_derive.workspace = true +tempfile = "3" + +[dev-dependencies] +collections = { path = "../collections", features = ["test-support"] } +gpui = { path = "../gpui", features = ["test-support"] } +rpc = { path = "../rpc", features = ["test-support"] } +settings = { path = "../settings", features = ["test-support"] } +util = { path = "../util", features = ["test-support"] } diff --git a/crates/channel/src/channel.rs b/crates/channel/src/channel.rs new file mode 100644 index 0000000000..67c560a1fc --- /dev/null +++ b/crates/channel/src/channel.rs @@ -0,0 +1,7 @@ +mod channel_store; + +pub mod channel_buffer; +pub use channel_store::*; + +#[cfg(test)] +mod channel_store_tests; diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs new file mode 100644 index 0000000000..10f59bce46 --- /dev/null +++ b/crates/channel/src/channel_buffer.rs @@ -0,0 +1,80 @@ +use crate::ChannelId; +use anyhow::Result; +use client::Client; +use gpui::{Entity, ModelContext, ModelHandle, Task}; +use rpc::proto::GetChannelBuffer; +use std::sync::Arc; + +// Open the channel document +// ChannelDocumentView { ChannelDocument, Editor } -> On clone, clones internal ChannelDocument handle, instantiates new editor +// Produces a view which is: (ChannelDocument, Editor), ChannelDocument manages subscriptions +// ChannelDocuments -> Buffers -> Editor with that buffer + +// ChannelDocuments { +// ChannleBuffers: HashMap> +// } + +pub struct ChannelBuffer { + channel_id: ChannelId, + buffer: Option>, + client: Arc, +} + +impl Entity for ChannelBuffer { + type Event = (); +} + +impl ChannelBuffer { + pub fn for_channel( + channel_id: ChannelId, + client: Arc, + cx: &mut ModelContext, + ) -> Self { + Self { + channel_id, + client, + buffer: None, + } + } + + fn on_buffer_update( + &mut self, + buffer: ModelHandle, + event: &language::Event, + cx: &mut ModelContext, + ) { + // + } + + pub fn buffer( + &mut self, + cx: &mut ModelContext, + ) -> Task>> { + if let Some(buffer) = &self.buffer { + Task::ready(Ok(buffer.clone())) + } else { + let channel_id = self.channel_id; + let client = self.client.clone(); + cx.spawn(|this, mut cx| async move { + let response = client.request(GetChannelBuffer { channel_id }).await?; + + let base_text = response.base_text; + let operations = response + .operations + .into_iter() + .map(language::proto::deserialize_operation) + .collect::, _>>()?; + + this.update(&mut cx, |this, cx| { + let buffer = cx.add_model(|cx| language::Buffer::new(0, base_text, cx)); + buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?; + + cx.subscribe(&buffer, Self::on_buffer_update).detach(); + + this.buffer = Some(buffer.clone()); + anyhow::Ok(buffer) + }) + }) + } + } +} diff --git a/crates/client/src/channel_store.rs b/crates/channel/src/channel_store.rs similarity index 99% rename from crates/client/src/channel_store.rs rename to crates/channel/src/channel_store.rs index 6352ac791e..b9b2c98acd 100644 --- a/crates/client/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -1,7 +1,8 @@ -use crate::Status; -use crate::{Client, Subscription, User, UserStore}; use anyhow::anyhow; use anyhow::Result; +use client::Status; +use client::UserId; +use client::{Client, Subscription, User, UserStore}; use collections::HashMap; use collections::HashSet; use futures::channel::mpsc; @@ -13,7 +14,6 @@ use std::sync::Arc; use util::ResultExt; pub type ChannelId = u64; -pub type UserId = u64; pub struct ChannelStore { channels_by_id: HashMap>, diff --git a/crates/client/src/channel_store_tests.rs b/crates/channel/src/channel_store_tests.rs similarity index 98% rename from crates/client/src/channel_store_tests.rs rename to crates/channel/src/channel_store_tests.rs index 51e819349e..18894b1f47 100644 --- a/crates/client/src/channel_store_tests.rs +++ b/crates/channel/src/channel_store_tests.rs @@ -1,4 +1,7 @@ use super::*; +use client::{Client, UserStore}; +use gpui::{AppContext, ModelHandle}; +use rpc::proto; use util::http::FakeHttpClient; #[gpui::test] diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml index 3ecc515986..64d8f02c8a 100644 --- a/crates/client/Cargo.toml +++ b/crates/client/Cargo.toml @@ -17,6 +17,7 @@ db = { path = "../db" } gpui = { path = "../gpui" } util = { path = "../util" } rpc = { path = "../rpc" } +text = { path = "../text" } settings = { path = "../settings" } staff_mode = { path = "../staff_mode" } sum_tree = { path = "../sum_tree" } diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 8ef3e32ea8..a32c415f7e 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1,10 +1,6 @@ #[cfg(any(test, feature = "test-support"))] pub mod test; -#[cfg(test)] -mod channel_store_tests; - -pub mod channel_store; pub mod telemetry; pub mod user; @@ -48,7 +44,6 @@ use util::channel::ReleaseChannel; use util::http::HttpClient; use util::{ResultExt, TryFutureExt}; -pub use channel_store::*; pub use rpc::*; pub use telemetry::ClickhouseEvent; pub use user::*; diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index be11d1fb44..1dc384da17 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -10,9 +10,11 @@ use std::sync::{Arc, Weak}; use util::http::HttpClient; use util::TryFutureExt as _; +pub type UserId = u64; + #[derive(Default, Debug)] pub struct User { - pub id: u64, + pub id: UserId, pub github_login: String, pub avatar: Option>, } diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index 49d17bdc63..fc78a03f67 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -64,6 +64,7 @@ collections = { path = "../collections", features = ["test-support"] } gpui = { path = "../gpui", features = ["test-support"] } call = { path = "../call", features = ["test-support"] } client = { path = "../client", features = ["test-support"] } +channel = { path = "../channel" } editor = { path = "../editor", features = ["test-support"] } language = { path = "../language", features = ["test-support"] } fs = { path = "../fs", features = ["test-support"] } diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 19915777dc..9c759f79a8 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -1,7 +1,8 @@ #[cfg(test)] -mod db_tests; +pub mod tests; + #[cfg(test)] -pub mod test_db; +pub use tests::TestDb; mod ids; mod queries; diff --git a/crates/collab/src/db/queries.rs b/crates/collab/src/db/queries.rs index c4a1d57eb4..09a8f073b4 100644 --- a/crates/collab/src/db/queries.rs +++ b/crates/collab/src/db/queries.rs @@ -9,6 +9,3 @@ pub mod rooms; pub mod servers; pub mod signups; pub mod users; - -#[cfg(test)] -pub mod buffer_tests; diff --git a/crates/collab/src/db/queries/channels.rs b/crates/collab/src/db/queries/channels.rs index e3d3643a61..85a9304a2e 100644 --- a/crates/collab/src/db/queries/channels.rs +++ b/crates/collab/src/db/queries/channels.rs @@ -689,6 +689,34 @@ impl Database { }) .await } + + pub async fn get_or_create_buffer_for_channel( + &self, + channel_id: ChannelId, + ) -> Result { + self.transaction(|tx| async move { + let tx = tx; + let channel = channel::Entity::find_by_id(channel_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("invalid channel"))?; + + if let Some(id) = channel.main_buffer_id { + return Ok(id); + } else { + let buffer = buffer::ActiveModel::new().insert(&*tx).await?; + channel::ActiveModel { + id: ActiveValue::Unchanged(channel_id), + main_buffer_id: ActiveValue::Set(Some(buffer.id)), + ..Default::default() + } + .update(&*tx) + .await?; + Ok(buffer.id) + } + }) + .await + } } #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] diff --git a/crates/collab/src/db/tables/channel.rs b/crates/collab/src/db/tables/channel.rs index f00b4ced62..444d5fa6d9 100644 --- a/crates/collab/src/db/tables/channel.rs +++ b/crates/collab/src/db/tables/channel.rs @@ -1,4 +1,4 @@ -use crate::db::ChannelId; +use crate::db::{BufferId, ChannelId}; use sea_orm::entity::prelude::*; #[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)] @@ -7,6 +7,7 @@ pub struct Model { #[sea_orm(primary_key)] pub id: ChannelId, pub name: String, + pub main_buffer_id: Option, } impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/test_db.rs b/crates/collab/src/db/tests.rs similarity index 95% rename from crates/collab/src/db/test_db.rs rename to crates/collab/src/db/tests.rs index 71e352eb86..36a0888a62 100644 --- a/crates/collab/src/db/test_db.rs +++ b/crates/collab/src/db/tests.rs @@ -1,3 +1,6 @@ +mod buffer_tests; +mod db_tests; + use super::*; use gpui::executor::Background; use parking_lot::Mutex; @@ -96,7 +99,7 @@ macro_rules! test_both_dbs { ($test_name:ident, $postgres_test_name:ident, $sqlite_test_name:ident) => { #[gpui::test] async fn $postgres_test_name() { - let test_db = crate::db::test_db::TestDb::postgres( + let test_db = crate::db::TestDb::postgres( gpui::executor::Deterministic::new(0).build_background(), ); $test_name(test_db.db()).await; @@ -104,9 +107,8 @@ macro_rules! test_both_dbs { #[gpui::test] async fn $sqlite_test_name() { - let test_db = crate::db::test_db::TestDb::sqlite( - gpui::executor::Deterministic::new(0).build_background(), - ); + let test_db = + crate::db::TestDb::sqlite(gpui::executor::Deterministic::new(0).build_background()); $test_name(test_db.db()).await; } }; diff --git a/crates/collab/src/db/queries/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs similarity index 100% rename from crates/collab/src/db/queries/buffer_tests.rs rename to crates/collab/src/db/tests/buffer_tests.rs diff --git a/crates/collab/src/db/db_tests.rs b/crates/collab/src/db/tests/db_tests.rs similarity index 98% rename from crates/collab/src/db/db_tests.rs rename to crates/collab/src/db/tests/db_tests.rs index 8c5dab77bd..0fffabc7c4 100644 --- a/crates/collab/src/db/db_tests.rs +++ b/crates/collab/src/db/tests/db_tests.rs @@ -3,7 +3,7 @@ use crate::test_both_dbs; use gpui::executor::{Background, Deterministic}; use pretty_assertions::{assert_eq, assert_ne}; use std::sync::Arc; -use test_db::TestDb; +use tests::TestDb; test_both_dbs!( test_get_users, @@ -1329,6 +1329,35 @@ async fn test_channel_renames(db: &Arc) { assert!(bad_name_rename.is_err()) } +test_both_dbs!( + test_get_or_create_channel_buffer, + test_get_or_create_channel_buffer_postgres, + test_get_or_create_channel_buffer_sqlite +); + +async fn test_get_or_create_channel_buffer(db: &Arc) { + let a_id = db + .create_user( + "user1@example.com", + false, + NewUserParams { + github_login: "user1".into(), + github_user_id: 5, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + + let zed_id = db.create_root_channel("zed", "1", a_id).await.unwrap(); + + let first_buffer_id = db.get_or_create_buffer_for_channel(zed_id).await.unwrap(); + let second_buffer_id = db.get_or_create_buffer_for_channel(zed_id).await.unwrap(); + + assert_eq!(first_buffer_id, second_buffer_id); +} + #[gpui::test] async fn test_multiple_signup_overwrite() { let test_db = TestDb::postgres(build_background_executor()); diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 521aa3e7b4..22eb23ce8e 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -35,8 +35,8 @@ use lazy_static::lazy_static; use prometheus::{register_int_gauge, IntGauge}; use rpc::{ proto::{ - self, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, LiveKitConnectionInfo, - RequestMessage, + self, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, GetChannelBufferResponse, + LiveKitConnectionInfo, RequestMessage, }, Connection, ConnectionId, Peer, Receipt, TypedEnvelope, }; @@ -248,6 +248,7 @@ impl Server { .add_request_handler(remove_channel_member) .add_request_handler(set_channel_member_admin) .add_request_handler(rename_channel) + .add_request_handler(get_channel_buffer) .add_request_handler(get_channel_members) .add_request_handler(respond_to_channel_invite) .add_request_handler(join_channel) @@ -2478,6 +2479,26 @@ async fn join_channel( Ok(()) } +async fn get_channel_buffer( + request: proto::GetChannelBuffer, + response: Response, + session: Session, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + + let buffer_id = db.get_or_create_buffer_for_channel(channel_id).await?; + + let buffer = db.get_buffer(buffer_id).await?; + + response.send(GetChannelBufferResponse { + base_text: buffer.base_text, + operations: buffer.operations, + })?; + + Ok(()) +} + async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> Result<()> { let project_id = ProjectId::from_proto(request.project_id); let project_connection_ids = session diff --git a/crates/collab/src/tests.rs b/crates/collab/src/tests.rs index c9f358ca5b..831bccbb72 100644 --- a/crates/collab/src/tests.rs +++ b/crates/collab/src/tests.rs @@ -1,14 +1,14 @@ use crate::{ - db::{test_db::TestDb, NewUserParams, UserId}, + db::{tests::TestDb, NewUserParams, UserId}, executor::Executor, rpc::{Server, CLEANUP_TIMEOUT}, AppState, }; use anyhow::anyhow; use call::{ActiveCall, Room}; +use channel::ChannelStore; use client::{ - self, proto::PeerId, ChannelStore, Client, Connection, Credentials, EstablishConnectionError, - UserStore, + self, proto::PeerId, Client, Connection, Credentials, EstablishConnectionError, UserStore, }; use collections::{HashMap, HashSet}; use fs::FakeFs; @@ -31,6 +31,7 @@ use std::{ use util::http::FakeHttpClient; use workspace::Workspace; +mod channel_buffer_tests; mod channel_tests; mod integration_tests; mod randomized_integration_tests; diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs new file mode 100644 index 0000000000..e7f662523e --- /dev/null +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -0,0 +1,84 @@ +use crate::tests::TestServer; + +use channel::channel_buffer::ChannelBuffer; +use gpui::{executor::Deterministic, ModelHandle, TestAppContext}; +use std::{ops::Range, sync::Arc}; + +#[gpui::test] +async fn test_channel_buffers( + deterministic: Arc, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + deterministic.forbid_parking(); + let mut server = TestServer::start(&deterministic).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let zed_id = server + .make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)]) + .await; + + let a_document = + cx_a.add_model(|cx| ChannelBuffer::for_channel(zed_id, client_a.client().to_owned(), cx)); + let channel_buffer_a = a_document + .update(cx_a, |doc, cx| doc.buffer(cx)) + .await + .unwrap(); + + edit_channel_buffer(&channel_buffer_a, cx_a, [(0..0, "hello world")]); + edit_channel_buffer(&channel_buffer_a, cx_a, [(5..5, ", cruel")]); + edit_channel_buffer(&channel_buffer_a, cx_a, [(0..5, "goodbye")]); + undo_channel_buffer(&channel_buffer_a, cx_a); + + assert_eq!( + channel_buffer_text(&channel_buffer_a, cx_a), + "hello, cruel world" + ); + + let b_document = + cx_b.add_model(|cx| ChannelBuffer::for_channel(zed_id, client_b.client().to_owned(), cx)); + let channel_buffer_b = b_document + .update(cx_b, |doc, cx| doc.buffer(cx)) + .await + .unwrap(); + + assert_eq!( + channel_buffer_text(&channel_buffer_b, cx_b), + "hello, cruel world" + ); + + edit_channel_buffer(&channel_buffer_b, cx_b, [(7..12, "beautiful")]); + + deterministic.run_until_parked(); + + assert_eq!( + channel_buffer_text(&channel_buffer_a, cx_a), + "hello, beautiful world" + ); + assert_eq!( + channel_buffer_text(&channel_buffer_b, cx_b), + "hello, beautiful world" + ); +} + +fn edit_channel_buffer( + channel_buffer: &ModelHandle, + cx: &mut TestAppContext, + edits: I, +) where + I: IntoIterator, &'static str)>, +{ + channel_buffer.update(cx, |buffer, cx| buffer.edit(edits, None, cx)); +} + +fn undo_channel_buffer(channel_buffer: &ModelHandle, cx: &mut TestAppContext) { + channel_buffer.update(cx, |buffer, cx| buffer.undo(cx)); +} + +fn channel_buffer_text( + channel_buffer: &ModelHandle, + cx: &mut TestAppContext, +) -> String { + channel_buffer.read_with(cx, |buffer, _| buffer.text()) +} diff --git a/crates/collab/src/tests/channel_tests.rs b/crates/collab/src/tests/channel_tests.rs index 06cf3607c0..41d2286772 100644 --- a/crates/collab/src/tests/channel_tests.rs +++ b/crates/collab/src/tests/channel_tests.rs @@ -3,7 +3,8 @@ use crate::{ tests::{room_participants, RoomParticipants, TestServer}, }; use call::ActiveCall; -use client::{ChannelId, ChannelMembership, ChannelStore, User}; +use channel::{ChannelId, ChannelMembership, ChannelStore}; +use client::User; use gpui::{executor::Deterministic, ModelHandle, TestAppContext}; use rpc::{proto, RECEIVE_TIMEOUT}; use std::sync::Arc; diff --git a/crates/collab_ui/Cargo.toml b/crates/collab_ui/Cargo.toml index 471608c43e..e0177f6609 100644 --- a/crates/collab_ui/Cargo.toml +++ b/crates/collab_ui/Cargo.toml @@ -26,6 +26,7 @@ auto_update = { path = "../auto_update" } db = { path = "../db" } call = { path = "../call" } client = { path = "../client" } +channel = { path = "../channel" } clock = { path = "../clock" } collections = { path = "../collections" } context_menu = { path = "../context_menu" } diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 5623ada42d..ab692dd166 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -4,10 +4,8 @@ mod panel_settings; use anyhow::Result; use call::ActiveCall; -use client::{ - proto::PeerId, Channel, ChannelEvent, ChannelId, ChannelStore, Client, Contact, User, UserStore, -}; - +use channel::{Channel, ChannelEvent, ChannelId, ChannelStore}; +use client::{proto::PeerId, Client, Contact, User, UserStore}; use context_menu::{ContextMenu, ContextMenuItem}; use db::kvp::KEY_VALUE_STORE; use editor::{Cancel, Editor}; diff --git a/crates/collab_ui/src/collab_panel/channel_modal.rs b/crates/collab_ui/src/collab_panel/channel_modal.rs index 75ab40be85..0adf2806d7 100644 --- a/crates/collab_ui/src/collab_panel/channel_modal.rs +++ b/crates/collab_ui/src/collab_panel/channel_modal.rs @@ -1,4 +1,5 @@ -use client::{proto, ChannelId, ChannelMembership, ChannelStore, User, UserId, UserStore}; +use channel::{ChannelId, ChannelMembership, ChannelStore}; +use client::{proto, User, UserId, UserStore}; use context_menu::{ContextMenu, ContextMenuItem}; use fuzzy::{match_strings, StringMatchCandidate}; use gpui::{ diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index caa5efd2cb..baeaae1876 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -142,6 +142,9 @@ message Envelope { GetChannelMembersResponse get_channel_members_response = 128; SetChannelMemberAdmin set_channel_member_admin = 129; RenameChannel rename_channel = 130; + + GetChannelBuffer get_channel_buffer = 131; + GetChannelBufferResponse get_channel_buffer_response = 132; } } @@ -948,6 +951,15 @@ message RenameChannel { string name = 2; } +message GetChannelBuffer { + uint64 channel_id = 1; +} + +message GetChannelBufferResponse { + string base_text = 1; + repeated Operation operations = 2; +} + message RespondToChannelInvite { uint64 channel_id = 1; bool accept = 2; diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index 92732b00b5..21a491b934 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -248,7 +248,9 @@ messages!( (GetPrivateUserInfo, Foreground), (GetPrivateUserInfoResponse, Foreground), (GetChannelMembers, Foreground), - (GetChannelMembersResponse, Foreground) + (GetChannelMembersResponse, Foreground), + (GetChannelBuffer, Foreground), + (GetChannelBufferResponse, Foreground) ); request_messages!( @@ -315,6 +317,7 @@ request_messages!( (UpdateParticipantLocation, Ack), (UpdateProject, Ack), (UpdateWorktree, Ack), + (GetChannelBuffer, GetChannelBufferResponse) ); entity_messages!( diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 8606be4944..e2dae07b8c 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -22,6 +22,7 @@ test-support = [ db = { path = "../db" } call = { path = "../call" } client = { path = "../client" } +channel = { path = "../channel" } collections = { path = "../collections" } context_menu = { path = "../context_menu" } drag_and_drop = { path = "../drag_and_drop" } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 79b701e015..a8354472aa 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -12,9 +12,10 @@ mod workspace_settings; use anyhow::{anyhow, Context, Result}; use call::ActiveCall; +use channel::ChannelStore; use client::{ proto::{self, PeerId}, - ChannelStore, Client, TypedEnvelope, UserStore, + Client, TypedEnvelope, UserStore, }; use collections::{hash_map, HashMap, HashSet}; use drag_and_drop::DragAndDrop; diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index faa3ae6953..92900f84cb 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -21,6 +21,7 @@ activity_indicator = { path = "../activity_indicator" } auto_update = { path = "../auto_update" } breadcrumbs = { path = "../breadcrumbs" } call = { path = "../call" } +channel = { path = "../channel" } cli = { path = "../cli" } collab_ui = { path = "../collab_ui" } collections = { path = "../collections" } diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index caeaeceded..b905c1d37b 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -3,13 +3,12 @@ use anyhow::{anyhow, Context, Result}; use backtrace::Backtrace; +use channel::ChannelStore; use cli::{ ipc::{self, IpcSender}, CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME, }; -use client::{ - self, ChannelStore, TelemetrySettings, UserStore, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN, -}; +use client::{self, TelemetrySettings, UserStore, ZED_APP_VERSION, ZED_SECRET_CLIENT_TOKEN}; use db::kvp::KEY_VALUE_STORE; use editor::{scroll::autoscroll::Autoscroll, Editor}; use futures::{ From 364ed1f840fc62e3dbb2da464d75cbfec2f100c0 Mon Sep 17 00:00:00 2001 From: Mikayla Date: Mon, 21 Aug 2023 17:53:37 -0700 Subject: [PATCH 04/22] WIP: pass synchronize channel buffers integration test --- crates/channel/src/channel_buffer.rs | 113 +++++++++++------- .../20221109000000_test_schema.sql | 21 +++- .../20230819154600_add_channel_buffers.sql | 18 ++- crates/collab/src/db/ids.rs | 1 + crates/collab/src/db/queries/buffers.rs | 105 +++++++++++++--- crates/collab/src/db/queries/channels.rs | 28 ----- crates/collab/src/db/tables.rs | 1 + crates/collab/src/db/tables/buffer.rs | 23 +++- crates/collab/src/db/tables/channel.rs | 9 +- .../db/tables/channel_buffer_collaborator.rs | 42 +++++++ crates/collab/src/db/tests/buffer_tests.rs | 57 ++++++++- crates/collab/src/rpc.rs | 52 ++++++-- .../collab/src/tests/channel_buffer_tests.rs | 40 +++---- crates/rpc/proto/zed.proto | 25 ++-- crates/rpc/src/proto.rs | 11 +- 15 files changed, 411 insertions(+), 135 deletions(-) create mode 100644 crates/collab/src/db/tables/channel_buffer_collaborator.rs diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index 10f59bce46..372bd319a1 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -1,9 +1,10 @@ use crate::ChannelId; use anyhow::Result; use client::Client; -use gpui::{Entity, ModelContext, ModelHandle, Task}; -use rpc::proto::GetChannelBuffer; +use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task}; +use rpc::{proto, TypedEnvelope}; use std::sync::Arc; +use util::ResultExt; // Open the channel document // ChannelDocumentView { ChannelDocument, Editor } -> On clone, clones internal ChannelDocument handle, instantiates new editor @@ -14,9 +15,12 @@ use std::sync::Arc; // ChannleBuffers: HashMap> // } +type BufferId = u64; + pub struct ChannelBuffer { channel_id: ChannelId, - buffer: Option>, + buffer_id: BufferId, + buffer: ModelHandle, client: Arc, } @@ -28,53 +32,76 @@ impl ChannelBuffer { pub fn for_channel( channel_id: ChannelId, client: Arc, - cx: &mut ModelContext, - ) -> Self { - Self { - channel_id, - client, - buffer: None, - } + cx: &mut AppContext, + ) -> Task>> { + cx.spawn(|mut cx| async move { + let response = client + .request(proto::OpenChannelBuffer { channel_id }) + .await?; + + let base_text = response.base_text; + let operations = response + .operations + .into_iter() + .map(language::proto::deserialize_operation) + .collect::, _>>()?; + let buffer_id = response.buffer_id; + + let buffer = cx.add_model(|cx| language::Buffer::new(0, base_text, cx)); + buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?; + + anyhow::Ok(cx.add_model(|cx| { + cx.subscribe(&buffer, Self::on_buffer_update).detach(); + client.add_model_message_handler(Self::handle_update_channel_buffer); + Self { + buffer_id, + buffer, + client, + channel_id, + } + })) + }) + } + + async fn handle_update_channel_buffer( + this: ModelHandle, + update_channel_buffer: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + let ops = update_channel_buffer + .payload + .operations + .into_iter() + .map(language::proto::deserialize_operation) + .collect::, _>>()?; + + this.update(&mut cx, |this, cx| { + this.buffer + .update(cx, |buffer, cx| buffer.apply_ops(ops, cx)) + })?; + + Ok(()) } fn on_buffer_update( &mut self, - buffer: ModelHandle, + _: ModelHandle, event: &language::Event, - cx: &mut ModelContext, + _: &mut ModelContext, ) { - // - } - - pub fn buffer( - &mut self, - cx: &mut ModelContext, - ) -> Task>> { - if let Some(buffer) = &self.buffer { - Task::ready(Ok(buffer.clone())) - } else { - let channel_id = self.channel_id; - let client = self.client.clone(); - cx.spawn(|this, mut cx| async move { - let response = client.request(GetChannelBuffer { channel_id }).await?; - - let base_text = response.base_text; - let operations = response - .operations - .into_iter() - .map(language::proto::deserialize_operation) - .collect::, _>>()?; - - this.update(&mut cx, |this, cx| { - let buffer = cx.add_model(|cx| language::Buffer::new(0, base_text, cx)); - buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?; - - cx.subscribe(&buffer, Self::on_buffer_update).detach(); - - this.buffer = Some(buffer.clone()); - anyhow::Ok(buffer) + if let language::Event::Operation(operation) = event { + let operation = language::proto::serialize_operation(operation); + self.client + .send(proto::UpdateChannelBuffer { + buffer_id: self.buffer_id, + operations: vec![operation], }) - }) + .log_err(); } } + + pub fn buffer(&self) -> ModelHandle { + self.buffer.clone() + } } diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 1e4663a6f6..12ff2caec5 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -189,8 +189,7 @@ CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id"); CREATE TABLE "channels" ( "id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" VARCHAR NOT NULL, - "created_at" TIMESTAMP NOT NULL DEFAULT now, - "main_buffer_id" INTEGER REFERENCES buffers (id) + "created_at" TIMESTAMP NOT NULL DEFAULT now ); CREATE TABLE "channel_paths" ( @@ -212,9 +211,12 @@ CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channe CREATE TABLE "buffers" ( "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, "epoch" INTEGER NOT NULL DEFAULT 0 ); +CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id"); + CREATE TABLE "buffer_operations" ( "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, "epoch" INTEGER NOT NULL, @@ -233,3 +235,18 @@ CREATE TABLE "buffer_snapshots" ( "text" TEXT NOT NULL, PRIMARY KEY(buffer_id, epoch) ); + +CREATE TABLE "channel_buffer_collaborators" ( + "id" INTEGER PRIMARY KEY AUTOINCREMENT, + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "connection_id" INTEGER NOT NULL, + "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "replica_id" INTEGER NOT NULL +); + +CREATE INDEX "index_channel_buffer_collaborators_on_buffer_id" ON "channel_buffer_collaborators" ("buffer_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_buffer_id_and_replica_id" ON "channel_buffer_collaborators" ("buffer_id", "replica_id"); +CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id"); +CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_buffer_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("buffer_id", "connection_id", "connection_server_id"); diff --git a/crates/collab/migrations/20230819154600_add_channel_buffers.sql b/crates/collab/migrations/20230819154600_add_channel_buffers.sql index a4d936fd74..8ccd7acadf 100644 --- a/crates/collab/migrations/20230819154600_add_channel_buffers.sql +++ b/crates/collab/migrations/20230819154600_add_channel_buffers.sql @@ -1,8 +1,11 @@ CREATE TABLE "buffers" ( "id" SERIAL PRIMARY KEY, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, "epoch" INTEGER NOT NULL DEFAULT 0 ); +CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id"); + CREATE TABLE "buffer_operations" ( "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, "epoch" INTEGER NOT NULL, @@ -22,4 +25,17 @@ CREATE TABLE "buffer_snapshots" ( PRIMARY KEY(buffer_id, epoch) ); -ALTER TABLE "channels" ADD COLUMN "main_buffer_id" INTEGER REFERENCES buffers (id); +CREATE TABLE "channel_buffer_collaborators" ( + "id" SERIAL PRIMARY KEY, + "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "connection_id" INTEGER NOT NULL, + "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, + "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, + "replica_id" INTEGER NOT NULL +); + +CREATE INDEX "index_channel_buffer_collaborators_on_buffer_id" ON "channel_buffer_collaborators" ("buffer_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_buffer_id_and_replica_id" ON "channel_buffer_collaborators" ("buffer_id", "replica_id"); +CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id"); +CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_buffer_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("buffer_id", "connection_id", "connection_server_id"); diff --git a/crates/collab/src/db/ids.rs b/crates/collab/src/db/ids.rs index 54f9463cca..8501083f83 100644 --- a/crates/collab/src/db/ids.rs +++ b/crates/collab/src/db/ids.rs @@ -124,3 +124,4 @@ id_type!(ReplicaId); id_type!(ServerId); id_type!(SignupId); id_type!(UserId); +id_type!(ChannelBufferCollaboratorId); diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index f5ff2e3367..ba88e95fb8 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -1,20 +1,12 @@ use super::*; use prost::Message; -pub struct Buffer { +pub struct ChannelBuffer { pub base_text: String, pub operations: Vec, } impl Database { - pub async fn create_buffer(&self) -> Result { - self.transaction(|tx| async move { - let buffer = buffer::ActiveModel::new().insert(&*tx).await?; - Ok(buffer.id) - }) - .await - } - pub async fn update_buffer( &self, buffer_id: BufferId, @@ -69,13 +61,65 @@ impl Database { .await } - pub async fn get_buffer(&self, id: BufferId) -> Result { + pub async fn join_buffer_for_channel( + &self, + channel_id: ChannelId, + user_id: UserId, + connection: ConnectionId, + ) -> Result { self.transaction(|tx| async move { - let buffer = buffer::Entity::find_by_id(id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no such buffer"))?; + let tx = tx; + // Get or create buffer from channel + self.check_user_is_channel_member(channel_id, user_id, &tx) + .await?; + + let buffer = channel::Model { + id: channel_id, + ..Default::default() + } + .find_related(buffer::Entity) + .one(&*tx) + .await?; + + let buffer = if let Some(buffer) = buffer { + buffer + } else { + let buffer = buffer::ActiveModel { + channel_id: ActiveValue::Set(channel_id), + ..Default::default() + } + .insert(&*tx) + .await?; + buffer + }; + + // Join the collaborators + let collaborators = buffer + .find_related(channel_buffer_collaborator::Entity) + .all(&*tx) + .await?; + let replica_ids = collaborators + .iter() + .map(|c| c.replica_id) + .collect::>(); + let mut replica_id = ReplicaId(0); + while replica_ids.contains(&replica_id) { + replica_id.0 += 1; + } + channel_buffer_collaborator::ActiveModel { + buffer_id: ActiveValue::Set(buffer.id), + connection_id: ActiveValue::Set(connection.id as i32), + connection_server_id: ActiveValue::Set(ServerId(connection.owner_id as i32)), + user_id: ActiveValue::Set(user_id), + replica_id: ActiveValue::Set(replica_id), + ..Default::default() + } + .insert(&*tx) + .await?; + + // Assemble the buffer state + let id = buffer.id; let base_text = if buffer.epoch > 0 { buffer_snapshot::Entity::find() .filter( @@ -128,13 +172,44 @@ impl Database { }) } - Ok(Buffer { + Ok(ChannelBuffer { base_text, operations, }) }) .await } + + pub async fn get_buffer_collaborators(&self, buffer: BufferId) -> Result<()> { + todo!() + } + + pub async fn leave_buffer(&self, buffer: BufferId, user: UserId) -> Result<()> { + self.transaction(|tx| async move { + //TODO + // let tx = tx; + // let channel = channel::Entity::find_by_id(channel_id) + // .one(&*tx) + // .await? + // .ok_or_else(|| anyhow!("invalid channel"))?; + + // if let Some(id) = channel.main_buffer_id { + // return Ok(id); + // } else { + // let buffer = buffer::ActiveModel::new().insert(&*tx).await?; + // channel::ActiveModel { + // id: ActiveValue::Unchanged(channel_id), + // main_buffer_id: ActiveValue::Set(Some(buffer.id)), + // ..Default::default() + // } + // .update(&*tx) + // .await?; + // Ok(buffer.id) + // } + Ok(()) + }) + .await + } } mod storage { diff --git a/crates/collab/src/db/queries/channels.rs b/crates/collab/src/db/queries/channels.rs index 85a9304a2e..e3d3643a61 100644 --- a/crates/collab/src/db/queries/channels.rs +++ b/crates/collab/src/db/queries/channels.rs @@ -689,34 +689,6 @@ impl Database { }) .await } - - pub async fn get_or_create_buffer_for_channel( - &self, - channel_id: ChannelId, - ) -> Result { - self.transaction(|tx| async move { - let tx = tx; - let channel = channel::Entity::find_by_id(channel_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("invalid channel"))?; - - if let Some(id) = channel.main_buffer_id { - return Ok(id); - } else { - let buffer = buffer::ActiveModel::new().insert(&*tx).await?; - channel::ActiveModel { - id: ActiveValue::Unchanged(channel_id), - main_buffer_id: ActiveValue::Set(Some(buffer.id)), - ..Default::default() - } - .update(&*tx) - .await?; - Ok(buffer.id) - } - }) - .await - } } #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs index fbf4bff2a6..fe747e0d27 100644 --- a/crates/collab/src/db/tables.rs +++ b/crates/collab/src/db/tables.rs @@ -3,6 +3,7 @@ pub mod buffer; pub mod buffer_operation; pub mod buffer_snapshot; pub mod channel; +pub mod channel_buffer_collaborator; pub mod channel_member; pub mod channel_path; pub mod contact; diff --git a/crates/collab/src/db/tables/buffer.rs b/crates/collab/src/db/tables/buffer.rs index 84e62cc071..f0187ad278 100644 --- a/crates/collab/src/db/tables/buffer.rs +++ b/crates/collab/src/db/tables/buffer.rs @@ -1,4 +1,4 @@ -use crate::db::BufferId; +use crate::db::{BufferId, ChannelId}; use sea_orm::entity::prelude::*; #[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] @@ -7,6 +7,7 @@ pub struct Model { #[sea_orm(primary_key)] pub id: BufferId, pub epoch: i32, + pub channel_id: ChannelId, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] @@ -15,6 +16,14 @@ pub enum Relation { Operations, #[sea_orm(has_many = "super::buffer_snapshot::Entity")] Snapshots, + #[sea_orm( + belongs_to = "super::channel::Entity", + from = "Column::ChannelId", + to = "super::channel::Column::Id" + )] + Channel, + #[sea_orm(has_many = "super::channel_buffer_collaborator::Entity")] + Collaborators, } impl Related for Entity { @@ -29,4 +38,16 @@ impl Related for Entity { } } +impl Related for Entity { + fn to() -> RelationDef { + Relation::Channel.def() + } +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Collaborators.def() + } +} + impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/channel.rs b/crates/collab/src/db/tables/channel.rs index 444d5fa6d9..7b33e3a1dd 100644 --- a/crates/collab/src/db/tables/channel.rs +++ b/crates/collab/src/db/tables/channel.rs @@ -7,7 +7,6 @@ pub struct Model { #[sea_orm(primary_key)] pub id: ChannelId, pub name: String, - pub main_buffer_id: Option, } impl ActiveModelBehavior for ActiveModel {} @@ -16,6 +15,8 @@ impl ActiveModelBehavior for ActiveModel {} pub enum Relation { #[sea_orm(has_one = "super::room::Entity")] Room, + #[sea_orm(has_one = "super::room::Entity")] + Buffer, #[sea_orm(has_many = "super::channel_member::Entity")] Member, } @@ -31,3 +32,9 @@ impl Related for Entity { Relation::Room.def() } } + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Buffer.def() + } +} diff --git a/crates/collab/src/db/tables/channel_buffer_collaborator.rs b/crates/collab/src/db/tables/channel_buffer_collaborator.rs new file mode 100644 index 0000000000..2e43e93e8e --- /dev/null +++ b/crates/collab/src/db/tables/channel_buffer_collaborator.rs @@ -0,0 +1,42 @@ +use crate::db::{BufferId, ChannelBufferCollaboratorId, ReplicaId, ServerId, UserId}; +use rpc::ConnectionId; +use sea_orm::entity::prelude::*; + +#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] +#[sea_orm(table_name = "channel_buffer_collaborators")] +pub struct Model { + #[sea_orm(primary_key)] + pub id: ChannelBufferCollaboratorId, + pub buffer_id: BufferId, + pub connection_id: i32, + pub connection_server_id: ServerId, + pub user_id: UserId, + pub replica_id: ReplicaId, +} + +impl Model { + pub fn connection(&self) -> ConnectionId { + ConnectionId { + owner_id: self.connection_server_id.0 as u32, + id: self.connection_id as u32, + } + } +} + +#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] +pub enum Relation { + #[sea_orm( + belongs_to = "super::buffer::Entity", + from = "Column::BufferId", + to = "super::buffer::Column::Id" + )] + Buffer, +} + +impl Related for Entity { + fn to() -> RelationDef { + Relation::Buffer.def() + } +} + +impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index f0e78e1fe4..bf7d7763e2 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -6,7 +6,60 @@ use text::Buffer; test_both_dbs!(test_buffers, test_buffers_postgres, test_buffers_sqlite); async fn test_buffers(db: &Arc) { - let buffer_id = db.create_buffer().await.unwrap(); + // Prep database test info + let a_id = db + .create_user( + "user_a@example.com", + false, + NewUserParams { + github_login: "user_a".into(), + github_user_id: 101, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + let b_id = db + .create_user( + "user_b@example.com", + false, + NewUserParams { + github_login: "user_b".into(), + github_user_id: 102, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + // This user will not be a part of the channel + let c_id = db + .create_user( + "user_b@example.com", + false, + NewUserParams { + github_login: "user_b".into(), + github_user_id: 102, + invite_count: 0, + }, + ) + .await + .unwrap() + .user_id; + + let zed_id = db.create_root_channel("zed", "1", a_id).await.unwrap(); + + db.invite_channel_member(zed_id, b_id, a_id, false) + .await + .unwrap(); + + db.respond_to_channel_invite(zed_id, b_id, true) + .await + .unwrap(); + + // TODO: Join buffer + let buffer_id = db.get_or_create_buffer_for_channel(zed_id); let mut buffer = Buffer::new(0, 0, "".to_string()); let mut operations = Vec::new(); @@ -23,7 +76,7 @@ async fn test_buffers(db: &Arc) { db.update_buffer(buffer_id, &operations).await.unwrap(); - let buffer_data = db.get_buffer(buffer_id).await.unwrap(); + let buffer_data = db.open_buffer(buffer_id).await.unwrap(); let mut buffer_2 = Buffer::new(0, 0, buffer_data.base_text); buffer_2 diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 22eb23ce8e..6e62b90473 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -2,7 +2,10 @@ mod connection_pool; use crate::{ auth, - db::{self, ChannelId, ChannelsForUser, Database, ProjectId, RoomId, ServerId, User, UserId}, + db::{ + self, BufferId, ChannelId, ChannelsForUser, Database, ProjectId, RoomId, ServerId, User, + UserId, + }, executor::Executor, AppState, Result, }; @@ -35,8 +38,8 @@ use lazy_static::lazy_static; use prometheus::{register_int_gauge, IntGauge}; use rpc::{ proto::{ - self, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, GetChannelBufferResponse, - LiveKitConnectionInfo, RequestMessage, + self, Ack, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, LiveKitConnectionInfo, + OpenChannelBufferResponse, RequestMessage, }, Connection, ConnectionId, Peer, Receipt, TypedEnvelope, }; @@ -248,7 +251,9 @@ impl Server { .add_request_handler(remove_channel_member) .add_request_handler(set_channel_member_admin) .add_request_handler(rename_channel) - .add_request_handler(get_channel_buffer) + .add_request_handler(open_channel_buffer) + .add_request_handler(close_channel_buffer) + .add_message_handler(update_channel_buffer) .add_request_handler(get_channel_members) .add_request_handler(respond_to_channel_invite) .add_request_handler(join_channel) @@ -2479,9 +2484,9 @@ async fn join_channel( Ok(()) } -async fn get_channel_buffer( - request: proto::GetChannelBuffer, - response: Response, +async fn open_channel_buffer( + request: proto::OpenChannelBuffer, + response: Response, session: Session, ) -> Result<()> { let db = session.db().await; @@ -2489,9 +2494,12 @@ async fn get_channel_buffer( let buffer_id = db.get_or_create_buffer_for_channel(channel_id).await?; - let buffer = db.get_buffer(buffer_id).await?; + // TODO: join channel_buffer - response.send(GetChannelBufferResponse { + let buffer = db.open_buffer(buffer_id).await?; + + response.send(OpenChannelBufferResponse { + buffer_id: buffer_id.to_proto(), base_text: buffer.base_text, operations: buffer.operations, })?; @@ -2499,6 +2507,32 @@ async fn get_channel_buffer( Ok(()) } +async fn close_channel_buffer( + request: proto::CloseChannelBuffer, + response: Response, + session: Session, +) -> Result<()> { + let db = session.db().await; + let buffer_id = BufferId::from_proto(request.buffer_id); + + // TODO: close channel buffer here + // + response.send(Ack {})?; + + Ok(()) +} + +async fn update_channel_buffer( + request: proto::UpdateChannelBuffer, + session: Session, +) -> Result<()> { + let db = session.db().await; + + // TODO: Broadcast to buffer members + + Ok(()) +} + async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> Result<()> { let project_id = ProjectId::from_proto(request.project_id); let project_connection_ids = session diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index e7f662523e..c41f5de803 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -19,45 +19,39 @@ async fn test_channel_buffers( .make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)]) .await; - let a_document = - cx_a.add_model(|cx| ChannelBuffer::for_channel(zed_id, client_a.client().to_owned(), cx)); - let channel_buffer_a = a_document - .update(cx_a, |doc, cx| doc.buffer(cx)) + let channel_buffer_a = cx_a + .update(|cx| ChannelBuffer::for_channel(zed_id, client_a.client().to_owned(), cx)) .await .unwrap(); - edit_channel_buffer(&channel_buffer_a, cx_a, [(0..0, "hello world")]); - edit_channel_buffer(&channel_buffer_a, cx_a, [(5..5, ", cruel")]); - edit_channel_buffer(&channel_buffer_a, cx_a, [(0..5, "goodbye")]); - undo_channel_buffer(&channel_buffer_a, cx_a); + let buffer_a = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer()); - assert_eq!( - channel_buffer_text(&channel_buffer_a, cx_a), - "hello, cruel world" - ); + edit_channel_buffer(&buffer_a, cx_a, [(0..0, "hello world")]); + edit_channel_buffer(&buffer_a, cx_a, [(5..5, ", cruel")]); + edit_channel_buffer(&buffer_a, cx_a, [(0..5, "goodbye")]); + undo_channel_buffer(&buffer_a, cx_a); - let b_document = - cx_b.add_model(|cx| ChannelBuffer::for_channel(zed_id, client_b.client().to_owned(), cx)); - let channel_buffer_b = b_document - .update(cx_b, |doc, cx| doc.buffer(cx)) + assert_eq!(channel_buffer_text(&buffer_a, cx_a), "hello, cruel world"); + + let channel_buffer_b = cx_b + .update(|cx| ChannelBuffer::for_channel(zed_id, client_b.client().to_owned(), cx)) .await .unwrap(); - assert_eq!( - channel_buffer_text(&channel_buffer_b, cx_b), - "hello, cruel world" - ); + let buffer_b = channel_buffer_b.read_with(cx_b, |buffer, _| buffer.buffer()); - edit_channel_buffer(&channel_buffer_b, cx_b, [(7..12, "beautiful")]); + assert_eq!(channel_buffer_text(&buffer_b, cx_b), "hello, cruel world"); + + edit_channel_buffer(&buffer_b, cx_b, [(7..12, "beautiful")]); deterministic.run_until_parked(); assert_eq!( - channel_buffer_text(&channel_buffer_a, cx_a), + channel_buffer_text(&buffer_a, cx_a), "hello, beautiful world" ); assert_eq!( - channel_buffer_text(&channel_buffer_b, cx_b), + channel_buffer_text(&buffer_b, cx_b), "hello, beautiful world" ); } diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index baeaae1876..7fb22577f3 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -143,8 +143,10 @@ message Envelope { SetChannelMemberAdmin set_channel_member_admin = 129; RenameChannel rename_channel = 130; - GetChannelBuffer get_channel_buffer = 131; - GetChannelBufferResponse get_channel_buffer_response = 132; + OpenChannelBuffer open_channel_buffer = 131; + OpenChannelBufferResponse open_channel_buffer_response = 132; + UpdateChannelBuffer update_channel_buffer = 133; + CloseChannelBuffer close_channel_buffer = 134; } } @@ -543,6 +545,11 @@ message UpdateBuffer { repeated Operation operations = 3; } +message UpdateChannelBuffer { + uint64 buffer_id = 2; + repeated Operation operations = 3; +} + message UpdateBufferFile { uint64 project_id = 1; uint64 buffer_id = 2; @@ -951,13 +958,18 @@ message RenameChannel { string name = 2; } -message GetChannelBuffer { +message OpenChannelBuffer { uint64 channel_id = 1; } -message GetChannelBufferResponse { - string base_text = 1; - repeated Operation operations = 2; +message OpenChannelBufferResponse { + uint64 buffer_id = 1; + string base_text = 2; + repeated Operation operations = 3; +} + +message CloseChannelBuffer { + uint64 buffer_id = 1; } message RespondToChannelInvite { @@ -1156,7 +1168,6 @@ enum GitStatus { Conflict = 2; } - message BufferState { uint64 id = 1; optional File file = 2; diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index 21a491b934..9d71140aa0 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -249,8 +249,10 @@ messages!( (GetPrivateUserInfoResponse, Foreground), (GetChannelMembers, Foreground), (GetChannelMembersResponse, Foreground), - (GetChannelBuffer, Foreground), - (GetChannelBufferResponse, Foreground) + (OpenChannelBuffer, Foreground), + (OpenChannelBufferResponse, Foreground), + (CloseChannelBuffer, Background), + (UpdateChannelBuffer, Foreground) ); request_messages!( @@ -317,7 +319,8 @@ request_messages!( (UpdateParticipantLocation, Ack), (UpdateProject, Ack), (UpdateWorktree, Ack), - (GetChannelBuffer, GetChannelBufferResponse) + (OpenChannelBuffer, OpenChannelBufferResponse), + (CloseChannelBuffer, Ack) ); entity_messages!( @@ -373,6 +376,8 @@ entity_messages!( UpdateDiffBase ); +entity_messages!(buffer_id, UpdateChannelBuffer); + const KIB: usize = 1024; const MIB: usize = KIB * 1024; const MAX_BUFFER_LEN: usize = MIB; From 71611ee7a2ef52a785df74e79af2dd3ececdf706 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 22 Aug 2023 09:47:49 -0700 Subject: [PATCH 05/22] Get join_buffer_for_channel compiling Co-authored-by: Mikayla --- crates/collab/src/db/queries/buffers.rs | 23 +++++--- crates/collab/src/db/tables/channel.rs | 4 +- crates/collab/src/db/tests/buffer_tests.rs | 67 +++++++++++++++++----- crates/collab/src/db/tests/db_tests.rs | 29 ---------- crates/collab/src/rpc.rs | 14 ++--- crates/rpc/proto/zed.proto | 1 + 6 files changed, 73 insertions(+), 65 deletions(-) diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index ba88e95fb8..3f86f897d8 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -1,11 +1,6 @@ use super::*; use prost::Message; -pub struct ChannelBuffer { - pub base_text: String, - pub operations: Vec, -} - impl Database { pub async fn update_buffer( &self, @@ -66,7 +61,7 @@ impl Database { channel_id: ChannelId, user_id: UserId, connection: ConnectionId, - ) -> Result { + ) -> Result { self.transaction(|tx| async move { let tx = tx; @@ -95,7 +90,7 @@ impl Database { }; // Join the collaborators - let collaborators = buffer + let mut collaborators = buffer .find_related(channel_buffer_collaborator::Entity) .all(&*tx) .await?; @@ -107,7 +102,7 @@ impl Database { while replica_ids.contains(&replica_id) { replica_id.0 += 1; } - channel_buffer_collaborator::ActiveModel { + let collaborator = channel_buffer_collaborator::ActiveModel { buffer_id: ActiveValue::Set(buffer.id), connection_id: ActiveValue::Set(connection.id as i32), connection_server_id: ActiveValue::Set(ServerId(connection.owner_id as i32)), @@ -117,6 +112,7 @@ impl Database { } .insert(&*tx) .await?; + collaborators.push(collaborator); // Assemble the buffer state let id = buffer.id; @@ -172,9 +168,18 @@ impl Database { }) } - Ok(ChannelBuffer { + Ok(proto::OpenChannelBufferResponse { + buffer_id: buffer.id.to_proto(), base_text, operations, + collaborators: collaborators + .into_iter() + .map(|collaborator| proto::Collaborator { + peer_id: Some(collaborator.connection().into()), + user_id: collaborator.user_id.to_proto(), + replica_id: collaborator.replica_id.0 as u32, + }) + .collect(), }) }) .await diff --git a/crates/collab/src/db/tables/channel.rs b/crates/collab/src/db/tables/channel.rs index 7b33e3a1dd..7f59e8d65f 100644 --- a/crates/collab/src/db/tables/channel.rs +++ b/crates/collab/src/db/tables/channel.rs @@ -1,4 +1,4 @@ -use crate::db::{BufferId, ChannelId}; +use crate::db::ChannelId; use sea_orm::entity::prelude::*; #[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)] @@ -15,7 +15,7 @@ impl ActiveModelBehavior for ActiveModel {} pub enum Relation { #[sea_orm(has_one = "super::room::Entity")] Room, - #[sea_orm(has_one = "super::room::Entity")] + #[sea_orm(has_one = "super::buffer::Entity")] Buffer, #[sea_orm(has_many = "super::channel_member::Entity")] Member, diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index bf7d7763e2..fff9938573 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -3,9 +3,13 @@ use crate::test_both_dbs; use language::proto; use text::Buffer; -test_both_dbs!(test_buffers, test_buffers_postgres, test_buffers_sqlite); +test_both_dbs!( + test_channel_buffers, + test_channel_buffers_postgres, + test_channel_buffers_sqlite +); -async fn test_buffers(db: &Arc) { +async fn test_channel_buffers(db: &Arc) { // Prep database test info let a_id = db .create_user( @@ -48,6 +52,8 @@ async fn test_buffers(db: &Arc) { .unwrap() .user_id; + let owner_id = db.create_server("production").await.unwrap().0 as u32; + let zed_id = db.create_root_channel("zed", "1", a_id).await.unwrap(); db.invite_channel_member(zed_id, b_id, a_id, false) @@ -58,16 +64,19 @@ async fn test_buffers(db: &Arc) { .await .unwrap(); - // TODO: Join buffer - let buffer_id = db.get_or_create_buffer_for_channel(zed_id); + let buffer_response_a = db + .join_buffer_for_channel(zed_id, a_id, ConnectionId { owner_id, id: 1 }) + .await + .unwrap(); + let buffer_id = BufferId::from_proto(buffer_response_a.buffer_id); - let mut buffer = Buffer::new(0, 0, "".to_string()); + let mut buffer_a = Buffer::new(0, 0, "".to_string()); let mut operations = Vec::new(); - operations.push(buffer.edit([(0..0, "hello world")])); - operations.push(buffer.edit([(5..5, ", cruel")])); - operations.push(buffer.edit([(0..5, "goodbye")])); - operations.push(buffer.undo().unwrap().1); - assert_eq!(buffer.text(), "hello, cruel world"); + operations.push(buffer_a.edit([(0..0, "hello world")])); + operations.push(buffer_a.edit([(5..5, ", cruel")])); + operations.push(buffer_a.edit([(0..5, "goodbye")])); + operations.push(buffer_a.undo().unwrap().1); + assert_eq!(buffer_a.text(), "hello, cruel world"); let operations = operations .into_iter() @@ -76,11 +85,14 @@ async fn test_buffers(db: &Arc) { db.update_buffer(buffer_id, &operations).await.unwrap(); - let buffer_data = db.open_buffer(buffer_id).await.unwrap(); + let buffer_response_b = db + .join_buffer_for_channel(zed_id, b_id, ConnectionId { owner_id, id: 2 }) + .await + .unwrap(); - let mut buffer_2 = Buffer::new(0, 0, buffer_data.base_text); - buffer_2 - .apply_ops(buffer_data.operations.into_iter().map(|operation| { + let mut buffer_b = Buffer::new(0, 0, buffer_response_b.base_text); + buffer_b + .apply_ops(buffer_response_b.operations.into_iter().map(|operation| { let operation = proto::deserialize_operation(operation).unwrap(); if let language::Operation::Buffer(operation) = operation { operation @@ -90,5 +102,30 @@ async fn test_buffers(db: &Arc) { })) .unwrap(); - assert_eq!(buffer_2.text(), "hello, cruel world"); + assert_eq!(buffer_b.text(), "hello, cruel world"); + + // Ensure that C fails to open the buffer + assert!(db + .join_buffer_for_channel(zed_id, c_id, ConnectionId { owner_id, id: 3 }) + .await + .is_err()); + + //Ensure that both collaborators have shown up + assert_eq!( + buffer_response_b.collaborators, + &[ + rpc::proto::Collaborator { + user_id: a_id.to_proto(), + peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }), + replica_id: 0, + }, + rpc::proto::Collaborator { + user_id: b_id.to_proto(), + peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }), + replica_id: 1, + } + ] + ); + + // Leave buffer } diff --git a/crates/collab/src/db/tests/db_tests.rs b/crates/collab/src/db/tests/db_tests.rs index 0fffabc7c4..fc31ee7c4d 100644 --- a/crates/collab/src/db/tests/db_tests.rs +++ b/crates/collab/src/db/tests/db_tests.rs @@ -1329,35 +1329,6 @@ async fn test_channel_renames(db: &Arc) { assert!(bad_name_rename.is_err()) } -test_both_dbs!( - test_get_or_create_channel_buffer, - test_get_or_create_channel_buffer_postgres, - test_get_or_create_channel_buffer_sqlite -); - -async fn test_get_or_create_channel_buffer(db: &Arc) { - let a_id = db - .create_user( - "user1@example.com", - false, - NewUserParams { - github_login: "user1".into(), - github_user_id: 5, - invite_count: 0, - }, - ) - .await - .unwrap() - .user_id; - - let zed_id = db.create_root_channel("zed", "1", a_id).await.unwrap(); - - let first_buffer_id = db.get_or_create_buffer_for_channel(zed_id).await.unwrap(); - let second_buffer_id = db.get_or_create_buffer_for_channel(zed_id).await.unwrap(); - - assert_eq!(first_buffer_id, second_buffer_id); -} - #[gpui::test] async fn test_multiple_signup_overwrite() { let test_db = TestDb::postgres(build_background_executor()); diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 6e62b90473..95c6bdefc1 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -2492,17 +2492,11 @@ async fn open_channel_buffer( let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); - let buffer_id = db.get_or_create_buffer_for_channel(channel_id).await?; + let open_response = db + .join_buffer_for_channel(channel_id, session.user_id, session.connection_id) + .await?; - // TODO: join channel_buffer - - let buffer = db.open_buffer(buffer_id).await?; - - response.send(OpenChannelBufferResponse { - buffer_id: buffer_id.to_proto(), - base_text: buffer.base_text, - operations: buffer.operations, - })?; + response.send(open_response)?; Ok(()) } diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 7fb22577f3..6f19132dc5 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -966,6 +966,7 @@ message OpenChannelBufferResponse { uint64 buffer_id = 1; string base_text = 2; repeated Operation operations = 3; + repeated Collaborator collaborators = 4; } message CloseChannelBuffer { From 95ea6647259d812da38c5b473600dc22ec602a8d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 22 Aug 2023 11:02:13 -0700 Subject: [PATCH 06/22] WIP --- crates/channel/src/channel_buffer.rs | 2 +- .../20221109000000_test_schema.sql | 9 +- .../20230819154600_add_channel_buffers.sql | 9 +- crates/collab/src/db/queries/buffers.rs | 188 ++++++++++-------- crates/collab/src/db/queries/rooms.rs | 36 +++- crates/collab/src/db/tables/buffer.rs | 8 - crates/collab/src/db/tables/channel.rs | 8 + .../db/tables/channel_buffer_collaborator.rs | 17 +- crates/collab/src/db/tests/buffer_tests.rs | 28 ++- crates/collab/src/rpc.rs | 28 +-- crates/rpc/proto/zed.proto | 14 +- crates/rpc/src/proto.rs | 10 +- 12 files changed, 211 insertions(+), 146 deletions(-) diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index 372bd319a1..d88810ff56 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -36,7 +36,7 @@ impl ChannelBuffer { ) -> Task>> { cx.spawn(|mut cx| async move { let response = client - .request(proto::OpenChannelBuffer { channel_id }) + .request(proto::JoinChannelBuffer { channel_id }) .await?; let base_text = response.base_text; diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 12ff2caec5..f39f0cca59 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -238,15 +238,16 @@ CREATE TABLE "buffer_snapshots" ( CREATE TABLE "channel_buffer_collaborators" ( "id" INTEGER PRIMARY KEY AUTOINCREMENT, - "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, "connection_id" INTEGER NOT NULL, "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, + "connection_lost" BOOLEAN NOT NULL DEFAULT false, "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, "replica_id" INTEGER NOT NULL ); -CREATE INDEX "index_channel_buffer_collaborators_on_buffer_id" ON "channel_buffer_collaborators" ("buffer_id"); -CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_buffer_id_and_replica_id" ON "channel_buffer_collaborators" ("buffer_id", "replica_id"); +CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id"); CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id"); CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id"); -CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_buffer_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("buffer_id", "connection_id", "connection_server_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id"); diff --git a/crates/collab/migrations/20230819154600_add_channel_buffers.sql b/crates/collab/migrations/20230819154600_add_channel_buffers.sql index 8ccd7acadf..f6bd2879c6 100644 --- a/crates/collab/migrations/20230819154600_add_channel_buffers.sql +++ b/crates/collab/migrations/20230819154600_add_channel_buffers.sql @@ -27,15 +27,16 @@ CREATE TABLE "buffer_snapshots" ( CREATE TABLE "channel_buffer_collaborators" ( "id" SERIAL PRIMARY KEY, - "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, + "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, "connection_id" INTEGER NOT NULL, "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE, + "connection_lost" BOOLEAN NOT NULL DEFAULT FALSE, "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, "replica_id" INTEGER NOT NULL ); -CREATE INDEX "index_channel_buffer_collaborators_on_buffer_id" ON "channel_buffer_collaborators" ("buffer_id"); -CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_buffer_id_and_replica_id" ON "channel_buffer_collaborators" ("buffer_id", "replica_id"); +CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id"); CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id"); CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id"); -CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_buffer_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("buffer_id", "connection_id", "connection_server_id"); +CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id"); diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 3f86f897d8..473dd1afe9 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -2,66 +2,12 @@ use super::*; use prost::Message; impl Database { - pub async fn update_buffer( - &self, - buffer_id: BufferId, - operations: &[proto::Operation], - ) -> Result<()> { - self.transaction(|tx| async move { - let buffer = buffer::Entity::find_by_id(buffer_id) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no such buffer"))?; - buffer_operation::Entity::insert_many(operations.iter().filter_map(|operation| { - match operation.variant.as_ref()? { - proto::operation::Variant::Edit(operation) => { - let value = - serialize_edit_operation(&operation.ranges, &operation.new_text); - let version = serialize_version(&operation.version); - Some(buffer_operation::ActiveModel { - buffer_id: ActiveValue::Set(buffer_id), - epoch: ActiveValue::Set(buffer.epoch), - replica_id: ActiveValue::Set(operation.replica_id as i32), - lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), - local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), - is_undo: ActiveValue::Set(false), - version: ActiveValue::Set(version), - value: ActiveValue::Set(value), - }) - } - proto::operation::Variant::Undo(operation) => { - let value = serialize_undo_operation(&operation.counts); - let version = serialize_version(&operation.version); - Some(buffer_operation::ActiveModel { - buffer_id: ActiveValue::Set(buffer_id), - epoch: ActiveValue::Set(buffer.epoch), - replica_id: ActiveValue::Set(operation.replica_id as i32), - lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), - local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), - is_undo: ActiveValue::Set(true), - version: ActiveValue::Set(version), - value: ActiveValue::Set(value), - }) - } - proto::operation::Variant::UpdateSelections(_) => None, - proto::operation::Variant::UpdateDiagnostics(_) => None, - proto::operation::Variant::UpdateCompletionTriggers(_) => None, - } - })) - .exec(&*tx) - .await?; - - Ok(()) - }) - .await - } - - pub async fn join_buffer_for_channel( + pub async fn join_channel_buffer( &self, channel_id: ChannelId, user_id: UserId, connection: ConnectionId, - ) -> Result { + ) -> Result { self.transaction(|tx| async move { let tx = tx; @@ -90,8 +36,8 @@ impl Database { }; // Join the collaborators - let mut collaborators = buffer - .find_related(channel_buffer_collaborator::Entity) + let mut collaborators = channel_buffer_collaborator::Entity::find() + .filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)) .all(&*tx) .await?; let replica_ids = collaborators @@ -103,7 +49,7 @@ impl Database { replica_id.0 += 1; } let collaborator = channel_buffer_collaborator::ActiveModel { - buffer_id: ActiveValue::Set(buffer.id), + channel_id: ActiveValue::Set(channel_id), connection_id: ActiveValue::Set(connection.id as i32), connection_server_id: ActiveValue::Set(ServerId(connection.owner_id as i32)), user_id: ActiveValue::Set(user_id), @@ -168,7 +114,7 @@ impl Database { }) } - Ok(proto::OpenChannelBufferResponse { + Ok(proto::JoinChannelBufferResponse { buffer_id: buffer.id.to_proto(), base_text, operations, @@ -185,32 +131,112 @@ impl Database { .await } - pub async fn get_buffer_collaborators(&self, buffer: BufferId) -> Result<()> { + pub async fn leave_channel_buffer( + &self, + channel_id: ChannelId, + connection: ConnectionId, + ) -> Result> { + self.transaction(|tx| async move { + let result = channel_buffer_collaborator::Entity::delete_many() + .filter( + Condition::all() + .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)) + .add( + channel_buffer_collaborator::Column::ConnectionId + .eq(connection.id as i32), + ) + .add( + channel_buffer_collaborator::Column::ConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .exec(&*tx) + .await?; + if result.rows_affected == 0 { + Err(anyhow!("not a collaborator on this project"))?; + } + + let mut connections = Vec::new(); + let mut rows = channel_buffer_collaborator::Entity::find() + .filter( + Condition::all() + .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)), + ) + .stream(&*tx) + .await?; + while let Some(row) = rows.next().await { + let row = row?; + connections.push(ConnectionId { + id: row.connection_id as u32, + owner_id: row.connection_server_id.0 as u32, + }); + } + + Ok(connections) + }) + .await + } + + pub async fn leave_channel_buffers( + &self, + connection: ConnectionId, + ) -> Result> { + // + } + + pub async fn get_channel_buffer_collaborators(&self, channel_id: ChannelId) -> Result<()> { todo!() } - pub async fn leave_buffer(&self, buffer: BufferId, user: UserId) -> Result<()> { + pub async fn update_channel_buffer( + &self, + buffer_id: BufferId, + operations: &[proto::Operation], + ) -> Result<()> { self.transaction(|tx| async move { - //TODO - // let tx = tx; - // let channel = channel::Entity::find_by_id(channel_id) - // .one(&*tx) - // .await? - // .ok_or_else(|| anyhow!("invalid channel"))?; + let buffer = buffer::Entity::find_by_id(buffer_id) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such buffer"))?; + buffer_operation::Entity::insert_many(operations.iter().filter_map(|operation| { + match operation.variant.as_ref()? { + proto::operation::Variant::Edit(operation) => { + let value = + serialize_edit_operation(&operation.ranges, &operation.new_text); + let version = serialize_version(&operation.version); + Some(buffer_operation::ActiveModel { + buffer_id: ActiveValue::Set(buffer_id), + epoch: ActiveValue::Set(buffer.epoch), + replica_id: ActiveValue::Set(operation.replica_id as i32), + lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), + local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), + is_undo: ActiveValue::Set(false), + version: ActiveValue::Set(version), + value: ActiveValue::Set(value), + }) + } + proto::operation::Variant::Undo(operation) => { + let value = serialize_undo_operation(&operation.counts); + let version = serialize_version(&operation.version); + Some(buffer_operation::ActiveModel { + buffer_id: ActiveValue::Set(buffer_id), + epoch: ActiveValue::Set(buffer.epoch), + replica_id: ActiveValue::Set(operation.replica_id as i32), + lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), + local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), + is_undo: ActiveValue::Set(true), + version: ActiveValue::Set(version), + value: ActiveValue::Set(value), + }) + } + proto::operation::Variant::UpdateSelections(_) => None, + proto::operation::Variant::UpdateDiagnostics(_) => None, + proto::operation::Variant::UpdateCompletionTriggers(_) => None, + } + })) + .exec(&*tx) + .await?; - // if let Some(id) = channel.main_buffer_id { - // return Ok(id); - // } else { - // let buffer = buffer::ActiveModel::new().insert(&*tx).await?; - // channel::ActiveModel { - // id: ActiveValue::Unchanged(channel_id), - // main_buffer_id: ActiveValue::Set(Some(buffer.id)), - // ..Default::default() - // } - // .update(&*tx) - // .await?; - // Ok(buffer.id) - // } Ok(()) }) .await diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index ee79f2cb4f..a85d257187 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -903,15 +903,35 @@ impl Database { ), ) .one(&*tx) - .await? - .ok_or_else(|| anyhow!("not a participant in any room"))?; + .await?; - room_participant::Entity::update(room_participant::ActiveModel { - answering_connection_lost: ActiveValue::set(true), - ..participant.into_active_model() - }) - .exec(&*tx) - .await?; + if let Some(participant) = participant { + room_participant::Entity::update(room_participant::ActiveModel { + answering_connection_lost: ActiveValue::set(true), + ..participant.into_active_model() + }) + .exec(&*tx) + .await?; + } + + channel_buffer_collaborator::Entity::update_many() + .filter( + Condition::all() + .add( + channel_buffer_collaborator::Column::ConnectionId + .eq(connection.id as i32), + ) + .add( + channel_buffer_collaborator::Column::ConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .set(channel_buffer_collaborator::ActiveModel { + connection_lost: ActiveValue::set(true), + ..Default::default() + }) + .exec(&*tx) + .await?; Ok(()) }) diff --git a/crates/collab/src/db/tables/buffer.rs b/crates/collab/src/db/tables/buffer.rs index f0187ad278..ec2ffd4a68 100644 --- a/crates/collab/src/db/tables/buffer.rs +++ b/crates/collab/src/db/tables/buffer.rs @@ -22,8 +22,6 @@ pub enum Relation { to = "super::channel::Column::Id" )] Channel, - #[sea_orm(has_many = "super::channel_buffer_collaborator::Entity")] - Collaborators, } impl Related for Entity { @@ -44,10 +42,4 @@ impl Related for Entity { } } -impl Related for Entity { - fn to() -> RelationDef { - Relation::Collaborators.def() - } -} - impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tables/channel.rs b/crates/collab/src/db/tables/channel.rs index 7f59e8d65f..05895ede4c 100644 --- a/crates/collab/src/db/tables/channel.rs +++ b/crates/collab/src/db/tables/channel.rs @@ -19,6 +19,8 @@ pub enum Relation { Buffer, #[sea_orm(has_many = "super::channel_member::Entity")] Member, + #[sea_orm(has_many = "super::channel_buffer_collaborator::Entity")] + BufferCollaborators, } impl Related for Entity { @@ -38,3 +40,9 @@ impl Related for Entity { Relation::Buffer.def() } } + +impl Related for Entity { + fn to() -> RelationDef { + Relation::BufferCollaborators.def() + } +} diff --git a/crates/collab/src/db/tables/channel_buffer_collaborator.rs b/crates/collab/src/db/tables/channel_buffer_collaborator.rs index 2e43e93e8e..ac2637b36e 100644 --- a/crates/collab/src/db/tables/channel_buffer_collaborator.rs +++ b/crates/collab/src/db/tables/channel_buffer_collaborator.rs @@ -1,4 +1,4 @@ -use crate::db::{BufferId, ChannelBufferCollaboratorId, ReplicaId, ServerId, UserId}; +use crate::db::{ChannelBufferCollaboratorId, ChannelId, ReplicaId, ServerId, UserId}; use rpc::ConnectionId; use sea_orm::entity::prelude::*; @@ -7,9 +7,10 @@ use sea_orm::entity::prelude::*; pub struct Model { #[sea_orm(primary_key)] pub id: ChannelBufferCollaboratorId, - pub buffer_id: BufferId, + pub channel_id: ChannelId, pub connection_id: i32, pub connection_server_id: ServerId, + pub connection_lost: bool, pub user_id: UserId, pub replica_id: ReplicaId, } @@ -26,16 +27,16 @@ impl Model { #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] pub enum Relation { #[sea_orm( - belongs_to = "super::buffer::Entity", - from = "Column::BufferId", - to = "super::buffer::Column::Id" + belongs_to = "super::channel::Entity", + from = "Column::ChannelId", + to = "super::channel::Column::Id" )] - Buffer, + Channel, } -impl Related for Entity { +impl Related for Entity { fn to() -> RelationDef { - Relation::Buffer.def() + Relation::Channel.def() } } diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index fff9938573..c25071e1a2 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -37,13 +37,14 @@ async fn test_channel_buffers(db: &Arc) { .await .unwrap() .user_id; + // This user will not be a part of the channel let c_id = db .create_user( - "user_b@example.com", + "user_c@example.com", false, NewUserParams { - github_login: "user_b".into(), + github_login: "user_c".into(), github_user_id: 102, invite_count: 0, }, @@ -64,8 +65,9 @@ async fn test_channel_buffers(db: &Arc) { .await .unwrap(); + let connection_id_a = ConnectionId { owner_id, id: 1 }; let buffer_response_a = db - .join_buffer_for_channel(zed_id, a_id, ConnectionId { owner_id, id: 1 }) + .join_channel_buffer(zed_id, a_id, connection_id_a) .await .unwrap(); let buffer_id = BufferId::from_proto(buffer_response_a.buffer_id); @@ -83,10 +85,13 @@ async fn test_channel_buffers(db: &Arc) { .map(|op| proto::serialize_operation(&language::Operation::Buffer(op))) .collect::>(); - db.update_buffer(buffer_id, &operations).await.unwrap(); + db.update_channel_buffer(buffer_id, &operations) + .await + .unwrap(); + let connection_id_b = ConnectionId { owner_id, id: 2 }; let buffer_response_b = db - .join_buffer_for_channel(zed_id, b_id, ConnectionId { owner_id, id: 2 }) + .join_channel_buffer(zed_id, b_id, connection_id_b) .await .unwrap(); @@ -106,7 +111,7 @@ async fn test_channel_buffers(db: &Arc) { // Ensure that C fails to open the buffer assert!(db - .join_buffer_for_channel(zed_id, c_id, ConnectionId { owner_id, id: 3 }) + .join_channel_buffer(zed_id, c_id, ConnectionId { owner_id, id: 3 }) .await .is_err()); @@ -127,5 +132,14 @@ async fn test_channel_buffers(db: &Arc) { ] ); - // Leave buffer + let collaborators = db + .leave_channel_buffer(zed_id, connection_id_b) + .await + .unwrap(); + + assert_eq!(collaborators, &[connection_id_a],); + + db.connection_lost(connection_id_a).await.unwrap(); + // assert!() + // Test buffer epoch incrementing? } diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 95c6bdefc1..da5e7e6398 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -39,7 +39,7 @@ use prometheus::{register_int_gauge, IntGauge}; use rpc::{ proto::{ self, Ack, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, LiveKitConnectionInfo, - OpenChannelBufferResponse, RequestMessage, + RequestMessage, }, Connection, ConnectionId, Peer, Receipt, TypedEnvelope, }; @@ -251,8 +251,8 @@ impl Server { .add_request_handler(remove_channel_member) .add_request_handler(set_channel_member_admin) .add_request_handler(rename_channel) - .add_request_handler(open_channel_buffer) - .add_request_handler(close_channel_buffer) + .add_request_handler(join_channel_buffer) + .add_request_handler(leave_channel_buffer) .add_message_handler(update_channel_buffer) .add_request_handler(get_channel_members) .add_request_handler(respond_to_channel_invite) @@ -2484,16 +2484,16 @@ async fn join_channel( Ok(()) } -async fn open_channel_buffer( - request: proto::OpenChannelBuffer, - response: Response, +async fn join_channel_buffer( + request: proto::JoinChannelBuffer, + response: Response, session: Session, ) -> Result<()> { let db = session.db().await; let channel_id = ChannelId::from_proto(request.channel_id); let open_response = db - .join_buffer_for_channel(channel_id, session.user_id, session.connection_id) + .join_channel_buffer(channel_id, session.user_id, session.connection_id) .await?; response.send(open_response)?; @@ -2501,16 +2501,18 @@ async fn open_channel_buffer( Ok(()) } -async fn close_channel_buffer( - request: proto::CloseChannelBuffer, - response: Response, +async fn leave_channel_buffer( + request: proto::LeaveChannelBuffer, + response: Response, session: Session, ) -> Result<()> { let db = session.db().await; - let buffer_id = BufferId::from_proto(request.buffer_id); + let channel_id = ChannelId::from_proto(request.channel_id); + + let collaborators_to_notify = db + .leave_channel_buffer(channel_id, session.connection_id) + .await?; - // TODO: close channel buffer here - // response.send(Ack {})?; Ok(()) diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 6f19132dc5..88ad46abc7 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -143,10 +143,10 @@ message Envelope { SetChannelMemberAdmin set_channel_member_admin = 129; RenameChannel rename_channel = 130; - OpenChannelBuffer open_channel_buffer = 131; - OpenChannelBufferResponse open_channel_buffer_response = 132; + JoinChannelBuffer join_channel_buffer = 131; + JoinChannelBufferResponse join_channel_buffer_response = 132; UpdateChannelBuffer update_channel_buffer = 133; - CloseChannelBuffer close_channel_buffer = 134; + LeaveChannelBuffer leave_channel_buffer = 134; } } @@ -958,19 +958,19 @@ message RenameChannel { string name = 2; } -message OpenChannelBuffer { +message JoinChannelBuffer { uint64 channel_id = 1; } -message OpenChannelBufferResponse { +message JoinChannelBufferResponse { uint64 buffer_id = 1; string base_text = 2; repeated Operation operations = 3; repeated Collaborator collaborators = 4; } -message CloseChannelBuffer { - uint64 buffer_id = 1; +message LeaveChannelBuffer { + uint64 channel_id = 1; } message RespondToChannelInvite { diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index 9d71140aa0..68219d3ad8 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -249,9 +249,9 @@ messages!( (GetPrivateUserInfoResponse, Foreground), (GetChannelMembers, Foreground), (GetChannelMembersResponse, Foreground), - (OpenChannelBuffer, Foreground), - (OpenChannelBufferResponse, Foreground), - (CloseChannelBuffer, Background), + (JoinChannelBuffer, Foreground), + (JoinChannelBufferResponse, Foreground), + (LeaveChannelBuffer, Background), (UpdateChannelBuffer, Foreground) ); @@ -319,8 +319,8 @@ request_messages!( (UpdateParticipantLocation, Ack), (UpdateProject, Ack), (UpdateWorktree, Ack), - (OpenChannelBuffer, OpenChannelBufferResponse), - (CloseChannelBuffer, Ack) + (JoinChannelBuffer, JoinChannelBufferResponse), + (LeaveChannelBuffer, Ack) ); entity_messages!( From 5a0315c4d5016bcc59116219dce3a1f09c687ba9 Mon Sep 17 00:00:00 2001 From: Mikayla Date: Tue, 22 Aug 2023 13:25:31 -0700 Subject: [PATCH 07/22] Achieve end to end channel buffer synchronization co-authored-by: max --- crates/channel/src/channel.rs | 7 + crates/channel/src/channel_buffer.rs | 91 ++++++++-- crates/collab/src/db/queries/buffers.rs | 167 +++++++++++++----- crates/collab/src/db/tests/buffer_tests.rs | 28 ++- crates/collab/src/rpc.rs | 105 +++++++++-- crates/collab/src/tests.rs | 1 + .../collab/src/tests/channel_buffer_tests.rs | 119 +++++++++---- crates/rpc/proto/zed.proto | 23 ++- crates/rpc/src/proto.rs | 11 +- crates/zed/src/main.rs | 1 + 10 files changed, 425 insertions(+), 128 deletions(-) diff --git a/crates/channel/src/channel.rs b/crates/channel/src/channel.rs index 67c560a1fc..15631b7dd3 100644 --- a/crates/channel/src/channel.rs +++ b/crates/channel/src/channel.rs @@ -1,7 +1,14 @@ mod channel_store; pub mod channel_buffer; +use std::sync::Arc; + pub use channel_store::*; +use client::Client; #[cfg(test)] mod channel_store_tests; + +pub fn init(client: &Arc) { + channel_buffer::init(client); +} diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index d88810ff56..a59fec1553 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -6,30 +6,34 @@ use rpc::{proto, TypedEnvelope}; use std::sync::Arc; use util::ResultExt; -// Open the channel document -// ChannelDocumentView { ChannelDocument, Editor } -> On clone, clones internal ChannelDocument handle, instantiates new editor -// Produces a view which is: (ChannelDocument, Editor), ChannelDocument manages subscriptions -// ChannelDocuments -> Buffers -> Editor with that buffer - -// ChannelDocuments { -// ChannleBuffers: HashMap> -// } - -type BufferId = u64; +pub(crate) fn init(client: &Arc) { + client.add_model_message_handler(ChannelBuffer::handle_update_channel_buffer); + client.add_model_message_handler(ChannelBuffer::handle_add_channel_buffer_collaborator); + client.add_model_message_handler(ChannelBuffer::handle_remove_channel_buffer_collaborator); +} pub struct ChannelBuffer { channel_id: ChannelId, - buffer_id: BufferId, + collaborators: Vec, buffer: ModelHandle, client: Arc, + _subscription: client::Subscription, } impl Entity for ChannelBuffer { type Event = (); + + fn release(&mut self, _: &mut AppContext) { + self.client + .send(proto::LeaveChannelBuffer { + channel_id: self.channel_id, + }) + .log_err(); + } } impl ChannelBuffer { - pub fn for_channel( + pub fn join_channel( channel_id: ChannelId, client: Arc, cx: &mut AppContext, @@ -45,19 +49,24 @@ impl ChannelBuffer { .into_iter() .map(language::proto::deserialize_operation) .collect::, _>>()?; - let buffer_id = response.buffer_id; - let buffer = cx.add_model(|cx| language::Buffer::new(0, base_text, cx)); + let collaborators = response.collaborators; + + let buffer = + cx.add_model(|cx| language::Buffer::new(response.replica_id as u16, base_text, cx)); buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?; + let subscription = client.subscribe_to_entity(channel_id)?; + anyhow::Ok(cx.add_model(|cx| { cx.subscribe(&buffer, Self::on_buffer_update).detach(); - client.add_model_message_handler(Self::handle_update_channel_buffer); + Self { - buffer_id, buffer, client, channel_id, + collaborators, + _subscription: subscription.set_model(&cx.handle(), &mut cx.to_async()), } })) }) @@ -77,6 +86,7 @@ impl ChannelBuffer { .collect::, _>>()?; this.update(&mut cx, |this, cx| { + cx.notify(); this.buffer .update(cx, |buffer, cx| buffer.apply_ops(ops, cx)) })?; @@ -84,6 +94,49 @@ impl ChannelBuffer { Ok(()) } + async fn handle_add_channel_buffer_collaborator( + this: ModelHandle, + envelope: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + let collaborator = envelope.payload.collaborator.ok_or_else(|| { + anyhow::anyhow!( + "Should have gotten a collaborator in the AddChannelBufferCollaborator message" + ) + })?; + + this.update(&mut cx, |this, cx| { + this.collaborators.push(collaborator); + cx.notify(); + }); + + Ok(()) + } + + async fn handle_remove_channel_buffer_collaborator( + this: ModelHandle, + message: TypedEnvelope, + _: Arc, + mut cx: AsyncAppContext, + ) -> Result<()> { + this.update(&mut cx, |this, cx| { + this.collaborators.retain(|collaborator| { + if collaborator.peer_id == message.payload.peer_id { + this.buffer.update(cx, |buffer, cx| { + buffer.remove_peer(collaborator.replica_id as u16, cx) + }); + false + } else { + true + } + }); + cx.notify(); + }); + + Ok(()) + } + fn on_buffer_update( &mut self, _: ModelHandle, @@ -94,7 +147,7 @@ impl ChannelBuffer { let operation = language::proto::serialize_operation(operation); self.client .send(proto::UpdateChannelBuffer { - buffer_id: self.buffer_id, + channel_id: self.channel_id, operations: vec![operation], }) .log_err(); @@ -104,4 +157,8 @@ impl ChannelBuffer { pub fn buffer(&self) -> ModelHandle { self.buffer.clone() } + + pub fn collaborators(&self) -> &[proto::Collaborator] { + &self.collaborators + } } diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 473dd1afe9..7f0e5a75f0 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -11,7 +11,6 @@ impl Database { self.transaction(|tx| async move { let tx = tx; - // Get or create buffer from channel self.check_user_is_channel_member(channel_id, user_id, &tx) .await?; @@ -116,6 +115,7 @@ impl Database { Ok(proto::JoinChannelBufferResponse { buffer_id: buffer.id.to_proto(), + replica_id: replica_id.to_proto() as u32, base_text, operations, collaborators: collaborators @@ -137,67 +137,128 @@ impl Database { connection: ConnectionId, ) -> Result> { self.transaction(|tx| async move { - let result = channel_buffer_collaborator::Entity::delete_many() - .filter( - Condition::all() - .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)) - .add( - channel_buffer_collaborator::Column::ConnectionId - .eq(connection.id as i32), - ) - .add( - channel_buffer_collaborator::Column::ConnectionServerId - .eq(connection.owner_id as i32), - ), - ) - .exec(&*tx) - .await?; - if result.rows_affected == 0 { - Err(anyhow!("not a collaborator on this project"))?; - } - - let mut connections = Vec::new(); - let mut rows = channel_buffer_collaborator::Entity::find() - .filter( - Condition::all() - .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)), - ) - .stream(&*tx) - .await?; - while let Some(row) = rows.next().await { - let row = row?; - connections.push(ConnectionId { - id: row.connection_id as u32, - owner_id: row.connection_server_id.0 as u32, - }); - } - - Ok(connections) + self.leave_channel_buffer_internal(channel_id, connection, &*tx) + .await }) .await } + pub async fn leave_channel_buffer_internal( + &self, + channel_id: ChannelId, + connection: ConnectionId, + tx: &DatabaseTransaction, + ) -> Result> { + let result = channel_buffer_collaborator::Entity::delete_many() + .filter( + Condition::all() + .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)) + .add(channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32)) + .add( + channel_buffer_collaborator::Column::ConnectionServerId + .eq(connection.owner_id as i32), + ), + ) + .exec(&*tx) + .await?; + if result.rows_affected == 0 { + Err(anyhow!("not a collaborator on this project"))?; + } + + let mut connections = Vec::new(); + let mut rows = channel_buffer_collaborator::Entity::find() + .filter( + Condition::all().add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)), + ) + .stream(&*tx) + .await?; + while let Some(row) = rows.next().await { + let row = row?; + connections.push(ConnectionId { + id: row.connection_id as u32, + owner_id: row.connection_server_id.0 as u32, + }); + } + + Ok(connections) + } + pub async fn leave_channel_buffers( &self, connection: ConnectionId, - ) -> Result> { - // + ) -> Result)>> { + self.transaction(|tx| async move { + #[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)] + enum QueryChannelIds { + ChannelId, + } + + let channel_ids: Vec = channel_buffer_collaborator::Entity::find() + .select_only() + .column(channel_buffer_collaborator::Column::ChannelId) + .filter(Condition::all().add( + channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32), + )) + .into_values::<_, QueryChannelIds>() + .all(&*tx) + .await?; + + let mut result = Vec::new(); + for channel_id in channel_ids { + let collaborators = self + .leave_channel_buffer_internal(channel_id, connection, &*tx) + .await?; + result.push((channel_id, collaborators)); + } + + Ok(result) + }) + .await } - pub async fn get_channel_buffer_collaborators(&self, channel_id: ChannelId) -> Result<()> { - todo!() + #[cfg(debug_assertions)] + pub async fn get_channel_buffer_collaborators( + &self, + channel_id: ChannelId, + ) -> Result> { + self.transaction(|tx| async move { + #[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)] + enum QueryUserIds { + UserId, + } + + let users: Vec = channel_buffer_collaborator::Entity::find() + .select_only() + .column(channel_buffer_collaborator::Column::UserId) + .filter( + Condition::all() + .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)), + ) + .into_values::<_, QueryUserIds>() + .all(&*tx) + .await?; + + Ok(users) + }) + .await } pub async fn update_channel_buffer( &self, - buffer_id: BufferId, + channel_id: ChannelId, + user: UserId, operations: &[proto::Operation], - ) -> Result<()> { + ) -> Result> { self.transaction(|tx| async move { - let buffer = buffer::Entity::find_by_id(buffer_id) + self.check_user_is_channel_member(channel_id, user, &*tx) + .await?; + + let buffer = buffer::Entity::find() + .filter(buffer::Column::ChannelId.eq(channel_id)) .one(&*tx) .await? .ok_or_else(|| anyhow!("no such buffer"))?; + let buffer_id = buffer.id; buffer_operation::Entity::insert_many(operations.iter().filter_map(|operation| { match operation.variant.as_ref()? { proto::operation::Variant::Edit(operation) => { @@ -237,7 +298,23 @@ impl Database { .exec(&*tx) .await?; - Ok(()) + let mut connections = Vec::new(); + let mut rows = channel_buffer_collaborator::Entity::find() + .filter( + Condition::all() + .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)), + ) + .stream(&*tx) + .await?; + while let Some(row) = rows.next().await { + let row = row?; + connections.push(ConnectionId { + id: row.connection_id as u32, + owner_id: row.connection_server_id.0 as u32, + }); + } + + Ok(connections) }) .await } diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index c25071e1a2..08252e382e 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -66,11 +66,10 @@ async fn test_channel_buffers(db: &Arc) { .unwrap(); let connection_id_a = ConnectionId { owner_id, id: 1 }; - let buffer_response_a = db + let _ = db .join_channel_buffer(zed_id, a_id, connection_id_a) .await .unwrap(); - let buffer_id = BufferId::from_proto(buffer_response_a.buffer_id); let mut buffer_a = Buffer::new(0, 0, "".to_string()); let mut operations = Vec::new(); @@ -85,7 +84,7 @@ async fn test_channel_buffers(db: &Arc) { .map(|op| proto::serialize_operation(&language::Operation::Buffer(op))) .collect::>(); - db.update_channel_buffer(buffer_id, &operations) + db.update_channel_buffer(zed_id, a_id, &operations) .await .unwrap(); @@ -115,7 +114,7 @@ async fn test_channel_buffers(db: &Arc) { .await .is_err()); - //Ensure that both collaborators have shown up + // Ensure that both collaborators have shown up assert_eq!( buffer_response_b.collaborators, &[ @@ -132,6 +131,10 @@ async fn test_channel_buffers(db: &Arc) { ] ); + // Ensure that get_channel_buffer_collaborators works + let zed_collaborats = db.get_channel_buffer_collaborators(zed_id).await.unwrap(); + assert_eq!(zed_collaborats, &[a_id, b_id]); + let collaborators = db .leave_channel_buffer(zed_id, connection_id_b) .await @@ -139,7 +142,18 @@ async fn test_channel_buffers(db: &Arc) { assert_eq!(collaborators, &[connection_id_a],); - db.connection_lost(connection_id_a).await.unwrap(); - // assert!() - // Test buffer epoch incrementing? + let cargo_id = db.create_root_channel("cargo", "2", a_id).await.unwrap(); + let _ = db + .join_channel_buffer(cargo_id, a_id, connection_id_a) + .await + .unwrap(); + + db.leave_channel_buffers(connection_id_a).await.unwrap(); + + let zed_collaborators = db.get_channel_buffer_collaborators(zed_id).await.unwrap(); + let cargo_collaborators = db.get_channel_buffer_collaborators(cargo_id).await.unwrap(); + assert_eq!(zed_collaborators, &[]); + assert_eq!(cargo_collaborators, &[]); + + // TODO: test buffer epoch incrementing } diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index da5e7e6398..2bd39c861d 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -2,10 +2,7 @@ mod connection_pool; use crate::{ auth, - db::{ - self, BufferId, ChannelId, ChannelsForUser, Database, ProjectId, RoomId, ServerId, User, - UserId, - }, + db::{self, ChannelId, ChannelsForUser, Database, ProjectId, RoomId, ServerId, User, UserId}, executor::Executor, AppState, Result, }; @@ -38,8 +35,8 @@ use lazy_static::lazy_static; use prometheus::{register_int_gauge, IntGauge}; use rpc::{ proto::{ - self, Ack, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, LiveKitConnectionInfo, - RequestMessage, + self, Ack, AddChannelBufferCollaborator, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, + LiveKitConnectionInfo, RequestMessage, }, Connection, ConnectionId, Peer, Receipt, TypedEnvelope, }; @@ -860,6 +857,7 @@ async fn connection_lost( futures::select_biased! { _ = executor.sleep(RECONNECT_TIMEOUT).fuse() => { leave_room_for_session(&session).await.trace_err(); + leave_channel_buffers_for_session(&session).await.trace_err(); if !session .connection_pool() @@ -872,6 +870,8 @@ async fn connection_lost( } } update_user_contacts(session.user_id, &session).await?; + + } _ = teardown.changed().fuse() => {} } @@ -2496,8 +2496,51 @@ async fn join_channel_buffer( .join_channel_buffer(channel_id, session.user_id, session.connection_id) .await?; + let replica_id = open_response.replica_id; + let collaborators = open_response.collaborators.clone(); + response.send(open_response)?; + let update = AddChannelBufferCollaborator { + channel_id: channel_id.to_proto(), + collaborator: Some(proto::Collaborator { + user_id: session.user_id.to_proto(), + peer_id: Some(session.connection_id.into()), + replica_id, + }), + }; + channel_buffer_updated( + session.connection_id, + collaborators + .iter() + .filter_map(|collaborator| Some(collaborator.peer_id?.into())), + &update, + &session.peer, + ); + + Ok(()) +} + +async fn update_channel_buffer( + request: proto::UpdateChannelBuffer, + session: Session, +) -> Result<()> { + let db = session.db().await; + let channel_id = ChannelId::from_proto(request.channel_id); + + let collaborators = db + .update_channel_buffer(channel_id, session.user_id, &request.operations) + .await?; + + channel_buffer_updated( + session.connection_id, + collaborators, + &proto::UpdateChannelBuffer { + channel_id: channel_id.to_proto(), + operations: request.operations, + }, + &session.peer, + ); Ok(()) } @@ -2515,18 +2558,28 @@ async fn leave_channel_buffer( response.send(Ack {})?; + channel_buffer_updated( + session.connection_id, + collaborators_to_notify, + &proto::RemoveChannelBufferCollaborator { + channel_id: channel_id.to_proto(), + peer_id: Some(session.connection_id.into()), + }, + &session.peer, + ); + Ok(()) } -async fn update_channel_buffer( - request: proto::UpdateChannelBuffer, - session: Session, -) -> Result<()> { - let db = session.db().await; - - // TODO: Broadcast to buffer members - - Ok(()) +fn channel_buffer_updated( + sender_id: ConnectionId, + collaborators: impl IntoIterator, + message: &T, + peer: &Peer, +) { + broadcast(Some(sender_id), collaborators.into_iter(), |peer_id| { + peer.send(peer_id.into(), message.clone()) + }); } async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> Result<()> { @@ -2854,6 +2907,28 @@ async fn leave_room_for_session(session: &Session) -> Result<()> { Ok(()) } +async fn leave_channel_buffers_for_session(session: &Session) -> Result<()> { + let left_channel_buffers = session + .db() + .await + .leave_channel_buffers(session.connection_id) + .await?; + + for (channel_id, connections) in left_channel_buffers { + channel_buffer_updated( + session.connection_id, + connections, + &proto::RemoveChannelBufferCollaborator { + channel_id: channel_id.to_proto(), + peer_id: Some(session.connection_id.into()), + }, + &session.peer, + ); + } + + Ok(()) +} + fn project_left(project: &db::LeftProject, session: &Session) { for connection_id in &project.connection_ids { if project.host_user_id == session.user_id { diff --git a/crates/collab/src/tests.rs b/crates/collab/src/tests.rs index 831bccbb72..25f059c0aa 100644 --- a/crates/collab/src/tests.rs +++ b/crates/collab/src/tests.rs @@ -211,6 +211,7 @@ impl TestServer { workspace::init(app_state.clone(), cx); audio::init((), cx); call::init(client.clone(), user_store.clone(), cx); + channel::init(&client); }); client diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index c41f5de803..d9880496f6 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -1,11 +1,13 @@ -use crate::tests::TestServer; +use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer}; use channel::channel_buffer::ChannelBuffer; +use client::UserId; use gpui::{executor::Deterministic, ModelHandle, TestAppContext}; -use std::{ops::Range, sync::Arc}; +use rpc::{proto, RECEIVE_TIMEOUT}; +use std::sync::Arc; #[gpui::test] -async fn test_channel_buffers( +async fn test_core_channel_buffers( deterministic: Arc, cx_a: &mut TestAppContext, cx_b: &mut TestAppContext, @@ -19,60 +21,103 @@ async fn test_channel_buffers( .make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)]) .await; + // Client A joins the channel buffer let channel_buffer_a = cx_a - .update(|cx| ChannelBuffer::for_channel(zed_id, client_a.client().to_owned(), cx)) + .update(|cx| ChannelBuffer::join_channel(zed_id, client_a.client().to_owned(), cx)) .await .unwrap(); + // Client A edits the buffer let buffer_a = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer()); - edit_channel_buffer(&buffer_a, cx_a, [(0..0, "hello world")]); - edit_channel_buffer(&buffer_a, cx_a, [(5..5, ", cruel")]); - edit_channel_buffer(&buffer_a, cx_a, [(0..5, "goodbye")]); - undo_channel_buffer(&buffer_a, cx_a); + buffer_a.update(cx_a, |buffer, cx| { + buffer.edit([(0..0, "hello world")], None, cx) + }); + buffer_a.update(cx_a, |buffer, cx| { + buffer.edit([(5..5, ", cruel")], None, cx) + }); + buffer_a.update(cx_a, |buffer, cx| { + buffer.edit([(0..5, "goodbye")], None, cx) + }); + buffer_a.update(cx_a, |buffer, cx| buffer.undo(cx)); + deterministic.run_until_parked(); - assert_eq!(channel_buffer_text(&buffer_a, cx_a), "hello, cruel world"); + assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world"); + // Client B joins the channel buffer let channel_buffer_b = cx_b - .update(|cx| ChannelBuffer::for_channel(zed_id, client_b.client().to_owned(), cx)) + .update(|cx| ChannelBuffer::join_channel(zed_id, client_b.client().to_owned(), cx)) .await .unwrap(); + channel_buffer_b.read_with(cx_b, |buffer, _| { + assert_collaborators( + buffer.collaborators(), + &[client_a.user_id(), client_b.user_id()], + ); + }); + + // Client B sees the correct text, and then edits it let buffer_b = channel_buffer_b.read_with(cx_b, |buffer, _| buffer.buffer()); + assert_eq!(buffer_text(&buffer_b, cx_b), "hello, cruel world"); + buffer_b.update(cx_b, |buffer, cx| { + buffer.edit([(7..12, "beautiful")], None, cx) + }); - assert_eq!(channel_buffer_text(&buffer_b, cx_b), "hello, cruel world"); - - edit_channel_buffer(&buffer_b, cx_b, [(7..12, "beautiful")]); + // Both A and B see the new edit + deterministic.run_until_parked(); + assert_eq!(buffer_text(&buffer_a, cx_a), "hello, beautiful world"); + assert_eq!(buffer_text(&buffer_b, cx_b), "hello, beautiful world"); + // Client A closes the channel buffer. + cx_a.update(|_| drop(channel_buffer_a)); deterministic.run_until_parked(); + // Client B sees that client A is gone from the channel buffer. + channel_buffer_b.read_with(cx_b, |buffer, _| { + assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]); + }); + + // Client A rejoins the channel buffer + let _channel_buffer_a = cx_a + .update(|cx| ChannelBuffer::join_channel(zed_id, client_a.client().to_owned(), cx)) + .await + .unwrap(); + deterministic.run_until_parked(); + + // Sanity test, make sure we saw A rejoining + channel_buffer_b.read_with(cx_b, |buffer, _| { + assert_collaborators( + &buffer.collaborators(), + &[client_b.user_id(), client_a.user_id()], + ); + }); + + // Client A loses connection. + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + // Client B observes A disconnect + channel_buffer_b.read_with(cx_b, |buffer, _| { + assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]); + }); + + // TODO: + // - Test synchronizing offline updates, what happens to A's channel buffer? +} + +#[track_caller] +fn assert_collaborators(collaborators: &[proto::Collaborator], ids: &[Option]) { assert_eq!( - channel_buffer_text(&buffer_a, cx_a), - "hello, beautiful world" - ); - assert_eq!( - channel_buffer_text(&buffer_b, cx_b), - "hello, beautiful world" + collaborators + .into_iter() + .map(|collaborator| collaborator.user_id) + .collect::>(), + ids.into_iter().map(|id| id.unwrap()).collect::>() ); } -fn edit_channel_buffer( - channel_buffer: &ModelHandle, - cx: &mut TestAppContext, - edits: I, -) where - I: IntoIterator, &'static str)>, -{ - channel_buffer.update(cx, |buffer, cx| buffer.edit(edits, None, cx)); -} - -fn undo_channel_buffer(channel_buffer: &ModelHandle, cx: &mut TestAppContext) { - channel_buffer.update(cx, |buffer, cx| buffer.undo(cx)); -} - -fn channel_buffer_text( - channel_buffer: &ModelHandle, - cx: &mut TestAppContext, -) -> String { +fn buffer_text(channel_buffer: &ModelHandle, cx: &mut TestAppContext) -> String { channel_buffer.read_with(cx, |buffer, _| buffer.text()) } diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 88ad46abc7..b97feff06b 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -147,6 +147,8 @@ message Envelope { JoinChannelBufferResponse join_channel_buffer_response = 132; UpdateChannelBuffer update_channel_buffer = 133; LeaveChannelBuffer leave_channel_buffer = 134; + AddChannelBufferCollaborator add_channel_buffer_collaborator = 135; + RemoveChannelBufferCollaborator remove_channel_buffer_collaborator = 136; } } @@ -416,6 +418,16 @@ message RemoveProjectCollaborator { PeerId peer_id = 2; } +message AddChannelBufferCollaborator { + uint64 channel_id = 1; + Collaborator collaborator = 2; +} + +message RemoveChannelBufferCollaborator { + uint64 channel_id = 1; + PeerId peer_id = 2; +} + message GetDefinition { uint64 project_id = 1; uint64 buffer_id = 2; @@ -546,8 +558,8 @@ message UpdateBuffer { } message UpdateChannelBuffer { - uint64 buffer_id = 2; - repeated Operation operations = 3; + uint64 channel_id = 1; + repeated Operation operations = 2; } message UpdateBufferFile { @@ -964,9 +976,10 @@ message JoinChannelBuffer { message JoinChannelBufferResponse { uint64 buffer_id = 1; - string base_text = 2; - repeated Operation operations = 3; - repeated Collaborator collaborators = 4; + uint32 replica_id = 2; + string base_text = 3; + repeated Operation operations = 4; + repeated Collaborator collaborators = 5; } message LeaveChannelBuffer { diff --git a/crates/rpc/src/proto.rs b/crates/rpc/src/proto.rs index 68219d3ad8..f0f49c6230 100644 --- a/crates/rpc/src/proto.rs +++ b/crates/rpc/src/proto.rs @@ -252,7 +252,9 @@ messages!( (JoinChannelBuffer, Foreground), (JoinChannelBufferResponse, Foreground), (LeaveChannelBuffer, Background), - (UpdateChannelBuffer, Foreground) + (UpdateChannelBuffer, Foreground), + (RemoveChannelBufferCollaborator, Foreground), + (AddChannelBufferCollaborator, Foreground), ); request_messages!( @@ -376,7 +378,12 @@ entity_messages!( UpdateDiffBase ); -entity_messages!(buffer_id, UpdateChannelBuffer); +entity_messages!( + channel_id, + UpdateChannelBuffer, + RemoveChannelBufferCollaborator, + AddChannelBufferCollaborator +); const KIB: usize = 1024; const MIB: usize = KIB * 1024; diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index b905c1d37b..3b1fccb927 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -158,6 +158,7 @@ fn main() { outline::init(cx); project_symbols::init(cx); project_panel::init(Assets, cx); + channel::init(&client); diagnostics::init(cx); search::init(cx); semantic_index::init(fs.clone(), http.clone(), languages.clone(), cx); From 4eff8ad18692a505f22e552422f443cfd583d012 Mon Sep 17 00:00:00 2001 From: Mikayla Date: Tue, 22 Aug 2023 14:18:32 -0700 Subject: [PATCH 08/22] Add channel notes view co-authored-by: Max --- Cargo.lock | 1 + crates/channel/src/channel_buffer.rs | 14 +++- crates/channel/src/channel_store.rs | 10 +++ .../collab/src/tests/channel_buffer_tests.rs | 19 ++--- crates/collab_ui/Cargo.toml | 1 + crates/collab_ui/src/channel_view.rs | 69 +++++++++++++++++++ crates/collab_ui/src/collab_panel.rs | 46 ++++++++++++- crates/collab_ui/src/collab_ui.rs | 1 + crates/gpui/src/app.rs | 3 +- 9 files changed, 150 insertions(+), 14 deletions(-) create mode 100644 crates/collab_ui/src/channel_view.rs diff --git a/Cargo.lock b/Cargo.lock index a40aa7d89c..deed9a176e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1529,6 +1529,7 @@ dependencies = [ "futures 0.3.28", "fuzzy", "gpui", + "language", "log", "menu", "picker", diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index a59fec1553..aa99d5c10b 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -1,4 +1,4 @@ -use crate::ChannelId; +use crate::{Channel, ChannelId, ChannelStore}; use anyhow::Result; use client::Client; use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task}; @@ -16,6 +16,7 @@ pub struct ChannelBuffer { channel_id: ChannelId, collaborators: Vec, buffer: ModelHandle, + channel_store: ModelHandle, client: Arc, _subscription: client::Subscription, } @@ -33,7 +34,8 @@ impl Entity for ChannelBuffer { } impl ChannelBuffer { - pub fn join_channel( + pub(crate) fn new( + channel_store: ModelHandle, channel_id: ChannelId, client: Arc, cx: &mut AppContext, @@ -65,6 +67,7 @@ impl ChannelBuffer { buffer, client, channel_id, + channel_store, collaborators, _subscription: subscription.set_model(&cx.handle(), &mut cx.to_async()), } @@ -161,4 +164,11 @@ impl ChannelBuffer { pub fn collaborators(&self) -> &[proto::Collaborator] { &self.collaborators } + + pub fn channel(&self, cx: &AppContext) -> Option> { + self.channel_store + .read(cx) + .channel_for_id(self.channel_id) + .cloned() + } } diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index b9b2c98acd..a6aad19d03 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -13,6 +13,8 @@ use rpc::{proto, TypedEnvelope}; use std::sync::Arc; use util::ResultExt; +use crate::channel_buffer::ChannelBuffer; + pub type ChannelId = u64; pub struct ChannelStore { @@ -151,6 +153,14 @@ impl ChannelStore { self.channels_by_id.get(&channel_id) } + pub fn open_channel_buffer( + &self, + channel_id: ChannelId, + cx: &mut ModelContext, + ) -> Task>> { + ChannelBuffer::new(cx.handle(), channel_id, self.client.clone(), cx) + } + pub fn is_user_admin(&self, channel_id: ChannelId) -> bool { self.channel_paths.iter().any(|path| { if let Some(ix) = path.iter().position(|id| *id == channel_id) { diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index d9880496f6..db98c6abdc 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -1,6 +1,5 @@ use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer}; -use channel::channel_buffer::ChannelBuffer; use client::UserId; use gpui::{executor::Deterministic, ModelHandle, TestAppContext}; use rpc::{proto, RECEIVE_TIMEOUT}; @@ -22,8 +21,9 @@ async fn test_core_channel_buffers( .await; // Client A joins the channel buffer - let channel_buffer_a = cx_a - .update(|cx| ChannelBuffer::join_channel(zed_id, client_a.client().to_owned(), cx)) + let channel_buffer_a = client_a + .channel_store() + .update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx)) .await .unwrap(); @@ -45,8 +45,9 @@ async fn test_core_channel_buffers( assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world"); // Client B joins the channel buffer - let channel_buffer_b = cx_b - .update(|cx| ChannelBuffer::join_channel(zed_id, client_b.client().to_owned(), cx)) + let channel_buffer_b = client_b + .channel_store() + .update(cx_b, |channel, cx| channel.open_channel_buffer(zed_id, cx)) .await .unwrap(); @@ -79,8 +80,9 @@ async fn test_core_channel_buffers( }); // Client A rejoins the channel buffer - let _channel_buffer_a = cx_a - .update(|cx| ChannelBuffer::join_channel(zed_id, client_a.client().to_owned(), cx)) + let _channel_buffer_a = client_a + .channel_store() + .update(cx_a, |channels, cx| channels.open_channel_buffer(zed_id, cx)) .await .unwrap(); deterministic.run_until_parked(); @@ -104,7 +106,8 @@ async fn test_core_channel_buffers( }); // TODO: - // - Test synchronizing offline updates, what happens to A's channel buffer? + // - Test synchronizing offline updates, what happens to A's channel buffer when A disconnects + // - Test interaction with channel deletion while buffer is open } #[track_caller] diff --git a/crates/collab_ui/Cargo.toml b/crates/collab_ui/Cargo.toml index e0177f6609..1ecb4b8422 100644 --- a/crates/collab_ui/Cargo.toml +++ b/crates/collab_ui/Cargo.toml @@ -34,6 +34,7 @@ editor = { path = "../editor" } feedback = { path = "../feedback" } fuzzy = { path = "../fuzzy" } gpui = { path = "../gpui" } +language = { path = "../language" } menu = { path = "../menu" } picker = { path = "../picker" } project = { path = "../project" } diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs new file mode 100644 index 0000000000..27a2d678f5 --- /dev/null +++ b/crates/collab_ui/src/channel_view.rs @@ -0,0 +1,69 @@ +use channel::channel_buffer::ChannelBuffer; +use editor::Editor; +use gpui::{ + actions, + elements::{ChildView, Label}, + AnyElement, AppContext, Element, Entity, ModelHandle, View, ViewContext, ViewHandle, +}; +use language::Language; +use std::sync::Arc; +use workspace::item::{Item, ItemHandle}; + +actions!(channel_view, [Deploy]); + +pub(crate) fn init(cx: &mut AppContext) { + // TODO +} + +pub struct ChannelView { + editor: ViewHandle, + channel_buffer: ModelHandle, +} + +impl ChannelView { + pub fn new( + channel_buffer: ModelHandle, + language: Arc, + cx: &mut ViewContext, + ) -> Self { + let buffer = channel_buffer.read(cx).buffer(); + buffer.update(cx, |buffer, cx| buffer.set_language(Some(language), cx)); + let editor = cx.add_view(|cx| Editor::for_buffer(buffer, None, cx)); + Self { + editor, + channel_buffer, + } + } +} + +impl Entity for ChannelView { + type Event = editor::Event; +} + +impl View for ChannelView { + fn ui_name() -> &'static str { + "ChannelView" + } + + fn render(&mut self, cx: &mut ViewContext<'_, '_, Self>) -> AnyElement { + ChildView::new(self.editor.as_any(), cx).into_any() + } +} + +impl Item for ChannelView { + fn tab_content( + &self, + _: Option, + style: &theme::Tab, + cx: &gpui::AppContext, + ) -> AnyElement { + let channel_name = self + .channel_buffer + .read(cx) + .channel(cx) + .map_or("[Deleted channel]".to_string(), |channel| { + format!("#{}", channel.name) + }); + Label::new(channel_name, style.label.to_owned()).into_any() + } +} diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index ab692dd166..0eb6a65984 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -42,7 +42,10 @@ use workspace::{ Workspace, }; -use crate::face_pile::FacePile; +use crate::{ + channel_view::{self, ChannelView}, + face_pile::FacePile, +}; use channel_modal::ChannelModal; use self::contact_finder::ContactFinder; @@ -77,6 +80,11 @@ struct RenameChannel { channel_id: u64, } +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +struct OpenChannelBuffer { + channel_id: u64, +} + actions!( collab_panel, [ @@ -96,7 +104,8 @@ impl_actions!( InviteMembers, ManageMembers, RenameChannel, - ToggleCollapse + ToggleCollapse, + OpenChannelBuffer ] ); @@ -106,6 +115,7 @@ pub fn init(_client: Arc, cx: &mut AppContext) { settings::register::(cx); contact_finder::init(cx); channel_modal::init(cx); + channel_view::init(cx); cx.add_action(CollabPanel::cancel); cx.add_action(CollabPanel::select_next); @@ -121,7 +131,8 @@ pub fn init(_client: Arc, cx: &mut AppContext) { cx.add_action(CollabPanel::rename_channel); cx.add_action(CollabPanel::toggle_channel_collapsed); cx.add_action(CollabPanel::collapse_selected_channel); - cx.add_action(CollabPanel::expand_selected_channel) + cx.add_action(CollabPanel::expand_selected_channel); + cx.add_action(CollabPanel::open_channel_buffer); } #[derive(Debug)] @@ -1888,6 +1899,7 @@ impl CollabPanel { vec![ ContextMenuItem::action(expand_action_name, ToggleCollapse { channel_id }), ContextMenuItem::action("New Subchannel", NewChannel { channel_id }), + ContextMenuItem::action("Open Notes", OpenChannelBuffer { channel_id }), ContextMenuItem::Separator, ContextMenuItem::action("Invite to Channel", InviteMembers { channel_id }), ContextMenuItem::Separator, @@ -2207,6 +2219,34 @@ impl CollabPanel { } } + fn open_channel_buffer(&mut self, action: &OpenChannelBuffer, cx: &mut ViewContext) { + let workspace = self.workspace; + let open = self.channel_store.update(cx, |channel_store, cx| { + channel_store.open_channel_buffer(action.channel_id, cx) + }); + + cx.spawn(|_, mut cx| async move { + let channel_buffer = open.await?; + + let markdown = workspace + .read_with(&cx, |workspace, _| { + workspace + .app_state() + .languages + .language_for_name("Markdown") + })? + .await?; + + workspace.update(&mut cx, |workspace, cx| { + let channel_view = cx.add_view(|cx| ChannelView::new(channel_buffer, markdown, cx)); + workspace.add_item(Box::new(channel_view), cx); + })?; + + anyhow::Ok(()) + }) + .detach(); + } + fn show_inline_context_menu(&mut self, _: &menu::ShowContextMenu, cx: &mut ViewContext) { let Some(channel) = self.selected_channel() else { return; diff --git a/crates/collab_ui/src/collab_ui.rs b/crates/collab_ui/src/collab_ui.rs index 5420dd1db5..04644b62d9 100644 --- a/crates/collab_ui/src/collab_ui.rs +++ b/crates/collab_ui/src/collab_ui.rs @@ -1,3 +1,4 @@ +pub mod channel_view; pub mod collab_panel; mod collab_titlebar_item; mod contact_notification; diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 03625c80e7..890bd55a7f 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -4687,12 +4687,13 @@ impl AnyWeakModelHandle { } } -#[derive(Copy)] pub struct WeakViewHandle { any_handle: AnyWeakViewHandle, view_type: PhantomData, } +impl Copy for WeakViewHandle {} + impl Debug for WeakViewHandle { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct(&format!("WeakViewHandle<{}>", type_name::())) From 1d08f44e702024e26753388c538b93d5fafcd8fd Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 22 Aug 2023 15:33:37 -0700 Subject: [PATCH 09/22] Snapshot channel notes buffers when everyone leaves Co-authored-by: Mikayla --- Cargo.lock | 1 + crates/collab/Cargo.toml | 1 + .../20221109000000_test_schema.sql | 1 + .../20230819154600_add_channel_buffers.sql | 1 + crates/collab/src/db/queries/buffers.rs | 352 +++++++++++++----- .../collab/src/db/tables/buffer_snapshot.rs | 1 + crates/collab/src/db/tests/buffer_tests.rs | 10 +- crates/language/src/proto.rs | 1 + crates/text/src/text.rs | 2 +- 9 files changed, 273 insertions(+), 97 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index deed9a176e..0ec2f34185 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1458,6 +1458,7 @@ dependencies = [ "channel", "clap 3.2.25", "client", + "clock", "collections", "ctor", "dashmap", diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index fc78a03f67..cc1970266d 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -14,6 +14,7 @@ name = "seed" required-features = ["seed-support"] [dependencies] +clock = { path = "../clock" } collections = { path = "../collections" } live_kit_server = { path = "../live_kit_server" } text = { path = "../text" } diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index f39f0cca59..fdae4f2339 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -233,6 +233,7 @@ CREATE TABLE "buffer_snapshots" ( "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, "epoch" INTEGER NOT NULL, "text" TEXT NOT NULL, + "operation_serialization_version" INTEGER NOT NULL, PRIMARY KEY(buffer_id, epoch) ); diff --git a/crates/collab/migrations/20230819154600_add_channel_buffers.sql b/crates/collab/migrations/20230819154600_add_channel_buffers.sql index f6bd2879c6..fec18ddb8d 100644 --- a/crates/collab/migrations/20230819154600_add_channel_buffers.sql +++ b/crates/collab/migrations/20230819154600_add_channel_buffers.sql @@ -22,6 +22,7 @@ CREATE TABLE "buffer_snapshots" ( "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, "epoch" INTEGER NOT NULL, "text" TEXT NOT NULL, + "operation_serialization_version" INTEGER NOT NULL, PRIMARY KEY(buffer_id, epoch) ); diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 7f0e5a75f0..b0df905ecb 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -1,5 +1,7 @@ use super::*; use prost::Message; +use std::ops::Range; +use text::{EditOperation, InsertionTimestamp, UndoOperation}; impl Database { pub async fn join_channel_buffer( @@ -31,6 +33,16 @@ impl Database { } .insert(&*tx) .await?; + buffer_snapshot::ActiveModel { + buffer_id: ActiveValue::Set(buffer.id), + epoch: ActiveValue::Set(0), + text: ActiveValue::Set(String::new()), + operation_serialization_version: ActiveValue::Set( + storage::SERIALIZATION_VERSION, + ), + } + .insert(&*tx) + .await?; buffer }; @@ -60,58 +72,7 @@ impl Database { collaborators.push(collaborator); // Assemble the buffer state - let id = buffer.id; - let base_text = if buffer.epoch > 0 { - buffer_snapshot::Entity::find() - .filter( - buffer_snapshot::Column::BufferId - .eq(id) - .and(buffer_snapshot::Column::Epoch.eq(buffer.epoch)), - ) - .one(&*tx) - .await? - .ok_or_else(|| anyhow!("no such snapshot"))? - .text - } else { - String::new() - }; - - let mut rows = buffer_operation::Entity::find() - .filter( - buffer_operation::Column::BufferId - .eq(id) - .and(buffer_operation::Column::Epoch.eq(buffer.epoch)), - ) - .stream(&*tx) - .await?; - let mut operations = Vec::new(); - while let Some(row) = rows.next().await { - let row = row?; - let version = deserialize_version(&row.version)?; - let operation = if row.is_undo { - let counts = deserialize_undo_operation(&row.value)?; - proto::operation::Variant::Undo(proto::operation::Undo { - replica_id: row.replica_id as u32, - local_timestamp: row.local_timestamp as u32, - lamport_timestamp: row.lamport_timestamp as u32, - version, - counts, - }) - } else { - let (ranges, new_text) = deserialize_edit_operation(&row.value)?; - proto::operation::Variant::Edit(proto::operation::Edit { - replica_id: row.replica_id as u32, - local_timestamp: row.local_timestamp as u32, - lamport_timestamp: row.lamport_timestamp as u32, - version, - ranges, - new_text, - }) - }; - operations.push(proto::Operation { - variant: Some(operation), - }) - } + let (base_text, operations) = self.get_buffer_state(&buffer, &tx).await?; Ok(proto::JoinChannelBufferResponse { buffer_id: buffer.id.to_proto(), @@ -180,6 +141,12 @@ impl Database { }); } + drop(rows); + + if connections.is_empty() { + self.snapshot_buffer(channel_id, &tx).await?; + } + Ok(connections) } @@ -258,42 +225,23 @@ impl Database { .one(&*tx) .await? .ok_or_else(|| anyhow!("no such buffer"))?; - let buffer_id = buffer.id; + + #[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)] + enum QueryVersion { + OperationSerializationVersion, + } + + let serialization_version: i32 = buffer + .find_related(buffer_snapshot::Entity) + .select_only() + .filter(buffer_snapshot::Column::Epoch.eq(buffer.epoch)) + .into_values::<_, QueryVersion>() + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("missing buffer snapshot"))?; + buffer_operation::Entity::insert_many(operations.iter().filter_map(|operation| { - match operation.variant.as_ref()? { - proto::operation::Variant::Edit(operation) => { - let value = - serialize_edit_operation(&operation.ranges, &operation.new_text); - let version = serialize_version(&operation.version); - Some(buffer_operation::ActiveModel { - buffer_id: ActiveValue::Set(buffer_id), - epoch: ActiveValue::Set(buffer.epoch), - replica_id: ActiveValue::Set(operation.replica_id as i32), - lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), - local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), - is_undo: ActiveValue::Set(false), - version: ActiveValue::Set(version), - value: ActiveValue::Set(value), - }) - } - proto::operation::Variant::Undo(operation) => { - let value = serialize_undo_operation(&operation.counts); - let version = serialize_version(&operation.version); - Some(buffer_operation::ActiveModel { - buffer_id: ActiveValue::Set(buffer_id), - epoch: ActiveValue::Set(buffer.epoch), - replica_id: ActiveValue::Set(operation.replica_id as i32), - lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), - local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), - is_undo: ActiveValue::Set(true), - version: ActiveValue::Set(version), - value: ActiveValue::Set(value), - }) - } - proto::operation::Variant::UpdateSelections(_) => None, - proto::operation::Variant::UpdateDiagnostics(_) => None, - proto::operation::Variant::UpdateCompletionTriggers(_) => None, - } + operation_to_storage(operation, &buffer, serialization_version) })) .exec(&*tx) .await?; @@ -318,6 +266,222 @@ impl Database { }) .await } + + async fn get_buffer_state( + &self, + buffer: &buffer::Model, + tx: &DatabaseTransaction, + ) -> Result<(String, Vec)> { + let id = buffer.id; + let (base_text, version) = if buffer.epoch > 0 { + let snapshot = buffer_snapshot::Entity::find() + .filter( + buffer_snapshot::Column::BufferId + .eq(id) + .and(buffer_snapshot::Column::Epoch.eq(buffer.epoch)), + ) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such snapshot"))?; + + let version = snapshot.operation_serialization_version; + (snapshot.text, version) + } else { + (String::new(), storage::SERIALIZATION_VERSION) + }; + + let mut rows = buffer_operation::Entity::find() + .filter( + buffer_operation::Column::BufferId + .eq(id) + .and(buffer_operation::Column::Epoch.eq(buffer.epoch)), + ) + .stream(&*tx) + .await?; + let mut operations = Vec::new(); + while let Some(row) = rows.next().await { + let row = row?; + + let operation = operation_from_storage(row, version)?; + operations.push(proto::Operation { + variant: Some(operation), + }) + } + + Ok((base_text, operations)) + } + + async fn snapshot_buffer(&self, channel_id: ChannelId, tx: &DatabaseTransaction) -> Result<()> { + let buffer = channel::Model { + id: channel_id, + ..Default::default() + } + .find_related(buffer::Entity) + .one(&*tx) + .await? + .ok_or_else(|| anyhow!("no such buffer"))?; + + let (base_text, operations) = self.get_buffer_state(&buffer, tx).await?; + + let mut text_buffer = text::Buffer::new(0, 0, base_text); + + text_buffer + .apply_ops( + operations + .into_iter() + .filter_map(deserialize_wire_operation), + ) + .unwrap(); + + let base_text = text_buffer.text(); + let epoch = buffer.epoch + 1; + + buffer_snapshot::Model { + buffer_id: buffer.id, + epoch, + text: base_text, + operation_serialization_version: storage::SERIALIZATION_VERSION, + } + .into_active_model() + .insert(tx) + .await?; + + buffer::ActiveModel { + id: ActiveValue::Unchanged(buffer.id), + epoch: ActiveValue::Set(epoch), + ..Default::default() + } + .save(tx) + .await?; + + Ok(()) + } +} + +fn operation_to_storage( + operation: &proto::Operation, + buffer: &buffer::Model, + _format: i32, +) -> Option { + match operation.variant.as_ref()? { + proto::operation::Variant::Edit(operation) => { + let value = edit_operation_to_storage(&operation.ranges, &operation.new_text); + let version = version_to_storage(&operation.version); + Some(buffer_operation::ActiveModel { + buffer_id: ActiveValue::Set(buffer.id), + epoch: ActiveValue::Set(buffer.epoch), + replica_id: ActiveValue::Set(operation.replica_id as i32), + lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), + local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), + is_undo: ActiveValue::Set(false), + version: ActiveValue::Set(version), + value: ActiveValue::Set(value), + }) + } + proto::operation::Variant::Undo(operation) => { + let value = undo_operation_to_storage(&operation.counts); + let version = version_to_storage(&operation.version); + Some(buffer_operation::ActiveModel { + buffer_id: ActiveValue::Set(buffer.id), + epoch: ActiveValue::Set(buffer.epoch), + replica_id: ActiveValue::Set(operation.replica_id as i32), + lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), + local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), + is_undo: ActiveValue::Set(true), + version: ActiveValue::Set(version), + value: ActiveValue::Set(value), + }) + } + proto::operation::Variant::UpdateSelections(_) => None, + proto::operation::Variant::UpdateDiagnostics(_) => None, + proto::operation::Variant::UpdateCompletionTriggers(_) => None, + } +} + +fn operation_from_storage( + row: buffer_operation::Model, + _format_version: i32, +) -> Result { + let version = version_from_storage(&row.version)?; + let operation = if row.is_undo { + let counts = undo_operation_from_storage(&row.value)?; + proto::operation::Variant::Undo(proto::operation::Undo { + replica_id: row.replica_id as u32, + local_timestamp: row.local_timestamp as u32, + lamport_timestamp: row.lamport_timestamp as u32, + version, + counts, + }) + } else { + let (ranges, new_text) = edit_operation_from_storage(&row.value)?; + proto::operation::Variant::Edit(proto::operation::Edit { + replica_id: row.replica_id as u32, + local_timestamp: row.local_timestamp as u32, + lamport_timestamp: row.lamport_timestamp as u32, + version, + ranges, + new_text, + }) + }; + Ok(operation) +} + +// This is currently a manual copy of the deserialization code in the client's langauge crate +pub fn deserialize_wire_operation(operation: proto::Operation) -> Option { + match operation.variant? { + proto::operation::Variant::Edit(edit) => Some(text::Operation::Edit(EditOperation { + timestamp: InsertionTimestamp { + replica_id: edit.replica_id as text::ReplicaId, + local: edit.local_timestamp, + lamport: edit.lamport_timestamp, + }, + version: deserialize_wire_version(&edit.version), + ranges: edit.ranges.into_iter().map(deserialize_range).collect(), + new_text: edit.new_text.into_iter().map(Arc::from).collect(), + })), + proto::operation::Variant::Undo(undo) => Some(text::Operation::Undo { + lamport_timestamp: clock::Lamport { + replica_id: undo.replica_id as text::ReplicaId, + value: undo.lamport_timestamp, + }, + undo: UndoOperation { + id: clock::Local { + replica_id: undo.replica_id as text::ReplicaId, + value: undo.local_timestamp, + }, + version: deserialize_wire_version(&undo.version), + counts: undo + .counts + .into_iter() + .map(|c| { + ( + clock::Local { + replica_id: c.replica_id as text::ReplicaId, + value: c.local_timestamp, + }, + c.count, + ) + }) + .collect(), + }, + }), + _ => None, + } +} + +pub fn deserialize_range(range: proto::Range) -> Range { + text::FullOffset(range.start as usize)..text::FullOffset(range.end as usize) +} + +fn deserialize_wire_version(message: &[proto::VectorClockEntry]) -> clock::Global { + let mut version = clock::Global::new(); + for entry in message { + version.observe(clock::Local { + replica_id: entry.replica_id as text::ReplicaId, + value: entry.timestamp, + }); + } + version } mod storage { @@ -325,7 +489,7 @@ mod storage { use prost::Message; - pub const VERSION: usize = 1; + pub const SERIALIZATION_VERSION: i32 = 1; #[derive(Message)] pub struct VectorClock { @@ -374,7 +538,7 @@ mod storage { } } -fn serialize_version(version: &Vec) -> Vec { +fn version_to_storage(version: &Vec) -> Vec { storage::VectorClock { entries: version .iter() @@ -387,7 +551,7 @@ fn serialize_version(version: &Vec) -> Vec { .encode_to_vec() } -fn deserialize_version(bytes: &[u8]) -> Result> { +fn version_from_storage(bytes: &[u8]) -> Result> { let clock = storage::VectorClock::decode(bytes).map_err(|error| anyhow!("{}", error))?; Ok(clock .entries @@ -399,7 +563,7 @@ fn deserialize_version(bytes: &[u8]) -> Result> { .collect()) } -fn serialize_edit_operation(ranges: &[proto::Range], texts: &[String]) -> Vec { +fn edit_operation_to_storage(ranges: &[proto::Range], texts: &[String]) -> Vec { storage::TextEdit { ranges: ranges .iter() @@ -413,7 +577,7 @@ fn serialize_edit_operation(ranges: &[proto::Range], texts: &[String]) -> Vec Result<(Vec, Vec)> { +fn edit_operation_from_storage(bytes: &[u8]) -> Result<(Vec, Vec)> { let edit = storage::TextEdit::decode(bytes).map_err(|error| anyhow!("{}", error))?; let ranges = edit .ranges @@ -426,7 +590,7 @@ fn deserialize_edit_operation(bytes: &[u8]) -> Result<(Vec, Vec) -> Vec { +fn undo_operation_to_storage(counts: &Vec) -> Vec { storage::Undo { entries: counts .iter() @@ -440,7 +604,7 @@ fn serialize_undo_operation(counts: &Vec) -> Vec { .encode_to_vec() } -fn deserialize_undo_operation(bytes: &[u8]) -> Result> { +fn undo_operation_from_storage(bytes: &[u8]) -> Result> { let undo = storage::Undo::decode(bytes).map_err(|error| anyhow!("{}", error))?; Ok(undo .entries diff --git a/crates/collab/src/db/tables/buffer_snapshot.rs b/crates/collab/src/db/tables/buffer_snapshot.rs index ca8712a053..c9de665e43 100644 --- a/crates/collab/src/db/tables/buffer_snapshot.rs +++ b/crates/collab/src/db/tables/buffer_snapshot.rs @@ -9,6 +9,7 @@ pub struct Model { #[sea_orm(primary_key)] pub epoch: i32, pub text: String, + pub operation_serialization_version: i32, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index 08252e382e..e71748b88b 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -10,7 +10,6 @@ test_both_dbs!( ); async fn test_channel_buffers(db: &Arc) { - // Prep database test info let a_id = db .create_user( "user_a@example.com", @@ -155,5 +154,12 @@ async fn test_channel_buffers(db: &Arc) { assert_eq!(zed_collaborators, &[]); assert_eq!(cargo_collaborators, &[]); - // TODO: test buffer epoch incrementing + // When everyone has left the channel, the operations are collapsed into + // a new base text. + let buffer_response_b = db + .join_channel_buffer(zed_id, b_id, connection_id_b) + .await + .unwrap(); + assert_eq!(buffer_response_b.base_text, "hello, cruel world"); + assert_eq!(buffer_response_b.operations, &[]); } diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 0de3f704c7..09c5ec7fc3 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -207,6 +207,7 @@ pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor { } } +// This behavior is currently copied in the collab database, for snapshotting channel notes pub fn deserialize_operation(message: proto::Operation) -> Result { Ok( match message diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 7c94f25e1e..4a97faf015 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -12,7 +12,7 @@ mod undo_map; pub use anchor::*; use anyhow::{anyhow, Result}; -use clock::ReplicaId; +pub use clock::ReplicaId; use collections::{HashMap, HashSet}; use fs::LineEnding; use locator::Locator; From 11ef5e27407703e312107d353bb933cbd833d44d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 22 Aug 2023 16:42:08 -0700 Subject: [PATCH 10/22] Simplify buffer_operations schema Co-authored-by: Mikayla --- .../20221109000000_test_schema.sql | 3 - .../20230819154600_add_channel_buffers.sql | 3 - crates/collab/src/db/queries/buffers.rs | 278 ++++++++---------- .../collab/src/db/tables/buffer_operation.rs | 3 - 4 files changed, 121 insertions(+), 166 deletions(-) diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index fdae4f2339..7a4cd9fd23 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -222,9 +222,6 @@ CREATE TABLE "buffer_operations" ( "epoch" INTEGER NOT NULL, "replica_id" INTEGER NOT NULL, "lamport_timestamp" INTEGER NOT NULL, - "local_timestamp" INTEGER NOT NULL, - "version" BLOB NOT NULL, - "is_undo" BOOLEAN NOT NULL, "value" BLOB NOT NULL, PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id) ); diff --git a/crates/collab/migrations/20230819154600_add_channel_buffers.sql b/crates/collab/migrations/20230819154600_add_channel_buffers.sql index fec18ddb8d..5e6e7ce339 100644 --- a/crates/collab/migrations/20230819154600_add_channel_buffers.sql +++ b/crates/collab/migrations/20230819154600_add_channel_buffers.sql @@ -10,10 +10,7 @@ CREATE TABLE "buffer_operations" ( "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE, "epoch" INTEGER NOT NULL, "replica_id" INTEGER NOT NULL, - "local_timestamp" INTEGER NOT NULL, "lamport_timestamp" INTEGER NOT NULL, - "version" BYTEA NOT NULL, - "is_undo" BOOLEAN NOT NULL, "value" BYTEA NOT NULL, PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id) ); diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index b0df905ecb..83f5b87079 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -1,6 +1,5 @@ use super::*; use prost::Message; -use std::ops::Range; use text::{EditOperation, InsertionTimestamp, UndoOperation}; impl Database { @@ -234,6 +233,7 @@ impl Database { let serialization_version: i32 = buffer .find_related(buffer_snapshot::Entity) .select_only() + .column(buffer_snapshot::Column::OperationSerializationVersion) .filter(buffer_snapshot::Column::Epoch.eq(buffer.epoch)) .into_values::<_, QueryVersion>() .one(&*tx) @@ -326,11 +326,7 @@ impl Database { let mut text_buffer = text::Buffer::new(0, 0, base_text); text_buffer - .apply_ops( - operations - .into_iter() - .filter_map(deserialize_wire_operation), - ) + .apply_ops(operations.into_iter().filter_map(operation_from_wire)) .unwrap(); let base_text = text_buffer.text(); @@ -363,71 +359,122 @@ fn operation_to_storage( buffer: &buffer::Model, _format: i32, ) -> Option { - match operation.variant.as_ref()? { - proto::operation::Variant::Edit(operation) => { - let value = edit_operation_to_storage(&operation.ranges, &operation.new_text); - let version = version_to_storage(&operation.version); - Some(buffer_operation::ActiveModel { - buffer_id: ActiveValue::Set(buffer.id), - epoch: ActiveValue::Set(buffer.epoch), - replica_id: ActiveValue::Set(operation.replica_id as i32), - lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), - local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), - is_undo: ActiveValue::Set(false), - version: ActiveValue::Set(version), - value: ActiveValue::Set(value), - }) - } - proto::operation::Variant::Undo(operation) => { - let value = undo_operation_to_storage(&operation.counts); - let version = version_to_storage(&operation.version); - Some(buffer_operation::ActiveModel { - buffer_id: ActiveValue::Set(buffer.id), - epoch: ActiveValue::Set(buffer.epoch), - replica_id: ActiveValue::Set(operation.replica_id as i32), - lamport_timestamp: ActiveValue::Set(operation.lamport_timestamp as i32), - local_timestamp: ActiveValue::Set(operation.local_timestamp as i32), - is_undo: ActiveValue::Set(true), - version: ActiveValue::Set(version), - value: ActiveValue::Set(value), - }) - } - proto::operation::Variant::UpdateSelections(_) => None, - proto::operation::Variant::UpdateDiagnostics(_) => None, - proto::operation::Variant::UpdateCompletionTriggers(_) => None, - } + let (replica_id, lamport_timestamp, value) = match operation.variant.as_ref()? { + proto::operation::Variant::Edit(operation) => ( + operation.replica_id, + operation.lamport_timestamp, + storage::Operation { + local_timestamp: operation.local_timestamp, + version: version_to_storage(&operation.version), + is_undo: false, + edit_ranges: operation + .ranges + .iter() + .map(|range| storage::Range { + start: range.start, + end: range.end, + }) + .collect(), + edit_texts: operation.new_text.clone(), + undo_counts: Vec::new(), + }, + ), + proto::operation::Variant::Undo(operation) => ( + operation.replica_id, + operation.lamport_timestamp, + storage::Operation { + local_timestamp: operation.local_timestamp, + version: version_to_storage(&operation.version), + is_undo: true, + edit_ranges: Vec::new(), + edit_texts: Vec::new(), + undo_counts: operation + .counts + .iter() + .map(|entry| storage::UndoCount { + replica_id: entry.replica_id, + local_timestamp: entry.local_timestamp, + count: entry.count, + }) + .collect(), + }, + ), + _ => None?, + }; + + Some(buffer_operation::ActiveModel { + buffer_id: ActiveValue::Set(buffer.id), + epoch: ActiveValue::Set(buffer.epoch), + replica_id: ActiveValue::Set(replica_id as i32), + lamport_timestamp: ActiveValue::Set(lamport_timestamp as i32), + value: ActiveValue::Set(value.encode_to_vec()), + }) } fn operation_from_storage( row: buffer_operation::Model, _format_version: i32, ) -> Result { - let version = version_from_storage(&row.version)?; - let operation = if row.is_undo { - let counts = undo_operation_from_storage(&row.value)?; + let operation = + storage::Operation::decode(row.value.as_slice()).map_err(|error| anyhow!("{}", error))?; + let version = version_from_storage(&operation.version); + Ok(if operation.is_undo { proto::operation::Variant::Undo(proto::operation::Undo { replica_id: row.replica_id as u32, - local_timestamp: row.local_timestamp as u32, + local_timestamp: operation.local_timestamp as u32, lamport_timestamp: row.lamport_timestamp as u32, version, - counts, + counts: operation + .undo_counts + .iter() + .map(|entry| proto::UndoCount { + replica_id: entry.replica_id, + local_timestamp: entry.local_timestamp, + count: entry.count, + }) + .collect(), }) } else { - let (ranges, new_text) = edit_operation_from_storage(&row.value)?; proto::operation::Variant::Edit(proto::operation::Edit { replica_id: row.replica_id as u32, - local_timestamp: row.local_timestamp as u32, + local_timestamp: operation.local_timestamp as u32, lamport_timestamp: row.lamport_timestamp as u32, version, - ranges, - new_text, + ranges: operation + .edit_ranges + .into_iter() + .map(|range| proto::Range { + start: range.start, + end: range.end, + }) + .collect(), + new_text: operation.edit_texts, }) - }; - Ok(operation) + }) +} + +fn version_to_storage(version: &Vec) -> Vec { + version + .iter() + .map(|entry| storage::VectorClockEntry { + replica_id: entry.replica_id, + timestamp: entry.timestamp, + }) + .collect() +} + +fn version_from_storage(version: &Vec) -> Vec { + version + .iter() + .map(|entry| proto::VectorClockEntry { + replica_id: entry.replica_id, + timestamp: entry.timestamp, + }) + .collect() } // This is currently a manual copy of the deserialization code in the client's langauge crate -pub fn deserialize_wire_operation(operation: proto::Operation) -> Option { +pub fn operation_from_wire(operation: proto::Operation) -> Option { match operation.variant? { proto::operation::Variant::Edit(edit) => Some(text::Operation::Edit(EditOperation { timestamp: InsertionTimestamp { @@ -435,8 +482,14 @@ pub fn deserialize_wire_operation(operation: proto::Operation) -> Option Some(text::Operation::Undo { @@ -449,7 +502,7 @@ pub fn deserialize_wire_operation(operation: proto::Operation) -> Option Option Range { - text::FullOffset(range.start as usize)..text::FullOffset(range.end as usize) -} - -fn deserialize_wire_version(message: &[proto::VectorClockEntry]) -> clock::Global { +fn version_from_wire(message: &[proto::VectorClockEntry]) -> clock::Global { let mut version = clock::Global::new(); for entry in message { version.observe(clock::Local { @@ -486,15 +535,23 @@ fn deserialize_wire_version(message: &[proto::VectorClockEntry]) -> clock::Globa mod storage { #![allow(non_snake_case)] - use prost::Message; - pub const SERIALIZATION_VERSION: i32 = 1; #[derive(Message)] - pub struct VectorClock { - #[prost(message, repeated, tag = "1")] - pub entries: Vec, + pub struct Operation { + #[prost(uint32, tag = "1")] + pub local_timestamp: u32, + #[prost(message, repeated, tag = "2")] + pub version: Vec, + #[prost(bool, tag = "3")] + pub is_undo: bool, + #[prost(message, repeated, tag = "4")] + pub edit_ranges: Vec, + #[prost(string, repeated, tag = "5")] + pub edit_texts: Vec, + #[prost(message, repeated, tag = "6")] + pub undo_counts: Vec, } #[derive(Message)] @@ -505,14 +562,6 @@ mod storage { pub timestamp: u32, } - #[derive(Message)] - pub struct TextEdit { - #[prost(message, repeated, tag = "1")] - pub ranges: Vec, - #[prost(string, repeated, tag = "2")] - pub texts: Vec, - } - #[derive(Message)] pub struct Range { #[prost(uint64, tag = "1")] @@ -521,12 +570,6 @@ mod storage { pub end: u64, } - #[derive(Message)] - pub struct Undo { - #[prost(message, repeated, tag = "1")] - pub entries: Vec, - } - #[derive(Message)] pub struct UndoCount { #[prost(uint32, tag = "1")] @@ -537,82 +580,3 @@ mod storage { pub count: u32, } } - -fn version_to_storage(version: &Vec) -> Vec { - storage::VectorClock { - entries: version - .iter() - .map(|entry| storage::VectorClockEntry { - replica_id: entry.replica_id, - timestamp: entry.timestamp, - }) - .collect(), - } - .encode_to_vec() -} - -fn version_from_storage(bytes: &[u8]) -> Result> { - let clock = storage::VectorClock::decode(bytes).map_err(|error| anyhow!("{}", error))?; - Ok(clock - .entries - .into_iter() - .map(|entry| proto::VectorClockEntry { - replica_id: entry.replica_id, - timestamp: entry.timestamp, - }) - .collect()) -} - -fn edit_operation_to_storage(ranges: &[proto::Range], texts: &[String]) -> Vec { - storage::TextEdit { - ranges: ranges - .iter() - .map(|range| storage::Range { - start: range.start, - end: range.end, - }) - .collect(), - texts: texts.to_vec(), - } - .encode_to_vec() -} - -fn edit_operation_from_storage(bytes: &[u8]) -> Result<(Vec, Vec)> { - let edit = storage::TextEdit::decode(bytes).map_err(|error| anyhow!("{}", error))?; - let ranges = edit - .ranges - .into_iter() - .map(|range| proto::Range { - start: range.start, - end: range.end, - }) - .collect(); - Ok((ranges, edit.texts)) -} - -fn undo_operation_to_storage(counts: &Vec) -> Vec { - storage::Undo { - entries: counts - .iter() - .map(|entry| storage::UndoCount { - replica_id: entry.replica_id, - local_timestamp: entry.local_timestamp, - count: entry.count, - }) - .collect(), - } - .encode_to_vec() -} - -fn undo_operation_from_storage(bytes: &[u8]) -> Result> { - let undo = storage::Undo::decode(bytes).map_err(|error| anyhow!("{}", error))?; - Ok(undo - .entries - .iter() - .map(|entry| proto::UndoCount { - replica_id: entry.replica_id, - local_timestamp: entry.local_timestamp, - count: entry.count, - }) - .collect()) -} diff --git a/crates/collab/src/db/tables/buffer_operation.rs b/crates/collab/src/db/tables/buffer_operation.rs index 59626c1e77..37bd4bedfe 100644 --- a/crates/collab/src/db/tables/buffer_operation.rs +++ b/crates/collab/src/db/tables/buffer_operation.rs @@ -12,9 +12,6 @@ pub struct Model { pub lamport_timestamp: i32, #[sea_orm(primary_key)] pub replica_id: i32, - pub local_timestamp: i32, - pub version: Vec, - pub is_undo: bool, pub value: Vec, } From 7e831388050d2e4b026421b3100bf71fa9ef22cc Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 22 Aug 2023 18:08:03 -0700 Subject: [PATCH 11/22] Start work on showing consistent replica ids for channel buffers Co-authored-by: Mikayla --- Cargo.lock | 1 + crates/channel/src/channel_buffer.rs | 4 + crates/collab/Cargo.toml | 1 + .../collab/src/tests/channel_buffer_tests.rs | 136 +++++++++++++++++- crates/collab_ui/src/channel_view.rs | 43 +++++- crates/collab_ui/src/collab_panel.rs | 9 +- crates/editor/src/editor.rs | 11 ++ crates/project/src/project.rs | 4 +- 8 files changed, 202 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0ec2f34185..2bda9fda46 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1459,6 +1459,7 @@ dependencies = [ "clap 3.2.25", "client", "clock", + "collab_ui", "collections", "ctor", "dashmap", diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index aa99d5c10b..5ee3fd6c84 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -171,4 +171,8 @@ impl ChannelBuffer { .channel_for_id(self.channel_id) .cloned() } + + pub fn replica_id(&self, cx: &AppContext) -> u16 { + self.buffer.read(cx).replica_id() + } } diff --git a/crates/collab/Cargo.toml b/crates/collab/Cargo.toml index cc1970266d..8adc38615c 100644 --- a/crates/collab/Cargo.toml +++ b/crates/collab/Cargo.toml @@ -78,6 +78,7 @@ rpc = { path = "../rpc", features = ["test-support"] } settings = { path = "../settings", features = ["test-support"] } theme = { path = "../theme" } workspace = { path = "../workspace", features = ["test-support"] } +collab_ui = { path = "../collab_ui", features = ["test-support"] } ctor.workspace = true env_logger.workspace = true diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index db98c6abdc..8fb50055f5 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -1,8 +1,11 @@ use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer}; - +use call::ActiveCall; use client::UserId; +use collab_ui::channel_view::ChannelView; +use collections::HashMap; use gpui::{executor::Deterministic, ModelHandle, TestAppContext}; use rpc::{proto, RECEIVE_TIMEOUT}; +use serde_json::json; use std::sync::Arc; #[gpui::test] @@ -82,7 +85,9 @@ async fn test_core_channel_buffers( // Client A rejoins the channel buffer let _channel_buffer_a = client_a .channel_store() - .update(cx_a, |channels, cx| channels.open_channel_buffer(zed_id, cx)) + .update(cx_a, |channels, cx| { + channels.open_channel_buffer(zed_id, cx) + }) .await .unwrap(); deterministic.run_until_parked(); @@ -110,6 +115,133 @@ async fn test_core_channel_buffers( // - Test interaction with channel deletion while buffer is open } +#[gpui::test] +async fn test_channel_buffer_replica_ids( + deterministic: Arc, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + deterministic.forbid_parking(); + let mut server = TestServer::start(&deterministic).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + let channel_id = server + .make_channel( + "zed", + (&client_a, cx_a), + &mut [(&client_b, cx_b), (&client_c, cx_c)], + ) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let active_call_b = cx_b.read(ActiveCall::global); + + // Clients A and B join a channel. + active_call_a + .update(cx_a, |call, cx| call.join_channel(channel_id, cx)) + .await + .unwrap(); + active_call_b + .update(cx_b, |call, cx| call.join_channel(channel_id, cx)) + .await + .unwrap(); + + // Clients A, B, and C join a channel buffer + // C first so that the replica IDs in the project and the channel buffer are different + let channel_buffer_c = client_c + .channel_store() + .update(cx_c, |channel, cx| { + channel.open_channel_buffer(channel_id, cx) + }) + .await + .unwrap(); + let channel_buffer_b = client_b + .channel_store() + .update(cx_b, |channel, cx| { + channel.open_channel_buffer(channel_id, cx) + }) + .await + .unwrap(); + let channel_buffer_a = client_a + .channel_store() + .update(cx_a, |channel, cx| { + channel.open_channel_buffer(channel_id, cx) + }) + .await + .unwrap(); + + // Client B shares a project + client_b + .fs() + .insert_tree("/dir", json!({ "file.txt": "contents" })) + .await; + let (project_b, _) = client_b.build_local_project("/dir", cx_b).await; + let shared_project_id = active_call_b + .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx)) + .await + .unwrap(); + + // Client A joins the project + let project_a = client_a.build_remote_project(shared_project_id, cx_a).await; + deterministic.run_until_parked(); + + // Client C is in a separate project. + client_c.fs().insert_tree("/dir", json!({})).await; + let (project_c, _) = client_c.build_local_project("/dir", cx_c).await; + + // Note that each user has a different replica id in the projects vs the + // channel buffer. + channel_buffer_a.read_with(cx_a, |channel_buffer, cx| { + assert_eq!(project_a.read(cx).replica_id(), 1); + assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 2); + }); + channel_buffer_b.read_with(cx_b, |channel_buffer, cx| { + assert_eq!(project_b.read(cx).replica_id(), 0); + assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 1); + }); + channel_buffer_c.read_with(cx_c, |channel_buffer, cx| { + // C is not in the project + assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 0); + }); + + let channel_window_a = cx_a + .add_window(|cx| ChannelView::new(project_a.clone(), channel_buffer_a.clone(), None, cx)); + let channel_window_b = cx_b + .add_window(|cx| ChannelView::new(project_b.clone(), channel_buffer_b.clone(), None, cx)); + let channel_window_c = cx_c + .add_window(|cx| ChannelView::new(project_c.clone(), channel_buffer_c.clone(), None, cx)); + + let channel_view_a = channel_window_a.root(cx_a); + let channel_view_b = channel_window_b.root(cx_b); + let channel_view_c = channel_window_c.root(cx_c); + + // For clients A and B, the replica ids in the channel buffer are mapped + // so that they match the same users' replica ids in their shared project. + channel_view_a.read_with(cx_a, |view, cx| { + assert_eq!( + view.project_replica_ids_by_channel_buffer_replica_id(cx), + [(1, 0), (2, 1)].into_iter().collect::>() + ); + }); + channel_view_b.read_with(cx_b, |view, cx| { + assert_eq!( + view.project_replica_ids_by_channel_buffer_replica_id(cx), + [(1, 0), (2, 1)].into_iter().collect::>(), + ) + }); + + // Client C only sees themself, as they're not part of any shared project + channel_view_c.read_with(cx_c, |view, cx| { + assert_eq!( + view.project_replica_ids_by_channel_buffer_replica_id(cx), + [(0, 0)].into_iter().collect::>(), + ); + }); +} + #[track_caller] fn assert_collaborators(collaborators: &[proto::Collaborator], ids: &[Option]) { assert_eq!( diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs index 27a2d678f5..af45eabe69 100644 --- a/crates/collab_ui/src/channel_view.rs +++ b/crates/collab_ui/src/channel_view.rs @@ -1,4 +1,6 @@ use channel::channel_buffer::ChannelBuffer; +use clock::ReplicaId; +use collections::HashMap; use editor::Editor; use gpui::{ actions, @@ -6,6 +8,7 @@ use gpui::{ AnyElement, AppContext, Element, Entity, ModelHandle, View, ViewContext, ViewHandle, }; use language::Language; +use project::Project; use std::sync::Arc; use workspace::item::{Item, ItemHandle}; @@ -17,22 +20,56 @@ pub(crate) fn init(cx: &mut AppContext) { pub struct ChannelView { editor: ViewHandle, + project: ModelHandle, channel_buffer: ModelHandle, } impl ChannelView { pub fn new( + project: ModelHandle, channel_buffer: ModelHandle, - language: Arc, + language: Option>, cx: &mut ViewContext, ) -> Self { let buffer = channel_buffer.read(cx).buffer(); - buffer.update(cx, |buffer, cx| buffer.set_language(Some(language), cx)); + buffer.update(cx, |buffer, cx| buffer.set_language(language, cx)); let editor = cx.add_view(|cx| Editor::for_buffer(buffer, None, cx)); - Self { + let this = Self { editor, + project, channel_buffer, + }; + let mapping = this.project_replica_ids_by_channel_buffer_replica_id(cx); + this.editor + .update(cx, |editor, cx| editor.set_replica_id_mapping(mapping, cx)); + this + } + + /// Channel Buffer Replica ID -> Project Replica ID + pub fn project_replica_ids_by_channel_buffer_replica_id( + &self, + cx: &AppContext, + ) -> HashMap { + let project = self.project.read(cx); + let mut result = HashMap::default(); + result.insert( + self.channel_buffer.read(cx).replica_id(cx), + project.replica_id(), + ); + for collaborator in self.channel_buffer.read(cx).collaborators() { + let project_replica_id = + project + .collaborators() + .values() + .find_map(|project_collaborator| { + (project_collaborator.user_id == collaborator.user_id) + .then_some(project_collaborator.replica_id) + }); + if let Some(project_replica_id) = project_replica_id { + result.insert(collaborator.replica_id as ReplicaId, project_replica_id); + } } + result } } diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 0eb6a65984..a6bd09e43b 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2238,7 +2238,14 @@ impl CollabPanel { .await?; workspace.update(&mut cx, |workspace, cx| { - let channel_view = cx.add_view(|cx| ChannelView::new(channel_buffer, markdown, cx)); + let channel_view = cx.add_view(|cx| { + ChannelView::new( + workspace.project().to_owned(), + channel_buffer, + Some(markdown), + cx, + ) + }); workspace.add_item(Box::new(channel_view), cx); })?; diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 67279b1ba6..e7197d98c5 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -559,6 +559,7 @@ pub struct Editor { blink_manager: ModelHandle, show_local_selections: bool, mode: EditorMode, + replica_id_mapping: Option>, show_gutter: bool, show_wrap_guides: Option, placeholder_text: Option>, @@ -1394,6 +1395,7 @@ impl Editor { blink_manager: blink_manager.clone(), show_local_selections: true, mode, + replica_id_mapping: None, show_gutter: mode == EditorMode::Full, show_wrap_guides: None, placeholder_text: None, @@ -1604,6 +1606,15 @@ impl Editor { self.read_only = read_only; } + pub fn set_replica_id_mapping( + &mut self, + mapping: HashMap, + cx: &mut ViewContext, + ) { + self.replica_id_mapping = Some(mapping); + cx.notify(); + } + fn selections_did_change( &mut self, local: bool, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index b120baa951..bc4fa587cb 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -11,7 +11,7 @@ mod project_tests; mod worktree_tests; use anyhow::{anyhow, Context, Result}; -use client::{proto, Client, TypedEnvelope, UserStore}; +use client::{proto, Client, TypedEnvelope, UserId, UserStore}; use clock::ReplicaId; use collections::{hash_map, BTreeMap, HashMap, HashSet}; use copilot::Copilot; @@ -250,6 +250,7 @@ enum ProjectClientState { pub struct Collaborator { pub peer_id: proto::PeerId, pub replica_id: ReplicaId, + pub user_id: UserId, } #[derive(Clone, Debug, PartialEq)] @@ -7756,6 +7757,7 @@ impl Collaborator { Ok(Self { peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?, replica_id: message.replica_id as ReplicaId, + user_id: message.user_id as UserId, }) } } From 199be8241c5ab47d7d41b0b36402ca3133e1944d Mon Sep 17 00:00:00 2001 From: Mikayla Date: Thu, 24 Aug 2023 11:25:20 -0700 Subject: [PATCH 12/22] Add following into channel notes co-authored-by: max --- crates/collab_ui/src/channel_view.rs | 108 ++++++++++++++++++++++++++- crates/editor/src/editor_tests.rs | 4 +- crates/editor/src/items.rs | 3 +- crates/rpc/proto/zed.proto | 5 ++ crates/workspace/src/item.rs | 2 +- crates/workspace/src/workspace.rs | 23 +++--- 6 files changed, 128 insertions(+), 17 deletions(-) diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs index af45eabe69..c13711b29c 100644 --- a/crates/collab_ui/src/channel_view.rs +++ b/crates/collab_ui/src/channel_view.rs @@ -1,27 +1,34 @@ use channel::channel_buffer::ChannelBuffer; +use client::proto; use clock::ReplicaId; use collections::HashMap; use editor::Editor; use gpui::{ actions, elements::{ChildView, Label}, - AnyElement, AppContext, Element, Entity, ModelHandle, View, ViewContext, ViewHandle, + AnyElement, AnyViewHandle, AppContext, Element, Entity, ModelHandle, Subscription, View, + ViewContext, ViewHandle, }; use language::Language; use project::Project; use std::sync::Arc; -use workspace::item::{Item, ItemHandle}; +use workspace::{ + item::{FollowableItem, Item, ItemHandle}, + register_followable_item, ViewId, +}; actions!(channel_view, [Deploy]); pub(crate) fn init(cx: &mut AppContext) { - // TODO + register_followable_item::(cx) } pub struct ChannelView { editor: ViewHandle, project: ModelHandle, channel_buffer: ModelHandle, + remote_id: Option, + _editor_event_subscription: Subscription, } impl ChannelView { @@ -34,14 +41,19 @@ impl ChannelView { let buffer = channel_buffer.read(cx).buffer(); buffer.update(cx, |buffer, cx| buffer.set_language(language, cx)); let editor = cx.add_view(|cx| Editor::for_buffer(buffer, None, cx)); + let _editor_event_subscription = cx.subscribe(&editor, |_, _, e, cx| cx.emit(e.clone())); + let this = Self { editor, project, channel_buffer, + remote_id: None, + _editor_event_subscription, }; let mapping = this.project_replica_ids_by_channel_buffer_replica_id(cx); this.editor .update(cx, |editor, cx| editor.set_replica_id_mapping(mapping, cx)); + this } @@ -82,9 +94,15 @@ impl View for ChannelView { "ChannelView" } - fn render(&mut self, cx: &mut ViewContext<'_, '_, Self>) -> AnyElement { + fn render(&mut self, cx: &mut ViewContext) -> AnyElement { ChildView::new(self.editor.as_any(), cx).into_any() } + + fn focus_in(&mut self, _: AnyViewHandle, cx: &mut ViewContext) { + if cx.is_self_focused() { + cx.focus(self.editor.as_any()) + } + } } impl Item for ChannelView { @@ -104,3 +122,85 @@ impl Item for ChannelView { Label::new(channel_name, style.label.to_owned()).into_any() } } + +impl FollowableItem for ChannelView { + fn remote_id(&self) -> Option { + self.remote_id + } + + fn to_state_proto(&self, cx: &AppContext) -> Option { + self.channel_buffer.read(cx).channel(cx).map(|channel| { + proto::view::Variant::ChannelView(proto::view::ChannelView { + channel_id: channel.id, + }) + }) + } + + fn from_state_proto( + _: ViewHandle, + workspace: ViewHandle, + remote_id: workspace::ViewId, + state_proto: &mut Option, + cx: &mut AppContext, + ) -> Option>>> { + let Some(proto::view::Variant::ChannelView(_)) = state_proto else { return None }; + let Some(proto::view::Variant::ChannelView(state)) = state_proto.take() else { unreachable!() }; + + let channel_store = &workspace.read(cx).app_state().channel_store.clone(); + let open_channel_buffer = channel_store.update(cx, |store, cx| { + store.open_channel_buffer(state.channel_id, cx) + }); + let project = workspace.read(cx).project().to_owned(); + let language = workspace.read(cx).app_state().languages.clone(); + let get_markdown = language.language_for_name("Markdown"); + + Some(cx.spawn(|mut cx| async move { + let channel_buffer = open_channel_buffer.await?; + let markdown = get_markdown.await?; + + let this = workspace + .update(&mut cx, move |_, cx| { + cx.add_view(|cx| { + let mut this = Self::new(project, channel_buffer, Some(markdown), cx); + this.remote_id = Some(remote_id); + this + }) + }) + .ok_or_else(|| anyhow::anyhow!("workspace droppped"))?; + + Ok(this) + })) + } + + fn add_event_to_update_proto( + &self, + _: &Self::Event, + _: &mut Option, + _: &AppContext, + ) -> bool { + false + } + + fn apply_update_proto( + &mut self, + _: &ModelHandle, + _: proto::update_view::Variant, + _: &mut ViewContext, + ) -> gpui::Task> { + gpui::Task::ready(Ok(())) + } + + fn set_leader_replica_id( + &mut self, + leader_replica_id: Option, + cx: &mut ViewContext, + ) { + self.editor.update(cx, |editor, cx| { + editor.set_leader_replica_id(leader_replica_id, cx) + }) + } + + fn should_unfollow_on_event(event: &Self::Event, cx: &AppContext) -> bool { + Editor::should_unfollow_on_event(event, cx) + } +} diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index e031edf538..a2a561402f 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -6384,7 +6384,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) { .update(|cx| { Editor::from_state_proto( pane.clone(), - project.clone(), + workspace.clone(), ViewId { creator: Default::default(), id: 0, @@ -6479,7 +6479,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) { .update(|cx| { Editor::from_state_proto( pane.clone(), - project.clone(), + workspace.clone(), ViewId { creator: Default::default(), id: 0, diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 668ea48203..657aae5ff9 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -49,11 +49,12 @@ impl FollowableItem for Editor { fn from_state_proto( pane: ViewHandle, - project: ModelHandle, + workspace: ViewHandle, remote_id: ViewId, state: &mut Option, cx: &mut AppContext, ) -> Option>>> { + let project = workspace.read(cx).project().to_owned(); let Some(proto::view::Variant::Editor(_)) = state else { return None }; let Some(proto::view::Variant::Editor(state)) = state.take() else { unreachable!() }; diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index b97feff06b..827468b280 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -1120,6 +1120,7 @@ message View { oneof variant { Editor editor = 3; + ChannelView channel_view = 4; } message Editor { @@ -1132,6 +1133,10 @@ message View { float scroll_x = 7; float scroll_y = 8; } + + message ChannelView { + uint64 channel_id = 1; + } } message Collaborator { diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index a115e0f473..4b5b7a7931 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -674,7 +674,7 @@ pub trait FollowableItem: Item { fn to_state_proto(&self, cx: &AppContext) -> Option; fn from_state_proto( pane: ViewHandle, - project: ModelHandle, + project: ViewHandle, id: ViewId, state: &mut Option, cx: &mut AppContext, diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index a8354472aa..62bb7a82a2 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -345,7 +345,7 @@ pub fn register_project_item(cx: &mut AppContext) { type FollowableItemBuilder = fn( ViewHandle, - ModelHandle, + ViewHandle, ViewId, &mut Option, &mut AppContext, @@ -362,8 +362,8 @@ pub fn register_followable_item(cx: &mut AppContext) { builders.insert( TypeId::of::(), ( - |pane, project, id, state, cx| { - I::from_state_proto(pane, project, id, state, cx).map(|task| { + |pane, workspace, id, state, cx| { + I::from_state_proto(pane, workspace, id, state, cx).map(|task| { cx.foreground() .spawn(async move { Ok(Box::new(task.await?) as Box<_>) }) }) @@ -2848,7 +2848,13 @@ impl Workspace { views: Vec, cx: &mut AsyncAppContext, ) -> Result<()> { - let project = this.read_with(cx, |this, _| this.project.clone())?; + let this = this + .upgrade(cx) + .ok_or_else(|| anyhow!("workspace dropped"))?; + let project = this + .read_with(cx, |this, _| this.project.clone()) + .ok_or_else(|| anyhow!("window dropped"))?; + let replica_id = project .read_with(cx, |project, _| { project @@ -2874,12 +2880,11 @@ impl Workspace { let id = ViewId::from_proto(id.clone())?; let mut variant = view.variant.clone(); if variant.is_none() { - Err(anyhow!("missing variant"))?; + Err(anyhow!("missing view variant"))?; } for build_item in &item_builders { - let task = cx.update(|cx| { - build_item(pane.clone(), project.clone(), id, &mut variant, cx) - }); + let task = cx + .update(|cx| build_item(pane.clone(), this.clone(), id, &mut variant, cx)); if let Some(task) = task { item_tasks.push(task); leader_view_ids.push(id); @@ -2907,7 +2912,7 @@ impl Workspace { } Some(()) - })?; + }); } Ok(()) } From 3268cce41a948ea422f0fe0389cbd8d2e260c047 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 23 Aug 2023 11:30:43 -0700 Subject: [PATCH 13/22] Fix error in update_channel_buffer when there are no operations to store Co-authored-by: Mikayla --- crates/collab/src/db/queries/buffers.rs | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 83f5b87079..a38693bace 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -215,7 +215,7 @@ impl Database { user: UserId, operations: &[proto::Operation], ) -> Result> { - self.transaction(|tx| async move { + self.transaction(move |tx| async move { self.check_user_is_channel_member(channel_id, user, &*tx) .await?; @@ -240,11 +240,15 @@ impl Database { .await? .ok_or_else(|| anyhow!("missing buffer snapshot"))?; - buffer_operation::Entity::insert_many(operations.iter().filter_map(|operation| { - operation_to_storage(operation, &buffer, serialization_version) - })) - .exec(&*tx) - .await?; + let operations = operations + .iter() + .filter_map(|op| operation_to_storage(op, &buffer, serialization_version)) + .collect::>(); + if !operations.is_empty() { + buffer_operation::Entity::insert_many(operations) + .exec(&*tx) + .await?; + } let mut connections = Vec::new(); let mut rows = channel_buffer_collaborator::Entity::find() From 24141c2f16261c25ebedfa01414ce4b2a62055c0 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 23 Aug 2023 13:32:16 -0700 Subject: [PATCH 14/22] Ensure collaborators cursor colors are the same in channel buffers as in projects Co-authored-by: Mikayla --- crates/channel/src/channel_buffer.rs | 13 ++- .../collab/src/tests/channel_buffer_tests.rs | 59 ++++++++++-- crates/collab_ui/src/channel_view.rs | 96 +++++++++++++------ crates/editor/src/editor.rs | 10 +- crates/editor/src/element.rs | 50 ++++++---- crates/language/src/buffer.rs | 8 ++ crates/project/src/project.rs | 2 + crates/sum_tree/src/tree_map.rs | 12 ++- crates/theme/src/theme.rs | 1 + styles/src/style_tree/editor.ts | 1 + 10 files changed, 190 insertions(+), 62 deletions(-) diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index 5ee3fd6c84..cad3c4f58f 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -21,8 +21,12 @@ pub struct ChannelBuffer { _subscription: client::Subscription, } +pub enum Event { + CollaboratorsChanged, +} + impl Entity for ChannelBuffer { - type Event = (); + type Event = Event; fn release(&mut self, _: &mut AppContext) { self.client @@ -54,8 +58,9 @@ impl ChannelBuffer { let collaborators = response.collaborators; - let buffer = - cx.add_model(|cx| language::Buffer::new(response.replica_id as u16, base_text, cx)); + let buffer = cx.add_model(|_| { + language::Buffer::remote(response.buffer_id, response.replica_id as u16, base_text) + }); buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?; let subscription = client.subscribe_to_entity(channel_id)?; @@ -111,6 +116,7 @@ impl ChannelBuffer { this.update(&mut cx, |this, cx| { this.collaborators.push(collaborator); + cx.emit(Event::CollaboratorsChanged); cx.notify(); }); @@ -134,6 +140,7 @@ impl ChannelBuffer { true } }); + cx.emit(Event::CollaboratorsChanged); cx.notify(); }); diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index 8fb50055f5..6a9ef3fc13 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -63,6 +63,10 @@ async fn test_core_channel_buffers( // Client B sees the correct text, and then edits it let buffer_b = channel_buffer_b.read_with(cx_b, |buffer, _| buffer.buffer()); + assert_eq!( + buffer_b.read_with(cx_b, |buffer, _| buffer.remote_id()), + buffer_a.read_with(cx_a, |buffer, _| buffer.remote_id()) + ); assert_eq!(buffer_text(&buffer_b, cx_b), "hello, cruel world"); buffer_b.update(cx_b, |buffer, cx| { buffer.edit([(7..12, "beautiful")], None, cx) @@ -138,6 +142,7 @@ async fn test_channel_buffer_replica_ids( let active_call_a = cx_a.read(ActiveCall::global); let active_call_b = cx_b.read(ActiveCall::global); + let active_call_c = cx_c.read(ActiveCall::global); // Clients A and B join a channel. active_call_a @@ -190,7 +195,7 @@ async fn test_channel_buffer_replica_ids( // Client C is in a separate project. client_c.fs().insert_tree("/dir", json!({})).await; - let (project_c, _) = client_c.build_local_project("/dir", cx_c).await; + let (separate_project_c, _) = client_c.build_local_project("/dir", cx_c).await; // Note that each user has a different replica id in the projects vs the // channel buffer. @@ -211,8 +216,14 @@ async fn test_channel_buffer_replica_ids( .add_window(|cx| ChannelView::new(project_a.clone(), channel_buffer_a.clone(), None, cx)); let channel_window_b = cx_b .add_window(|cx| ChannelView::new(project_b.clone(), channel_buffer_b.clone(), None, cx)); - let channel_window_c = cx_c - .add_window(|cx| ChannelView::new(project_c.clone(), channel_buffer_c.clone(), None, cx)); + let channel_window_c = cx_c.add_window(|cx| { + ChannelView::new( + separate_project_c.clone(), + channel_buffer_c.clone(), + None, + cx, + ) + }); let channel_view_a = channel_window_a.root(cx_a); let channel_view_b = channel_window_b.root(cx_b); @@ -222,24 +233,54 @@ async fn test_channel_buffer_replica_ids( // so that they match the same users' replica ids in their shared project. channel_view_a.read_with(cx_a, |view, cx| { assert_eq!( - view.project_replica_ids_by_channel_buffer_replica_id(cx), - [(1, 0), (2, 1)].into_iter().collect::>() + view.editor.read(cx).replica_id_map().unwrap(), + &[(1, 0), (2, 1)].into_iter().collect::>() ); }); channel_view_b.read_with(cx_b, |view, cx| { assert_eq!( - view.project_replica_ids_by_channel_buffer_replica_id(cx), - [(1, 0), (2, 1)].into_iter().collect::>(), + view.editor.read(cx).replica_id_map().unwrap(), + &[(1, 0), (2, 1)].into_iter().collect::>(), ) }); // Client C only sees themself, as they're not part of any shared project channel_view_c.read_with(cx_c, |view, cx| { assert_eq!( - view.project_replica_ids_by_channel_buffer_replica_id(cx), - [(0, 0)].into_iter().collect::>(), + view.editor.read(cx).replica_id_map().unwrap(), + &[(0, 0)].into_iter().collect::>(), ); }); + + // Client C joins the project that clients A and B are in. + active_call_c + .update(cx_c, |call, cx| call.join_channel(channel_id, cx)) + .await + .unwrap(); + let project_c = client_c.build_remote_project(shared_project_id, cx_c).await; + deterministic.run_until_parked(); + project_c.read_with(cx_c, |project, _| { + assert_eq!(project.replica_id(), 2); + }); + + // For clients A and B, client C's replica id in the channel buffer is + // now mapped to their replica id in the shared project. + channel_view_a.read_with(cx_a, |view, cx| { + assert_eq!( + view.editor.read(cx).replica_id_map().unwrap(), + &[(1, 0), (2, 1), (0, 2)] + .into_iter() + .collect::>() + ); + }); + channel_view_b.read_with(cx_b, |view, cx| { + assert_eq!( + view.editor.read(cx).replica_id_map().unwrap(), + &[(1, 0), (2, 1), (0, 2)] + .into_iter() + .collect::>(), + ) + }); } #[track_caller] diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs index c13711b29c..dd3969d351 100644 --- a/crates/collab_ui/src/channel_view.rs +++ b/crates/collab_ui/src/channel_view.rs @@ -1,4 +1,4 @@ -use channel::channel_buffer::ChannelBuffer; +use channel::channel_buffer::{self, ChannelBuffer}; use client::proto; use clock::ReplicaId; use collections::HashMap; @@ -24,7 +24,7 @@ pub(crate) fn init(cx: &mut AppContext) { } pub struct ChannelView { - editor: ViewHandle, + pub editor: ViewHandle, project: ModelHandle, channel_buffer: ModelHandle, remote_id: Option, @@ -43,6 +43,10 @@ impl ChannelView { let editor = cx.add_view(|cx| Editor::for_buffer(buffer, None, cx)); let _editor_event_subscription = cx.subscribe(&editor, |_, _, e, cx| cx.emit(e.clone())); + cx.subscribe(&project, Self::handle_project_event).detach(); + cx.subscribe(&channel_buffer, Self::handle_channel_buffer_event) + .detach(); + let this = Self { editor, project, @@ -50,38 +54,70 @@ impl ChannelView { remote_id: None, _editor_event_subscription, }; - let mapping = this.project_replica_ids_by_channel_buffer_replica_id(cx); - this.editor - .update(cx, |editor, cx| editor.set_replica_id_mapping(mapping, cx)); - + this.refresh_replica_id_map(cx); this } - /// Channel Buffer Replica ID -> Project Replica ID - pub fn project_replica_ids_by_channel_buffer_replica_id( - &self, - cx: &AppContext, - ) -> HashMap { - let project = self.project.read(cx); - let mut result = HashMap::default(); - result.insert( - self.channel_buffer.read(cx).replica_id(cx), - project.replica_id(), - ); - for collaborator in self.channel_buffer.read(cx).collaborators() { - let project_replica_id = - project - .collaborators() - .values() - .find_map(|project_collaborator| { - (project_collaborator.user_id == collaborator.user_id) - .then_some(project_collaborator.replica_id) - }); - if let Some(project_replica_id) = project_replica_id { - result.insert(collaborator.replica_id as ReplicaId, project_replica_id); - } + fn handle_project_event( + &mut self, + _: ModelHandle, + event: &project::Event, + cx: &mut ViewContext, + ) { + match event { + project::Event::RemoteIdChanged(_) => {} + project::Event::DisconnectedFromHost => {} + project::Event::Closed => {} + project::Event::CollaboratorUpdated { .. } => {} + project::Event::CollaboratorLeft(_) => {} + project::Event::CollaboratorJoined(_) => {} + _ => return, } - result + self.refresh_replica_id_map(cx); + } + + fn handle_channel_buffer_event( + &mut self, + _: ModelHandle, + _: &channel_buffer::Event, + cx: &mut ViewContext, + ) { + self.refresh_replica_id_map(cx); + } + + /// Build a mapping of channel buffer replica ids to the corresponding + /// replica ids in the current project. + /// + /// Using this mapping, a given user can be displayed with the same color + /// in the channel buffer as in other files in the project. Users who are + /// in the channel buffer but not the project will not have a color. + fn refresh_replica_id_map(&self, cx: &mut ViewContext) { + let mut project_replica_ids_by_channel_buffer_replica_id = HashMap::default(); + let project = self.project.read(cx); + let channel_buffer = self.channel_buffer.read(cx); + project_replica_ids_by_channel_buffer_replica_id + .insert(channel_buffer.replica_id(cx), project.replica_id()); + project_replica_ids_by_channel_buffer_replica_id.extend( + channel_buffer + .collaborators() + .iter() + .filter_map(|channel_buffer_collaborator| { + project + .collaborators() + .values() + .find_map(|project_collaborator| { + (project_collaborator.user_id == channel_buffer_collaborator.user_id) + .then_some(( + channel_buffer_collaborator.replica_id as ReplicaId, + project_collaborator.replica_id, + )) + }) + }), + ); + + self.editor.update(cx, |editor, cx| { + editor.set_replica_id_map(Some(project_replica_ids_by_channel_buffer_replica_id), cx) + }); } } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index e7197d98c5..775f3c07ec 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1606,12 +1606,16 @@ impl Editor { self.read_only = read_only; } - pub fn set_replica_id_mapping( + pub fn replica_id_map(&self) -> Option<&HashMap> { + self.replica_id_mapping.as_ref() + } + + pub fn set_replica_id_map( &mut self, - mapping: HashMap, + mapping: Option>, cx: &mut ViewContext, ) { - self.replica_id_mapping = Some(mapping); + self.replica_id_mapping = mapping; cx.notify(); } diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 0f26e5819c..9f74eed790 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -62,6 +62,7 @@ struct SelectionLayout { head: DisplayPoint, cursor_shape: CursorShape, is_newest: bool, + is_local: bool, range: Range, active_rows: Range, } @@ -73,6 +74,7 @@ impl SelectionLayout { cursor_shape: CursorShape, map: &DisplaySnapshot, is_newest: bool, + is_local: bool, ) -> Self { let point_selection = selection.map(|p| p.to_point(&map.buffer_snapshot)); let display_selection = point_selection.map(|p| p.to_display_point(map)); @@ -109,6 +111,7 @@ impl SelectionLayout { head, cursor_shape, is_newest, + is_local, range, active_rows, } @@ -763,7 +766,6 @@ impl EditorElement { cx: &mut PaintContext, ) { let style = &self.style; - let local_replica_id = editor.replica_id(cx); let scroll_position = layout.position_map.snapshot.scroll_position(); let start_row = layout.visible_display_row_range.start; let scroll_top = scroll_position.y() * layout.position_map.line_height; @@ -852,15 +854,13 @@ impl EditorElement { for (replica_id, selections) in &layout.selections { let replica_id = *replica_id; - let selection_style = style.replica_selection_style(replica_id); + let selection_style = if let Some(replica_id) = replica_id { + style.replica_selection_style(replica_id) + } else { + &style.absent_selection + }; for selection in selections { - if !selection.range.is_empty() - && (replica_id == local_replica_id - || Some(replica_id) == editor.leader_replica_id) - { - invisible_display_ranges.push(selection.range.clone()); - } self.paint_highlighted_range( scene, selection.range.clone(), @@ -874,7 +874,10 @@ impl EditorElement { bounds, ); - if editor.show_local_cursors(cx) || replica_id != local_replica_id { + if selection.is_local && !selection.range.is_empty() { + invisible_display_ranges.push(selection.range.clone()); + } + if !selection.is_local || editor.show_local_cursors(cx) { let cursor_position = selection.head; if layout .visible_display_row_range @@ -2124,7 +2127,7 @@ impl Element for EditorElement { .anchor_before(DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right)) }; - let mut selections: Vec<(ReplicaId, Vec)> = Vec::new(); + let mut selections: Vec<(Option, Vec)> = Vec::new(); let mut active_rows = BTreeMap::new(); let mut fold_ranges = Vec::new(); let is_singleton = editor.is_singleton(cx); @@ -2155,8 +2158,14 @@ impl Element for EditorElement { .buffer_snapshot .remote_selections_in_range(&(start_anchor..end_anchor)) { + let replica_id = if let Some(mapping) = &editor.replica_id_mapping { + mapping.get(&replica_id).copied() + } else { + None + }; + // The local selections match the leader's selections. - if Some(replica_id) == editor.leader_replica_id { + if replica_id.is_some() && replica_id == editor.leader_replica_id { continue; } remote_selections @@ -2168,6 +2177,7 @@ impl Element for EditorElement { cursor_shape, &snapshot.display_snapshot, false, + false, )); } selections.extend(remote_selections); @@ -2191,6 +2201,7 @@ impl Element for EditorElement { editor.cursor_shape, &snapshot.display_snapshot, is_newest, + true, ); if is_newest { newest_selection_head = Some(layout.head); @@ -2206,11 +2217,18 @@ impl Element for EditorElement { } // Render the local selections in the leader's color when following. - let local_replica_id = editor - .leader_replica_id - .unwrap_or_else(|| editor.replica_id(cx)); + let local_replica_id = if let Some(leader_replica_id) = editor.leader_replica_id { + leader_replica_id + } else { + let replica_id = editor.replica_id(cx); + if let Some(mapping) = &editor.replica_id_mapping { + mapping.get(&replica_id).copied().unwrap_or(replica_id) + } else { + replica_id + } + }; - selections.push((local_replica_id, layouts)); + selections.push((Some(local_replica_id), layouts)); } let scrollbar_settings = &settings::get::(cx).scrollbar; @@ -2591,7 +2609,7 @@ pub struct LayoutState { blocks: Vec, highlighted_ranges: Vec<(Range, Color)>, fold_ranges: Vec<(BufferRow, Range, Color)>, - selections: Vec<(ReplicaId, Vec)>, + selections: Vec<(Option, Vec)>, scrollbar_row_range: Range, show_scrollbars: bool, is_singleton: bool, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index d032e8e025..1b83ca5964 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -359,6 +359,14 @@ impl Buffer { ) } + pub fn remote(remote_id: u64, replica_id: ReplicaId, base_text: String) -> Self { + Self::build( + TextBuffer::new(replica_id, remote_id, base_text), + None, + None, + ) + } + pub fn from_proto( replica_id: ReplicaId, message: proto::BufferState, diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index bc4fa587cb..49074268f2 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -282,6 +282,7 @@ pub enum Event { old_peer_id: proto::PeerId, new_peer_id: proto::PeerId, }, + CollaboratorJoined(proto::PeerId), CollaboratorLeft(proto::PeerId), RefreshInlayHints, } @@ -5931,6 +5932,7 @@ impl Project { let collaborator = Collaborator::from_proto(collaborator)?; this.update(&mut cx, |this, cx| { this.shared_buffers.remove(&collaborator.peer_id); + cx.emit(Event::CollaboratorJoined(collaborator.peer_id)); this.collaborators .insert(collaborator.peer_id, collaborator); cx.notify(); diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index 4bb98d2ac8..edb9010e50 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -2,7 +2,7 @@ use std::{cmp::Ordering, fmt::Debug}; use crate::{Bias, Dimension, Edit, Item, KeyedItem, SeekTarget, SumTree, Summary}; -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, PartialEq, Eq)] pub struct TreeMap(SumTree>) where K: Clone + Debug + Default + Ord, @@ -162,6 +162,16 @@ impl TreeMap { } } +impl Debug for TreeMap +where + K: Clone + Debug + Default + Ord, + V: Clone + Debug, +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_map().entries(self.iter()).finish() + } +} + #[derive(Debug)] struct MapSeekTargetAdaptor<'a, T>(&'a T); diff --git a/crates/theme/src/theme.rs b/crates/theme/src/theme.rs index 0f34963708..9005fc9757 100644 --- a/crates/theme/src/theme.rs +++ b/crates/theme/src/theme.rs @@ -756,6 +756,7 @@ pub struct Editor { pub line_number: Color, pub line_number_active: Color, pub guest_selections: Vec, + pub absent_selection: SelectionStyle, pub syntax: Arc, pub hint: HighlightStyle, pub suggestion: HighlightStyle, diff --git a/styles/src/style_tree/editor.ts b/styles/src/style_tree/editor.ts index 9ad008f38d..9277a2e7a1 100644 --- a/styles/src/style_tree/editor.ts +++ b/styles/src/style_tree/editor.ts @@ -184,6 +184,7 @@ export default function editor(): any { theme.players[6], theme.players[7], ], + absent_selection: theme.players[7], autocomplete: { background: background(theme.middle), corner_radius: 8, From 90f22cb0d24abb5d73dc6801990e5ce741e6a18d Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 24 Aug 2023 12:36:01 -0700 Subject: [PATCH 15/22] Replicate editor state when following into channel notes Co-authored-by: Mikayla --- crates/collab_ui/src/channel_view.rs | 57 ++++++++++++++++++++++------ crates/rpc/proto/zed.proto | 1 + 2 files changed, 46 insertions(+), 12 deletions(-) diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs index dd3969d351..be186fc2e2 100644 --- a/crates/collab_ui/src/channel_view.rs +++ b/crates/collab_ui/src/channel_view.rs @@ -168,6 +168,13 @@ impl FollowableItem for ChannelView { self.channel_buffer.read(cx).channel(cx).map(|channel| { proto::view::Variant::ChannelView(proto::view::ChannelView { channel_id: channel.id, + editor: if let Some(proto::view::Variant::Editor(proto)) = + self.editor.read(cx).to_state_proto(cx) + { + Some(proto) + } else { + None + }, }) }) } @@ -176,11 +183,11 @@ impl FollowableItem for ChannelView { _: ViewHandle, workspace: ViewHandle, remote_id: workspace::ViewId, - state_proto: &mut Option, + state: &mut Option, cx: &mut AppContext, ) -> Option>>> { - let Some(proto::view::Variant::ChannelView(_)) = state_proto else { return None }; - let Some(proto::view::Variant::ChannelView(state)) = state_proto.take() else { unreachable!() }; + let Some(proto::view::Variant::ChannelView(_)) = state else { return None }; + let Some(proto::view::Variant::ChannelView(state)) = state.take() else { unreachable!() }; let channel_store = &workspace.read(cx).app_state().channel_store.clone(); let open_channel_buffer = channel_store.update(cx, |store, cx| { @@ -202,7 +209,29 @@ impl FollowableItem for ChannelView { this }) }) - .ok_or_else(|| anyhow::anyhow!("workspace droppped"))?; + .ok_or_else(|| anyhow::anyhow!("workspace dropped"))?; + + if let Some(state) = state.editor { + let task = this.update(&mut cx, |this, cx| { + this.editor.update(cx, |editor, cx| { + editor.apply_update_proto( + &this.project, + proto::update_view::Variant::Editor(proto::update_view::Editor { + selections: state.selections, + pending_selection: state.pending_selection, + scroll_top_anchor: state.scroll_top_anchor, + scroll_x: state.scroll_x, + scroll_y: state.scroll_y, + ..Default::default() + }), + cx, + ) + }) + }); + if let Some(task) = task { + task.await?; + } + } Ok(this) })) @@ -210,20 +239,24 @@ impl FollowableItem for ChannelView { fn add_event_to_update_proto( &self, - _: &Self::Event, - _: &mut Option, - _: &AppContext, + event: &Self::Event, + update: &mut Option, + cx: &AppContext, ) -> bool { - false + self.editor + .read(cx) + .add_event_to_update_proto(event, update, cx) } fn apply_update_proto( &mut self, - _: &ModelHandle, - _: proto::update_view::Variant, - _: &mut ViewContext, + project: &ModelHandle, + message: proto::update_view::Variant, + cx: &mut ViewContext, ) -> gpui::Task> { - gpui::Task::ready(Ok(())) + self.editor.update(cx, |editor, cx| { + editor.apply_update_proto(project, message, cx) + }) } fn set_leader_replica_id( diff --git a/crates/rpc/proto/zed.proto b/crates/rpc/proto/zed.proto index 827468b280..f032ccce51 100644 --- a/crates/rpc/proto/zed.proto +++ b/crates/rpc/proto/zed.proto @@ -1136,6 +1136,7 @@ message View { message ChannelView { uint64 channel_id = 1; + Editor editor = 2; } } From 5888e7b214685aa1d8dd24e657d84c7b015aa08f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 24 Aug 2023 13:40:44 -0700 Subject: [PATCH 16/22] Dedup channel buffers --- crates/channel/src/channel_buffer.rs | 62 +++++++------- crates/channel/src/channel_store.rs | 81 +++++++++++++++---- .../collab/src/tests/channel_buffer_tests.rs | 56 +++++++++++++ 3 files changed, 152 insertions(+), 47 deletions(-) diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index cad3c4f58f..c19899501a 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -1,7 +1,7 @@ use crate::{Channel, ChannelId, ChannelStore}; use anyhow::Result; use client::Client; -use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Task}; +use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle}; use rpc::{proto, TypedEnvelope}; use std::sync::Arc; use util::ResultExt; @@ -38,46 +38,44 @@ impl Entity for ChannelBuffer { } impl ChannelBuffer { - pub(crate) fn new( + pub(crate) async fn new( channel_store: ModelHandle, channel_id: ChannelId, client: Arc, - cx: &mut AppContext, - ) -> Task>> { - cx.spawn(|mut cx| async move { - let response = client - .request(proto::JoinChannelBuffer { channel_id }) - .await?; + mut cx: AsyncAppContext, + ) -> Result> { + let response = client + .request(proto::JoinChannelBuffer { channel_id }) + .await?; - let base_text = response.base_text; - let operations = response - .operations - .into_iter() - .map(language::proto::deserialize_operation) - .collect::, _>>()?; + let base_text = response.base_text; + let operations = response + .operations + .into_iter() + .map(language::proto::deserialize_operation) + .collect::, _>>()?; - let collaborators = response.collaborators; + let collaborators = response.collaborators; - let buffer = cx.add_model(|_| { - language::Buffer::remote(response.buffer_id, response.replica_id as u16, base_text) - }); - buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?; + let buffer = cx.add_model(|_| { + language::Buffer::remote(response.buffer_id, response.replica_id as u16, base_text) + }); + buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?; - let subscription = client.subscribe_to_entity(channel_id)?; + let subscription = client.subscribe_to_entity(channel_id)?; - anyhow::Ok(cx.add_model(|cx| { - cx.subscribe(&buffer, Self::on_buffer_update).detach(); + anyhow::Ok(cx.add_model(|cx| { + cx.subscribe(&buffer, Self::on_buffer_update).detach(); - Self { - buffer, - client, - channel_id, - channel_store, - collaborators, - _subscription: subscription.set_model(&cx.handle(), &mut cx.to_async()), - } - })) - }) + Self { + buffer, + client, + channel_id, + channel_store, + collaborators, + _subscription: subscription.set_model(&cx.handle(), &mut cx.to_async()), + } + })) } async fn handle_update_channel_buffer( diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index a6aad19d03..1d83bd1d7f 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -1,20 +1,13 @@ -use anyhow::anyhow; -use anyhow::Result; -use client::Status; -use client::UserId; -use client::{Client, Subscription, User, UserStore}; -use collections::HashMap; -use collections::HashSet; -use futures::channel::mpsc; -use futures::Future; -use futures::StreamExt; -use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task}; +use crate::channel_buffer::ChannelBuffer; +use anyhow::{anyhow, Result}; +use client::{Client, Status, Subscription, User, UserId, UserStore}; +use collections::{hash_map, HashMap, HashSet}; +use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt, TryFutureExt}; +use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle}; use rpc::{proto, TypedEnvelope}; use std::sync::Arc; use util::ResultExt; -use crate::channel_buffer::ChannelBuffer; - pub type ChannelId = u64; pub struct ChannelStore { @@ -25,6 +18,7 @@ pub struct ChannelStore { channels_with_admin_privileges: HashSet, outgoing_invites: HashSet<(ChannelId, UserId)>, update_channels_tx: mpsc::UnboundedSender, + opened_buffers: HashMap, client: Arc, user_store: ModelHandle, _rpc_subscription: Subscription, @@ -59,6 +53,11 @@ pub enum ChannelMemberStatus { NotMember, } +enum OpenedChannelBuffer { + Open(WeakModelHandle), + Loading(Shared, Arc>>>), +} + impl ChannelStore { pub fn new( client: Arc, @@ -89,6 +88,7 @@ impl ChannelStore { } } }); + Self { channels_by_id: HashMap::default(), channel_invitations: Vec::default(), @@ -96,6 +96,7 @@ impl ChannelStore { channel_participants: Default::default(), channels_with_admin_privileges: Default::default(), outgoing_invites: Default::default(), + opened_buffers: Default::default(), update_channels_tx, client, user_store, @@ -154,11 +155,61 @@ impl ChannelStore { } pub fn open_channel_buffer( - &self, + &mut self, channel_id: ChannelId, cx: &mut ModelContext, ) -> Task>> { - ChannelBuffer::new(cx.handle(), channel_id, self.client.clone(), cx) + // Make sure that a given channel buffer is only opened once per + // app instance, even if this method is called multiple times + // with the same channel id while the first task is still running. + let task = loop { + match self.opened_buffers.entry(channel_id) { + hash_map::Entry::Occupied(e) => match e.get() { + OpenedChannelBuffer::Open(buffer) => { + if let Some(buffer) = buffer.upgrade(cx) { + break Task::ready(Ok(buffer)).shared(); + } else { + self.opened_buffers.remove(&channel_id); + continue; + } + } + OpenedChannelBuffer::Loading(task) => break task.clone(), + }, + hash_map::Entry::Vacant(e) => { + let task = cx + .spawn(|this, cx| { + ChannelBuffer::new(this, channel_id, self.client.clone(), cx) + .map_err(Arc::new) + }) + .shared(); + e.insert(OpenedChannelBuffer::Loading(task.clone())); + cx.spawn({ + let task = task.clone(); + |this, mut cx| async move { + let result = task.await; + this.update(&mut cx, |this, cx| { + if let Ok(buffer) = result { + cx.observe_release(&buffer, move |this, _, _| { + this.opened_buffers.remove(&channel_id); + }) + .detach(); + this.opened_buffers.insert( + channel_id, + OpenedChannelBuffer::Open(buffer.downgrade()), + ); + } else { + this.opened_buffers.remove(&channel_id); + } + }); + } + }) + .detach(); + break task; + } + } + }; + cx.foreground() + .spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) }) } pub fn is_user_admin(&self, channel_id: ChannelId) -> bool { diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index 6a9ef3fc13..f7e5751a37 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -3,6 +3,7 @@ use call::ActiveCall; use client::UserId; use collab_ui::channel_view::ChannelView; use collections::HashMap; +use futures::future; use gpui::{executor::Deterministic, ModelHandle, TestAppContext}; use rpc::{proto, RECEIVE_TIMEOUT}; use serde_json::json; @@ -283,6 +284,61 @@ async fn test_channel_buffer_replica_ids( }); } +#[gpui::test] +async fn test_reopen_channel_buffer(deterministic: Arc, cx_a: &mut TestAppContext) { + deterministic.forbid_parking(); + let mut server = TestServer::start(&deterministic).await; + let client_a = server.create_client(cx_a, "user_a").await; + + let zed_id = server.make_channel("zed", (&client_a, cx_a), &mut []).await; + + let channel_buffer_1 = client_a + .channel_store() + .update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx)); + let channel_buffer_2 = client_a + .channel_store() + .update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx)); + let channel_buffer_3 = client_a + .channel_store() + .update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx)); + + // All concurrent tasks for opening a channel buffer return the same model handle. + let (channel_buffer_1, channel_buffer_2, channel_buffer_3) = + future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3) + .await + .unwrap(); + let model_id = channel_buffer_1.id(); + assert_eq!(channel_buffer_1, channel_buffer_2); + assert_eq!(channel_buffer_1, channel_buffer_3); + + channel_buffer_1.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(0..0, "hello")], None, cx); + }) + }); + deterministic.run_until_parked(); + + cx_a.update(|_| { + drop(channel_buffer_1); + drop(channel_buffer_2); + drop(channel_buffer_3); + }); + deterministic.run_until_parked(); + + // The channel buffer can be reopened after dropping it. + let channel_buffer = client_a + .channel_store() + .update(cx_a, |channel, cx| channel.open_channel_buffer(zed_id, cx)) + .await + .unwrap(); + assert_ne!(channel_buffer.id(), model_id); + channel_buffer.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, _| { + assert_eq!(buffer.text(), "hello"); + }) + }); +} + #[track_caller] fn assert_collaborators(collaborators: &[proto::Collaborator], ids: &[Option]) { assert_eq!( From 1ae54ca62099b5eb9f762ca1f70bc5b179880481 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 24 Aug 2023 14:29:04 -0700 Subject: [PATCH 17/22] Dedup channel views Co-authored-by: Mikayla --- .../collab/src/tests/channel_buffer_tests.rs | 15 +-- crates/collab_ui/src/channel_view.rs | 123 +++++++++++------- crates/collab_ui/src/collab_panel.rs | 42 ++---- 3 files changed, 94 insertions(+), 86 deletions(-) diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index f7e5751a37..0ecd4588c5 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -213,17 +213,12 @@ async fn test_channel_buffer_replica_ids( assert_eq!(channel_buffer.buffer().read(cx).replica_id(), 0); }); - let channel_window_a = cx_a - .add_window(|cx| ChannelView::new(project_a.clone(), channel_buffer_a.clone(), None, cx)); - let channel_window_b = cx_b - .add_window(|cx| ChannelView::new(project_b.clone(), channel_buffer_b.clone(), None, cx)); + let channel_window_a = + cx_a.add_window(|cx| ChannelView::new(project_a.clone(), channel_buffer_a.clone(), cx)); + let channel_window_b = + cx_b.add_window(|cx| ChannelView::new(project_b.clone(), channel_buffer_b.clone(), cx)); let channel_window_c = cx_c.add_window(|cx| { - ChannelView::new( - separate_project_c.clone(), - channel_buffer_c.clone(), - None, - cx, - ) + ChannelView::new(separate_project_c.clone(), channel_buffer_c.clone(), cx) }); let channel_view_a = channel_window_a.root(cx_a); diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs index be186fc2e2..0e2d3636aa 100644 --- a/crates/collab_ui/src/channel_view.rs +++ b/crates/collab_ui/src/channel_view.rs @@ -1,4 +1,8 @@ -use channel::channel_buffer::{self, ChannelBuffer}; +use anyhow::{anyhow, Result}; +use channel::{ + channel_buffer::{self, ChannelBuffer}, + ChannelId, +}; use client::proto; use clock::ReplicaId; use collections::HashMap; @@ -6,15 +10,13 @@ use editor::Editor; use gpui::{ actions, elements::{ChildView, Label}, - AnyElement, AnyViewHandle, AppContext, Element, Entity, ModelHandle, Subscription, View, + AnyElement, AnyViewHandle, AppContext, Element, Entity, ModelHandle, Subscription, Task, View, ViewContext, ViewHandle, }; -use language::Language; use project::Project; -use std::sync::Arc; use workspace::{ item::{FollowableItem, Item, ItemHandle}, - register_followable_item, ViewId, + register_followable_item, Pane, ViewId, Workspace, WorkspaceId, }; actions!(channel_view, [Deploy]); @@ -32,14 +34,47 @@ pub struct ChannelView { } impl ChannelView { + pub fn open( + channel_id: ChannelId, + pane: ViewHandle, + workspace: ViewHandle, + cx: &mut AppContext, + ) -> Task>> { + let workspace = workspace.read(cx); + let project = workspace.project().to_owned(); + let channel_store = workspace.app_state().channel_store.clone(); + let markdown = workspace + .app_state() + .languages + .language_for_name("Markdown"); + let channel_buffer = + channel_store.update(cx, |store, cx| store.open_channel_buffer(channel_id, cx)); + + cx.spawn(|mut cx| async move { + let channel_buffer = channel_buffer.await?; + let markdown = markdown.await?; + channel_buffer.update(&mut cx, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.set_language(Some(markdown), cx); + }) + }); + + pane.update(&mut cx, |pane, cx| { + pane.items_of_type::() + .find(|channel_view| channel_view.read(cx).channel_buffer == channel_buffer) + .unwrap_or_else(|| cx.add_view(|cx| Self::new(project, channel_buffer, cx))) + }) + .ok_or_else(|| anyhow!("pane was dropped")) + }) + } + pub fn new( project: ModelHandle, channel_buffer: ModelHandle, - language: Option>, cx: &mut ViewContext, ) -> Self { let buffer = channel_buffer.read(cx).buffer(); - buffer.update(cx, |buffer, cx| buffer.set_language(language, cx)); + // buffer.update(cx, |buffer, cx| buffer.set_language(language, cx)); let editor = cx.add_view(|cx| Editor::for_buffer(buffer, None, cx)); let _editor_event_subscription = cx.subscribe(&editor, |_, _, e, cx| cx.emit(e.clone())); @@ -157,6 +192,14 @@ impl Item for ChannelView { }); Label::new(channel_name, style.label.to_owned()).into_any() } + + fn clone_on_split(&self, _: WorkspaceId, cx: &mut ViewContext) -> Option { + Some(Self::new( + self.project.clone(), + self.channel_buffer.clone(), + cx, + )) + } } impl FollowableItem for ChannelView { @@ -180,7 +223,7 @@ impl FollowableItem for ChannelView { } fn from_state_proto( - _: ViewHandle, + pane: ViewHandle, workspace: ViewHandle, remote_id: workspace::ViewId, state: &mut Option, @@ -189,48 +232,38 @@ impl FollowableItem for ChannelView { let Some(proto::view::Variant::ChannelView(_)) = state else { return None }; let Some(proto::view::Variant::ChannelView(state)) = state.take() else { unreachable!() }; - let channel_store = &workspace.read(cx).app_state().channel_store.clone(); - let open_channel_buffer = channel_store.update(cx, |store, cx| { - store.open_channel_buffer(state.channel_id, cx) - }); - let project = workspace.read(cx).project().to_owned(); - let language = workspace.read(cx).app_state().languages.clone(); - let get_markdown = language.language_for_name("Markdown"); + let open = ChannelView::open(state.channel_id, pane, workspace, cx); Some(cx.spawn(|mut cx| async move { - let channel_buffer = open_channel_buffer.await?; - let markdown = get_markdown.await?; + let this = open.await?; - let this = workspace - .update(&mut cx, move |_, cx| { - cx.add_view(|cx| { - let mut this = Self::new(project, channel_buffer, Some(markdown), cx); - this.remote_id = Some(remote_id); - this - }) + let task = this + .update(&mut cx, |this, cx| { + this.remote_id = Some(remote_id); + + if let Some(state) = state.editor { + Some(this.editor.update(cx, |editor, cx| { + editor.apply_update_proto( + &this.project, + proto::update_view::Variant::Editor(proto::update_view::Editor { + selections: state.selections, + pending_selection: state.pending_selection, + scroll_top_anchor: state.scroll_top_anchor, + scroll_x: state.scroll_x, + scroll_y: state.scroll_y, + ..Default::default() + }), + cx, + ) + })) + } else { + None + } }) - .ok_or_else(|| anyhow::anyhow!("workspace dropped"))?; + .ok_or_else(|| anyhow!("window was closed"))?; - if let Some(state) = state.editor { - let task = this.update(&mut cx, |this, cx| { - this.editor.update(cx, |editor, cx| { - editor.apply_update_proto( - &this.project, - proto::update_view::Variant::Editor(proto::update_view::Editor { - selections: state.selections, - pending_selection: state.pending_selection, - scroll_top_anchor: state.scroll_top_anchor, - scroll_x: state.scroll_x, - scroll_y: state.scroll_y, - ..Default::default() - }), - cx, - ) - }) - }); - if let Some(task) = task { - task.await?; - } + if let Some(task) = task { + task.await?; } Ok(this) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index a6bd09e43b..5bdc6ad6b7 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -2220,38 +2220,18 @@ impl CollabPanel { } fn open_channel_buffer(&mut self, action: &OpenChannelBuffer, cx: &mut ViewContext) { - let workspace = self.workspace; - let open = self.channel_store.update(cx, |channel_store, cx| { - channel_store.open_channel_buffer(action.channel_id, cx) - }); - - cx.spawn(|_, mut cx| async move { - let channel_buffer = open.await?; - - let markdown = workspace - .read_with(&cx, |workspace, _| { - workspace - .app_state() - .languages - .language_for_name("Markdown") - })? - .await?; - - workspace.update(&mut cx, |workspace, cx| { - let channel_view = cx.add_view(|cx| { - ChannelView::new( - workspace.project().to_owned(), - channel_buffer, - Some(markdown), - cx, - ) + if let Some(workspace) = self.workspace.upgrade(cx) { + let pane = workspace.read(cx).active_pane().clone(); + let channel_view = ChannelView::open(action.channel_id, pane.clone(), workspace, cx); + cx.spawn(|_, mut cx| async move { + let channel_view = channel_view.await?; + pane.update(&mut cx, |pane, cx| { + pane.add_item(Box::new(channel_view), true, true, None, cx) }); - workspace.add_item(Box::new(channel_view), cx); - })?; - - anyhow::Ok(()) - }) - .detach(); + anyhow::Ok(()) + }) + .detach(); + } } fn show_inline_context_menu(&mut self, _: &menu::ShowContextMenu, cx: &mut ViewContext) { From a327320f7dd317b40ea397d2cbc43fefc40dbc62 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 24 Aug 2023 15:00:54 -0700 Subject: [PATCH 18/22] Show channel notes in current call section of collab panel Co-authored-by: Mikayla --- crates/collab_ui/src/collab_panel.rs | 188 +++++++++++++++++++++------ 1 file changed, 146 insertions(+), 42 deletions(-) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 5bdc6ad6b7..dece04cb8b 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -18,13 +18,14 @@ use gpui::{ MouseEventHandler, Orientation, OverlayPositionMode, Padding, ParentElement, SafeStylable, Stack, Svg, }, + fonts::TextStyle, geometry::{ rect::RectF, vector::{vec2f, Vector2F}, }, impl_actions, platform::{CursorStyle, MouseButton, PromptLevel}, - serde_json, AnyElement, AppContext, AsyncAppContext, Element, Entity, ModelHandle, + serde_json, AnyElement, AppContext, AsyncAppContext, Element, Entity, FontCache, ModelHandle, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle, }; use menu::{Confirm, SelectNext, SelectPrev}; @@ -183,6 +184,7 @@ pub struct CollabPanel { #[derive(Serialize, Deserialize)] struct SerializedChannelsPanel { width: Option, + collapsed_channels: Vec, } #[derive(Debug)] @@ -227,6 +229,9 @@ enum ListEntry { channel: Arc, depth: usize, }, + ChannelNotes { + channel_id: ChannelId, + }, ChannelEditor { depth: usize, }, @@ -370,6 +375,12 @@ impl CollabPanel { return channel_row; } } + ListEntry::ChannelNotes { channel_id } => this.render_channel_notes( + *channel_id, + &theme.collab_panel, + is_selected, + cx, + ), ListEntry::ChannelInvite(channel) => Self::render_channel_invite( channel.clone(), this.channel_store.clone(), @@ -509,6 +520,7 @@ impl CollabPanel { if let Some(serialized_panel) = serialized_panel { panel.update(cx, |panel, cx| { panel.width = serialized_panel.width; + panel.collapsed_channels = serialized_panel.collapsed_channels; cx.notify(); }); } @@ -519,12 +531,16 @@ impl CollabPanel { fn serialize(&mut self, cx: &mut ViewContext) { let width = self.width; + let collapsed_channels = self.collapsed_channels.clone(); self.pending_serialization = cx.background().spawn( async move { KEY_VALUE_STORE .write_kvp( COLLABORATION_PANEL_KEY.into(), - serde_json::to_string(&SerializedChannelsPanel { width })?, + serde_json::to_string(&SerializedChannelsPanel { + width, + collapsed_channels, + })?, ) .await?; anyhow::Ok(()) @@ -548,6 +564,10 @@ impl CollabPanel { if !self.collapsed_sections.contains(&Section::ActiveCall) { let room = room.read(cx); + if let Some(channel_id) = room.channel_id() { + self.entries.push(ListEntry::ChannelNotes { channel_id }) + } + // Populate the active user. if let Some(user) = user_store.current_user() { self.match_candidates.clear(); @@ -1007,25 +1027,19 @@ impl CollabPanel { ) -> AnyElement { enum JoinProject {} - let font_cache = cx.font_cache(); - let host_avatar_height = theme + let host_avatar_width = theme .contact_avatar .width .or(theme.contact_avatar.height) .unwrap_or(0.); - let row = &theme.project_row.inactive_state().default; let tree_branch = theme.tree_branch; - let line_height = row.name.text.line_height(font_cache); - let cap_height = row.name.text.cap_height(font_cache); - let baseline_offset = - row.name.text.baseline_offset(font_cache) + (theme.row_height - line_height) / 2.; let project_name = if worktree_root_names.is_empty() { "untitled".to_string() } else { worktree_root_names.join(", ") }; - MouseEventHandler::new::(project_id as usize, cx, |mouse_state, _| { + MouseEventHandler::new::(project_id as usize, cx, |mouse_state, cx| { let tree_branch = *tree_branch.in_state(is_selected).style_for(mouse_state); let row = theme .project_row @@ -1033,39 +1047,20 @@ impl CollabPanel { .style_for(mouse_state); Flex::row() + .with_child(render_tree_branch( + tree_branch, + &row.name.text, + is_last, + vec2f(host_avatar_width, theme.row_height), + cx.font_cache(), + )) .with_child( - Stack::new() - .with_child(Canvas::new(move |scene, bounds, _, _, _| { - let start_x = - bounds.min_x() + (bounds.width() / 2.) - (tree_branch.width / 2.); - let end_x = bounds.max_x(); - let start_y = bounds.min_y(); - let end_y = bounds.min_y() + baseline_offset - (cap_height / 2.); - - scene.push_quad(gpui::Quad { - bounds: RectF::from_points( - vec2f(start_x, start_y), - vec2f( - start_x + tree_branch.width, - if is_last { end_y } else { bounds.max_y() }, - ), - ), - background: Some(tree_branch.color), - border: gpui::Border::default(), - corner_radii: (0.).into(), - }); - scene.push_quad(gpui::Quad { - bounds: RectF::from_points( - vec2f(start_x, end_y), - vec2f(end_x, end_y + tree_branch.width), - ), - background: Some(tree_branch.color), - border: gpui::Border::default(), - corner_radii: (0.).into(), - }); - })) + Svg::new("icons/file_icons/folder.svg") + .with_color(theme.channel_hash.color) .constrained() - .with_width(host_avatar_height), + .with_width(theme.channel_hash.width) + .aligned() + .left(), ) .with_child( Label::new(project_name, row.name.text.clone()) @@ -1240,7 +1235,7 @@ impl CollabPanel { }); if let Some(name) = channel_name { - Cow::Owned(format!("Current Call - #{}", name)) + Cow::Owned(format!("#{}", name)) } else { Cow::Borrowed("Current Call") } @@ -1676,6 +1671,61 @@ impl CollabPanel { .into_any() } + fn render_channel_notes( + &self, + channel_id: ChannelId, + theme: &theme::CollabPanel, + is_selected: bool, + cx: &mut ViewContext, + ) -> AnyElement { + enum ChannelNotes {} + let host_avatar_width = theme + .contact_avatar + .width + .or(theme.contact_avatar.height) + .unwrap_or(0.); + + MouseEventHandler::new::(channel_id as usize, cx, |state, cx| { + let tree_branch = *theme.tree_branch.in_state(is_selected).style_for(state); + let row = theme.project_row.in_state(is_selected).style_for(state); + + Flex::::row() + .with_child(render_tree_branch( + tree_branch, + &row.name.text, + true, + vec2f(host_avatar_width, theme.row_height), + cx.font_cache(), + )) + .with_child( + Svg::new("icons/radix/file.svg") + .with_color(theme.channel_hash.color) + .constrained() + .with_width(theme.channel_hash.width) + .aligned() + .left(), + ) + .with_child( + Label::new("notes", theme.channel_name.text.clone()) + .contained() + .with_style(theme.channel_name.container) + .aligned() + .left() + .flex(1., true), + ) + .constrained() + .with_height(theme.row_height) + .contained() + .with_style(*theme.channel_row.style_for(is_selected, state)) + .with_padding_left(theme.channel_row.default_style().padding.left) + }) + .on_click(MouseButton::Left, move |_, this, cx| { + this.open_channel_buffer(&OpenChannelBuffer { channel_id }, cx); + }) + .with_cursor_style(CursorStyle::PointingHand) + .into_any() + } + fn render_channel_invite( channel: Arc, channel_store: ModelHandle, @@ -2114,6 +2164,7 @@ impl CollabPanel { self.collapsed_channels.insert(ix, channel_id); } }; + self.serialize(cx); self.update_entries(true, cx); cx.notify(); cx.focus_self(); @@ -2392,6 +2443,51 @@ impl CollabPanel { } } +fn render_tree_branch( + branch_style: theme::TreeBranch, + row_style: &TextStyle, + is_last: bool, + size: Vector2F, + font_cache: &FontCache, +) -> gpui::elements::ConstrainedBox { + let line_height = row_style.line_height(font_cache); + let cap_height = row_style.cap_height(font_cache); + let baseline_offset = row_style.baseline_offset(font_cache) + (size.y() - line_height) / 2.; + + Canvas::new(move |scene, bounds, _, _, _| { + scene.paint_layer(None, |scene| { + let start_x = bounds.min_x() + (bounds.width() / 2.) - (branch_style.width / 2.); + let end_x = bounds.max_x(); + let start_y = bounds.min_y(); + let end_y = bounds.min_y() + baseline_offset - (cap_height / 2.); + + scene.push_quad(gpui::Quad { + bounds: RectF::from_points( + vec2f(start_x, start_y), + vec2f( + start_x + branch_style.width, + if is_last { end_y } else { bounds.max_y() }, + ), + ), + background: Some(branch_style.color), + border: gpui::Border::default(), + corner_radii: (0.).into(), + }); + scene.push_quad(gpui::Quad { + bounds: RectF::from_points( + vec2f(start_x, end_y), + vec2f(end_x, end_y + branch_style.width), + ), + background: Some(branch_style.color), + border: gpui::Border::default(), + corner_radii: (0.).into(), + }); + }) + }) + .constrained() + .with_width(size.x()) +} + impl View for CollabPanel { fn ui_name() -> &'static str { "CollabPanel" @@ -2601,6 +2697,14 @@ impl PartialEq for ListEntry { return channel_1.id == channel_2.id && depth_1 == depth_2; } } + ListEntry::ChannelNotes { channel_id } => { + if let ListEntry::ChannelNotes { + channel_id: other_id, + } = other + { + return channel_id == other_id; + } + } ListEntry::ChannelInvite(channel_1) => { if let ListEntry::ChannelInvite(channel_2) = other { return channel_1.id == channel_2.id; From 358a20494c177799474baf4c25ab666104c0bb19 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 24 Aug 2023 16:50:13 -0700 Subject: [PATCH 19/22] Make channel notes read-only when disconnected Co-authored-by: Mikayla --- crates/channel/src/channel_buffer.rs | 58 +++++---- crates/channel/src/channel_store.rs | 115 ++++++++++++------ crates/collab/src/rpc.rs | 5 +- .../collab/src/tests/channel_buffer_tests.rs | 76 ++++++++++++ crates/collab/src/tests/channel_tests.rs | 7 +- crates/collab_ui/src/channel_view.rs | 36 +++--- 6 files changed, 217 insertions(+), 80 deletions(-) diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index c19899501a..29f4d3493c 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -1,4 +1,4 @@ -use crate::{Channel, ChannelId, ChannelStore}; +use crate::Channel; use anyhow::Result; use client::Client; use gpui::{AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle}; @@ -13,39 +13,43 @@ pub(crate) fn init(client: &Arc) { } pub struct ChannelBuffer { - channel_id: ChannelId, + pub(crate) channel: Arc, + connected: bool, collaborators: Vec, buffer: ModelHandle, - channel_store: ModelHandle, client: Arc, - _subscription: client::Subscription, + subscription: Option, } pub enum Event { CollaboratorsChanged, + Disconnected, } impl Entity for ChannelBuffer { type Event = Event; fn release(&mut self, _: &mut AppContext) { - self.client - .send(proto::LeaveChannelBuffer { - channel_id: self.channel_id, - }) - .log_err(); + if self.connected { + self.client + .send(proto::LeaveChannelBuffer { + channel_id: self.channel.id, + }) + .log_err(); + } } } impl ChannelBuffer { pub(crate) async fn new( - channel_store: ModelHandle, - channel_id: ChannelId, + channel: Arc, client: Arc, mut cx: AsyncAppContext, ) -> Result> { let response = client - .request(proto::JoinChannelBuffer { channel_id }) + .request(proto::JoinChannelBuffer { + channel_id: channel.id, + }) .await?; let base_text = response.base_text; @@ -62,7 +66,7 @@ impl ChannelBuffer { }); buffer.update(&mut cx, |buffer, cx| buffer.apply_ops(operations, cx))?; - let subscription = client.subscribe_to_entity(channel_id)?; + let subscription = client.subscribe_to_entity(channel.id)?; anyhow::Ok(cx.add_model(|cx| { cx.subscribe(&buffer, Self::on_buffer_update).detach(); @@ -70,10 +74,10 @@ impl ChannelBuffer { Self { buffer, client, - channel_id, - channel_store, + connected: true, collaborators, - _subscription: subscription.set_model(&cx.handle(), &mut cx.to_async()), + channel, + subscription: Some(subscription.set_model(&cx.handle(), &mut cx.to_async())), } })) } @@ -155,7 +159,7 @@ impl ChannelBuffer { let operation = language::proto::serialize_operation(operation); self.client .send(proto::UpdateChannelBuffer { - channel_id: self.channel_id, + channel_id: self.channel.id, operations: vec![operation], }) .log_err(); @@ -170,11 +174,21 @@ impl ChannelBuffer { &self.collaborators } - pub fn channel(&self, cx: &AppContext) -> Option> { - self.channel_store - .read(cx) - .channel_for_id(self.channel_id) - .cloned() + pub fn channel(&self) -> Arc { + self.channel.clone() + } + + pub(crate) fn disconnect(&mut self, cx: &mut ModelContext) { + if self.connected { + self.connected = false; + self.subscription.take(); + cx.emit(Event::Disconnected); + cx.notify() + } + } + + pub fn is_connected(&self) -> bool { + self.connected } pub fn replica_id(&self, cx: &AppContext) -> u16 { diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index 1d83bd1d7f..861f731331 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -2,7 +2,7 @@ use crate::channel_buffer::ChannelBuffer; use anyhow::{anyhow, Result}; use client::{Client, Status, Subscription, User, UserId, UserStore}; use collections::{hash_map, HashMap, HashSet}; -use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt, TryFutureExt}; +use futures::{channel::mpsc, future::Shared, Future, FutureExt, StreamExt}; use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, Task, WeakModelHandle}; use rpc::{proto, TypedEnvelope}; use std::sync::Arc; @@ -71,16 +71,14 @@ impl ChannelStore { let mut connection_status = client.status(); let watch_connection_status = cx.spawn_weak(|this, mut cx| async move { while let Some(status) = connection_status.next().await { - if matches!(status, Status::ConnectionLost | Status::SignedOut) { + if !status.is_connected() { if let Some(this) = this.upgrade(&cx) { this.update(&mut cx, |this, cx| { - this.channels_by_id.clear(); - this.channel_invitations.clear(); - this.channel_participants.clear(); - this.channels_with_admin_privileges.clear(); - this.channel_paths.clear(); - this.outgoing_invites.clear(); - cx.notify(); + if matches!(status, Status::ConnectionLost | Status::SignedOut) { + this.handle_disconnect(cx); + } else { + this.disconnect_buffers(cx); + } }); } else { break; @@ -176,9 +174,17 @@ impl ChannelStore { OpenedChannelBuffer::Loading(task) => break task.clone(), }, hash_map::Entry::Vacant(e) => { + let client = self.client.clone(); let task = cx - .spawn(|this, cx| { - ChannelBuffer::new(this, channel_id, self.client.clone(), cx) + .spawn(|this, cx| async move { + let channel = this.read_with(&cx, |this, _| { + this.channel_for_id(channel_id).cloned().ok_or_else(|| { + Arc::new(anyhow!("no channel for id: {}", channel_id)) + }) + })?; + + ChannelBuffer::new(channel, client, cx) + .await .map_err(Arc::new) }) .shared(); @@ -187,8 +193,8 @@ impl ChannelStore { let task = task.clone(); |this, mut cx| async move { let result = task.await; - this.update(&mut cx, |this, cx| { - if let Ok(buffer) = result { + this.update(&mut cx, |this, cx| match result { + Ok(buffer) => { cx.observe_release(&buffer, move |this, _, _| { this.opened_buffers.remove(&channel_id); }) @@ -197,7 +203,9 @@ impl ChannelStore { channel_id, OpenedChannelBuffer::Open(buffer.downgrade()), ); - } else { + } + Err(error) => { + log::error!("failed to open channel buffer {error:?}"); this.opened_buffers.remove(&channel_id); } }); @@ -474,6 +482,27 @@ impl ChannelStore { Ok(()) } + fn handle_disconnect(&mut self, cx: &mut ModelContext<'_, ChannelStore>) { + self.disconnect_buffers(cx); + self.channels_by_id.clear(); + self.channel_invitations.clear(); + self.channel_participants.clear(); + self.channels_with_admin_privileges.clear(); + self.channel_paths.clear(); + self.outgoing_invites.clear(); + cx.notify(); + } + + fn disconnect_buffers(&mut self, cx: &mut ModelContext) { + for (_, buffer) in self.opened_buffers.drain() { + if let OpenedChannelBuffer::Open(buffer) = buffer { + if let Some(buffer) = buffer.upgrade(cx) { + buffer.update(cx, |buffer, cx| buffer.disconnect(cx)); + } + } + } + } + pub(crate) fn update_channels( &mut self, payload: proto::UpdateChannels, @@ -508,38 +537,44 @@ impl ChannelStore { .retain(|channel_id, _| !payload.remove_channels.contains(channel_id)); self.channels_with_admin_privileges .retain(|channel_id| !payload.remove_channels.contains(channel_id)); + + for channel_id in &payload.remove_channels { + let channel_id = *channel_id; + if let Some(OpenedChannelBuffer::Open(buffer)) = + self.opened_buffers.remove(&channel_id) + { + if let Some(buffer) = buffer.upgrade(cx) { + buffer.update(cx, ChannelBuffer::disconnect); + } + } + } } - for channel in payload.channels { - if let Some(existing_channel) = self.channels_by_id.get_mut(&channel.id) { - // FIXME: We may be missing a path for this existing channel in certain cases - let existing_channel = Arc::make_mut(existing_channel); - existing_channel.name = channel.name; - continue; - } + for channel_proto in payload.channels { + if let Some(existing_channel) = self.channels_by_id.get_mut(&channel_proto.id) { + Arc::make_mut(existing_channel).name = channel_proto.name; + } else { + let channel = Arc::new(Channel { + id: channel_proto.id, + name: channel_proto.name, + }); + self.channels_by_id.insert(channel.id, channel.clone()); - self.channels_by_id.insert( - channel.id, - Arc::new(Channel { - id: channel.id, - name: channel.name, - }), - ); - - if let Some(parent_id) = channel.parent_id { - let mut ix = 0; - while ix < self.channel_paths.len() { - let path = &self.channel_paths[ix]; - if path.ends_with(&[parent_id]) { - let mut new_path = path.clone(); - new_path.push(channel.id); - self.channel_paths.insert(ix + 1, new_path); + if let Some(parent_id) = channel_proto.parent_id { + let mut ix = 0; + while ix < self.channel_paths.len() { + let path = &self.channel_paths[ix]; + if path.ends_with(&[parent_id]) { + let mut new_path = path.clone(); + new_path.push(channel.id); + self.channel_paths.insert(ix + 1, new_path); + ix += 1; + } ix += 1; } - ix += 1; + } else { + self.channel_paths.push(vec![channel.id]); } - } else { - self.channel_paths.push(vec![channel.id]); } } diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 2bd39c861d..18587c2ba8 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -854,10 +854,13 @@ async fn connection_lost( .await .trace_err(); + leave_channel_buffers_for_session(&session) + .await + .trace_err(); + futures::select_biased! { _ = executor.sleep(RECONNECT_TIMEOUT).fuse() => { leave_room_for_session(&session).await.trace_err(); - leave_channel_buffers_for_session(&session).await.trace_err(); if !session .connection_pool() diff --git a/crates/collab/src/tests/channel_buffer_tests.rs b/crates/collab/src/tests/channel_buffer_tests.rs index 0ecd4588c5..8ac4dbbd3f 100644 --- a/crates/collab/src/tests/channel_buffer_tests.rs +++ b/crates/collab/src/tests/channel_buffer_tests.rs @@ -1,5 +1,6 @@ use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer}; use call::ActiveCall; +use channel::Channel; use client::UserId; use collab_ui::channel_view::ChannelView; use collections::HashMap; @@ -334,6 +335,81 @@ async fn test_reopen_channel_buffer(deterministic: Arc, cx_a: &mu }); } +#[gpui::test] +async fn test_channel_buffer_disconnect( + deterministic: Arc, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + deterministic.forbid_parking(); + let mut server = TestServer::start(&deterministic).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channel_id = server + .make_channel("zed", (&client_a, cx_a), &mut [(&client_b, cx_b)]) + .await; + + let channel_buffer_a = client_a + .channel_store() + .update(cx_a, |channel, cx| { + channel.open_channel_buffer(channel_id, cx) + }) + .await + .unwrap(); + + let channel_buffer_b = client_b + .channel_store() + .update(cx_b, |channel, cx| { + channel.open_channel_buffer(channel_id, cx) + }) + .await + .unwrap(); + + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + channel_buffer_a.update(cx_a, |buffer, _| { + assert_eq!( + buffer.channel().as_ref(), + &Channel { + id: channel_id, + name: "zed".to_string() + } + ); + assert!(!buffer.is_connected()); + }); + + deterministic.run_until_parked(); + + server.allow_connections(); + deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT); + + deterministic.run_until_parked(); + + client_a + .channel_store() + .update(cx_a, |channel_store, _| { + channel_store.remove_channel(channel_id) + }) + .await + .unwrap(); + deterministic.run_until_parked(); + + // Channel buffer observed the deletion + channel_buffer_b.update(cx_b, |buffer, _| { + assert_eq!( + buffer.channel().as_ref(), + &Channel { + id: channel_id, + name: "zed".to_string() + } + ); + assert!(!buffer.is_connected()); + }); +} + #[track_caller] fn assert_collaborators(collaborators: &[proto::Collaborator], ids: &[Option]) { assert_eq!( diff --git a/crates/collab/src/tests/channel_tests.rs b/crates/collab/src/tests/channel_tests.rs index 41d2286772..b54b4d349b 100644 --- a/crates/collab/src/tests/channel_tests.rs +++ b/crates/collab/src/tests/channel_tests.rs @@ -799,7 +799,7 @@ async fn test_lost_channel_creation( deterministic.run_until_parked(); - // Sanity check + // Sanity check, B has the invitation assert_channel_invitations( client_b.channel_store(), cx_b, @@ -811,6 +811,7 @@ async fn test_lost_channel_creation( }], ); + // A creates a subchannel while the invite is still pending. let subchannel_id = client_a .channel_store() .update(cx_a, |channel_store, cx| { @@ -841,7 +842,7 @@ async fn test_lost_channel_creation( ], ); - // Accept the invite + // Client B accepts the invite client_b .channel_store() .update(cx_b, |channel_store, _| { @@ -852,7 +853,7 @@ async fn test_lost_channel_creation( deterministic.run_until_parked(); - // B should now see the channel + // Client B should now see the channel assert_channels( client_b.channel_store(), cx_b, diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs index 0e2d3636aa..9c125117e1 100644 --- a/crates/collab_ui/src/channel_view.rs +++ b/crates/collab_ui/src/channel_view.rs @@ -114,10 +114,18 @@ impl ChannelView { fn handle_channel_buffer_event( &mut self, _: ModelHandle, - _: &channel_buffer::Event, + event: &channel_buffer::Event, cx: &mut ViewContext, ) { - self.refresh_replica_id_map(cx); + match event { + channel_buffer::Event::CollaboratorsChanged => { + self.refresh_replica_id_map(cx); + } + channel_buffer::Event::Disconnected => self.editor.update(cx, |editor, cx| { + editor.set_read_only(true); + cx.notify(); + }), + } } /// Build a mapping of channel buffer replica ids to the corresponding @@ -183,14 +191,13 @@ impl Item for ChannelView { style: &theme::Tab, cx: &gpui::AppContext, ) -> AnyElement { - let channel_name = self - .channel_buffer - .read(cx) - .channel(cx) - .map_or("[Deleted channel]".to_string(), |channel| { - format!("#{}", channel.name) - }); - Label::new(channel_name, style.label.to_owned()).into_any() + let channel_name = &self.channel_buffer.read(cx).channel().name; + let label = if self.channel_buffer.read(cx).is_connected() { + format!("#{}", channel_name) + } else { + format!("#{} (disconnected)", channel_name) + }; + Label::new(label, style.label.to_owned()).into_any() } fn clone_on_split(&self, _: WorkspaceId, cx: &mut ViewContext) -> Option { @@ -208,8 +215,9 @@ impl FollowableItem for ChannelView { } fn to_state_proto(&self, cx: &AppContext) -> Option { - self.channel_buffer.read(cx).channel(cx).map(|channel| { - proto::view::Variant::ChannelView(proto::view::ChannelView { + let channel = self.channel_buffer.read(cx).channel(); + Some(proto::view::Variant::ChannelView( + proto::view::ChannelView { channel_id: channel.id, editor: if let Some(proto::view::Variant::Editor(proto)) = self.editor.read(cx).to_state_proto(cx) @@ -218,8 +226,8 @@ impl FollowableItem for ChannelView { } else { None }, - }) - }) + }, + )) } fn from_state_proto( From c7c220309dbfdf8e90625dd0a47401c6da5019a6 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 24 Aug 2023 16:55:39 -0700 Subject: [PATCH 20/22] Avoid creating redundant snapshots of channel notes buffers Co-authored-by: Mikayla --- crates/collab/src/db/queries/buffers.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index a38693bace..354accc01a 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -326,9 +326,11 @@ impl Database { .ok_or_else(|| anyhow!("no such buffer"))?; let (base_text, operations) = self.get_buffer_state(&buffer, tx).await?; + if operations.is_empty() { + return Ok(()); + } let mut text_buffer = text::Buffer::new(0, 0, base_text); - text_buffer .apply_ops(operations.into_iter().filter_map(operation_from_wire)) .unwrap(); From 7b6c0c539c9f8a8e30f98413269f8d17a1b7c224 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 24 Aug 2023 17:17:20 -0700 Subject: [PATCH 21/22] Show non-admin context menu items for all channel members Co-authored-by: Mikayla --- crates/collab_ui/src/collab_panel.rs | 81 +++++++++++++++------------- 1 file changed, 43 insertions(+), 38 deletions(-) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index dece04cb8b..411a3a2598 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -1923,47 +1923,52 @@ impl CollabPanel { channel_id: u64, cx: &mut ViewContext, ) { - if self.channel_store.read(cx).is_user_admin(channel_id) { - self.context_menu_on_selected = position.is_none(); + self.context_menu_on_selected = position.is_none(); - self.context_menu.update(cx, |context_menu, cx| { - context_menu.set_position_mode(if self.context_menu_on_selected { - OverlayPositionMode::Local - } else { - OverlayPositionMode::Window - }); - - let expand_action_name = if self.is_channel_collapsed(channel_id) { - "Expand Subchannels" - } else { - "Collapse Subchannels" - }; - - context_menu.show( - position.unwrap_or_default(), - if self.context_menu_on_selected { - gpui::elements::AnchorCorner::TopRight - } else { - gpui::elements::AnchorCorner::BottomLeft - }, - vec![ - ContextMenuItem::action(expand_action_name, ToggleCollapse { channel_id }), - ContextMenuItem::action("New Subchannel", NewChannel { channel_id }), - ContextMenuItem::action("Open Notes", OpenChannelBuffer { channel_id }), - ContextMenuItem::Separator, - ContextMenuItem::action("Invite to Channel", InviteMembers { channel_id }), - ContextMenuItem::Separator, - ContextMenuItem::action("Rename", RenameChannel { channel_id }), - ContextMenuItem::action("Manage", ManageMembers { channel_id }), - ContextMenuItem::Separator, - ContextMenuItem::action("Delete", RemoveChannel { channel_id }), - ], - cx, - ); + self.context_menu.update(cx, |context_menu, cx| { + context_menu.set_position_mode(if self.context_menu_on_selected { + OverlayPositionMode::Local + } else { + OverlayPositionMode::Window }); - cx.notify(); - } + let expand_action_name = if self.is_channel_collapsed(channel_id) { + "Expand Subchannels" + } else { + "Collapse Subchannels" + }; + + let mut items = vec![ + ContextMenuItem::action(expand_action_name, ToggleCollapse { channel_id }), + ContextMenuItem::action("Open Notes", OpenChannelBuffer { channel_id }), + ]; + + if self.channel_store.read(cx).is_user_admin(channel_id) { + items.extend([ + ContextMenuItem::Separator, + ContextMenuItem::action("New Subchannel", NewChannel { channel_id }), + ContextMenuItem::action("Rename", RenameChannel { channel_id }), + ContextMenuItem::Separator, + ContextMenuItem::action("Invite Members", InviteMembers { channel_id }), + ContextMenuItem::action("Manage Members", ManageMembers { channel_id }), + ContextMenuItem::Separator, + ContextMenuItem::action("Delete", RemoveChannel { channel_id }), + ]); + } + + context_menu.show( + position.unwrap_or_default(), + if self.context_menu_on_selected { + gpui::elements::AnchorCorner::TopRight + } else { + gpui::elements::AnchorCorner::BottomLeft + }, + items, + cx, + ); + }); + + cx.notify(); } fn cancel(&mut self, _: &Cancel, cx: &mut ViewContext) { From a95dcfa8bc74dbfec05174af5f9b1498737862b5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 24 Aug 2023 17:18:18 -0700 Subject: [PATCH 22/22] Make channel notes view searchable and navigable via pane history Co-authored-by: Mikayla --- crates/collab_ui/src/channel_view.rs | 37 +++++++++++++++++++++++++++- crates/editor/src/items.rs | 2 +- crates/vim/src/visual.rs | 12 ++++----- crates/workspace/src/item.rs | 8 +++--- 4 files changed, 46 insertions(+), 13 deletions(-) diff --git a/crates/collab_ui/src/channel_view.rs b/crates/collab_ui/src/channel_view.rs index 9c125117e1..bb1e840ffc 100644 --- a/crates/collab_ui/src/channel_view.rs +++ b/crates/collab_ui/src/channel_view.rs @@ -10,13 +10,17 @@ use editor::Editor; use gpui::{ actions, elements::{ChildView, Label}, + geometry::vector::Vector2F, AnyElement, AnyViewHandle, AppContext, Element, Entity, ModelHandle, Subscription, Task, View, ViewContext, ViewHandle, }; use project::Project; +use std::any::Any; use workspace::{ item::{FollowableItem, Item, ItemHandle}, - register_followable_item, Pane, ViewId, Workspace, WorkspaceId, + register_followable_item, + searchable::SearchableItemHandle, + ItemNavHistory, Pane, ViewId, Workspace, WorkspaceId, }; actions!(channel_view, [Deploy]); @@ -207,6 +211,37 @@ impl Item for ChannelView { cx, )) } + + fn is_singleton(&self, _cx: &AppContext) -> bool { + true + } + + fn navigate(&mut self, data: Box, cx: &mut ViewContext) -> bool { + self.editor + .update(cx, |editor, cx| editor.navigate(data, cx)) + } + + fn deactivated(&mut self, cx: &mut ViewContext) { + self.editor + .update(cx, |editor, cx| Item::deactivated(editor, cx)) + } + + fn set_nav_history(&mut self, history: ItemNavHistory, cx: &mut ViewContext) { + self.editor + .update(cx, |editor, cx| Item::set_nav_history(editor, history, cx)) + } + + fn as_searchable(&self, _: &ViewHandle) -> Option> { + Some(Box::new(self.editor.clone())) + } + + fn show_toolbar(&self) -> bool { + true + } + + fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option { + self.editor.read(cx).pixel_position_of_cursor(cx) + } } impl FollowableItem for ChannelView { diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 657aae5ff9..477eab41ac 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -754,7 +754,7 @@ impl Item for Editor { Some(Box::new(handle.clone())) } - fn pixel_position_of_cursor(&self) -> Option { + fn pixel_position_of_cursor(&self, _: &AppContext) -> Option { self.pixel_position_of_newest_cursor } diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index 1a11721a4e..5e60ef59fc 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -391,7 +391,7 @@ mod test { the lazy dog" }) .await; - let cursor = cx.update_editor(|editor, _| editor.pixel_position_of_cursor()); + let cursor = cx.update_editor(|editor, cx| editor.pixel_position_of_cursor(cx)); // entering visual mode should select the character // under cursor @@ -400,7 +400,7 @@ mod test { fox jumps over the lazy dog"}) .await; - cx.update_editor(|editor, _| assert_eq!(cursor, editor.pixel_position_of_cursor())); + cx.update_editor(|editor, cx| assert_eq!(cursor, editor.pixel_position_of_cursor(cx))); // forwards motions should extend the selection cx.simulate_shared_keystrokes(["w", "j"]).await; @@ -430,7 +430,7 @@ mod test { b "}) .await; - let cursor = cx.update_editor(|editor, _| editor.pixel_position_of_cursor()); + let cursor = cx.update_editor(|editor, cx| editor.pixel_position_of_cursor(cx)); cx.simulate_shared_keystrokes(["v"]).await; cx.assert_shared_state(indoc! {" a @@ -438,7 +438,7 @@ mod test { ˇ»b "}) .await; - cx.update_editor(|editor, _| assert_eq!(cursor, editor.pixel_position_of_cursor())); + cx.update_editor(|editor, cx| assert_eq!(cursor, editor.pixel_position_of_cursor(cx))); // toggles off again cx.simulate_shared_keystrokes(["v"]).await; @@ -510,7 +510,7 @@ mod test { b ˇ"}) .await; - let cursor = cx.update_editor(|editor, _| editor.pixel_position_of_cursor()); + let cursor = cx.update_editor(|editor, cx| editor.pixel_position_of_cursor(cx)); cx.simulate_shared_keystrokes(["shift-v"]).await; cx.assert_shared_state(indoc! {" a @@ -518,7 +518,7 @@ mod test { ˇ"}) .await; assert_eq!(cx.mode(), cx.neovim_mode().await); - cx.update_editor(|editor, _| assert_eq!(cursor, editor.pixel_position_of_cursor())); + cx.update_editor(|editor, cx| assert_eq!(cursor, editor.pixel_position_of_cursor(cx))); cx.simulate_shared_keystrokes(["x"]).await; cx.assert_shared_state(indoc! {" a diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 4b5b7a7931..c218a85234 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -158,9 +158,7 @@ pub trait Item: View { fn should_update_tab_on_event(_: &Self::Event) -> bool { false } - fn is_edit_event(_: &Self::Event) -> bool { - false - } + fn act_as_type<'a>( &'a self, type_id: TypeId, @@ -205,7 +203,7 @@ pub trait Item: View { fn show_toolbar(&self) -> bool { true } - fn pixel_position_of_cursor(&self) -> Option { + fn pixel_position_of_cursor(&self, _: &AppContext) -> Option { None } } @@ -623,7 +621,7 @@ impl ItemHandle for ViewHandle { } fn pixel_position_of_cursor(&self, cx: &AppContext) -> Option { - self.read(cx).pixel_position_of_cursor() + self.read(cx).pixel_position_of_cursor(cx) } }