Merge branch 'main' into randomized-tests-operation-script

This commit is contained in:
Max Brunsfeld 2023-02-20 10:39:00 -08:00
commit 51cea1b1fb
207 changed files with 7644 additions and 3698 deletions

View file

@ -595,7 +595,16 @@ impl Database {
.await
}
pub async fn remove_contact(&self, requester_id: UserId, responder_id: UserId) -> Result<()> {
/// Returns a bool indicating whether the removed contact had originally accepted or not
///
/// Deletes the contact identified by the requester and responder ids, and then returns
/// whether the deleted contact had originally accepted or was a pending contact request.
///
/// # Arguments
///
/// * `requester_id` - The user that initiates this request
/// * `responder_id` - The user that will be removed
pub async fn remove_contact(&self, requester_id: UserId, responder_id: UserId) -> Result<bool> {
self.transaction(|tx| async move {
let (id_a, id_b) = if responder_id < requester_id {
(responder_id, requester_id)
@ -603,20 +612,18 @@ impl Database {
(requester_id, responder_id)
};
let result = contact::Entity::delete_many()
let contact = contact::Entity::find()
.filter(
contact::Column::UserIdA
.eq(id_a)
.and(contact::Column::UserIdB.eq(id_b)),
)
.exec(&*tx)
.await?;
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such contact"))?;
if result.rows_affected == 1 {
Ok(())
} else {
Err(anyhow!("no such contact"))?
}
contact::Entity::delete_by_id(contact.id).exec(&*tx).await?;
Ok(contact.accepted)
})
.await
}
@ -1586,12 +1593,8 @@ impl Database {
.filter(
Condition::all()
.add(
room_participant::Column::CallingConnectionId
.eq(connection.id as i32),
)
.add(
room_participant::Column::CallingConnectionServerId
.eq(connection.owner_id as i32),
room_participant::Column::CallingUserId
.eq(leaving_participant.user_id),
)
.add(room_participant::Column::AnsweringConnectionId.is_null()),
)
@ -1917,7 +1920,9 @@ impl Database {
};
if let Some(db_worktree) = db_worktree {
project.worktree_root_names.push(db_worktree.root_name);
if db_worktree.visible {
project.worktree_root_names.push(db_worktree.root_name);
}
}
}
}

View file

@ -1961,23 +1961,31 @@ async fn remove_contact(
let requester_id = session.user_id;
let responder_id = UserId::from_proto(request.user_id);
let db = session.db().await;
db.remove_contact(requester_id, responder_id).await?;
let contact_accepted = db.remove_contact(requester_id, responder_id).await?;
let pool = session.connection_pool().await;
// Update outgoing contact requests of requester
let mut update = proto::UpdateContacts::default();
update
.remove_outgoing_requests
.push(responder_id.to_proto());
if contact_accepted {
update.remove_contacts.push(responder_id.to_proto());
} else {
update
.remove_outgoing_requests
.push(responder_id.to_proto());
}
for connection_id in pool.user_connection_ids(requester_id) {
session.peer.send(connection_id, update.clone())?;
}
// Update incoming contact requests of responder
let mut update = proto::UpdateContacts::default();
update
.remove_incoming_requests
.push(requester_id.to_proto());
if contact_accepted {
update.remove_contacts.push(requester_id.to_proto());
} else {
update
.remove_incoming_requests
.push(requester_id.to_proto());
}
for connection_id in pool.user_connection_ids(responder_id) {
session.peer.send(connection_id, update.clone())?;
}

View file

@ -11,7 +11,7 @@ use client::{
EstablishConnectionError, UserStore,
};
use collections::{HashMap, HashSet};
use fs::{FakeFs, HomeDir};
use fs::FakeFs;
use futures::{channel::oneshot, StreamExt as _};
use gpui::{
executor::Deterministic, test::EmptyView, ModelHandle, Task, TestAppContext, ViewHandle,
@ -101,7 +101,6 @@ impl TestServer {
async fn create_client(&mut self, cx: &mut TestAppContext, name: &str) -> TestClient {
cx.update(|cx| {
cx.set_global(HomeDir(Path::new("/tmp/").to_path_buf()));
cx.set_global(Settings::test(cx));
});
@ -197,7 +196,7 @@ impl TestServer {
languages: Arc::new(LanguageRegistry::new(Task::ready(()))),
themes: ThemeRegistry::new((), cx.font_cache()),
fs: fs.clone(),
build_window_options: Default::default,
build_window_options: |_, _, _| Default::default(),
initialize_workspace: |_, _, _| unimplemented!(),
dock_default_item_factory: |_, _| unimplemented!(),
});

View file

@ -166,9 +166,67 @@ async fn test_basic_calls(
}
);
// Call user C again from user A.
active_call_a
.update(cx_a, |call, cx| {
call.invite(client_c.user_id().unwrap(), None, cx)
})
.await
.unwrap();
deterministic.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
remote: vec!["user_b".to_string()],
pending: vec!["user_c".to_string()]
}
);
assert_eq!(
room_participants(&room_b, cx_b),
RoomParticipants {
remote: vec!["user_a".to_string()],
pending: vec!["user_c".to_string()]
}
);
// User C accepts the call.
let call_c = incoming_call_c.next().await.unwrap().unwrap();
assert_eq!(call_c.calling_user.github_login, "user_a");
active_call_c
.update(cx_c, |call, cx| call.accept_incoming(cx))
.await
.unwrap();
assert!(incoming_call_c.next().await.unwrap().is_none());
let room_c = active_call_c.read_with(cx_c, |call, _| call.room().unwrap().clone());
deterministic.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
remote: vec!["user_b".to_string(), "user_c".to_string()],
pending: Default::default()
}
);
assert_eq!(
room_participants(&room_b, cx_b),
RoomParticipants {
remote: vec!["user_a".to_string(), "user_c".to_string()],
pending: Default::default()
}
);
assert_eq!(
room_participants(&room_c, cx_c),
RoomParticipants {
remote: vec!["user_a".to_string(), "user_b".to_string()],
pending: Default::default()
}
);
// User A shares their screen
let display = MacOSDisplay::new();
let events_b = active_call_events(cx_b);
let events_c = active_call_events(cx_c);
active_call_a
.update(cx_a, |call, cx| {
call.room().unwrap().update(cx, |room, cx| {
@ -181,9 +239,10 @@ async fn test_basic_calls(
deterministic.run_until_parked();
// User B observes the remote screen sharing track.
assert_eq!(events_b.borrow().len(), 1);
let event = events_b.borrow().first().unwrap().clone();
if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event {
let event_b = events_b.borrow().first().unwrap().clone();
if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_b {
assert_eq!(participant_id, client_a.peer_id().unwrap());
room_b.read_with(cx_b, |room, _| {
assert_eq!(
@ -197,6 +256,23 @@ async fn test_basic_calls(
panic!("unexpected event")
}
// User C observes the remote screen sharing track.
assert_eq!(events_c.borrow().len(), 1);
let event_c = events_c.borrow().first().unwrap().clone();
if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_c {
assert_eq!(participant_id, client_a.peer_id().unwrap());
room_c.read_with(cx_c, |room, _| {
assert_eq!(
room.remote_participants()[&client_a.user_id().unwrap()]
.tracks
.len(),
1
);
});
} else {
panic!("unexpected event")
}
// User A leaves the room.
active_call_a.update(cx_a, |call, cx| {
call.hang_up(cx).unwrap();
@ -213,18 +289,28 @@ async fn test_basic_calls(
assert_eq!(
room_participants(&room_b, cx_b),
RoomParticipants {
remote: Default::default(),
remote: vec!["user_c".to_string()],
pending: Default::default()
}
);
assert_eq!(
room_participants(&room_c, cx_c),
RoomParticipants {
remote: vec!["user_b".to_string()],
pending: Default::default()
}
);
// User B gets disconnected from the LiveKit server, which causes them
// to automatically leave the room.
// to automatically leave the room. User C leaves the room as well because
// nobody else is in there.
server
.test_live_kit_server
.disconnect_client(client_b.peer_id().unwrap().to_string())
.disconnect_client(client_b.user_id().unwrap().to_string())
.await;
active_call_b.update(cx_b, |call, _| assert!(call.room().is_none()));
deterministic.run_until_parked();
active_call_b.read_with(cx_b, |call, _| assert!(call.room().is_none()));
active_call_c.read_with(cx_c, |call, _| assert!(call.room().is_none()));
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@ -239,6 +325,141 @@ async fn test_basic_calls(
pending: Default::default()
}
);
assert_eq!(
room_participants(&room_c, cx_c),
RoomParticipants {
remote: Default::default(),
pending: Default::default()
}
);
}
#[gpui::test(iterations = 10)]
async fn test_calling_multiple_users_simultaneously(
deterministic: Arc<Deterministic>,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
cx_d: &mut TestAppContext,
) {
deterministic.forbid_parking();
let mut server = TestServer::start(&deterministic).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
let client_d = server.create_client(cx_d, "user_d").await;
server
.make_contacts(&mut [
(&client_a, cx_a),
(&client_b, cx_b),
(&client_c, cx_c),
(&client_d, cx_d),
])
.await;
let active_call_a = cx_a.read(ActiveCall::global);
let active_call_b = cx_b.read(ActiveCall::global);
let active_call_c = cx_c.read(ActiveCall::global);
let active_call_d = cx_d.read(ActiveCall::global);
// Simultaneously call user B and user C from client A.
let b_invite = active_call_a.update(cx_a, |call, cx| {
call.invite(client_b.user_id().unwrap(), None, cx)
});
let c_invite = active_call_a.update(cx_a, |call, cx| {
call.invite(client_c.user_id().unwrap(), None, cx)
});
b_invite.await.unwrap();
c_invite.await.unwrap();
let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
deterministic.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
remote: Default::default(),
pending: vec!["user_b".to_string(), "user_c".to_string()]
}
);
// Call client D from client A.
active_call_a
.update(cx_a, |call, cx| {
call.invite(client_d.user_id().unwrap(), None, cx)
})
.await
.unwrap();
deterministic.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
remote: Default::default(),
pending: vec![
"user_b".to_string(),
"user_c".to_string(),
"user_d".to_string()
]
}
);
// Accept the call on all clients simultaneously.
let accept_b = active_call_b.update(cx_b, |call, cx| call.accept_incoming(cx));
let accept_c = active_call_c.update(cx_c, |call, cx| call.accept_incoming(cx));
let accept_d = active_call_d.update(cx_d, |call, cx| call.accept_incoming(cx));
accept_b.await.unwrap();
accept_c.await.unwrap();
accept_d.await.unwrap();
deterministic.run_until_parked();
let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
let room_c = active_call_c.read_with(cx_c, |call, _| call.room().unwrap().clone());
let room_d = active_call_d.read_with(cx_d, |call, _| call.room().unwrap().clone());
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
remote: vec![
"user_b".to_string(),
"user_c".to_string(),
"user_d".to_string(),
],
pending: Default::default()
}
);
assert_eq!(
room_participants(&room_b, cx_b),
RoomParticipants {
remote: vec![
"user_a".to_string(),
"user_c".to_string(),
"user_d".to_string(),
],
pending: Default::default()
}
);
assert_eq!(
room_participants(&room_c, cx_c),
RoomParticipants {
remote: vec![
"user_a".to_string(),
"user_b".to_string(),
"user_d".to_string(),
],
pending: Default::default()
}
);
assert_eq!(
room_participants(&room_d, cx_d),
RoomParticipants {
remote: vec![
"user_a".to_string(),
"user_b".to_string(),
"user_c".to_string(),
],
pending: Default::default()
}
);
}
#[gpui::test(iterations = 10)]
@ -2023,7 +2244,7 @@ async fn test_propagate_saves_and_fs_changes(
});
// Edit the buffer as the host and concurrently save as guest B.
let save_b = buffer_b.update(cx_b, |buf, cx| buf.save(cx));
let save_b = project_b.update(cx_b, |project, cx| project.save_buffer(buffer_b.clone(), cx));
buffer_a.update(cx_a, |buf, cx| buf.edit([(0..0, "hi-a, ")], None, cx));
save_b.await.unwrap();
assert_eq!(
@ -2092,6 +2313,41 @@ async fn test_propagate_saves_and_fs_changes(
assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js"));
assert_eq!(&*buffer.language().unwrap().name(), "JavaScript");
});
let new_buffer_a = project_a
.update(cx_a, |p, cx| p.create_buffer("", None, cx))
.unwrap();
let new_buffer_id = new_buffer_a.read_with(cx_a, |buffer, _| buffer.remote_id());
let new_buffer_b = project_b
.update(cx_b, |p, cx| p.open_buffer_by_id(new_buffer_id, cx))
.await
.unwrap();
new_buffer_b.read_with(cx_b, |buffer, _| {
assert!(buffer.file().is_none());
});
new_buffer_a.update(cx_a, |buffer, cx| {
buffer.edit([(0..0, "ok")], None, cx);
});
project_a
.update(cx_a, |project, cx| {
project.save_buffer_as(new_buffer_a.clone(), "/a/file3.rs".into(), cx)
})
.await
.unwrap();
deterministic.run_until_parked();
new_buffer_b.read_with(cx_b, |buffer_b, _| {
assert_eq!(
buffer_b.file().unwrap().path().as_ref(),
Path::new("file3.rs")
);
new_buffer_a.read_with(cx_a, |buffer_a, _| {
assert_eq!(buffer_b.saved_mtime(), buffer_a.saved_mtime());
assert_eq!(buffer_b.saved_version(), buffer_a.saved_version());
});
});
}
#[gpui::test(iterations = 10)]
@ -2571,6 +2827,8 @@ async fn test_fs_operations(
})
.await
.unwrap();
deterministic.run_until_parked();
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
worktree
@ -2659,7 +2917,9 @@ async fn test_buffer_conflict_after_save(
assert!(!buf.has_conflict());
});
buffer_b.update(cx_b, |buf, cx| buf.save(cx)).await.unwrap();
project_b.update(cx_b, |project, cx| project.save_buffer(buffer_b.clone(), cx))
.await
.unwrap();
cx_a.foreground().forbid_parking();
buffer_b.read_with(cx_b, |buffer_b, _| assert!(!buffer_b.is_dirty()));
buffer_b.read_with(cx_b, |buf, _| {
@ -5291,6 +5551,27 @@ async fn test_contacts(
[("user_b".to_string(), "online", "free")]
);
// Test removing a contact
client_b
.user_store
.update(cx_b, |store, cx| {
store.remove_contact(client_c.user_id().unwrap(), cx)
})
.await
.unwrap();
deterministic.run_until_parked();
assert_eq!(
contacts(&client_b, cx_b),
[
("user_a".to_string(), "offline", "free"),
("user_d".to_string(), "online", "free")
]
);
assert_eq!(
contacts(&client_c, cx_c),
[("user_a".to_string(), "offline", "free"),]
);
fn contacts(
client: &TestClient,
cx: &TestAppContext,
@ -5602,7 +5883,6 @@ async fn test_following(
.downcast::<Editor>()
.unwrap()
});
assert!(cx_b.read(|cx| editor_b2.is_focused(cx)));
assert_eq!(
cx_b.read(|cx| editor_b2.project_path(cx)),
Some((worktree_id, "2.txt").into())

View file

@ -397,16 +397,18 @@ async fn apply_server_operation(
log::info!("Added connection for {}", username);
}
Operation::RemoveConnection { user_id } => {
log::info!("Simulating full disconnection of user {}", user_id);
Operation::RemoveConnection {
user_id: removed_user_id,
} => {
log::info!("Simulating full disconnection of user {}", removed_user_id);
let client_ix = clients
.iter()
.position(|(client, cx)| client.current_user_id(cx) == user_id);
.position(|(client, cx)| client.current_user_id(cx) == removed_user_id);
let Some(client_ix) = client_ix else { return false };
let user_connection_ids = server
.connection_pool
.lock()
.user_connection_ids(user_id)
.user_connection_ids(removed_user_id)
.collect::<Vec<_>>();
assert_eq!(user_connection_ids.len(), 1);
let removed_peer_id = user_connection_ids[0].into();
@ -417,7 +419,7 @@ async fn apply_server_operation(
server.disconnect_client(removed_peer_id);
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
deterministic.start_waiting();
log::info!("Waiting for user {} to exit...", user_id);
log::info!("Waiting for user {} to exit...", removed_user_id);
client_task.await;
deterministic.finish_waiting();
server.allow_connections();
@ -441,19 +443,17 @@ async fn apply_server_operation(
.unwrap();
let pool = server.connection_pool.lock();
for contact in contacts {
if let db::Contact::Accepted { user_id: id, .. } = contact {
if pool.is_user_online(id) {
assert_ne!(
id, user_id,
"removed client is still a contact of another peer"
);
if let db::Contact::Accepted { user_id, busy, .. } = contact {
if user_id == removed_user_id {
assert!(!pool.is_user_online(user_id));
assert!(!busy);
}
}
}
}
log::info!("{} removed", client.username);
plan.lock().user(user_id).online = false;
plan.lock().user(removed_user_id).online = false;
client_cx.update(|cx| {
cx.clear_globals();
drop(client);
@ -806,8 +806,8 @@ async fn apply_client_operation(
);
ensure_project_shared(&project, client, cx).await;
let (requested_version, save) =
buffer.update(cx, |buffer, cx| (buffer.version(), buffer.save(cx)));
let requested_version = buffer.read_with(cx, |buffer, _| buffer.version());
let save = project.update(cx, |project, cx| project.save_buffer(buffer, cx));
let save = cx.background().spawn(async move {
let (saved_version, _, _) = save
.await
@ -1972,15 +1972,3 @@ fn path_env_var(name: &str) -> Option<PathBuf> {
}
Some(path)
}
async fn child_file_paths(client: &TestClient, dir_path: &Path) -> Vec<PathBuf> {
let mut child_paths = client.fs.read_dir(dir_path).await.unwrap();
let mut child_file_paths = Vec::new();
while let Some(child_path) = child_paths.next().await {
let child_path = child_path.unwrap();
if client.fs.is_file(&child_path).await {
child_file_paths.push(child_path);
}
}
child_file_paths
}