Share new worktrees when resharing project
Co-Authored-By: Nathan Sobo <nathan@zed.dev>
This commit is contained in:
parent
585c23e9f6
commit
8487ae77e7
5 changed files with 54 additions and 70 deletions
|
@ -66,7 +66,7 @@ impl Entity for Room {
|
||||||
fn release(&mut self, _: &mut MutableAppContext) {
|
fn release(&mut self, _: &mut MutableAppContext) {
|
||||||
if self.status.is_online() {
|
if self.status.is_online() {
|
||||||
log::info!("room was released, sending leave message");
|
log::info!("room was released, sending leave message");
|
||||||
self.client.send(proto::LeaveRoom {}).log_err();
|
let _ = self.client.send(proto::LeaveRoom {});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -672,15 +672,17 @@ impl<'a> Drop for ConnectionPoolGuard<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn broadcast<F>(
|
fn broadcast<F>(
|
||||||
sender_id: ConnectionId,
|
sender_id: Option<ConnectionId>,
|
||||||
receiver_ids: impl IntoIterator<Item = ConnectionId>,
|
receiver_ids: impl IntoIterator<Item = ConnectionId>,
|
||||||
mut f: F,
|
mut f: F,
|
||||||
) where
|
) where
|
||||||
F: FnMut(ConnectionId) -> anyhow::Result<()>,
|
F: FnMut(ConnectionId) -> anyhow::Result<()>,
|
||||||
{
|
{
|
||||||
for receiver_id in receiver_ids {
|
for receiver_id in receiver_ids {
|
||||||
if receiver_id != sender_id {
|
if Some(receiver_id) != sender_id {
|
||||||
f(receiver_id).trace_err();
|
if let Err(error) = f(receiver_id) {
|
||||||
|
tracing::error!("failed to send to {:?} {}", receiver_id, error);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -998,7 +1000,7 @@ async fn rejoin_room(
|
||||||
}
|
}
|
||||||
|
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
project
|
project
|
||||||
.collaborators
|
.collaborators
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -1279,7 +1281,7 @@ async fn unshare_project(message: proto::UnshareProject, session: Session) -> Re
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
guest_connection_ids.iter().copied(),
|
guest_connection_ids.iter().copied(),
|
||||||
|conn_id| session.peer.send(conn_id, message.clone()),
|
|conn_id| session.peer.send(conn_id, message.clone()),
|
||||||
);
|
);
|
||||||
|
@ -1430,7 +1432,7 @@ async fn update_project(
|
||||||
.update_project(project_id, session.connection_id, &request.worktrees)
|
.update_project(project_id, session.connection_id, &request.worktrees)
|
||||||
.await?;
|
.await?;
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
guest_connection_ids.iter().copied(),
|
guest_connection_ids.iter().copied(),
|
||||||
|connection_id| {
|
|connection_id| {
|
||||||
session
|
session
|
||||||
|
@ -1456,7 +1458,7 @@ async fn update_worktree(
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
guest_connection_ids.iter().copied(),
|
guest_connection_ids.iter().copied(),
|
||||||
|connection_id| {
|
|connection_id| {
|
||||||
session
|
session
|
||||||
|
@ -1479,7 +1481,7 @@ async fn update_diagnostic_summary(
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
guest_connection_ids.iter().copied(),
|
guest_connection_ids.iter().copied(),
|
||||||
|connection_id| {
|
|connection_id| {
|
||||||
session
|
session
|
||||||
|
@ -1502,7 +1504,7 @@ async fn start_language_server(
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
guest_connection_ids.iter().copied(),
|
guest_connection_ids.iter().copied(),
|
||||||
|connection_id| {
|
|connection_id| {
|
||||||
session
|
session
|
||||||
|
@ -1525,7 +1527,7 @@ async fn update_language_server(
|
||||||
.project_connection_ids(project_id, session.connection_id)
|
.project_connection_ids(project_id, session.connection_id)
|
||||||
.await?;
|
.await?;
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
project_connection_ids.iter().copied(),
|
project_connection_ids.iter().copied(),
|
||||||
|connection_id| {
|
|connection_id| {
|
||||||
session
|
session
|
||||||
|
@ -1600,11 +1602,15 @@ async fn save_buffer(
|
||||||
let project_connection_ids = collaborators
|
let project_connection_ids = collaborators
|
||||||
.iter()
|
.iter()
|
||||||
.map(|collaborator| collaborator.connection_id);
|
.map(|collaborator| collaborator.connection_id);
|
||||||
broadcast(host_connection_id, project_connection_ids, |conn_id| {
|
broadcast(
|
||||||
|
Some(host_connection_id),
|
||||||
|
project_connection_ids,
|
||||||
|
|conn_id| {
|
||||||
session
|
session
|
||||||
.peer
|
.peer
|
||||||
.forward_send(host_connection_id, conn_id, response_payload.clone())
|
.forward_send(host_connection_id, conn_id, response_payload.clone())
|
||||||
});
|
},
|
||||||
|
);
|
||||||
response.send(response_payload)?;
|
response.send(response_payload)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1637,7 +1643,7 @@ async fn update_buffer(
|
||||||
session.executor.record_backtrace();
|
session.executor.record_backtrace();
|
||||||
|
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
project_connection_ids.iter().copied(),
|
project_connection_ids.iter().copied(),
|
||||||
|connection_id| {
|
|connection_id| {
|
||||||
session
|
session
|
||||||
|
@ -1658,7 +1664,7 @@ async fn update_buffer_file(request: proto::UpdateBufferFile, session: Session)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
project_connection_ids.iter().copied(),
|
project_connection_ids.iter().copied(),
|
||||||
|connection_id| {
|
|connection_id| {
|
||||||
session
|
session
|
||||||
|
@ -1677,7 +1683,7 @@ async fn buffer_reloaded(request: proto::BufferReloaded, session: Session) -> Re
|
||||||
.project_connection_ids(project_id, session.connection_id)
|
.project_connection_ids(project_id, session.connection_id)
|
||||||
.await?;
|
.await?;
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
project_connection_ids.iter().copied(),
|
project_connection_ids.iter().copied(),
|
||||||
|connection_id| {
|
|connection_id| {
|
||||||
session
|
session
|
||||||
|
@ -1696,7 +1702,7 @@ async fn buffer_saved(request: proto::BufferSaved, session: Session) -> Result<(
|
||||||
.project_connection_ids(project_id, session.connection_id)
|
.project_connection_ids(project_id, session.connection_id)
|
||||||
.await?;
|
.await?;
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
project_connection_ids.iter().copied(),
|
project_connection_ids.iter().copied(),
|
||||||
|connection_id| {
|
|connection_id| {
|
||||||
session
|
session
|
||||||
|
@ -1988,7 +1994,7 @@ async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> R
|
||||||
.project_connection_ids(project_id, session.connection_id)
|
.project_connection_ids(project_id, session.connection_id)
|
||||||
.await?;
|
.await?;
|
||||||
broadcast(
|
broadcast(
|
||||||
session.connection_id,
|
Some(session.connection_id),
|
||||||
project_connection_ids.iter().copied(),
|
project_connection_ids.iter().copied(),
|
||||||
|connection_id| {
|
|connection_id| {
|
||||||
session
|
session
|
||||||
|
@ -2098,21 +2104,20 @@ fn contact_for_user(
|
||||||
}
|
}
|
||||||
|
|
||||||
fn room_updated(room: &proto::Room, peer: &Peer) {
|
fn room_updated(room: &proto::Room, peer: &Peer) {
|
||||||
for participant in &room.participants {
|
broadcast(
|
||||||
if let Some(peer_id) = participant
|
None,
|
||||||
.peer_id
|
room.participants
|
||||||
.ok_or_else(|| anyhow!("invalid participant peer id"))
|
.iter()
|
||||||
.trace_err()
|
.filter_map(|participant| Some(participant.peer_id?.into())),
|
||||||
{
|
|peer_id| {
|
||||||
peer.send(
|
peer.send(
|
||||||
peer_id.into(),
|
peer_id.into(),
|
||||||
proto::RoomUpdated {
|
proto::RoomUpdated {
|
||||||
room: Some(room.clone()),
|
room: Some(room.clone()),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.trace_err();
|
},
|
||||||
}
|
);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_user_contacts(user_id: UserId, session: &Session) -> Result<()> {
|
async fn update_user_contacts(user_id: UserId, session: &Session) -> Result<()> {
|
||||||
|
|
|
@ -290,10 +290,11 @@ async fn test_random_collaboration(
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
guest_snapshot.entries(false).collect::<Vec<_>>(),
|
guest_snapshot.entries(false).collect::<Vec<_>>(),
|
||||||
host_snapshot.entries(false).collect::<Vec<_>>(),
|
host_snapshot.entries(false).collect::<Vec<_>>(),
|
||||||
"{} has different snapshot than the host for worktree {} ({:?})",
|
"{} has different snapshot than the host for worktree {} ({:?}) and project {:?}",
|
||||||
client.username,
|
client.username,
|
||||||
id,
|
id,
|
||||||
host_snapshot.abs_path()
|
host_snapshot.abs_path(),
|
||||||
|
host_project.read_with(host_cx, |project, _| project.remote_id())
|
||||||
);
|
);
|
||||||
assert_eq!(guest_snapshot.scan_id(), host_snapshot.scan_id());
|
assert_eq!(guest_snapshot.scan_id(), host_snapshot.scan_id());
|
||||||
}
|
}
|
||||||
|
|
|
@ -1037,7 +1037,7 @@ impl Project {
|
||||||
if update_project.is_ok() {
|
if update_project.is_ok() {
|
||||||
for worktree in worktrees {
|
for worktree in worktrees {
|
||||||
worktree.update(&mut cx, |worktree, cx| {
|
worktree.update(&mut cx, |worktree, cx| {
|
||||||
let worktree = &mut worktree.as_local_mut().unwrap();
|
let worktree = worktree.as_local_mut().unwrap();
|
||||||
worktree.share(project_id, cx).detach_and_log_err(cx)
|
worktree.share(project_id, cx).detach_and_log_err(cx)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1062,17 +1062,7 @@ impl Project {
|
||||||
cx: &mut ModelContext<Self>,
|
cx: &mut ModelContext<Self>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
self.set_collaborators_from_proto(message.collaborators, cx)?;
|
self.set_collaborators_from_proto(message.collaborators, cx)?;
|
||||||
for worktree in self.worktrees.iter() {
|
let _ = self.metadata_changed(cx);
|
||||||
if let Some(worktree) = worktree.upgrade(&cx) {
|
|
||||||
worktree.update(cx, |worktree, _| {
|
|
||||||
if let Some(worktree) = worktree.as_local_mut() {
|
|
||||||
worktree.reshare()
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6259,18 +6249,14 @@ impl Entity for Project {
|
||||||
fn release(&mut self, _: &mut gpui::MutableAppContext) {
|
fn release(&mut self, _: &mut gpui::MutableAppContext) {
|
||||||
match &self.client_state {
|
match &self.client_state {
|
||||||
Some(ProjectClientState::Local { remote_id, .. }) => {
|
Some(ProjectClientState::Local { remote_id, .. }) => {
|
||||||
self.client
|
let _ = self.client.send(proto::UnshareProject {
|
||||||
.send(proto::UnshareProject {
|
|
||||||
project_id: *remote_id,
|
project_id: *remote_id,
|
||||||
})
|
});
|
||||||
.log_err();
|
|
||||||
}
|
}
|
||||||
Some(ProjectClientState::Remote { remote_id, .. }) => {
|
Some(ProjectClientState::Remote { remote_id, .. }) => {
|
||||||
self.client
|
let _ = self.client.send(proto::LeaveProject {
|
||||||
.send(proto::LeaveProject {
|
|
||||||
project_id: *remote_id,
|
project_id: *remote_id,
|
||||||
})
|
});
|
||||||
.log_err();
|
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -166,7 +166,7 @@ enum ScanState {
|
||||||
struct ShareState {
|
struct ShareState {
|
||||||
project_id: u64,
|
project_id: u64,
|
||||||
snapshots_tx: watch::Sender<LocalSnapshot>,
|
snapshots_tx: watch::Sender<LocalSnapshot>,
|
||||||
reshared: watch::Sender<()>,
|
resume_updates: watch::Sender<()>,
|
||||||
_maintain_remote_snapshot: Task<Option<()>>,
|
_maintain_remote_snapshot: Task<Option<()>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -975,12 +975,12 @@ impl LocalWorktree {
|
||||||
pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
|
pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
|
||||||
let (share_tx, share_rx) = oneshot::channel();
|
let (share_tx, share_rx) = oneshot::channel();
|
||||||
|
|
||||||
if self.share.is_some() {
|
if let Some(share) = self.share.as_mut() {
|
||||||
let _ = share_tx.send(());
|
let _ = share_tx.send(());
|
||||||
|
*share.resume_updates.borrow_mut() = ();
|
||||||
} else {
|
} else {
|
||||||
let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
|
let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
|
||||||
let (reshared_tx, mut reshared_rx) = watch::channel();
|
let (resume_updates_tx, mut resume_updates_rx) = watch::channel();
|
||||||
let _ = reshared_rx.try_recv();
|
|
||||||
let worktree_id = cx.model_id() as u64;
|
let worktree_id = cx.model_id() as u64;
|
||||||
|
|
||||||
for (path, summary) in self.diagnostic_summaries.iter() {
|
for (path, summary) in self.diagnostic_summaries.iter() {
|
||||||
|
@ -1022,10 +1022,11 @@ impl LocalWorktree {
|
||||||
let update =
|
let update =
|
||||||
snapshot.build_update(&prev_snapshot, project_id, worktree_id, true);
|
snapshot.build_update(&prev_snapshot, project_id, worktree_id, true);
|
||||||
for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
|
for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
|
||||||
|
let _ = resume_updates_rx.try_recv();
|
||||||
while let Err(error) = client.request(update.clone()).await {
|
while let Err(error) = client.request(update.clone()).await {
|
||||||
log::error!("failed to send worktree update: {}", error);
|
log::error!("failed to send worktree update: {}", error);
|
||||||
log::info!("waiting for worktree to be reshared");
|
log::info!("waiting to resume updates");
|
||||||
if reshared_rx.next().await.is_none() {
|
if resume_updates_rx.next().await.is_none() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1046,7 +1047,7 @@ impl LocalWorktree {
|
||||||
self.share = Some(ShareState {
|
self.share = Some(ShareState {
|
||||||
project_id,
|
project_id,
|
||||||
snapshots_tx,
|
snapshots_tx,
|
||||||
reshared: reshared_tx,
|
resume_updates: resume_updates_tx,
|
||||||
_maintain_remote_snapshot,
|
_maintain_remote_snapshot,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1059,15 +1060,6 @@ impl LocalWorktree {
|
||||||
self.share.take();
|
self.share.take();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn reshare(&mut self) -> Result<()> {
|
|
||||||
let share = self
|
|
||||||
.share
|
|
||||||
.as_mut()
|
|
||||||
.ok_or_else(|| anyhow!("can't reshare a worktree that wasn't shared"))?;
|
|
||||||
*share.reshared.borrow_mut() = ();
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_shared(&self) -> bool {
|
pub fn is_shared(&self) -> bool {
|
||||||
self.share.is_some()
|
self.share.is_some()
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue