Fix up/down project_id confusion (#18099)

Release Notes:

- ssh remoting: Fix LSP queries run over collab
This commit is contained in:
Conrad Irwin 2024-09-23 09:11:58 -06:00 committed by GitHub
parent 35a80f07e0
commit a36706aed6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 161 additions and 100 deletions

View file

@ -36,19 +36,27 @@ struct MatchingEntry {
respond: oneshot::Sender<ProjectPath>,
}
enum WorktreeStoreState {
Local {
fs: Arc<dyn Fs>,
},
Remote {
dev_server_project_id: Option<DevServerProjectId>,
upstream_client: AnyProtoClient,
upstream_project_id: u64,
},
}
pub struct WorktreeStore {
next_entry_id: Arc<AtomicUsize>,
upstream_client: Option<AnyProtoClient>,
downstream_client: Option<AnyProtoClient>,
remote_id: u64,
dev_server_project_id: Option<DevServerProjectId>,
downstream_client: Option<(AnyProtoClient, u64)>,
retain_worktrees: bool,
worktrees: Vec<WorktreeHandle>,
worktrees_reordered: bool,
#[allow(clippy::type_complexity)]
loading_worktrees:
HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
fs: Arc<dyn Fs>,
state: WorktreeStoreState,
}
pub enum WorktreeStoreEvent {
@ -69,27 +77,37 @@ impl WorktreeStore {
client.add_model_request_handler(Self::handle_expand_project_entry);
}
pub fn new(
upstream_client: Option<AnyProtoClient>,
retain_worktrees: bool,
fs: Arc<dyn Fs>,
) -> Self {
pub fn local(retain_worktrees: bool, fs: Arc<dyn Fs>) -> Self {
Self {
next_entry_id: Default::default(),
loading_worktrees: Default::default(),
dev_server_project_id: None,
downstream_client: None,
worktrees: Vec::new(),
worktrees_reordered: false,
retain_worktrees,
remote_id: 0,
upstream_client,
fs,
state: WorktreeStoreState::Local { fs },
}
}
pub fn set_dev_server_project_id(&mut self, id: DevServerProjectId) {
self.dev_server_project_id = Some(id);
pub fn remote(
retain_worktrees: bool,
upstream_client: AnyProtoClient,
upstream_project_id: u64,
dev_server_project_id: Option<DevServerProjectId>,
) -> Self {
Self {
next_entry_id: Default::default(),
loading_worktrees: Default::default(),
downstream_client: None,
worktrees: Vec::new(),
worktrees_reordered: false,
retain_worktrees,
state: WorktreeStoreState::Remote {
upstream_client,
upstream_project_id,
dev_server_project_id,
},
}
}
/// Iterates through all worktrees, including ones that don't appear in the project panel
@ -159,14 +177,28 @@ impl WorktreeStore {
) -> Task<Result<Model<Worktree>>> {
let path: Arc<Path> = abs_path.as_ref().into();
if !self.loading_worktrees.contains_key(&path) {
let task = if let Some(client) = self.upstream_client.clone() {
if let Some(dev_server_project_id) = self.dev_server_project_id {
self.create_dev_server_worktree(client, dev_server_project_id, abs_path, cx)
} else {
self.create_ssh_worktree(client, abs_path, visible, cx)
let task = match &self.state {
WorktreeStoreState::Remote {
upstream_client,
dev_server_project_id,
..
} => {
if let Some(dev_server_project_id) = dev_server_project_id {
self.create_dev_server_worktree(
upstream_client.clone(),
*dev_server_project_id,
abs_path,
cx,
)
} else if upstream_client.is_via_collab() {
Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab"))))
} else {
self.create_ssh_worktree(upstream_client.clone(), abs_path, visible, cx)
}
}
WorktreeStoreState::Local { fs } => {
self.create_local_worktree(fs.clone(), abs_path, visible, cx)
}
} else {
self.create_local_worktree(abs_path, visible, cx)
};
self.loading_worktrees.insert(path.clone(), task.shared());
@ -236,11 +268,11 @@ impl WorktreeStore {
fn create_local_worktree(
&mut self,
fs: Arc<dyn Fs>,
abs_path: impl AsRef<Path>,
visible: bool,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
let fs = self.fs.clone();
let next_entry_id = self.next_entry_id.clone();
let path: Arc<Path> = abs_path.as_ref().into();
@ -374,6 +406,17 @@ impl WorktreeStore {
self.worktrees_reordered = worktrees_reordered;
}
fn upstream_client(&self) -> Option<(AnyProtoClient, u64)> {
match &self.state {
WorktreeStoreState::Remote {
upstream_client,
upstream_project_id,
..
} => Some((upstream_client.clone(), *upstream_project_id)),
WorktreeStoreState::Local { .. } => None,
}
}
pub fn set_worktrees_from_proto(
&mut self,
worktrees: Vec<proto::WorktreeMetadata>,
@ -389,8 +432,8 @@ impl WorktreeStore {
})
.collect::<HashMap<_, _>>();
let client = self
.upstream_client
let (client, project_id) = self
.upstream_client()
.clone()
.ok_or_else(|| anyhow!("invalid project"))?;
@ -408,7 +451,7 @@ impl WorktreeStore {
self.worktrees.push(handle);
} else {
self.add(
&Worktree::remote(self.remote_id, replica_id, worktree, client.clone(), cx),
&Worktree::remote(project_id, replica_id, worktree, client.clone(), cx),
cx,
);
}
@ -477,10 +520,9 @@ impl WorktreeStore {
}
pub fn send_project_updates(&mut self, cx: &mut ModelContext<Self>) {
let Some(downstream_client) = self.downstream_client.clone() else {
let Some((downstream_client, project_id)) = self.downstream_client.clone() else {
return;
};
let project_id = self.remote_id;
let update = proto::UpdateProject {
project_id,
@ -549,8 +591,7 @@ impl WorktreeStore {
cx: &mut ModelContext<Self>,
) {
self.retain_worktrees = true;
self.remote_id = remote_id;
self.downstream_client = Some(downsteam_client);
self.downstream_client = Some((downsteam_client, remote_id));
// When shared, retain all worktrees
for worktree_handle in self.worktrees.iter_mut() {