Merge branch 'main' into language-api-docs

This commit is contained in:
Max Brunsfeld 2024-01-18 15:04:54 -08:00
commit b65cf6d2d9
382 changed files with 12764 additions and 7823 deletions

View file

@ -99,20 +99,6 @@ pub trait Item {
fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
}
// Language server state is stored across 3 collections:
// language_servers =>
// a mapping from unique server id to LanguageServerState which can either be a task for a
// server in the process of starting, or a running server with adapter and language server arcs
// language_server_ids => a mapping from worktreeId and server name to the unique server id
// language_server_statuses => a mapping from unique server id to the current server status
//
// Multiple worktrees can map to the same language server for example when you jump to the definition
// of a file in the standard library. So language_server_ids is used to look up which server is active
// for a given worktree and language server name
//
// When starting a language server, first the id map is checked to make sure a server isn't already available
// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
// the Starting variant of LanguageServerState is stored in the language_servers map.
pub struct Project {
worktrees: Vec<WorktreeHandle>,
active_entry: Option<ProjectEntryId>,
@ -130,7 +116,7 @@ pub struct Project {
next_diagnostic_group_id: usize,
user_store: Model<UserStore>,
fs: Arc<dyn Fs>,
client_state: Option<ProjectClientState>,
client_state: ProjectClientState,
collaborators: HashMap<proto::PeerId, Collaborator>,
client_subscriptions: Vec<client::Subscription>,
_subscriptions: Vec<gpui::Subscription>,
@ -254,8 +240,10 @@ enum WorktreeHandle {
Weak(WeakModel<Worktree>),
}
#[derive(Debug)]
enum ProjectClientState {
Local {
Local,
Shared {
remote_id: u64,
updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
_send_updates: Task<Result<()>>,
@ -657,7 +645,7 @@ impl Project {
local_buffer_ids_by_entry_id: Default::default(),
buffer_snapshots: Default::default(),
join_project_response_message_id: 0,
client_state: None,
client_state: ProjectClientState::Local,
opened_buffer: watch::channel(),
client_subscriptions: Vec::new(),
_subscriptions: vec![
@ -756,12 +744,12 @@ impl Project {
cx.on_app_quit(Self::shutdown_language_servers),
],
client: client.clone(),
client_state: Some(ProjectClientState::Remote {
client_state: ProjectClientState::Remote {
sharing_has_stopped: false,
capability: Capability::ReadWrite,
remote_id,
replica_id,
}),
},
supplementary_language_servers: HashMap::default(),
language_servers: Default::default(),
language_server_ids: Default::default(),
@ -799,7 +787,7 @@ impl Project {
prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(),
};
this.set_role(role);
this.set_role(role, cx);
for worktree in worktrees {
let _ = this.add_worktree(&worktree, cx);
}
@ -828,16 +816,16 @@ impl Project {
fn release(&mut self, cx: &mut AppContext) {
match &self.client_state {
Some(ProjectClientState::Local { .. }) => {
ProjectClientState::Local => {}
ProjectClientState::Shared { .. } => {
let _ = self.unshare_internal(cx);
}
Some(ProjectClientState::Remote { remote_id, .. }) => {
ProjectClientState::Remote { remote_id, .. } => {
let _ = self.client.send(proto::LeaveProject {
project_id: *remote_id,
});
self.disconnected_from_host_internal(cx);
}
_ => {}
}
}
@ -1058,21 +1046,22 @@ impl Project {
}
pub fn remote_id(&self) -> Option<u64> {
match self.client_state.as_ref()? {
ProjectClientState::Local { remote_id, .. }
| ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
match self.client_state {
ProjectClientState::Local => None,
ProjectClientState::Shared { remote_id, .. }
| ProjectClientState::Remote { remote_id, .. } => Some(remote_id),
}
}
pub fn replica_id(&self) -> ReplicaId {
match &self.client_state {
Some(ProjectClientState::Remote { replica_id, .. }) => *replica_id,
match self.client_state {
ProjectClientState::Remote { replica_id, .. } => replica_id,
_ => 0,
}
}
fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
if let Some(ProjectClientState::Local { updates_tx, .. }) = &mut self.client_state {
if let ProjectClientState::Shared { updates_tx, .. } = &mut self.client_state {
updates_tx
.unbounded_send(LocalProjectUpdate::WorktreesChanged)
.ok();
@ -1362,7 +1351,7 @@ impl Project {
}
pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
if self.client_state.is_some() {
if !matches!(self.client_state, ProjectClientState::Local) {
return Err(anyhow!("project was already shared"));
}
self.client_subscriptions.push(
@ -1423,7 +1412,7 @@ impl Project {
let (updates_tx, mut updates_rx) = mpsc::unbounded();
let client = self.client.clone();
self.client_state = Some(ProjectClientState::Local {
self.client_state = ProjectClientState::Shared {
remote_id: project_id,
updates_tx,
_send_updates: cx.spawn(move |this, mut cx| async move {
@ -1508,7 +1497,7 @@ impl Project {
}
Ok(())
}),
});
};
self.metadata_changed(cx);
cx.emit(Event::RemoteIdChanged(Some(project_id)));
@ -1578,7 +1567,8 @@ impl Project {
return Err(anyhow!("attempted to unshare a remote project"));
}
if let Some(ProjectClientState::Local { remote_id, .. }) = self.client_state.take() {
if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
self.client_state = ProjectClientState::Local;
self.collaborators.clear();
self.shared_buffers.clear();
self.client_subscriptions.clear();
@ -1622,22 +1612,30 @@ impl Project {
cx.notify();
}
pub fn set_role(&mut self, role: proto::ChannelRole) {
if let Some(ProjectClientState::Remote { capability, .. }) = &mut self.client_state {
*capability = if role == proto::ChannelRole::Member || role == proto::ChannelRole::Admin
{
pub fn set_role(&mut self, role: proto::ChannelRole, cx: &mut ModelContext<Self>) {
let new_capability =
if role == proto::ChannelRole::Member || role == proto::ChannelRole::Admin {
Capability::ReadWrite
} else {
Capability::ReadOnly
};
if let ProjectClientState::Remote { capability, .. } = &mut self.client_state {
if *capability == new_capability {
return;
}
*capability = new_capability;
for buffer in self.opened_buffers() {
buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
}
}
}
fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
if let Some(ProjectClientState::Remote {
if let ProjectClientState::Remote {
sharing_has_stopped,
..
}) = &mut self.client_state
} = &mut self.client_state
{
*sharing_has_stopped = true;
@ -1676,18 +1674,18 @@ impl Project {
pub fn is_disconnected(&self) -> bool {
match &self.client_state {
Some(ProjectClientState::Remote {
ProjectClientState::Remote {
sharing_has_stopped,
..
}) => *sharing_has_stopped,
} => *sharing_has_stopped,
_ => false,
}
}
pub fn capability(&self) -> Capability {
match &self.client_state {
Some(ProjectClientState::Remote { capability, .. }) => *capability,
Some(ProjectClientState::Local { .. }) | None => Capability::ReadWrite,
ProjectClientState::Remote { capability, .. } => *capability,
ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite,
}
}
@ -1697,8 +1695,8 @@ impl Project {
pub fn is_local(&self) -> bool {
match &self.client_state {
Some(ProjectClientState::Remote { .. }) => false,
_ => true,
ProjectClientState::Local | ProjectClientState::Shared { .. } => true,
ProjectClientState::Remote { .. } => false,
}
}
@ -5578,7 +5576,7 @@ impl Project {
// 3. We run a scan over all the candidate buffers on multiple background threads.
// We cannot assume that there will even be a match - while at least one match
// is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
// There is also an auxilliary background thread responsible for result gathering.
// There is also an auxiliary background thread responsible for result gathering.
// This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
// it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
// As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
@ -6155,8 +6153,8 @@ impl Project {
pub fn is_shared(&self) -> bool {
match &self.client_state {
Some(ProjectClientState::Local { .. }) => true,
_ => false,
ProjectClientState::Shared { .. } => true,
ProjectClientState::Local | ProjectClientState::Remote { .. } => false,
}
}
@ -7944,7 +7942,7 @@ impl Project {
cx: &mut AppContext,
) -> u64 {
let buffer_id = buffer.read(cx).remote_id();
if let Some(ProjectClientState::Local { updates_tx, .. }) = &self.client_state {
if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state {
updates_tx
.unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
.ok();
@ -7993,21 +7991,21 @@ impl Project {
}
fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
let project_id = match self.client_state.as_ref() {
Some(ProjectClientState::Remote {
let project_id = match self.client_state {
ProjectClientState::Remote {
sharing_has_stopped,
remote_id,
..
}) => {
if *sharing_has_stopped {
} => {
if sharing_has_stopped {
return Task::ready(Err(anyhow!(
"can't synchronize remote buffers on a readonly project"
)));
} else {
*remote_id
remote_id
}
}
Some(ProjectClientState::Local { .. }) | None => {
ProjectClientState::Shared { .. } | ProjectClientState::Local => {
return Task::ready(Err(anyhow!(
"can't synchronize remote buffers on a local project"
)))
@ -8550,7 +8548,7 @@ fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
break;
} else {
if i > 0 {
// Acount for separator prior to this part
// Account for separator prior to this part
literal_end += path::MAIN_SEPARATOR.len_utf8();
}
literal_end += part.len();