Make worktree UpdatedEntries
events fully describe all changes (#2533)
This PR makes the worktree's change events more useful in a few ways: * The changes are now described by a cheaply clone-able collection, so that they can be used in background tasks. Right now, I'm using a simple Arc slice. * The `UpdatedEntries` event now captures not only changes due to FS changes, but also newly-loaded paths that are discovered during the initial scan. * The `UpdatedGitRepositories` event now includes repositories whose work-dir changed but git dir didn't change. A boolean flag is included, to indicate whether the git content changed. * The `UpdatedEntries` and `UpdatedGitRepositories` events are now *used* to compute the worktree's `UpdateWorktree` messages, used to sync changes to guests. This unifies two closely-related code paths, and makes the host more efficient when collaborating, because the `UpdateWorktree` message computation used to require walking the entire `entries` tree on every FS change.
This commit is contained in:
commit
e4530471de
7 changed files with 733 additions and 641 deletions
|
@ -434,7 +434,9 @@ impl<T: Entity> ModelHandle<T> {
|
|||
Duration::from_secs(1)
|
||||
};
|
||||
|
||||
let executor = cx.background().clone();
|
||||
async move {
|
||||
executor.start_waiting();
|
||||
let notification = crate::util::timeout(duration, rx.next())
|
||||
.await
|
||||
.expect("next notification timed out");
|
||||
|
|
|
@ -876,6 +876,14 @@ impl Background {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn start_waiting(&self) {
|
||||
match self {
|
||||
Self::Deterministic { executor, .. } => executor.start_waiting(),
|
||||
_ => panic!("this method can only be called on a deterministic executor"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Background {
|
||||
|
|
|
@ -796,6 +796,12 @@ impl LanguageRegistry {
|
|||
http_client: Arc<dyn HttpClient>,
|
||||
cx: &mut AppContext,
|
||||
) -> Option<PendingLanguageServer> {
|
||||
let server_id = self.state.write().next_language_server_id();
|
||||
log::info!(
|
||||
"starting language server name:{}, path:{root_path:?}, id:{server_id}",
|
||||
adapter.name.0
|
||||
);
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
if language.fake_adapter.is_some() {
|
||||
let task = cx.spawn(|cx| async move {
|
||||
|
@ -825,7 +831,6 @@ impl LanguageRegistry {
|
|||
Ok(server)
|
||||
});
|
||||
|
||||
let server_id = self.state.write().next_language_server_id();
|
||||
return Some(PendingLanguageServer { server_id, task });
|
||||
}
|
||||
|
||||
|
@ -834,7 +839,6 @@ impl LanguageRegistry {
|
|||
.clone()
|
||||
.ok_or_else(|| anyhow!("language server download directory has not been assigned"))
|
||||
.log_err()?;
|
||||
|
||||
let this = self.clone();
|
||||
let language = language.clone();
|
||||
let http_client = http_client.clone();
|
||||
|
@ -843,7 +847,6 @@ impl LanguageRegistry {
|
|||
let adapter = adapter.clone();
|
||||
let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone();
|
||||
let login_shell_env_loaded = self.login_shell_env_loaded.clone();
|
||||
let server_id = self.state.write().next_language_server_id();
|
||||
|
||||
let task = cx.spawn(|cx| async move {
|
||||
login_shell_env_loaded.await;
|
||||
|
|
|
@ -849,10 +849,12 @@ impl FakeLanguageServer {
|
|||
T: request::Request,
|
||||
T::Result: 'static + Send,
|
||||
{
|
||||
self.server.executor.start_waiting();
|
||||
self.server.request::<T>(params).await
|
||||
}
|
||||
|
||||
pub async fn receive_notification<T: notification::Notification>(&mut self) -> T::Params {
|
||||
self.server.executor.start_waiting();
|
||||
self.try_receive_notification::<T>().await.unwrap()
|
||||
}
|
||||
|
||||
|
|
|
@ -1459,7 +1459,7 @@ impl Project {
|
|||
};
|
||||
|
||||
cx.foreground().spawn(async move {
|
||||
pump_loading_buffer_reciever(loading_watch)
|
||||
wait_for_loading_buffer(loading_watch)
|
||||
.await
|
||||
.map_err(|error| anyhow!("{}", error))
|
||||
})
|
||||
|
@ -4847,7 +4847,7 @@ impl Project {
|
|||
if worktree.read(cx).is_local() {
|
||||
cx.subscribe(worktree, |this, worktree, event, cx| match event {
|
||||
worktree::Event::UpdatedEntries(changes) => {
|
||||
this.update_local_worktree_buffers(&worktree, &changes, cx);
|
||||
this.update_local_worktree_buffers(&worktree, changes, cx);
|
||||
this.update_local_worktree_language_servers(&worktree, changes, cx);
|
||||
}
|
||||
worktree::Event::UpdatedGitRepositories(updated_repos) => {
|
||||
|
@ -4881,13 +4881,13 @@ impl Project {
|
|||
fn update_local_worktree_buffers(
|
||||
&mut self,
|
||||
worktree_handle: &ModelHandle<Worktree>,
|
||||
changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
|
||||
changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
let snapshot = worktree_handle.read(cx).snapshot();
|
||||
|
||||
let mut renamed_buffers = Vec::new();
|
||||
for (path, entry_id) in changes.keys() {
|
||||
for (path, entry_id, _) in changes {
|
||||
let worktree_id = worktree_handle.read(cx).id();
|
||||
let project_path = ProjectPath {
|
||||
worktree_id,
|
||||
|
@ -4993,7 +4993,7 @@ impl Project {
|
|||
fn update_local_worktree_language_servers(
|
||||
&mut self,
|
||||
worktree_handle: &ModelHandle<Worktree>,
|
||||
changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
|
||||
changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
if changes.is_empty() {
|
||||
|
@ -5024,23 +5024,21 @@ impl Project {
|
|||
let params = lsp::DidChangeWatchedFilesParams {
|
||||
changes: changes
|
||||
.iter()
|
||||
.filter_map(|((path, _), change)| {
|
||||
if watched_paths.is_match(&path) {
|
||||
Some(lsp::FileEvent {
|
||||
uri: lsp::Url::from_file_path(abs_path.join(path))
|
||||
.unwrap(),
|
||||
typ: match change {
|
||||
PathChange::Added => lsp::FileChangeType::CREATED,
|
||||
PathChange::Removed => lsp::FileChangeType::DELETED,
|
||||
PathChange::Updated
|
||||
| PathChange::AddedOrUpdated => {
|
||||
lsp::FileChangeType::CHANGED
|
||||
}
|
||||
},
|
||||
})
|
||||
} else {
|
||||
None
|
||||
.filter_map(|(path, _, change)| {
|
||||
if !watched_paths.is_match(&path) {
|
||||
return None;
|
||||
}
|
||||
let typ = match change {
|
||||
PathChange::Loaded => return None,
|
||||
PathChange::Added => lsp::FileChangeType::CREATED,
|
||||
PathChange::Removed => lsp::FileChangeType::DELETED,
|
||||
PathChange::Updated => lsp::FileChangeType::CHANGED,
|
||||
PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
|
||||
};
|
||||
Some(lsp::FileEvent {
|
||||
uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
|
||||
typ,
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
@ -5059,98 +5057,102 @@ impl Project {
|
|||
fn update_local_worktree_buffers_git_repos(
|
||||
&mut self,
|
||||
worktree_handle: ModelHandle<Worktree>,
|
||||
repos: &HashMap<Arc<Path>, LocalRepositoryEntry>,
|
||||
changed_repos: &UpdatedGitRepositoriesSet,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
debug_assert!(worktree_handle.read(cx).is_local());
|
||||
|
||||
// Setup the pending buffers
|
||||
// Identify the loading buffers whose containing repository that has changed.
|
||||
let future_buffers = self
|
||||
.loading_buffers_by_path
|
||||
.iter()
|
||||
.filter_map(|(path, receiver)| {
|
||||
let path = &path.path;
|
||||
let (work_directory, repo) = repos
|
||||
.iter()
|
||||
.find(|(work_directory, _)| path.starts_with(work_directory))?;
|
||||
|
||||
let repo_relative_path = path.strip_prefix(work_directory).log_err()?;
|
||||
|
||||
.filter_map(|(project_path, receiver)| {
|
||||
if project_path.worktree_id != worktree_handle.read(cx).id() {
|
||||
return None;
|
||||
}
|
||||
let path = &project_path.path;
|
||||
changed_repos.iter().find(|(work_dir, change)| {
|
||||
path.starts_with(work_dir) && change.git_dir_changed
|
||||
})?;
|
||||
let receiver = receiver.clone();
|
||||
let repo_ptr = repo.repo_ptr.clone();
|
||||
let repo_relative_path = repo_relative_path.to_owned();
|
||||
let path = path.clone();
|
||||
Some(async move {
|
||||
pump_loading_buffer_reciever(receiver)
|
||||
wait_for_loading_buffer(receiver)
|
||||
.await
|
||||
.ok()
|
||||
.map(|buffer| (buffer, repo_relative_path, repo_ptr))
|
||||
.map(|buffer| (buffer, path))
|
||||
})
|
||||
})
|
||||
.collect::<FuturesUnordered<_>>()
|
||||
.filter_map(|result| async move {
|
||||
let (buffer_handle, repo_relative_path, repo_ptr) = result?;
|
||||
.collect::<FuturesUnordered<_>>();
|
||||
|
||||
let lock = repo_ptr.lock();
|
||||
lock.load_index_text(&repo_relative_path)
|
||||
.map(|diff_base| (diff_base, buffer_handle))
|
||||
});
|
||||
// Identify the current buffers whose containing repository has changed.
|
||||
let current_buffers = self
|
||||
.opened_buffers
|
||||
.values()
|
||||
.filter_map(|buffer| {
|
||||
let buffer = buffer.upgrade(cx)?;
|
||||
let file = File::from_dyn(buffer.read(cx).file())?;
|
||||
if file.worktree != worktree_handle {
|
||||
return None;
|
||||
}
|
||||
let path = file.path();
|
||||
changed_repos.iter().find(|(work_dir, change)| {
|
||||
path.starts_with(work_dir) && change.git_dir_changed
|
||||
})?;
|
||||
Some((buffer, path.clone()))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let update_diff_base_fn = update_diff_base(self);
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let diff_base_tasks = cx
|
||||
if future_buffers.len() + current_buffers.len() == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let remote_id = self.remote_id();
|
||||
let client = self.client.clone();
|
||||
cx.spawn_weak(move |_, mut cx| async move {
|
||||
// Wait for all of the buffers to load.
|
||||
let future_buffers = future_buffers.collect::<Vec<_>>().await;
|
||||
|
||||
// Reload the diff base for every buffer whose containing git repository has changed.
|
||||
let snapshot =
|
||||
worktree_handle.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
|
||||
let diff_bases_by_buffer = cx
|
||||
.background()
|
||||
.spawn(future_buffers.collect::<Vec<_>>())
|
||||
.spawn(async move {
|
||||
future_buffers
|
||||
.into_iter()
|
||||
.filter_map(|e| e)
|
||||
.chain(current_buffers)
|
||||
.filter_map(|(buffer, path)| {
|
||||
let (work_directory, repo) =
|
||||
snapshot.repository_and_work_directory_for_path(&path)?;
|
||||
let repo = snapshot.get_local_repo(&repo)?;
|
||||
let relative_path = path.strip_prefix(&work_directory).ok()?;
|
||||
let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
|
||||
Some((buffer, base_text))
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.await;
|
||||
|
||||
for (diff_base, buffer) in diff_base_tasks.into_iter() {
|
||||
update_diff_base_fn(Some(diff_base), buffer, &mut cx);
|
||||
// Assign the new diff bases on all of the buffers.
|
||||
for (buffer, diff_base) in diff_bases_by_buffer {
|
||||
let buffer_id = buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.set_diff_base(diff_base.clone(), cx);
|
||||
buffer.remote_id()
|
||||
});
|
||||
if let Some(project_id) = remote_id {
|
||||
client
|
||||
.send(proto::UpdateDiffBase {
|
||||
project_id,
|
||||
buffer_id,
|
||||
diff_base,
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
// And the current buffers
|
||||
for (_, buffer) in &self.opened_buffers {
|
||||
if let Some(buffer) = buffer.upgrade(cx) {
|
||||
let file = match File::from_dyn(buffer.read(cx).file()) {
|
||||
Some(file) => file,
|
||||
None => continue,
|
||||
};
|
||||
if file.worktree != worktree_handle {
|
||||
continue;
|
||||
}
|
||||
|
||||
let path = file.path().clone();
|
||||
|
||||
let worktree = worktree_handle.read(cx);
|
||||
|
||||
let (work_directory, repo) = match repos
|
||||
.iter()
|
||||
.find(|(work_directory, _)| path.starts_with(work_directory))
|
||||
{
|
||||
Some(repo) => repo.clone(),
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let relative_repo = match path.strip_prefix(work_directory).log_err() {
|
||||
Some(relative_repo) => relative_repo.to_owned(),
|
||||
None => continue,
|
||||
};
|
||||
|
||||
drop(worktree);
|
||||
|
||||
let update_diff_base_fn = update_diff_base(self);
|
||||
let git_ptr = repo.repo_ptr.clone();
|
||||
let diff_base_task = cx
|
||||
.background()
|
||||
.spawn(async move { git_ptr.lock().load_index_text(&relative_repo) });
|
||||
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
let diff_base = diff_base_task.await;
|
||||
update_diff_base_fn(diff_base, buffer, &mut cx);
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
|
||||
|
@ -7072,7 +7074,7 @@ impl Item for Buffer {
|
|||
}
|
||||
}
|
||||
|
||||
async fn pump_loading_buffer_reciever(
|
||||
async fn wait_for_loading_buffer(
|
||||
mut receiver: postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
|
||||
) -> Result<ModelHandle<Buffer>, Arc<anyhow::Error>> {
|
||||
loop {
|
||||
|
@ -7085,26 +7087,3 @@ async fn pump_loading_buffer_reciever(
|
|||
receiver.next().await;
|
||||
}
|
||||
}
|
||||
|
||||
fn update_diff_base(
|
||||
project: &Project,
|
||||
) -> impl Fn(Option<String>, ModelHandle<Buffer>, &mut AsyncAppContext) {
|
||||
let remote_id = project.remote_id();
|
||||
let client = project.client().clone();
|
||||
move |diff_base, buffer, cx| {
|
||||
let buffer_id = buffer.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(diff_base.clone(), cx);
|
||||
buffer.remote_id()
|
||||
});
|
||||
|
||||
if let Some(project_id) = remote_id {
|
||||
client
|
||||
.send(proto::UpdateDiffBase {
|
||||
project_id,
|
||||
buffer_id: buffer_id as u64,
|
||||
diff_base,
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1193,7 +1193,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
|
|||
.await;
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
#[gpui::test(iterations = 3)]
|
||||
async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
|
@ -1273,7 +1273,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
|
||||
// The diagnostics have moved down since they were created.
|
||||
buffer.next_notification(cx).await;
|
||||
buffer.next_notification(cx).await;
|
||||
cx.foreground().run_until_parked();
|
||||
buffer.read_with(cx, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer
|
||||
|
@ -1352,6 +1352,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
});
|
||||
|
||||
buffer.next_notification(cx).await;
|
||||
cx.foreground().run_until_parked();
|
||||
buffer.read_with(cx, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer
|
||||
|
@ -1444,6 +1445,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
|
|||
});
|
||||
|
||||
buffer.next_notification(cx).await;
|
||||
cx.foreground().run_until_parked();
|
||||
buffer.read_with(cx, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer
|
||||
|
@ -2524,29 +2526,21 @@ async fn test_rescan_and_remote_updates(
|
|||
|
||||
// Create a remote copy of this worktree.
|
||||
let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
|
||||
let remote = cx.update(|cx| {
|
||||
Worktree::remote(
|
||||
1,
|
||||
1,
|
||||
proto::WorktreeMetadata {
|
||||
id: initial_snapshot.id().to_proto(),
|
||||
root_name: initial_snapshot.root_name().into(),
|
||||
abs_path: initial_snapshot
|
||||
.abs_path()
|
||||
.as_os_str()
|
||||
.to_string_lossy()
|
||||
.into(),
|
||||
visible: true,
|
||||
},
|
||||
rpc.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
remote.update(cx, |remote, _| {
|
||||
let update = initial_snapshot.build_initial_update(1);
|
||||
remote.as_remote_mut().unwrap().update_from_remote(update);
|
||||
|
||||
let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
|
||||
|
||||
let updates = Arc::new(Mutex::new(Vec::new()));
|
||||
tree.update(cx, |tree, cx| {
|
||||
let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
|
||||
let updates = updates.clone();
|
||||
move |update| {
|
||||
updates.lock().push(update);
|
||||
async { true }
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
|
||||
deterministic.run_until_parked();
|
||||
|
||||
cx.read(|cx| {
|
||||
|
@ -2612,14 +2606,11 @@ async fn test_rescan_and_remote_updates(
|
|||
|
||||
// Update the remote worktree. Check that it becomes consistent with the
|
||||
// local worktree.
|
||||
remote.update(cx, |remote, cx| {
|
||||
let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
|
||||
&initial_snapshot,
|
||||
1,
|
||||
1,
|
||||
true,
|
||||
);
|
||||
remote.as_remote_mut().unwrap().update_from_remote(update);
|
||||
deterministic.run_until_parked();
|
||||
remote.update(cx, |remote, _| {
|
||||
for update in updates.lock().drain(..) {
|
||||
remote.as_remote_mut().unwrap().update_from_remote(update);
|
||||
}
|
||||
});
|
||||
deterministic.run_until_parked();
|
||||
remote.read_with(cx, |remote, _| {
|
||||
|
|
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue