cx.background_executor().spawn(...) -> cx.background_spawn(...) (#25103)

Done automatically with

> ast-grep -p '$A.background_executor().spawn($B)' -r
'$A.background_spawn($B)' --update-all --globs "\!crates/gpui"

Followed by:

* `cargo fmt`
* Unexpected need to remove some trailing whitespace.
* Manually adding imports of `gpui::{AppContext as _}` which provides
`background_spawn`
* Added `AppContext as _` to existing use of `AppContext`

Release Notes:

- N/A
This commit is contained in:
Michael Sloan 2025-02-18 13:30:33 -07:00 committed by GitHub
parent f606b0641e
commit b1872e3afd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
120 changed files with 1146 additions and 1267 deletions

View file

@ -346,7 +346,7 @@ impl RemoteBufferStore {
fn open_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Task<Result<Option<String>>> {
let project_id = self.project_id;
let client = self.upstream_client.clone();
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let response = client
.request(proto::OpenUnstagedDiff {
project_id,
@ -366,7 +366,7 @@ impl RemoteBufferStore {
let project_id = self.project_id;
let client = self.upstream_client.clone();
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let response = client
.request(proto::OpenUncommittedDiff {
project_id,
@ -402,9 +402,7 @@ impl RemoteBufferStore {
return Ok(buffer);
}
cx.background_executor()
.spawn(async move { rx.await? })
.await
cx.background_spawn(async move { rx.await? }).await
})
}
@ -843,8 +841,7 @@ impl LocalBufferStore {
let snapshot =
worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
let diff_bases_changes_by_buffer = cx
.background_executor()
.spawn(async move {
.background_spawn(async move {
diff_state_updates
.into_iter()
.filter_map(
@ -1129,8 +1126,7 @@ impl LocalBufferStore {
cx.spawn(move |_, mut cx| async move {
let loaded = load_file.await?;
let text_buffer = cx
.background_executor()
.spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) })
.background_spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) })
.await;
cx.insert_entity(reservation, |_| {
Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite)
@ -1347,8 +1343,7 @@ impl BufferStore {
}
};
cx.background_executor()
.spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
}
pub fn open_unstaged_diff(
@ -1388,8 +1383,7 @@ impl BufferStore {
}
};
cx.background_executor()
.spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
}
pub fn open_uncommitted_diff(
@ -1409,7 +1403,7 @@ impl BufferStore {
BufferStoreState::Local(this) => {
let committed_text = this.load_committed_text(&buffer, cx);
let staged_text = this.load_staged_text(&buffer, cx);
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let committed_text = committed_text.await?;
let staged_text = staged_text.await?;
let diff_bases_change = if committed_text == staged_text {
@ -1445,8 +1439,7 @@ impl BufferStore {
}
};
cx.background_executor()
.spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
}
async fn open_diff_internal(
@ -1587,7 +1580,7 @@ impl BufferStore {
anyhow::Ok(Some((repo, relative_path, content)))
});
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let Some((repo, relative_path, content)) = blame_params? else {
return Ok(None);
};
@ -2106,24 +2099,23 @@ impl BufferStore {
})
.log_err();
cx.background_executor()
.spawn(
async move {
let operations = operations.await;
for chunk in split_operations(operations) {
client
.request(proto::UpdateBuffer {
project_id,
buffer_id: buffer_id.into(),
operations: chunk,
})
.await?;
}
anyhow::Ok(())
cx.background_spawn(
async move {
let operations = operations.await;
for chunk in split_operations(operations) {
client
.request(proto::UpdateBuffer {
project_id,
buffer_id: buffer_id.into(),
operations: chunk,
})
.await?;
}
.log_err(),
)
.detach();
anyhow::Ok(())
}
.log_err(),
)
.detach();
}
}
Ok(response)
@ -2558,27 +2550,26 @@ impl BufferStore {
if client.send(initial_state).log_err().is_some() {
let client = client.clone();
cx.background_executor()
.spawn(async move {
let mut chunks = split_operations(operations).peekable();
while let Some(chunk) = chunks.next() {
let is_last = chunks.peek().is_none();
client.send(proto::CreateBufferForPeer {
project_id,
peer_id: Some(peer_id),
variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
proto::BufferChunk {
buffer_id: buffer_id.into(),
operations: chunk,
is_last,
},
)),
})?;
}
anyhow::Ok(())
})
.await
.log_err();
cx.background_spawn(async move {
let mut chunks = split_operations(operations).peekable();
while let Some(chunk) = chunks.next() {
let is_last = chunks.peek().is_none();
client.send(proto::CreateBufferForPeer {
project_id,
peer_id: Some(peer_id),
variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
proto::BufferChunk {
buffer_id: buffer_id.into(),
operations: chunk,
is_last,
},
)),
})?;
}
anyhow::Ok(())
})
.await
.log_err();
}
Ok(())
})

View file

@ -135,8 +135,7 @@ impl ProjectEnvironment {
cx.spawn(|this, mut cx| async move {
let (mut shell_env, error_message) = cx
.background_executor()
.spawn({
.background_spawn({
let worktree_abs_path = worktree_abs_path.clone();
async move {
load_worktree_shell_environment(&worktree_abs_path, &load_direnv).await

View file

@ -11,8 +11,8 @@ use git::{
status::{GitSummary, TrackedSummary},
};
use gpui::{
App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
WeakEntity,
App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription,
Task, WeakEntity,
};
use language::{Buffer, LanguageRegistry};
use rpc::proto::{git_reset, ToProto};
@ -242,10 +242,7 @@ impl GitStore {
mpsc::unbounded::<(Message, oneshot::Sender<Result<()>>)>();
cx.spawn(|_, cx| async move {
while let Some((msg, respond)) = update_receiver.next().await {
let result = cx
.background_executor()
.spawn(Self::process_git_msg(msg))
.await;
let result = cx.background_spawn(Self::process_git_msg(msg)).await;
respond.send(result).ok();
}
})
@ -841,15 +838,14 @@ impl Repository {
match self.git_repo.clone() {
GitRepo::Local(git_repository) => {
let commit = commit.to_string();
cx.background_executor()
.spawn(async move { git_repository.show(&commit) })
cx.background_spawn(async move { git_repository.show(&commit) })
}
GitRepo::Remote {
project_id,
client,
worktree_id,
work_directory_id,
} => cx.background_executor().spawn(async move {
} => cx.background_spawn(async move {
let resp = client
.request(proto::GitShow {
project_id: project_id.0,

View file

@ -404,7 +404,7 @@ impl ImageStore {
}
};
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
Self::wait_for_loading_image(loading_watch)
.await
.map_err(|e| e.cloned())

View file

@ -2144,7 +2144,7 @@ impl LocalLspStore {
cx: &mut Context<LspStore>,
) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let snapshot = snapshot?;
let mut lsp_edits = lsp_edits
.into_iter()
@ -3282,16 +3282,15 @@ impl LspStore {
}
} else if let Some((upstream_client, upstream_project_id)) = self.upstream_client() {
let buffer_id = buffer.read(cx).remote_id().to_proto();
cx.background_executor()
.spawn(async move {
upstream_client
.request(proto::RegisterBufferWithLanguageServers {
project_id: upstream_project_id,
buffer_id,
})
.await
})
.detach();
cx.background_spawn(async move {
upstream_client
.request(proto::RegisterBufferWithLanguageServers {
project_id: upstream_project_id,
buffer_id,
})
.await
})
.detach();
} else {
panic!("oops!");
}
@ -6707,7 +6706,7 @@ impl LspStore {
return Err(anyhow!("No language server {id}"));
};
Ok(cx.background_executor().spawn(async move {
Ok(cx.background_spawn(async move {
let can_resolve = server
.capabilities()
.completion_provider
@ -7375,9 +7374,7 @@ impl LspStore {
.map(|b| b.read(cx).remote_id().to_proto())
.collect(),
});
cx.background_executor()
.spawn(request)
.detach_and_log_err(cx);
cx.background_spawn(request).detach_and_log_err(cx);
} else {
let Some(local) = self.as_local_mut() else {
return;
@ -7406,9 +7403,7 @@ impl LspStore {
.collect::<Vec<_>>();
cx.spawn(|this, mut cx| async move {
cx.background_executor()
.spawn(futures::future::join_all(tasks))
.await;
cx.background_spawn(futures::future::join_all(tasks)).await;
this.update(&mut cx, |this, cx| {
for buffer in buffers {
this.register_buffer_with_language_servers(&buffer, true, cx);
@ -7737,9 +7732,7 @@ impl LspStore {
},
)),
});
cx.background_executor()
.spawn(request)
.detach_and_log_err(cx);
cx.background_spawn(request).detach_and_log_err(cx);
} else if let Some(local) = self.as_local() {
let servers = buffers
.into_iter()
@ -7795,9 +7788,7 @@ impl LspStore {
),
),
});
cx.background_executor()
.spawn(request)
.detach_and_log_err(cx);
cx.background_spawn(request).detach_and_log_err(cx);
}
}

View file

@ -12,7 +12,7 @@ use futures::{
stream::FuturesUnordered,
FutureExt,
};
use gpui::{AsyncApp, Context, Entity, EventEmitter, Task, WeakEntity};
use gpui::{AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, WeakEntity};
use language::{
language_settings::{Formatter, LanguageSettings, SelectedFormatter},
Buffer, LanguageRegistry, LocalFile,
@ -121,8 +121,7 @@ impl PrettierStore {
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
cx.spawn(|lsp_store, mut cx| async move {
match cx
.background_executor()
.spawn(async move {
.background_spawn(async move {
Prettier::locate_prettier_installation(
fs.as_ref(),
&installed_prettiers,
@ -234,8 +233,7 @@ impl PrettierStore {
.unwrap_or_default();
cx.spawn(|lsp_store, mut cx| async move {
match cx
.background_executor()
.spawn(async move {
.background_spawn(async move {
Prettier::locate_prettier_ignore(
fs.as_ref(),
&prettier_ignores,
@ -483,31 +481,30 @@ impl PrettierStore {
}))
.collect::<Vec<_>>();
cx.background_executor()
.spawn(async move {
let _: Vec<()> = future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_instance)| {
async move {
if let Some(instance) = prettier_instance.prettier {
match instance.await {
Ok(prettier) => {
prettier.clear_cache().log_err().await;
},
Err(e) => {
match prettier_path {
Some(prettier_path) => log::error!(
"Failed to clear prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update: {e:#}"
),
None => log::error!(
"Failed to clear default prettier cache for worktree {worktree_id:?} on prettier settings update: {e:#}"
),
}
},
}
cx.background_spawn(async move {
let _: Vec<()> = future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_instance)| {
async move {
if let Some(instance) = prettier_instance.prettier {
match instance.await {
Ok(prettier) => {
prettier.clear_cache().log_err().await;
},
Err(e) => {
match prettier_path {
Some(prettier_path) => log::error!(
"Failed to clear prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update: {e:#}"
),
None => log::error!(
"Failed to clear default prettier cache for worktree {worktree_id:?} on prettier settings update: {e:#}"
),
}
},
}
}
}))
.await;
})
}
}))
.await;
})
.detach();
}
}
@ -539,7 +536,7 @@ impl PrettierStore {
}) {
Some(locate_from) => {
let installed_prettiers = self.prettier_instances.keys().cloned().collect();
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
Prettier::locate_prettier_installation(
fs.as_ref(),
&installed_prettiers,
@ -631,13 +628,12 @@ impl PrettierStore {
})?;
if needs_install {
let installed_plugins = new_plugins.clone();
cx.background_executor()
.spawn(async move {
install_prettier_packages(fs.as_ref(), new_plugins, node).await?;
// Save the server file last, so the reinstall need could be determined by the absence of the file.
save_prettier_server_file(fs.as_ref()).await?;
anyhow::Ok(())
})
cx.background_spawn(async move {
install_prettier_packages(fs.as_ref(), new_plugins, node).await?;
// Save the server file last, so the reinstall need could be determined by the absence of the file.
save_prettier_server_file(fs.as_ref()).await?;
anyhow::Ok(())
})
.await
.context("prettier & plugins install")
.map_err(Arc::new)?;

View file

@ -562,7 +562,7 @@ impl DirectoryLister {
}
DirectoryLister::Local(fs) => {
let fs = fs.clone();
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let mut results = vec![];
let expanded = shellexpand::tilde(&path);
let query = Path::new(expanded.as_ref());
@ -1163,13 +1163,12 @@ impl Project {
.read(cx)
.shutdown_processes(Some(proto::ShutdownRemoteServer {}));
cx.background_executor()
.spawn(async move {
if let Some(shutdown) = shutdown {
shutdown.await;
}
})
.detach()
cx.background_spawn(async move {
if let Some(shutdown) = shutdown {
shutdown.await;
}
})
.detach()
}
match &self.client_state {
@ -3138,7 +3137,7 @@ impl Project {
let buffer = buffer.clone();
let query = query.clone();
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
chunk_results.push(cx.background_executor().spawn(async move {
chunk_results.push(cx.background_spawn(async move {
let ranges = query
.search(&snapshot, None)
.await
@ -3377,7 +3376,7 @@ impl Project {
cx: &mut Context<Self>,
) -> Task<Option<ResolvedPath>> {
let resolve_task = self.resolve_abs_path(path, cx);
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let resolved_path = resolve_task.await;
resolved_path.filter(|path| path.is_file())
})
@ -3391,7 +3390,7 @@ impl Project {
if self.is_local() {
let expanded = PathBuf::from(shellexpand::tilde(&path).into_owned());
let fs = self.fs.clone();
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let path = expanded.as_path();
let metadata = fs.metadata(path).await.ok().flatten();
@ -3409,7 +3408,7 @@ impl Project {
project_id: SSH_PROJECT_ID,
path: request_path.to_proto(),
});
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let response = request.await.log_err()?;
if response.exists {
Some(ResolvedPath::AbsPath {
@ -3490,7 +3489,7 @@ impl Project {
};
let response = session.read(cx).proto_client().request(request);
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let response = response.await?;
Ok(response.entries.into_iter().map(PathBuf::from).collect())
})
@ -3906,8 +3905,7 @@ impl Project {
if let Some(remote_id) = this.remote_id() {
let mut payload = envelope.payload.clone();
payload.project_id = remote_id;
cx.background_executor()
.spawn(this.client.request(payload))
cx.background_spawn(this.client.request(payload))
.detach_and_log_err(cx);
}
this.buffer_store.clone()
@ -3924,8 +3922,7 @@ impl Project {
if let Some(ssh) = &this.ssh_client {
let mut payload = envelope.payload.clone();
payload.project_id = SSH_PROJECT_ID;
cx.background_executor()
.spawn(ssh.read(cx).proto_client().request(payload))
cx.background_spawn(ssh.read(cx).proto_client().request(payload))
.detach_and_log_err(cx);
}
this.buffer_store.clone()
@ -4146,7 +4143,7 @@ impl Project {
if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
let operations =
buffer.read(cx).serialize_ops(Some(remote_version), cx);
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let operations = operations.await;
for chunk in split_operations(operations) {
client
@ -4169,12 +4166,11 @@ impl Project {
// Any incomplete buffers have open requests waiting. Request that the host sends
// creates these buffers for us again to unblock any waiting futures.
for id in incomplete_buffer_ids {
cx.background_executor()
.spawn(client.request(proto::OpenBufferById {
project_id,
id: id.into(),
}))
.detach();
cx.background_spawn(client.request(proto::OpenBufferById {
project_id,
id: id.into(),
}))
.detach();
}
futures::future::join_all(send_updates_for_buffers)

View file

@ -13,7 +13,7 @@ use std::{
};
use collections::HashMap;
use gpui::{App, AppContext, Context, Entity, EventEmitter, Subscription};
use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Subscription};
use language::{CachedLspAdapter, LspAdapterDelegate};
use lsp::LanguageServerName;
use path_trie::{LabelPresence, RootPathTrie, TriePath};

View file

@ -15,7 +15,7 @@ use std::{
};
use collections::{HashMap, IndexMap};
use gpui::{App, AppContext, Entity, Subscription};
use gpui::{App, AppContext as _, Entity, Subscription};
use itertools::Itertools;
use language::{
language_settings::AllLanguageSettings, Attach, LanguageName, LanguageRegistry,

View file

@ -325,16 +325,15 @@ impl LocalToolchainStore {
.ok()?
.await;
cx.background_executor()
.spawn(async move {
let language = registry
.language_for_name(language_name.as_ref())
.await
.ok()?;
let toolchains = language.toolchain_lister()?;
Some(toolchains.list(root.to_path_buf(), project_env).await)
})
.await
cx.background_spawn(async move {
let language = registry
.language_for_name(language_name.as_ref())
.await
.ok()?;
let toolchains = language.toolchain_lister()?;
Some(toolchains.list(root.to_path_buf(), project_env).await)
})
.await
})
}
pub(crate) fn active_toolchain(

View file

@ -13,7 +13,9 @@ use futures::{
FutureExt, SinkExt,
};
use git::repository::Branch;
use gpui::{App, AsyncApp, Context, Entity, EntityId, EventEmitter, Task, WeakEntity};
use gpui::{
App, AppContext as _, AsyncApp, Context, Entity, EntityId, EventEmitter, Task, WeakEntity,
};
use postage::oneshot;
use rpc::{
proto::{self, FromProto, ToProto, SSH_PROJECT_ID},
@ -179,8 +181,7 @@ impl WorktreeStore {
Task::ready(Ok((tree, relative_path)))
} else {
let worktree = self.create_worktree(abs_path, visible, cx);
cx.background_executor()
.spawn(async move { Ok((worktree.await?, PathBuf::new())) })
cx.background_spawn(async move { Ok((worktree.await?, PathBuf::new())) })
}
}
@ -679,7 +680,7 @@ impl WorktreeStore {
let (output_tx, output_rx) = smol::channel::bounded(64);
let (matching_paths_tx, matching_paths_rx) = smol::channel::unbounded();
let input = cx.background_executor().spawn({
let input = cx.background_spawn({
let fs = fs.clone();
let query = query.clone();
async move {
@ -696,7 +697,7 @@ impl WorktreeStore {
}
});
const MAX_CONCURRENT_FILE_SCANS: usize = 64;
let filters = cx.background_executor().spawn(async move {
let filters = cx.background_spawn(async move {
let fs = &fs;
let query = &query;
executor
@ -712,25 +713,24 @@ impl WorktreeStore {
})
.await;
});
cx.background_executor()
.spawn(async move {
let mut matched = 0;
while let Ok(mut receiver) = output_rx.recv().await {
let Some(path) = receiver.next().await else {
continue;
};
let Ok(_) = matching_paths_tx.send(path).await else {
break;
};
matched += 1;
if matched == limit {
break;
}
cx.background_spawn(async move {
let mut matched = 0;
while let Ok(mut receiver) = output_rx.recv().await {
let Some(path) = receiver.next().await else {
continue;
};
let Ok(_) = matching_paths_tx.send(path).await else {
break;
};
matched += 1;
if matched == limit {
break;
}
drop(input);
drop(filters);
})
.detach();
}
drop(input);
drop(filters);
})
.detach();
matching_paths_rx
}
@ -934,7 +934,7 @@ impl WorktreeStore {
}),
});
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let response = request.await?;
let branches = response
@ -1021,7 +1021,7 @@ impl WorktreeStore {
branch_name: new_branch,
});
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
request.await?;
Ok(())
})