cx.background_executor().spawn(...) -> cx.background_spawn(...) (#25103)

Done automatically with

> ast-grep -p '$A.background_executor().spawn($B)' -r
'$A.background_spawn($B)' --update-all --globs "\!crates/gpui"

Followed by:

* `cargo fmt`
* Unexpected need to remove some trailing whitespace.
* Manually adding imports of `gpui::{AppContext as _}` which provides
`background_spawn`
* Added `AppContext as _` to existing use of `AppContext`

Release Notes:

- N/A
This commit is contained in:
Michael Sloan 2025-02-18 13:30:33 -07:00 committed by GitHub
parent f606b0641e
commit b1872e3afd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
120 changed files with 1146 additions and 1267 deletions

View file

@ -10,7 +10,7 @@ use fs::Fs;
use fs::MTime;
use futures::{stream::StreamExt, FutureExt as _};
use futures_batch::ChunksTimeoutStreamExt;
use gpui::{App, Entity, Task};
use gpui::{App, AppContext as _, Entity, Task};
use heed::types::{SerdeBincode, Str};
use language::LanguageRegistry;
use log;
@ -102,7 +102,7 @@ impl EmbeddingIndex {
let db_connection = self.db_connection.clone();
let db = self.db;
let entries_being_indexed = self.entry_ids_being_indexed.clone();
let task = cx.background_executor().spawn(async move {
let task = cx.background_spawn(async move {
let txn = db_connection
.read_txn()
.context("failed to create read transaction")?;
@ -185,7 +185,7 @@ impl EmbeddingIndex {
let (updated_entries_tx, updated_entries_rx) = channel::bounded(512);
let (deleted_entry_ranges_tx, deleted_entry_ranges_rx) = channel::bounded(128);
let entries_being_indexed = self.entry_ids_being_indexed.clone();
let task = cx.background_executor().spawn(async move {
let task = cx.background_spawn(async move {
for (path, entry_id, status) in updated_entries.iter() {
match status {
project::PathChange::Added
@ -278,7 +278,7 @@ impl EmbeddingIndex {
) -> EmbedFiles {
let embedding_provider = embedding_provider.clone();
let (embedded_files_tx, embedded_files_rx) = channel::bounded(512);
let task = cx.background_executor().spawn(async move {
let task = cx.background_spawn(async move {
let mut chunked_file_batches =
pin!(chunked_files.chunks_timeout(512, Duration::from_secs(2)));
while let Some(chunked_files) = chunked_file_batches.next().await {
@ -361,7 +361,7 @@ impl EmbeddingIndex {
let db_connection = self.db_connection.clone();
let db = self.db;
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let mut deleted_entry_ranges = pin!(deleted_entry_ranges);
let mut embedded_files = pin!(embedded_files);
loop {
@ -397,7 +397,7 @@ impl EmbeddingIndex {
pub fn paths(&self, cx: &App) -> Task<Result<Vec<Arc<Path>>>> {
let connection = self.db_connection.clone();
let db = self.db;
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let tx = connection
.read_txn()
.context("failed to create read transaction")?;
@ -413,7 +413,7 @@ impl EmbeddingIndex {
pub fn chunks_for_path(&self, path: Arc<Path>, cx: &App) -> Task<Result<Vec<EmbeddedChunk>>> {
let connection = self.db_connection.clone();
let db = self.db;
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let tx = connection
.read_txn()
.context("failed to create read transaction")?;

View file

@ -7,7 +7,9 @@ use anyhow::{anyhow, Context as _, Result};
use collections::HashMap;
use fs::Fs;
use futures::FutureExt;
use gpui::{App, Context, Entity, EntityId, EventEmitter, Subscription, Task, WeakEntity};
use gpui::{
App, AppContext as _, Context, Entity, EntityId, EventEmitter, Subscription, Task, WeakEntity,
};
use language::LanguageRegistry;
use log;
use project::{Project, Worktree, WorktreeId};
@ -250,7 +252,7 @@ impl ProjectIndex {
let worktree_id = index.worktree().read(cx).id();
let db_connection = index.db_connection().clone();
let db = *index.embedding_index().db();
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let txn = db_connection
.read_txn()
.context("failed to create read transaction")?;
@ -432,7 +434,7 @@ impl ProjectIndex {
let file_digest_db = summary_index.file_digest_db();
let summary_db = summary_index.summary_db();
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let txn = db_connection
.read_txn()
.context("failed to create db read transaction")?;
@ -519,8 +521,9 @@ impl ProjectIndex {
index
.read_with(&cx, |index, cx| {
cx.background_executor()
.spawn(index.summary_index().flush_backlog(worktree_abs_path, cx))
cx.background_spawn(
index.summary_index().flush_backlog(worktree_abs_path, cx),
)
})?
.await
})

View file

@ -42,8 +42,7 @@ impl SemanticDb {
cx: &mut AsyncApp,
) -> Result<Self> {
let db_connection = cx
.background_executor()
.spawn(async move {
.background_spawn(async move {
std::fs::create_dir_all(&db_path)?;
unsafe {
heed::EnvOpenOptions::new()
@ -432,8 +431,7 @@ mod tests {
let worktree = search_result.worktree.read(cx);
let entry_abs_path = worktree.abs_path().join(&search_result.path);
let fs = project.read(cx).fs().clone();
cx.background_executor()
.spawn(async move { fs.load(&entry_abs_path).await.unwrap() })
cx.background_spawn(async move { fs.load(&entry_abs_path).await.unwrap() })
})
.await;

View file

@ -3,7 +3,7 @@ use arrayvec::ArrayString;
use fs::{Fs, MTime};
use futures::{stream::StreamExt, TryFutureExt};
use futures_batch::ChunksTimeoutStreamExt;
use gpui::{App, Entity, Task};
use gpui::{App, AppContext as _, Entity, Task};
use heed::{
types::{SerdeBincode, Str},
RoTxn,
@ -254,7 +254,7 @@ impl SummaryIndex {
let db_connection = self.db_connection.clone();
let db = self.summary_db;
let (needs_summary_tx, needs_summary_rx) = channel::bounded(512);
let task = cx.background_executor().spawn(async move {
let task = cx.background_spawn(async move {
let mut might_need_summary = pin!(might_need_summary);
while let Some(file) = might_need_summary.next().await {
let tx = db_connection
@ -291,7 +291,7 @@ impl SummaryIndex {
let db_connection = self.db_connection.clone();
let digest_db = self.file_digest_db;
let backlog = Arc::clone(&self.backlog);
let task = cx.background_executor().spawn(async move {
let task = cx.background_spawn(async move {
let txn = db_connection
.read_txn()
.context("failed to create read transaction")?;
@ -368,7 +368,7 @@ impl SummaryIndex {
let db_connection = self.db_connection.clone();
let digest_db = self.file_digest_db;
let backlog = Arc::clone(&self.backlog);
let task = cx.background_executor().spawn(async move {
let task = cx.background_spawn(async move {
let txn = db_connection
.read_txn()
.context("failed to create read transaction")?;
@ -538,7 +538,7 @@ impl SummaryIndex {
.available_models(cx)
.find(|model| &model.id() == &summary_model_id)
else {
return cx.background_executor().spawn(async move {
return cx.background_spawn(async move {
Err(anyhow!("Couldn't find the preferred summarization model ({:?}) in the language registry's available models", summary_model_id))
});
};
@ -566,31 +566,30 @@ impl SummaryIndex {
let code_len = code.len();
cx.spawn(|cx| async move {
let stream = model.stream_completion(request, &cx);
cx.background_executor()
.spawn(async move {
let answer: String = stream
.await?
.filter_map(|event| async {
if let Ok(LanguageModelCompletionEvent::Text(text)) = event {
Some(text)
} else {
None
}
})
.collect()
.await;
cx.background_spawn(async move {
let answer: String = stream
.await?
.filter_map(|event| async {
if let Ok(LanguageModelCompletionEvent::Text(text)) = event {
Some(text)
} else {
None
}
})
.collect()
.await;
log::info!(
"It took {:?} to summarize {:?} bytes of code.",
start.elapsed(),
code_len
);
log::info!(
"It took {:?} to summarize {:?} bytes of code.",
start.elapsed(),
code_len
);
log::debug!("Summary was: {:?}", &answer);
log::debug!("Summary was: {:?}", &answer);
Ok(answer)
})
.await
Ok(answer)
})
.await
// TODO if summarization failed, put it back in the backlog!
})
@ -604,7 +603,7 @@ impl SummaryIndex {
let db_connection = self.db_connection.clone();
let digest_db = self.file_digest_db;
let summary_db = self.summary_db;
cx.background_executor().spawn(async move {
cx.background_spawn(async move {
let mut summaries = pin!(summaries.chunks_timeout(4096, Duration::from_secs(2)));
while let Some(summaries) = summaries.next().await {
let mut txn = db_connection.write_txn()?;
@ -650,7 +649,7 @@ impl SummaryIndex {
backlog.drain().collect()
};
let task = cx.background_executor().spawn(async move {
let task = cx.background_spawn(async move {
tx.send(needs_summary).await?;
Ok(())
});

View file

@ -52,8 +52,7 @@ impl WorktreeIndex {
cx.spawn(|mut cx| async move {
let entries_being_indexed = Arc::new(IndexingEntrySet::new(status_tx));
let (embedding_index, summary_index) = cx
.background_executor()
.spawn({
.background_spawn({
let entries_being_indexed = Arc::clone(&entries_being_indexed);
let db_connection = db_connection.clone();
async move {