cx.background_executor().spawn(...) -> cx.background_spawn(...) (#25103)

Done automatically with

> ast-grep -p '$A.background_executor().spawn($B)' -r
'$A.background_spawn($B)' --update-all --globs "\!crates/gpui"

Followed by:

* `cargo fmt`
* Unexpected need to remove some trailing whitespace.
* Manually adding imports of `gpui::{AppContext as _}` which provides
`background_spawn`
* Added `AppContext as _` to existing use of `AppContext`

Release Notes:

- N/A
This commit is contained in:
Michael Sloan 2025-02-18 13:30:33 -07:00 committed by GitHub
parent f606b0641e
commit b1872e3afd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
120 changed files with 1146 additions and 1267 deletions

View file

@ -3,7 +3,8 @@ use collections::BTreeMap;
use editor::{Editor, EditorElement, EditorStyle};
use futures::{future::BoxFuture, stream::BoxStream, FutureExt, StreamExt};
use gpui::{
AnyView, AppContext, AsyncApp, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace,
AnyView, AppContext as _, AsyncApp, Entity, FontStyle, Subscription, Task, TextStyle,
WhiteSpace,
};
use http_client::HttpClient;
use language_model::{
@ -269,26 +270,25 @@ impl LanguageModel for DeepSeekLanguageModel {
request: LanguageModelRequest,
cx: &App,
) -> BoxFuture<'static, Result<usize>> {
cx.background_executor()
.spawn(async move {
let messages = request
.messages
.into_iter()
.map(|message| tiktoken_rs::ChatCompletionRequestMessage {
role: match message.role {
Role::User => "user".into(),
Role::Assistant => "assistant".into(),
Role::System => "system".into(),
},
content: Some(message.string_contents()),
name: None,
function_call: None,
})
.collect::<Vec<_>>();
cx.background_spawn(async move {
let messages = request
.messages
.into_iter()
.map(|message| tiktoken_rs::ChatCompletionRequestMessage {
role: match message.role {
Role::User => "user".into(),
Role::Assistant => "assistant".into(),
Role::System => "system".into(),
},
content: Some(message.string_contents()),
name: None,
function_call: None,
})
.collect::<Vec<_>>();
tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
})
.boxed()
tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
})
.boxed()
}
fn stream_completion(