deps: Bump smol to 2.0 (#22956)

The collateral of this is that code size is increased by ~300kB, but I
think we can stomach it.

Release Notes:

- N/A
This commit is contained in:
Piotr Osiewicz 2025-01-10 14:38:00 +01:00 committed by GitHub
parent 1f84c1b6c7
commit 9e113bccd0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
25 changed files with 187 additions and 330 deletions

View file

@ -8,7 +8,7 @@ use collections::Bound;
use feature_flags::FeatureFlagAppExt;
use fs::Fs;
use fs::MTime;
use futures::stream::StreamExt;
use futures::{stream::StreamExt, FutureExt as _};
use futures_batch::ChunksTimeoutStreamExt;
use gpui::{AppContext, Model, Task};
use heed::types::{SerdeBincode, Str};
@ -17,8 +17,7 @@ use log;
use project::{Entry, UpdatedEntriesSet, Worktree};
use serde::{Deserialize, Serialize};
use smol::channel;
use smol::future::FutureExt;
use std::{cmp::Ordering, future::Future, iter, path::Path, sync::Arc, time::Duration};
use std::{cmp::Ordering, future::Future, iter, path::Path, pin::pin, sync::Arc, time::Duration};
use util::ResultExt;
use worktree::Snapshot;
@ -284,7 +283,7 @@ impl EmbeddingIndex {
let (embedded_files_tx, embedded_files_rx) = channel::bounded(512);
let task = cx.background_executor().spawn(async move {
let mut chunked_file_batches =
chunked_files.chunks_timeout(512, Duration::from_secs(2));
pin!(chunked_files.chunks_timeout(512, Duration::from_secs(2)));
while let Some(chunked_files) = chunked_file_batches.next().await {
// View the batch of files as a vec of chunks
// Flatten out to a vec of chunks that we can subdivide into batch sized pieces
@ -358,14 +357,16 @@ impl EmbeddingIndex {
fn persist_embeddings(
&self,
mut deleted_entry_ranges: channel::Receiver<(Bound<String>, Bound<String>)>,
mut embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>,
deleted_entry_ranges: channel::Receiver<(Bound<String>, Bound<String>)>,
embedded_files: channel::Receiver<(EmbeddedFile, IndexingEntryHandle)>,
cx: &AppContext,
) -> Task<Result<()>> {
let db_connection = self.db_connection.clone();
let db = self.db;
cx.background_executor().spawn(async move {
let mut deleted_entry_ranges = pin!(deleted_entry_ranges);
let mut embedded_files = pin!(embedded_files);
loop {
// Interleave deletions and persists of embedded files
futures::select_biased! {

View file

@ -6,7 +6,7 @@ use crate::{
use anyhow::{anyhow, Context, Result};
use collections::HashMap;
use fs::Fs;
use futures::{stream::StreamExt, FutureExt};
use futures::FutureExt;
use gpui::{
AppContext, Entity, EntityId, EventEmitter, Model, ModelContext, Subscription, Task, WeakModel,
};
@ -80,7 +80,7 @@ impl ProjectIndex {
) -> Self {
let language_registry = project.read(cx).languages().clone();
let fs = project.read(cx).fs().clone();
let (status_tx, mut status_rx) = channel::unbounded();
let (status_tx, status_rx) = channel::unbounded();
let mut this = ProjectIndex {
db_connection,
project: project.downgrade(),
@ -92,7 +92,7 @@ impl ProjectIndex {
embedding_provider,
_subscription: cx.subscribe(&project, Self::handle_project_event),
_maintain_status: cx.spawn(|this, mut cx| async move {
while status_rx.next().await.is_some() {
while status_rx.recv().await.is_ok() {
if this
.update(&mut cx, |this, cx| this.update_status(cx))
.is_err()

View file

@ -278,7 +278,7 @@ mod tests {
use project::{Project, ProjectEntryId};
use serde_json::json;
use settings::SettingsStore;
use smol::{channel, stream::StreamExt};
use smol::channel;
use std::{future, path::Path, sync::Arc};
fn init_test(cx: &mut TestAppContext) {
@ -496,9 +496,9 @@ mod tests {
cx.update(|cx| EmbeddingIndex::embed_files(provider.clone(), chunked_files_rx, cx));
embed_files_task.task.await.unwrap();
let mut embedded_files_rx = embed_files_task.files;
let embedded_files_rx = embed_files_task.files;
let mut embedded_files = Vec::new();
while let Some((embedded_file, _)) = embedded_files_rx.next().await {
while let Ok((embedded_file, _)) = embedded_files_rx.recv().await {
embedded_files.push(embedded_file);
}

View file

@ -20,6 +20,7 @@ use smol::channel;
use std::{
future::Future,
path::Path,
pin::pin,
sync::Arc,
time::{Duration, Instant},
};
@ -247,13 +248,14 @@ impl SummaryIndex {
fn check_summary_cache(
&self,
mut might_need_summary: channel::Receiver<UnsummarizedFile>,
might_need_summary: channel::Receiver<UnsummarizedFile>,
cx: &AppContext,
) -> NeedsSummary {
let db_connection = self.db_connection.clone();
let db = self.summary_db;
let (needs_summary_tx, needs_summary_rx) = channel::bounded(512);
let task = cx.background_executor().spawn(async move {
let mut might_need_summary = pin!(might_need_summary);
while let Some(file) = might_need_summary.next().await {
let tx = db_connection
.read_txn()
@ -484,12 +486,12 @@ impl SummaryIndex {
fn summarize_files(
&self,
mut unsummarized_files: channel::Receiver<UnsummarizedFile>,
unsummarized_files: channel::Receiver<UnsummarizedFile>,
cx: &AppContext,
) -> SummarizeFiles {
let (summarized_tx, summarized_rx) = channel::bounded(512);
let task = cx.spawn(|cx| async move {
while let Some(file) = unsummarized_files.next().await {
while let Ok(file) = unsummarized_files.recv().await {
log::debug!("Summarizing {:?}", file);
let summary = cx
.update(|cx| Self::summarize_code(&file.contents, &file.path, cx))?
@ -607,7 +609,7 @@ impl SummaryIndex {
let digest_db = self.file_digest_db;
let summary_db = self.summary_db;
cx.background_executor().spawn(async move {
let mut summaries = summaries.chunks_timeout(4096, Duration::from_secs(2));
let mut summaries = pin!(summaries.chunks_timeout(4096, Duration::from_secs(2)));
while let Some(summaries) = summaries.next().await {
let mut txn = db_connection.write_txn()?;
for file in &summaries {