deps: Bump smol to 2.0 (#22956)
The collateral of this is that code size is increased by ~300kB, but I think we can stomach it. Release Notes: - N/A
This commit is contained in:
parent
1f84c1b6c7
commit
9e113bccd0
25 changed files with 187 additions and 330 deletions
|
@ -30,6 +30,7 @@ use std::{
|
|||
io,
|
||||
ops::Range,
|
||||
path::{Path, PathBuf},
|
||||
pin::pin,
|
||||
str::FromStr as _,
|
||||
sync::Arc,
|
||||
time::Instant,
|
||||
|
@ -1483,7 +1484,7 @@ impl BufferStore {
|
|||
}
|
||||
|
||||
const MAX_CONCURRENT_BUFFER_OPENS: usize = 64;
|
||||
let mut project_paths_rx = self
|
||||
let project_paths_rx = self
|
||||
.worktree_store
|
||||
.update(cx, |worktree_store, cx| {
|
||||
worktree_store.find_search_candidates(query.clone(), limit, open_buffers, fs, cx)
|
||||
|
@ -1495,6 +1496,7 @@ impl BufferStore {
|
|||
tx.send(buffer).await.ok();
|
||||
}
|
||||
|
||||
let mut project_paths_rx = pin!(project_paths_rx);
|
||||
while let Some(project_paths) = project_paths_rx.next().await {
|
||||
let buffers = this.update(&mut cx, |this, cx| {
|
||||
project_paths
|
||||
|
|
|
@ -818,7 +818,7 @@ impl LocalLspStore {
|
|||
let name = name.to_string();
|
||||
async move {
|
||||
let actions = params.actions.unwrap_or_default();
|
||||
let (tx, mut rx) = smol::channel::bounded(1);
|
||||
let (tx, rx) = smol::channel::bounded(1);
|
||||
let request = LanguageServerPromptRequest {
|
||||
level: match params.typ {
|
||||
lsp::MessageType::ERROR => PromptLevel::Critical,
|
||||
|
@ -837,9 +837,9 @@ impl LocalLspStore {
|
|||
})
|
||||
.is_ok();
|
||||
if did_update {
|
||||
let response = rx.next().await;
|
||||
let response = rx.recv().await?;
|
||||
|
||||
Ok(response)
|
||||
Ok(Some(response))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
|
|
|
@ -79,6 +79,7 @@ use std::{
|
|||
borrow::Cow,
|
||||
ops::Range,
|
||||
path::{Component, Path, PathBuf},
|
||||
pin::pin,
|
||||
str,
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
|
@ -3019,6 +3020,7 @@ impl Project {
|
|||
// 64 buffers at a time to avoid overwhelming the main thread. For each
|
||||
// opened buffer, we will spawn a background task that retrieves all the
|
||||
// ranges in the buffer matched by the query.
|
||||
let mut chunks = pin!(chunks);
|
||||
'outer: while let Some(matching_buffer_chunk) = chunks.next().await {
|
||||
let mut chunk_results = Vec::new();
|
||||
for buffer in matching_buffer_chunk {
|
||||
|
@ -3748,6 +3750,7 @@ impl Project {
|
|||
// next `flush_effects()` call.
|
||||
drop(this);
|
||||
|
||||
let mut rx = pin!(rx);
|
||||
let answer = rx.next().await;
|
||||
|
||||
Ok(LanguageServerPromptResponse {
|
||||
|
@ -3889,7 +3892,7 @@ impl Project {
|
|||
.query
|
||||
.ok_or_else(|| anyhow!("missing query field"))?,
|
||||
)?;
|
||||
let mut results = this.update(&mut cx, |this, cx| {
|
||||
let results = this.update(&mut cx, |this, cx| {
|
||||
this.find_search_candidate_buffers(&query, message.limit as _, cx)
|
||||
})?;
|
||||
|
||||
|
@ -3897,7 +3900,7 @@ impl Project {
|
|||
buffer_ids: Vec::new(),
|
||||
};
|
||||
|
||||
while let Some(buffer) = results.next().await {
|
||||
while let Ok(buffer) = results.recv().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let buffer_id = this.create_buffer_for_peer(&buffer, peer_id, cx);
|
||||
response.buffer_ids.push(buffer_id.to_proto());
|
||||
|
|
|
@ -5657,9 +5657,9 @@ async fn search(
|
|||
query: SearchQuery,
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) -> Result<HashMap<String, Vec<Range<usize>>>> {
|
||||
let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
|
||||
let search_rx = project.update(cx, |project, cx| project.search(query, cx));
|
||||
let mut results = HashMap::default();
|
||||
while let Some(search_result) = search_rx.next().await {
|
||||
while let Ok(search_result) = search_rx.recv().await {
|
||||
match search_result {
|
||||
SearchResult::Buffer { buffer, ranges } => {
|
||||
results.entry(buffer).or_insert(ranges);
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
pin::pin,
|
||||
sync::{atomic::AtomicUsize, Arc},
|
||||
};
|
||||
|
||||
|
@ -648,7 +649,7 @@ impl WorktreeStore {
|
|||
// We spawn a number of workers that take items from the filter channel and check the query
|
||||
// against the version of the file on disk.
|
||||
let (filter_tx, filter_rx) = smol::channel::bounded(64);
|
||||
let (output_tx, mut output_rx) = smol::channel::bounded(64);
|
||||
let (output_tx, output_rx) = smol::channel::bounded(64);
|
||||
let (matching_paths_tx, matching_paths_rx) = smol::channel::unbounded();
|
||||
|
||||
let input = cx.background_executor().spawn({
|
||||
|
@ -685,7 +686,7 @@ impl WorktreeStore {
|
|||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let mut matched = 0;
|
||||
while let Some(mut receiver) = output_rx.next().await {
|
||||
while let Ok(mut receiver) = output_rx.recv().await {
|
||||
let Some(path) = receiver.next().await else {
|
||||
continue;
|
||||
};
|
||||
|
@ -990,6 +991,7 @@ impl WorktreeStore {
|
|||
mut input: Receiver<MatchingEntry>,
|
||||
query: &SearchQuery,
|
||||
) -> Result<()> {
|
||||
let mut input = pin!(input);
|
||||
while let Some(mut entry) = input.next().await {
|
||||
let abs_path = entry.worktree_path.join(&entry.path.path);
|
||||
let Some(file) = fs.open_sync(&abs_path).await.log_err() else {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue