Use anyhow
more idiomatically (#31052)
https://github.com/zed-industries/zed/issues/30972 brought up another case where our context is not enough to track the actual source of the issue: we get a general top-level error without inner error. The reason for this was `.ok_or_else(|| anyhow!("failed to read HEAD SHA"))?; ` on the top level. The PR finally reworks the way we use anyhow to reduce such issues (or at least make it simpler to bubble them up later in a fix). On top of that, uses a few more anyhow methods for better readability. * `.ok_or_else(|| anyhow!("..."))`, `map_err` and other similar error conversion/option reporting cases are replaced with `context` and `with_context` calls * in addition to that, various `anyhow!("failed to do ...")` are stripped with `.context("Doing ...")` messages instead to remove the parasitic `failed to` text * `anyhow::ensure!` is used instead of `if ... { return Err(...); }` calls * `anyhow::bail!` is used instead of `return Err(anyhow!(...));` Release Notes: - N/A
This commit is contained in:
parent
1e51a7ac44
commit
16366cf9f2
294 changed files with 2037 additions and 2610 deletions
|
@ -58,7 +58,7 @@ struct RemoteBufferStore {
|
|||
project_id: u64,
|
||||
loading_remote_buffers_by_id: HashMap<BufferId, Entity<Buffer>>,
|
||||
remote_buffer_listeners:
|
||||
HashMap<BufferId, Vec<oneshot::Sender<Result<Entity<Buffer>, anyhow::Error>>>>,
|
||||
HashMap<BufferId, Vec<oneshot::Sender<anyhow::Result<Entity<Buffer>>>>>,
|
||||
worktree_store: Entity<WorktreeStore>,
|
||||
}
|
||||
|
||||
|
@ -152,11 +152,7 @@ impl RemoteBufferStore {
|
|||
capability: Capability,
|
||||
cx: &mut Context<BufferStore>,
|
||||
) -> Result<Option<Entity<Buffer>>> {
|
||||
match envelope
|
||||
.payload
|
||||
.variant
|
||||
.ok_or_else(|| anyhow!("missing variant"))?
|
||||
{
|
||||
match envelope.payload.variant.context("missing variant")? {
|
||||
proto::create_buffer_for_peer::Variant::State(mut state) => {
|
||||
let buffer_id = BufferId::new(state.id)?;
|
||||
|
||||
|
@ -168,8 +164,8 @@ impl RemoteBufferStore {
|
|||
.worktree_store
|
||||
.read(cx)
|
||||
.worktree_for_id(worktree_id, cx)
|
||||
.ok_or_else(|| {
|
||||
anyhow!("no worktree found for id {}", file.worktree_id)
|
||||
.with_context(|| {
|
||||
format!("no worktree found for id {}", file.worktree_id)
|
||||
})?;
|
||||
buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
|
||||
as Arc<dyn language::File>);
|
||||
|
@ -197,8 +193,8 @@ impl RemoteBufferStore {
|
|||
.loading_remote_buffers_by_id
|
||||
.get(&buffer_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"received chunk for buffer {} without initial state",
|
||||
chunk.buffer_id
|
||||
)
|
||||
|
@ -341,10 +337,7 @@ impl RemoteBufferStore {
|
|||
});
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let response = request
|
||||
.await?
|
||||
.transaction
|
||||
.ok_or_else(|| anyhow!("missing transaction"))?;
|
||||
let response = request.await?.transaction.context("missing transaction")?;
|
||||
this.update(cx, |this, cx| {
|
||||
this.deserialize_project_transaction(response, push_to_history, cx)
|
||||
})?
|
||||
|
@ -913,8 +906,8 @@ impl BufferStore {
|
|||
if is_remote {
|
||||
return Ok(());
|
||||
} else {
|
||||
debug_panic!("buffer {} was already registered", remote_id);
|
||||
Err(anyhow!("buffer {} was already registered", remote_id))?;
|
||||
debug_panic!("buffer {remote_id} was already registered");
|
||||
anyhow::bail!("buffer {remote_id} was already registered");
|
||||
}
|
||||
}
|
||||
entry.insert(open_buffer);
|
||||
|
@ -963,7 +956,7 @@ impl BufferStore {
|
|||
|
||||
pub fn get_existing(&self, buffer_id: BufferId) -> Result<Entity<Buffer>> {
|
||||
self.get(buffer_id)
|
||||
.ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
|
||||
.with_context(|| format!("unknown buffer id {buffer_id}"))
|
||||
}
|
||||
|
||||
pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
|
||||
|
@ -1279,9 +1272,9 @@ impl BufferStore {
|
|||
capability: Capability,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<()> {
|
||||
let Some(remote) = self.as_remote_mut() else {
|
||||
return Err(anyhow!("buffer store is not a remote"));
|
||||
};
|
||||
let remote = self
|
||||
.as_remote_mut()
|
||||
.context("buffer store is not a remote")?;
|
||||
|
||||
if let Some(buffer) =
|
||||
remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx)?
|
||||
|
@ -1303,12 +1296,12 @@ impl BufferStore {
|
|||
this.update(&mut cx, |this, cx| {
|
||||
let payload = envelope.payload.clone();
|
||||
if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
|
||||
let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
|
||||
let file = payload.file.context("invalid file")?;
|
||||
let worktree = this
|
||||
.worktree_store
|
||||
.read(cx)
|
||||
.worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
|
||||
.ok_or_else(|| anyhow!("no such worktree"))?;
|
||||
.context("no such worktree")?;
|
||||
let file = File::from_proto(file, worktree, cx)?;
|
||||
let old_file = buffer.update(cx, |buffer, cx| {
|
||||
let old_file = buffer.file().cloned();
|
||||
|
@ -1445,7 +1438,7 @@ impl BufferStore {
|
|||
let mtime = envelope.payload.mtime.clone().map(|time| time.into());
|
||||
let line_ending = deserialize_line_ending(
|
||||
proto::LineEnding::from_i32(envelope.payload.line_ending)
|
||||
.ok_or_else(|| anyhow!("missing line ending"))?,
|
||||
.context("missing line ending")?,
|
||||
);
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
|
||||
|
@ -1495,7 +1488,7 @@ impl BufferStore {
|
|||
let buffer_id = BufferId::new(*buffer_id)?;
|
||||
buffers.insert(this.get_existing(buffer_id)?);
|
||||
}
|
||||
Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
|
||||
anyhow::Ok(this.reload_buffers(buffers, false, cx))
|
||||
})??;
|
||||
|
||||
let project_transaction = reload.await?;
|
||||
|
|
|
@ -233,9 +233,10 @@ impl ContextServerStore {
|
|||
}
|
||||
|
||||
pub fn stop_server(&mut self, id: &ContextServerId, cx: &mut Context<Self>) -> Result<()> {
|
||||
let Some(state) = self.servers.remove(id) else {
|
||||
return Err(anyhow::anyhow!("Context server not found"));
|
||||
};
|
||||
let state = self
|
||||
.servers
|
||||
.remove(id)
|
||||
.context("Context server not found")?;
|
||||
|
||||
let server = state.server();
|
||||
let configuration = state.configuration();
|
||||
|
@ -336,9 +337,10 @@ impl ContextServerStore {
|
|||
}
|
||||
|
||||
fn remove_server(&mut self, id: &ContextServerId, cx: &mut Context<Self>) -> Result<()> {
|
||||
let Some(state) = self.servers.remove(id) else {
|
||||
return Err(anyhow::anyhow!("Context server not found"));
|
||||
};
|
||||
let state = self
|
||||
.servers
|
||||
.remove(id)
|
||||
.context("Context server not found")?;
|
||||
drop(state);
|
||||
cx.emit(Event::ServerStatusChanged {
|
||||
server_id: id.clone(),
|
||||
|
@ -1097,7 +1099,7 @@ mod tests {
|
|||
|
||||
self.tx
|
||||
.unbounded_send(response.to_string())
|
||||
.map_err(|e| anyhow::anyhow!("Failed to send message: {}", e))?;
|
||||
.context("sending a message")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! Module for managing breakpoints in a project.
|
||||
//!
|
||||
//! Breakpoints are separate from a session because they're not associated with any particular debug session. They can also be set up without a session running.
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
pub use breakpoints_in_file::{BreakpointSessionState, BreakpointWithPosition};
|
||||
use breakpoints_in_file::{BreakpointsInFile, StatefulBreakpoint};
|
||||
use collections::{BTreeMap, HashMap};
|
||||
|
@ -219,7 +219,7 @@ impl BreakpointStore {
|
|||
})
|
||||
.ok()
|
||||
.flatten()
|
||||
.ok_or_else(|| anyhow!("Invalid project path"))?
|
||||
.context("Invalid project path")?
|
||||
.await?;
|
||||
|
||||
breakpoints.update(&mut cx, move |this, cx| {
|
||||
|
@ -272,25 +272,25 @@ impl BreakpointStore {
|
|||
.update(&mut cx, |this, cx| {
|
||||
this.project_path_for_absolute_path(message.payload.path.as_ref(), cx)
|
||||
})?
|
||||
.ok_or_else(|| anyhow!("Could not resolve provided abs path"))?;
|
||||
.context("Could not resolve provided abs path")?;
|
||||
let buffer = this
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.buffer_store().read(cx).get_by_path(&path, cx)
|
||||
})?
|
||||
.ok_or_else(|| anyhow!("Could not find buffer for a given path"))?;
|
||||
.context("Could not find buffer for a given path")?;
|
||||
let breakpoint = message
|
||||
.payload
|
||||
.breakpoint
|
||||
.ok_or_else(|| anyhow!("Breakpoint not present in RPC payload"))?;
|
||||
.context("Breakpoint not present in RPC payload")?;
|
||||
let position = language::proto::deserialize_anchor(
|
||||
breakpoint
|
||||
.position
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("Anchor not present in RPC payload"))?,
|
||||
.context("Anchor not present in RPC payload")?,
|
||||
)
|
||||
.ok_or_else(|| anyhow!("Anchor deserialization failed"))?;
|
||||
let breakpoint = Breakpoint::from_proto(breakpoint)
|
||||
.ok_or_else(|| anyhow!("Could not deserialize breakpoint"))?;
|
||||
.context("Anchor deserialization failed")?;
|
||||
let breakpoint =
|
||||
Breakpoint::from_proto(breakpoint).context("Could not deserialize breakpoint")?;
|
||||
|
||||
breakpoints.update(&mut cx, |this, cx| {
|
||||
this.toggle_breakpoint(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Ok, Result, anyhow};
|
||||
use anyhow::{Context as _, Ok, Result};
|
||||
use dap::{
|
||||
Capabilities, ContinueArguments, ExceptionFilterOptions, InitializeRequestArguments,
|
||||
InitializeRequestArgumentsPathFormat, NextArguments, SetVariableResponse, SourceBreakpoint,
|
||||
|
@ -1766,7 +1766,7 @@ impl DapCommand for LocationsCommand {
|
|||
source: response
|
||||
.source
|
||||
.map(<dap::Source as ProtoConversion>::from_proto)
|
||||
.ok_or_else(|| anyhow!("Missing `source` field in Locations proto"))?,
|
||||
.context("Missing `source` field in Locations proto")?,
|
||||
line: response.line,
|
||||
column: response.column,
|
||||
end_line: response.end_line,
|
||||
|
|
|
@ -237,9 +237,7 @@ impl DapStore {
|
|||
let binary = DebugAdapterBinary::from_proto(response)?;
|
||||
let mut ssh_command = ssh_client.update(cx, |ssh, _| {
|
||||
anyhow::Ok(SshCommand {
|
||||
arguments: ssh
|
||||
.ssh_args()
|
||||
.ok_or_else(|| anyhow!("SSH arguments not found"))?,
|
||||
arguments: ssh.ssh_args().context("SSH arguments not found")?,
|
||||
})
|
||||
})??;
|
||||
|
||||
|
@ -316,10 +314,10 @@ impl DapStore {
|
|||
return Ok(result);
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
anyhow::bail!(
|
||||
"None of the locators for task `{}` completed successfully",
|
||||
build_command.label
|
||||
))
|
||||
)
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!(
|
||||
|
@ -735,7 +733,7 @@ impl DapStore {
|
|||
let task = envelope
|
||||
.payload
|
||||
.build_command
|
||||
.ok_or_else(|| anyhow!("missing definition"))?;
|
||||
.context("missing definition")?;
|
||||
let build_task = SpawnInTerminal::from_proto(task);
|
||||
let locator = envelope.payload.locator;
|
||||
let request = this
|
||||
|
@ -753,10 +751,7 @@ impl DapStore {
|
|||
mut cx: AsyncApp,
|
||||
) -> Result<proto::DebugAdapterBinary> {
|
||||
let definition = DebugTaskDefinition::from_proto(
|
||||
envelope
|
||||
.payload
|
||||
.definition
|
||||
.ok_or_else(|| anyhow!("missing definition"))?,
|
||||
envelope.payload.definition.context("missing definition")?,
|
||||
)?;
|
||||
let (tx, mut rx) = mpsc::unbounded();
|
||||
let session_id = envelope.payload.session_id;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use async_trait::async_trait;
|
||||
use dap::{DapLocator, DebugRequest, adapters::DebugAdapterName};
|
||||
use gpui::SharedString;
|
||||
|
@ -90,11 +90,10 @@ impl DapLocator for CargoLocator {
|
|||
}
|
||||
|
||||
async fn run(&self, build_config: SpawnInTerminal) -> Result<DebugRequest> {
|
||||
let Some(cwd) = build_config.cwd.clone() else {
|
||||
return Err(anyhow!(
|
||||
"Couldn't get cwd from debug config which is needed for locators"
|
||||
));
|
||||
};
|
||||
let cwd = build_config
|
||||
.cwd
|
||||
.clone()
|
||||
.context("Couldn't get cwd from debug config which is needed for locators")?;
|
||||
let builder = ShellBuilder::new(true, &build_config.shell).non_interactive();
|
||||
let (program, args) = builder.build(
|
||||
"cargo".into(),
|
||||
|
@ -119,9 +118,7 @@ impl DapLocator for CargoLocator {
|
|||
}
|
||||
|
||||
let status = child.status().await?;
|
||||
if !status.success() {
|
||||
return Err(anyhow::anyhow!("Cargo command failed"));
|
||||
}
|
||||
anyhow::ensure!(status.success(), "Cargo command failed");
|
||||
|
||||
let executables = output
|
||||
.lines()
|
||||
|
@ -133,9 +130,10 @@ impl DapLocator for CargoLocator {
|
|||
.map(String::from)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
if executables.is_empty() {
|
||||
return Err(anyhow!("Couldn't get executable in cargo locator"));
|
||||
};
|
||||
anyhow::ensure!(
|
||||
!executables.is_empty(),
|
||||
"Couldn't get executable in cargo locator"
|
||||
);
|
||||
let is_test = build_config.args.first().map_or(false, |arg| arg == "test");
|
||||
|
||||
let mut test_name = None;
|
||||
|
@ -161,7 +159,7 @@ impl DapLocator for CargoLocator {
|
|||
};
|
||||
|
||||
let Some(executable) = executable.or_else(|| executables.first().cloned()) else {
|
||||
return Err(anyhow!("Couldn't get executable in cargo locator"));
|
||||
anyhow::bail!("Couldn't get executable in cargo locator");
|
||||
};
|
||||
|
||||
let args = test_name.into_iter().collect();
|
||||
|
|
|
@ -12,7 +12,7 @@ use super::dap_command::{
|
|||
TerminateThreadsCommand, ThreadsCommand, VariablesCommand,
|
||||
};
|
||||
use super::dap_store::DapStore;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use collections::{HashMap, HashSet, IndexMap, IndexSet};
|
||||
use dap::adapters::{DebugAdapterBinary, DebugAdapterName};
|
||||
use dap::messages::Response;
|
||||
|
@ -487,8 +487,7 @@ impl Mode {
|
|||
match self {
|
||||
Mode::Running(debug_adapter_client) => debug_adapter_client.request(request),
|
||||
Mode::Building => Task::ready(Err(anyhow!(
|
||||
"no adapter running to send request: {:?}",
|
||||
request
|
||||
"no adapter running to send request: {request:?}"
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
@ -1736,7 +1735,7 @@ impl Session {
|
|||
anyhow::Ok(
|
||||
task.await
|
||||
.map(|response| response.targets)
|
||||
.ok_or_else(|| anyhow!("failed to fetch completions"))?,
|
||||
.context("failed to fetch completions")?,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -976,7 +976,7 @@ impl GitStore {
|
|||
return cx.spawn(async move |cx| {
|
||||
let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
|
||||
get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
|
||||
.map_err(|_| anyhow!("no permalink available"))
|
||||
.context("no permalink available")
|
||||
});
|
||||
|
||||
// TODO remote case
|
||||
|
@ -997,23 +997,20 @@ impl GitStore {
|
|||
RepositoryState::Local { backend, .. } => {
|
||||
let origin_url = backend
|
||||
.remote_url(&remote)
|
||||
.ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
|
||||
.with_context(|| format!("remote \"{remote}\" not found"))?;
|
||||
|
||||
let sha = backend
|
||||
.head_sha()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
|
||||
let sha = backend.head_sha().await.context("reading HEAD SHA")?;
|
||||
|
||||
let provider_registry =
|
||||
cx.update(GitHostingProviderRegistry::default_global)?;
|
||||
|
||||
let (provider, remote) =
|
||||
parse_git_remote_url(provider_registry, &origin_url)
|
||||
.ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
|
||||
.context("parsing Git remote URL")?;
|
||||
|
||||
let path = repo_path
|
||||
.to_str()
|
||||
.ok_or_else(|| anyhow!("failed to convert path to string"))?;
|
||||
let path = repo_path.to_str().with_context(|| {
|
||||
format!("converting repo path {repo_path:?} to string")
|
||||
})?;
|
||||
|
||||
Ok(provider.build_permalink(
|
||||
remote,
|
||||
|
@ -1966,7 +1963,7 @@ impl GitStore {
|
|||
let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
|
||||
let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
|
||||
debug_panic!("no askpass found");
|
||||
return Err(anyhow::anyhow!("no askpass found"));
|
||||
anyhow::bail!("no askpass found");
|
||||
};
|
||||
|
||||
let response = askpass.ask_password(envelope.payload.prompt).await?;
|
||||
|
@ -2035,7 +2032,7 @@ impl GitStore {
|
|||
let buffer = this.buffer_store.read(cx).get(buffer_id)?;
|
||||
Some(this.open_unstaged_diff(buffer, cx))
|
||||
})?
|
||||
.ok_or_else(|| anyhow!("no such buffer"))?
|
||||
.context("missing buffer")?
|
||||
.await?;
|
||||
this.update(&mut cx, |this, _| {
|
||||
let shared_diffs = this
|
||||
|
@ -2059,7 +2056,7 @@ impl GitStore {
|
|||
let buffer = this.buffer_store.read(cx).get(buffer_id)?;
|
||||
Some(this.open_uncommitted_diff(buffer, cx))
|
||||
})?
|
||||
.ok_or_else(|| anyhow!("no such buffer"))?
|
||||
.context("missing buffer")?
|
||||
.await?;
|
||||
this.update(&mut cx, |this, _| {
|
||||
let shared_diffs = this
|
||||
|
@ -3915,7 +3912,7 @@ impl Repository {
|
|||
self.send_job(None, |repo, _cx| async move {
|
||||
match repo {
|
||||
RepositoryState::Local { backend, .. } => backend.checkpoint().await,
|
||||
RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
|
||||
RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -3929,7 +3926,7 @@ impl Repository {
|
|||
RepositoryState::Local { backend, .. } => {
|
||||
backend.restore_checkpoint(checkpoint).await
|
||||
}
|
||||
RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
|
||||
RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -3984,7 +3981,7 @@ impl Repository {
|
|||
RepositoryState::Local { backend, .. } => {
|
||||
backend.compare_checkpoints(left, right).await
|
||||
}
|
||||
RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
|
||||
RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -4001,7 +3998,7 @@ impl Repository {
|
|||
.diff_checkpoints(base_checkpoint, target_checkpoint)
|
||||
.await
|
||||
}
|
||||
RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
|
||||
RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -4064,7 +4061,7 @@ impl Repository {
|
|||
cx.spawn(async move |_, cx| {
|
||||
let environment = project_environment
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("missing project environment"))?
|
||||
.context("missing project environment")?
|
||||
.update(cx, |project_environment, cx| {
|
||||
project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
|
||||
})?
|
||||
|
@ -4076,7 +4073,7 @@ impl Repository {
|
|||
let backend = cx
|
||||
.background_spawn(async move {
|
||||
fs.open_repo(&dot_git_abs_path)
|
||||
.ok_or_else(|| anyhow!("failed to build repository"))
|
||||
.with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
|
||||
})
|
||||
.await?;
|
||||
|
||||
|
@ -4215,8 +4212,7 @@ impl Repository {
|
|||
buffer_id: buffer_id.to_proto(),
|
||||
})
|
||||
.await?;
|
||||
let mode =
|
||||
Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
|
||||
let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
|
||||
let bases = match mode {
|
||||
Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
|
||||
Mode::IndexAndHead => DiffBasesChange::SetEach {
|
||||
|
@ -4353,7 +4349,7 @@ fn get_permalink_in_rust_registry_src(
|
|||
let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
|
||||
let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
|
||||
let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
|
||||
.ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
|
||||
.context("parsing package.repository field of manifest")?;
|
||||
let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
|
||||
let permalink = provider.build_permalink(
|
||||
remote,
|
||||
|
@ -4597,7 +4593,7 @@ fn status_from_proto(
|
|||
|
||||
let Some(variant) = status.and_then(|status| status.variant) else {
|
||||
let code = proto::GitStatus::from_i32(simple_status)
|
||||
.ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
|
||||
.with_context(|| format!("Invalid git status code: {simple_status}"))?;
|
||||
let result = match code {
|
||||
proto::GitStatus::Added => TrackedStatus {
|
||||
worktree_status: StatusCode::Added,
|
||||
|
@ -4619,7 +4615,7 @@ fn status_from_proto(
|
|||
index_status: StatusCode::Unmodified,
|
||||
}
|
||||
.into(),
|
||||
_ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
|
||||
_ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
|
||||
};
|
||||
return Ok(result);
|
||||
};
|
||||
|
@ -4631,12 +4627,12 @@ fn status_from_proto(
|
|||
let [first_head, second_head] =
|
||||
[unmerged.first_head, unmerged.second_head].map(|head| {
|
||||
let code = proto::GitStatus::from_i32(head)
|
||||
.ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
|
||||
.with_context(|| format!("Invalid git status code: {head}"))?;
|
||||
let result = match code {
|
||||
proto::GitStatus::Added => UnmergedStatusCode::Added,
|
||||
proto::GitStatus::Updated => UnmergedStatusCode::Updated,
|
||||
proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
|
||||
_ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
|
||||
_ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
|
||||
};
|
||||
Ok(result)
|
||||
});
|
||||
|
@ -4651,7 +4647,7 @@ fn status_from_proto(
|
|||
let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
|
||||
.map(|status| {
|
||||
let code = proto::GitStatus::from_i32(status)
|
||||
.ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
|
||||
.with_context(|| format!("Invalid git status code: {status}"))?;
|
||||
let result = match code {
|
||||
proto::GitStatus::Modified => StatusCode::Modified,
|
||||
proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
|
||||
|
@ -4660,7 +4656,7 @@ fn status_from_proto(
|
|||
proto::GitStatus::Renamed => StatusCode::Renamed,
|
||||
proto::GitStatus::Copied => StatusCode::Copied,
|
||||
proto::GitStatus::Unmodified => StatusCode::Unmodified,
|
||||
_ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
|
||||
_ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
|
||||
};
|
||||
Ok(result)
|
||||
});
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::{
|
|||
Project, ProjectEntryId, ProjectItem, ProjectPath,
|
||||
worktree_store::{WorktreeStore, WorktreeStoreEvent},
|
||||
};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use collections::{HashMap, HashSet, hash_map};
|
||||
use futures::{StreamExt, channel::oneshot};
|
||||
use gpui::{
|
||||
|
@ -128,7 +128,7 @@ impl ImageItem {
|
|||
let file_metadata = fs
|
||||
.metadata(image_path.as_path())
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("failed to load image metadata"))?;
|
||||
.context("failed to load image metadata")?;
|
||||
|
||||
Ok(ImageMetadata {
|
||||
width,
|
||||
|
@ -223,7 +223,7 @@ impl ProjectItem for ImageItem {
|
|||
project: &Entity<Project>,
|
||||
path: &ProjectPath,
|
||||
cx: &mut App,
|
||||
) -> Option<Task<gpui::Result<Entity<Self>>>> {
|
||||
) -> Option<Task<anyhow::Result<Entity<Self>>>> {
|
||||
if is_image_file(&project, &path, cx) {
|
||||
Some(cx.spawn({
|
||||
let path = path.clone();
|
||||
|
@ -702,7 +702,7 @@ fn create_gpui_image(content: Vec<u8>) -> anyhow::Result<Arc<gpui::Image>> {
|
|||
image::ImageFormat::Gif => gpui::ImageFormat::Gif,
|
||||
image::ImageFormat::Bmp => gpui::ImageFormat::Bmp,
|
||||
image::ImageFormat::Tiff => gpui::ImageFormat::Tiff,
|
||||
_ => Err(anyhow::anyhow!("Image format not supported"))?,
|
||||
format => anyhow::bail!("Image format {format:?} not supported"),
|
||||
},
|
||||
content,
|
||||
)))
|
||||
|
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
PrepareRenameResponse, ProjectTransaction, ResolveState,
|
||||
lsp_store::{LocalLspStore, LspStore},
|
||||
};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use async_trait::async_trait;
|
||||
use client::proto::{self, PeerId};
|
||||
use clock::Global;
|
||||
|
@ -48,9 +48,7 @@ pub fn lsp_formatting_options(settings: &LanguageSettings) -> lsp::FormattingOpt
|
|||
pub(crate) fn file_path_to_lsp_url(path: &Path) -> Result<lsp::Url> {
|
||||
match lsp::Url::from_file_path(path) {
|
||||
Ok(url) => Ok(url),
|
||||
Err(()) => Err(anyhow!(
|
||||
"Invalid file path provided to LSP request: {path:?}"
|
||||
)),
|
||||
Err(()) => anyhow::bail!("Invalid file path provided to LSP request: {path:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -293,7 +291,7 @@ impl LspCommand for PrepareRename {
|
|||
Some(lsp::OneOf::Left(true)) => Ok(LspParamsOrResponse::Response(
|
||||
PrepareRenameResponse::OnlyUnpreparedRenameSupported,
|
||||
)),
|
||||
_ => Err(anyhow!("Rename not supported")),
|
||||
_ => anyhow::bail!("Rename not supported"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -359,7 +357,7 @@ impl LspCommand for PrepareRename {
|
|||
let position = message
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
.context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -422,9 +420,9 @@ impl LspCommand for PrepareRename {
|
|||
) {
|
||||
Ok(PrepareRenameResponse::Success(start..end))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
anyhow::bail!(
|
||||
"Missing start or end position in remote project PrepareRenameResponse"
|
||||
))
|
||||
);
|
||||
}
|
||||
} else if message.only_unprepared_rename_supported {
|
||||
Ok(PrepareRenameResponse::OnlyUnpreparedRenameSupported)
|
||||
|
@ -508,7 +506,7 @@ impl LspCommand for PerformRename {
|
|||
let position = message
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
.context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -543,9 +541,7 @@ impl LspCommand for PerformRename {
|
|||
_: Entity<Buffer>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<ProjectTransaction> {
|
||||
let message = message
|
||||
.transaction
|
||||
.ok_or_else(|| anyhow!("missing transaction"))?;
|
||||
let message = message.transaction.context("missing transaction")?;
|
||||
lsp_store
|
||||
.update(&mut cx, |lsp_store, cx| {
|
||||
lsp_store.buffer_store().update(cx, |buffer_store, cx| {
|
||||
|
@ -622,7 +618,7 @@ impl LspCommand for GetDefinition {
|
|||
let position = message
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
.context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -721,7 +717,7 @@ impl LspCommand for GetDeclaration {
|
|||
let position = message
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
.context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -813,7 +809,7 @@ impl LspCommand for GetImplementation {
|
|||
let position = message
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
.context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -912,7 +908,7 @@ impl LspCommand for GetTypeDefinition {
|
|||
let position = message
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
.context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -963,7 +959,7 @@ fn language_server_for_buffer(
|
|||
.map(|(adapter, server)| (adapter.clone(), server.clone()))
|
||||
})
|
||||
})?
|
||||
.ok_or_else(|| anyhow!("no language server found for buffer"))
|
||||
.context("no language server found for buffer")
|
||||
}
|
||||
|
||||
pub async fn location_links_from_proto(
|
||||
|
@ -997,11 +993,11 @@ pub fn location_link_from_proto(
|
|||
let start = origin
|
||||
.start
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing origin start"))?;
|
||||
.context("missing origin start")?;
|
||||
let end = origin
|
||||
.end
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing origin end"))?;
|
||||
.context("missing origin end")?;
|
||||
buffer
|
||||
.update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||
.await?;
|
||||
|
@ -1013,7 +1009,7 @@ pub fn location_link_from_proto(
|
|||
None => None,
|
||||
};
|
||||
|
||||
let target = link.target.ok_or_else(|| anyhow!("missing target"))?;
|
||||
let target = link.target.context("missing target")?;
|
||||
let buffer_id = BufferId::new(target.buffer_id)?;
|
||||
let buffer = lsp_store
|
||||
.update(cx, |lsp_store, cx| {
|
||||
|
@ -1023,11 +1019,11 @@ pub fn location_link_from_proto(
|
|||
let start = target
|
||||
.start
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target start"))?;
|
||||
.context("missing target start")?;
|
||||
let end = target
|
||||
.end
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target end"))?;
|
||||
.context("missing target end")?;
|
||||
buffer
|
||||
.update(cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||
.await?;
|
||||
|
@ -1337,7 +1333,7 @@ impl LspCommand for GetReferences {
|
|||
let position = message
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
.context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -1393,11 +1389,11 @@ impl LspCommand for GetReferences {
|
|||
let start = location
|
||||
.start
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target start"))?;
|
||||
.context("missing target start")?;
|
||||
let end = location
|
||||
.end
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target end"))?;
|
||||
.context("missing target end")?;
|
||||
target_buffer
|
||||
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||
.await?;
|
||||
|
@ -1494,7 +1490,7 @@ impl LspCommand for GetDocumentHighlights {
|
|||
let position = message
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
.context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -1540,11 +1536,11 @@ impl LspCommand for GetDocumentHighlights {
|
|||
let start = highlight
|
||||
.start
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target start"))?;
|
||||
.context("missing target start")?;
|
||||
let end = highlight
|
||||
.end
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("missing target end"))?;
|
||||
.context("missing target end")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))?
|
||||
.await?;
|
||||
|
@ -1723,19 +1719,15 @@ impl LspCommand for GetDocumentSymbols {
|
|||
let kind =
|
||||
unsafe { mem::transmute::<i32, lsp::SymbolKind>(serialized_symbol.kind) };
|
||||
|
||||
let start = serialized_symbol
|
||||
.start
|
||||
.ok_or_else(|| anyhow!("invalid start"))?;
|
||||
let end = serialized_symbol
|
||||
.end
|
||||
.ok_or_else(|| anyhow!("invalid end"))?;
|
||||
let start = serialized_symbol.start.context("invalid start")?;
|
||||
let end = serialized_symbol.end.context("invalid end")?;
|
||||
|
||||
let selection_start = serialized_symbol
|
||||
.selection_start
|
||||
.ok_or_else(|| anyhow!("invalid selection start"))?;
|
||||
.context("invalid selection start")?;
|
||||
let selection_end = serialized_symbol
|
||||
.selection_end
|
||||
.ok_or_else(|| anyhow!("invalid selection end"))?;
|
||||
.context("invalid selection end")?;
|
||||
|
||||
Ok(DocumentSymbol {
|
||||
name: serialized_symbol.name,
|
||||
|
@ -1993,7 +1985,7 @@ impl LspCommand for GetHover {
|
|||
let position = message
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
.context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -2329,7 +2321,7 @@ impl LspCommand for GetCompletions {
|
|||
buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left)
|
||||
})
|
||||
})
|
||||
.ok_or_else(|| anyhow!("invalid position"))??;
|
||||
.context("invalid position")??;
|
||||
Ok(Self {
|
||||
position,
|
||||
context: CompletionContext {
|
||||
|
@ -2597,11 +2589,11 @@ impl LspCommand for GetCodeActions {
|
|||
let start = message
|
||||
.start
|
||||
.and_then(language::proto::deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid start"))?;
|
||||
.context("invalid start")?;
|
||||
let end = message
|
||||
.end
|
||||
.and_then(language::proto::deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid end"))?;
|
||||
.context("invalid end")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -2767,7 +2759,7 @@ impl LspCommand for OnTypeFormatting {
|
|||
let position = message
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
.context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
|
@ -3576,15 +3568,13 @@ impl LspCommand for LinkedEditingRange {
|
|||
buffer: Entity<Buffer>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<Self> {
|
||||
let position = message
|
||||
.position
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
let position = message.position.context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(&message.version))
|
||||
})?
|
||||
.await?;
|
||||
let position = deserialize_anchor(position).ok_or_else(|| anyhow!("invalid position"))?;
|
||||
let position = deserialize_anchor(position).context("invalid position")?;
|
||||
buffer
|
||||
.update(&mut cx, |buffer, _| buffer.wait_for_anchors([position]))?
|
||||
.await?;
|
||||
|
|
|
@ -1204,7 +1204,7 @@ impl LocalLspStore {
|
|||
buffer.finalize_last_transaction();
|
||||
let transaction_id = buffer
|
||||
.start_transaction()
|
||||
.ok_or_else(|| anyhow!("transaction already open"))?;
|
||||
.context("transaction already open")?;
|
||||
let transaction = buffer
|
||||
.get_transaction(transaction_id)
|
||||
.expect("transaction started")
|
||||
|
@ -1862,14 +1862,14 @@ impl LocalLspStore {
|
|||
let capabilities = &language_server.capabilities();
|
||||
let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
|
||||
if range_formatting_provider.map_or(false, |provider| provider == &OneOf::Left(false)) {
|
||||
return Err(anyhow!(
|
||||
anyhow::bail!(
|
||||
"{} language server does not support range formatting",
|
||||
language_server.name()
|
||||
));
|
||||
);
|
||||
}
|
||||
|
||||
let uri = lsp::Url::from_file_path(abs_path)
|
||||
.map_err(|_| anyhow!("failed to convert abs path to uri"))?;
|
||||
.map_err(|()| anyhow!("failed to convert abs path to uri"))?;
|
||||
let text_document = lsp::TextDocumentIdentifier::new(uri);
|
||||
|
||||
let lsp_edits = {
|
||||
|
@ -1934,7 +1934,7 @@ impl LocalLspStore {
|
|||
zlog::info!(logger => "Formatting via LSP");
|
||||
|
||||
let uri = lsp::Url::from_file_path(abs_path)
|
||||
.map_err(|_| anyhow!("failed to convert abs path to uri"))?;
|
||||
.map_err(|()| anyhow!("failed to convert abs path to uri"))?;
|
||||
let text_document = lsp::TextDocumentIdentifier::new(uri);
|
||||
let capabilities = &language_server.capabilities();
|
||||
|
||||
|
@ -2026,10 +2026,7 @@ impl LocalLspStore {
|
|||
.stderr(smol::process::Stdio::piped())
|
||||
.spawn()?;
|
||||
|
||||
let stdin = child
|
||||
.stdin
|
||||
.as_mut()
|
||||
.ok_or_else(|| anyhow!("failed to acquire stdin"))?;
|
||||
let stdin = child.stdin.as_mut().context("failed to acquire stdin")?;
|
||||
let text = buffer
|
||||
.handle
|
||||
.update(cx, |buffer, _| buffer.as_rope().clone())?;
|
||||
|
@ -2039,14 +2036,13 @@ impl LocalLspStore {
|
|||
stdin.flush().await?;
|
||||
|
||||
let output = child.output().await?;
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
|
||||
output.status.code(),
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
|
||||
output.status.code(),
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
);
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
Ok(Some(
|
||||
|
@ -2570,9 +2566,7 @@ impl LocalLspStore {
|
|||
// We detect this case and treat it as if the version was `None`.
|
||||
return Ok(buffer.read(cx).text_snapshot());
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
"no snapshots found for buffer {buffer_id} and server {server_id}"
|
||||
));
|
||||
anyhow::bail!("no snapshots found for buffer {buffer_id} and server {server_id}");
|
||||
};
|
||||
|
||||
let found_snapshot = snapshots
|
||||
|
@ -2617,7 +2611,7 @@ impl LocalLspStore {
|
|||
push_to_history: bool,
|
||||
project_transaction: &mut ProjectTransaction,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
) -> anyhow::Result<()> {
|
||||
for mut action in actions {
|
||||
Self::try_resolve_code_action(language_server, &mut action)
|
||||
.await
|
||||
|
@ -2846,7 +2840,7 @@ impl LocalLspStore {
|
|||
let abs_path = op
|
||||
.uri
|
||||
.to_file_path()
|
||||
.map_err(|_| anyhow!("can't convert URI to path"))?;
|
||||
.map_err(|()| anyhow!("can't convert URI to path"))?;
|
||||
|
||||
if let Some(parent_path) = abs_path.parent() {
|
||||
fs.create_dir(parent_path).await?;
|
||||
|
@ -2871,11 +2865,11 @@ impl LocalLspStore {
|
|||
let source_abs_path = op
|
||||
.old_uri
|
||||
.to_file_path()
|
||||
.map_err(|_| anyhow!("can't convert URI to path"))?;
|
||||
.map_err(|()| anyhow!("can't convert URI to path"))?;
|
||||
let target_abs_path = op
|
||||
.new_uri
|
||||
.to_file_path()
|
||||
.map_err(|_| anyhow!("can't convert URI to path"))?;
|
||||
.map_err(|()| anyhow!("can't convert URI to path"))?;
|
||||
fs.rename(
|
||||
&source_abs_path,
|
||||
&target_abs_path,
|
||||
|
@ -2893,7 +2887,7 @@ impl LocalLspStore {
|
|||
let abs_path = op
|
||||
.uri
|
||||
.to_file_path()
|
||||
.map_err(|_| anyhow!("can't convert URI to path"))?;
|
||||
.map_err(|()| anyhow!("can't convert URI to path"))?;
|
||||
let options = op
|
||||
.options
|
||||
.map(|options| fs::RemoveOptions {
|
||||
|
@ -3042,12 +3036,10 @@ impl LocalLspStore {
|
|||
adapter: Arc<CachedLspAdapter>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<lsp::ApplyWorkspaceEditResponse> {
|
||||
let this = this
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("project project closed"))?;
|
||||
let this = this.upgrade().context("project project closed")?;
|
||||
let language_server = this
|
||||
.update(cx, |this, _| this.language_server_for_id(server_id))?
|
||||
.ok_or_else(|| anyhow!("language server not found"))?;
|
||||
.context("language server not found")?;
|
||||
let transaction = Self::deserialize_workspace_edit(
|
||||
this.clone(),
|
||||
params.edit,
|
||||
|
@ -4372,13 +4364,13 @@ impl LspStore {
|
|||
err
|
||||
);
|
||||
log::warn!("{message}");
|
||||
anyhow!(message)
|
||||
anyhow::anyhow!(message)
|
||||
})?;
|
||||
|
||||
let response = request
|
||||
.response_from_lsp(
|
||||
response,
|
||||
this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
|
||||
this.upgrade().context("no app context")?,
|
||||
buffer_handle,
|
||||
language_server.server_id(),
|
||||
cx.clone(),
|
||||
|
@ -4591,7 +4583,7 @@ impl LspStore {
|
|||
.request(request)
|
||||
.await?
|
||||
.transaction
|
||||
.ok_or_else(|| anyhow!("missing transaction"))?;
|
||||
.context("missing transaction")?;
|
||||
|
||||
buffer_store
|
||||
.update(cx, |buffer_store, cx| {
|
||||
|
@ -4613,7 +4605,7 @@ impl LspStore {
|
|||
if let Some(edit) = action.lsp_action.edit() {
|
||||
if edit.changes.is_some() || edit.document_changes.is_some() {
|
||||
return LocalLspStore::deserialize_workspace_edit(
|
||||
this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
|
||||
this.upgrade().context("no app present")?,
|
||||
edit.clone(),
|
||||
push_to_history,
|
||||
lsp_adapter.clone(),
|
||||
|
@ -5715,7 +5707,7 @@ impl LspStore {
|
|||
LspCommand::response_from_proto(
|
||||
lsp_request,
|
||||
response,
|
||||
project.upgrade().ok_or_else(|| anyhow!("No project"))?,
|
||||
project.upgrade().context("No project")?,
|
||||
buffer_handle.clone(),
|
||||
cx.clone(),
|
||||
)
|
||||
|
@ -6525,7 +6517,7 @@ impl LspStore {
|
|||
mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
|
||||
filter: F,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
) -> anyhow::Result<()> {
|
||||
let Some((worktree, relative_path)) =
|
||||
self.worktree_store.read(cx).find_worktree(&abs_path, cx)
|
||||
else {
|
||||
|
@ -6730,7 +6722,7 @@ impl LspStore {
|
|||
|
||||
let abs_path = abs_path
|
||||
.to_file_path()
|
||||
.map_err(|_| anyhow!("can't convert URI to path"))?;
|
||||
.map_err(|()| anyhow!("can't convert URI to path"))?;
|
||||
let p = abs_path.clone();
|
||||
let yarn_worktree = lsp_store
|
||||
.update(cx, move |lsp_store, cx| match lsp_store.as_local() {
|
||||
|
@ -7094,12 +7086,8 @@ impl LspStore {
|
|||
mut cx: AsyncApp,
|
||||
) -> Result<proto::ApplyCodeActionResponse> {
|
||||
let sender_id = envelope.original_sender_id().unwrap_or_default();
|
||||
let action = Self::deserialize_code_action(
|
||||
envelope
|
||||
.payload
|
||||
.action
|
||||
.ok_or_else(|| anyhow!("invalid action"))?,
|
||||
)?;
|
||||
let action =
|
||||
Self::deserialize_code_action(envelope.payload.action.context("invalid action")?)?;
|
||||
let apply_code_action = this.update(&mut cx, |this, cx| {
|
||||
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
|
||||
let buffer = this.buffer_store.read(cx).get_existing(buffer_id)?;
|
||||
|
@ -7198,7 +7186,7 @@ impl LspStore {
|
|||
)
|
||||
})
|
||||
})?
|
||||
.ok_or_else(|| anyhow!("worktree not found"))?;
|
||||
.context("worktree not found")?;
|
||||
let (old_abs_path, new_abs_path) = {
|
||||
let root_path = worktree.update(&mut cx, |this, _| this.abs_path())?;
|
||||
let new_path = PathBuf::from_proto(envelope.payload.new_path.clone());
|
||||
|
@ -7288,10 +7276,7 @@ impl LspStore {
|
|||
envelope: TypedEnvelope<proto::StartLanguageServer>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<()> {
|
||||
let server = envelope
|
||||
.payload
|
||||
.server
|
||||
.ok_or_else(|| anyhow!("invalid server"))?;
|
||||
let server = envelope.payload.server.context("invalid server")?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let server_id = LanguageServerId(server.id as usize);
|
||||
|
@ -7322,11 +7307,7 @@ impl LspStore {
|
|||
this.update(&mut cx, |this, cx| {
|
||||
let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
|
||||
|
||||
match envelope
|
||||
.payload
|
||||
.variant
|
||||
.ok_or_else(|| anyhow!("invalid variant"))?
|
||||
{
|
||||
match envelope.payload.variant.context("invalid variant")? {
|
||||
proto::update_language_server::Variant::WorkStart(payload) => {
|
||||
this.on_lsp_work_start(
|
||||
language_server_id,
|
||||
|
@ -7903,11 +7884,11 @@ impl LspStore {
|
|||
let completion = this
|
||||
.read_with(&cx, |this, cx| {
|
||||
let id = LanguageServerId(envelope.payload.language_server_id as usize);
|
||||
let Some(server) = this.language_server_for_id(id) else {
|
||||
return Err(anyhow!("No language server {id}"));
|
||||
};
|
||||
let server = this
|
||||
.language_server_for_id(id)
|
||||
.with_context(|| format!("No language server {id}"))?;
|
||||
|
||||
Ok(cx.background_spawn(async move {
|
||||
anyhow::Ok(cx.background_spawn(async move {
|
||||
let can_resolve = server
|
||||
.capabilities()
|
||||
.completion_provider
|
||||
|
@ -7994,8 +7975,8 @@ impl LspStore {
|
|||
.payload
|
||||
.position
|
||||
.and_then(deserialize_anchor)
|
||||
.ok_or_else(|| anyhow!("invalid position"))?;
|
||||
Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
|
||||
.context("invalid position")?;
|
||||
anyhow::Ok(this.apply_on_type_formatting(
|
||||
buffer,
|
||||
position,
|
||||
envelope.payload.trigger.clone(),
|
||||
|
@ -8114,18 +8095,12 @@ impl LspStore {
|
|||
mut cx: AsyncApp,
|
||||
) -> Result<proto::OpenBufferForSymbolResponse> {
|
||||
let peer_id = envelope.original_sender_id().unwrap_or_default();
|
||||
let symbol = envelope
|
||||
.payload
|
||||
.symbol
|
||||
.ok_or_else(|| anyhow!("invalid symbol"))?;
|
||||
let symbol = envelope.payload.symbol.context("invalid symbol")?;
|
||||
let symbol = Self::deserialize_symbol(symbol)?;
|
||||
let symbol = this.update(&mut cx, |this, _| {
|
||||
let signature = this.symbol_signature(&symbol.path);
|
||||
if signature == symbol.signature {
|
||||
Ok(symbol)
|
||||
} else {
|
||||
Err(anyhow!("invalid symbol signature"))
|
||||
}
|
||||
anyhow::ensure!(signature == symbol.signature, "invalid symbol signature");
|
||||
Ok(symbol)
|
||||
})??;
|
||||
let buffer = this
|
||||
.update(&mut cx, |this, cx| {
|
||||
|
@ -8268,10 +8243,7 @@ impl LspStore {
|
|||
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
|
||||
let buffer = this.buffer_store.read(cx).get_existing(buffer_id)?;
|
||||
let completion = Self::deserialize_completion(
|
||||
envelope
|
||||
.payload
|
||||
.completion
|
||||
.ok_or_else(|| anyhow!("invalid completion"))?,
|
||||
envelope.payload.completion.context("invalid completion")?,
|
||||
)?;
|
||||
anyhow::Ok((buffer, completion))
|
||||
})??;
|
||||
|
@ -8365,10 +8337,7 @@ impl LspStore {
|
|||
let ranges = match &target {
|
||||
LspFormatTarget::Buffers => None,
|
||||
LspFormatTarget::Ranges(ranges) => {
|
||||
let Some(ranges) = ranges.get(&id) else {
|
||||
return Err(anyhow!("No format ranges provided for buffer"));
|
||||
};
|
||||
Some(ranges.clone())
|
||||
Some(ranges.get(&id).context("No format ranges provided for buffer")?.clone())
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -8498,17 +8467,20 @@ impl LspStore {
|
|||
buffers.insert(this.buffer_store.read(cx).get_existing(buffer_id)?);
|
||||
}
|
||||
let kind = match envelope.payload.kind.as_str() {
|
||||
"" => Ok(CodeActionKind::EMPTY),
|
||||
"quickfix" => Ok(CodeActionKind::QUICKFIX),
|
||||
"refactor" => Ok(CodeActionKind::REFACTOR),
|
||||
"refactor.extract" => Ok(CodeActionKind::REFACTOR_EXTRACT),
|
||||
"refactor.inline" => Ok(CodeActionKind::REFACTOR_INLINE),
|
||||
"refactor.rewrite" => Ok(CodeActionKind::REFACTOR_REWRITE),
|
||||
"source" => Ok(CodeActionKind::SOURCE),
|
||||
"source.organizeImports" => Ok(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
|
||||
"source.fixAll" => Ok(CodeActionKind::SOURCE_FIX_ALL),
|
||||
_ => Err(anyhow!("Invalid code action kind")),
|
||||
}?;
|
||||
"" => CodeActionKind::EMPTY,
|
||||
"quickfix" => CodeActionKind::QUICKFIX,
|
||||
"refactor" => CodeActionKind::REFACTOR,
|
||||
"refactor.extract" => CodeActionKind::REFACTOR_EXTRACT,
|
||||
"refactor.inline" => CodeActionKind::REFACTOR_INLINE,
|
||||
"refactor.rewrite" => CodeActionKind::REFACTOR_REWRITE,
|
||||
"source" => CodeActionKind::SOURCE,
|
||||
"source.organizeImports" => CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
|
||||
"source.fixAll" => CodeActionKind::SOURCE_FIX_ALL,
|
||||
_ => anyhow::bail!(
|
||||
"Invalid code action kind {}",
|
||||
envelope.payload.kind.as_str()
|
||||
),
|
||||
};
|
||||
anyhow::Ok(this.apply_code_action_kind(buffers, kind, false, cx))
|
||||
})??;
|
||||
|
||||
|
@ -8778,7 +8750,7 @@ impl LspStore {
|
|||
let abs_path = params
|
||||
.uri
|
||||
.to_file_path()
|
||||
.map_err(|_| anyhow!("URI is not a file"))?;
|
||||
.map_err(|()| anyhow!("URI is not a file"))?;
|
||||
let mut diagnostics = Vec::default();
|
||||
let mut primary_diagnostic_group_ids = HashMap::default();
|
||||
let mut sources_by_group_id = HashMap::default();
|
||||
|
@ -9320,12 +9292,8 @@ impl LspStore {
|
|||
path: Arc::<Path>::from_proto(serialized_symbol.path),
|
||||
};
|
||||
|
||||
let start = serialized_symbol
|
||||
.start
|
||||
.ok_or_else(|| anyhow!("invalid start"))?;
|
||||
let end = serialized_symbol
|
||||
.end
|
||||
.ok_or_else(|| anyhow!("invalid end"))?;
|
||||
let start = serialized_symbol.start.context("invalid start")?;
|
||||
let end = serialized_symbol.end.context("invalid end")?;
|
||||
Ok(CoreSymbol {
|
||||
language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
|
||||
source_worktree_id,
|
||||
|
@ -10307,15 +10275,14 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate {
|
|||
.output()
|
||||
.await?;
|
||||
|
||||
if output.status.success() {
|
||||
return Ok(());
|
||||
}
|
||||
Err(anyhow!(
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"{}, stdout: {:?}, stderr: {:?}",
|
||||
output.status,
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
))
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_status(&self, server_name: LanguageServerName, status: language::BinaryStatus) {
|
||||
|
|
|
@ -761,8 +761,7 @@ pub(super) async fn format_with_prettier(
|
|||
.log_err();
|
||||
|
||||
Some(Err(anyhow!(
|
||||
"{} failed to spawn: {error:#}",
|
||||
prettier_description
|
||||
"{prettier_description} failed to spawn: {error:#}"
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2022,7 +2022,7 @@ impl Project {
|
|||
worktree.expand_all_for_entry(entry_id, cx)
|
||||
});
|
||||
Some(cx.spawn(async move |this, cx| {
|
||||
task.ok_or_else(|| anyhow!("no task"))?.await?;
|
||||
task.context("no task")?.await?;
|
||||
this.update(cx, |_, cx| {
|
||||
cx.emit(Event::ExpandedAllForEntry(worktree_id, entry_id));
|
||||
})?;
|
||||
|
@ -2031,9 +2031,10 @@ impl Project {
|
|||
}
|
||||
|
||||
pub fn shared(&mut self, project_id: u64, cx: &mut Context<Self>) -> Result<()> {
|
||||
if !matches!(self.client_state, ProjectClientState::Local) {
|
||||
return Err(anyhow!("project was already shared"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
matches!(self.client_state, ProjectClientState::Local),
|
||||
"project was already shared"
|
||||
);
|
||||
|
||||
self.client_subscriptions.extend([
|
||||
self.client
|
||||
|
@ -2151,9 +2152,10 @@ impl Project {
|
|||
}
|
||||
|
||||
fn unshare_internal(&mut self, cx: &mut App) -> Result<()> {
|
||||
if self.is_via_collab() {
|
||||
return Err(anyhow!("attempted to unshare a remote project"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!self.is_via_collab(),
|
||||
"attempted to unshare a remote project"
|
||||
);
|
||||
|
||||
if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
|
||||
self.client_state = ProjectClientState::Local;
|
||||
|
@ -2189,7 +2191,7 @@ impl Project {
|
|||
.ok();
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!("attempted to unshare an unshared project"))
|
||||
anyhow::bail!("attempted to unshare an unshared project");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2431,7 +2433,7 @@ impl Project {
|
|||
if let Some(buffer) = self.buffer_for_id(id, cx) {
|
||||
Task::ready(Ok(buffer))
|
||||
} else if self.is_local() || self.is_via_ssh() {
|
||||
Task::ready(Err(anyhow!("buffer {} does not exist", id)))
|
||||
Task::ready(Err(anyhow!("buffer {id} does not exist")))
|
||||
} else if let Some(project_id) = self.remote_id() {
|
||||
let request = self.client.request(proto::OpenBufferById {
|
||||
project_id,
|
||||
|
@ -2521,9 +2523,7 @@ impl Project {
|
|||
let weak_project = cx.entity().downgrade();
|
||||
cx.spawn(async move |_, cx| {
|
||||
let image_item = open_image_task.await?;
|
||||
let project = weak_project
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("Project dropped"))?;
|
||||
let project = weak_project.upgrade().context("Project dropped")?;
|
||||
|
||||
let metadata = ImageItem::load_image_metadata(image_item.clone(), project, cx).await?;
|
||||
image_item.update(cx, |image_item, cx| {
|
||||
|
@ -4272,7 +4272,7 @@ impl Project {
|
|||
.payload
|
||||
.collaborator
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("empty collaborator"))?;
|
||||
.context("empty collaborator")?;
|
||||
|
||||
let collaborator = Collaborator::from_proto(collaborator)?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
|
@ -4296,16 +4296,16 @@ impl Project {
|
|||
let old_peer_id = envelope
|
||||
.payload
|
||||
.old_peer_id
|
||||
.ok_or_else(|| anyhow!("missing old peer id"))?;
|
||||
.context("missing old peer id")?;
|
||||
let new_peer_id = envelope
|
||||
.payload
|
||||
.new_peer_id
|
||||
.ok_or_else(|| anyhow!("missing new peer id"))?;
|
||||
.context("missing new peer id")?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let collaborator = this
|
||||
.collaborators
|
||||
.remove(&old_peer_id)
|
||||
.ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
|
||||
.context("received UpdateProjectCollaborator for unknown peer")?;
|
||||
let is_host = collaborator.is_host;
|
||||
this.collaborators.insert(new_peer_id, collaborator);
|
||||
|
||||
|
@ -4336,14 +4336,11 @@ impl Project {
|
|||
mut cx: AsyncApp,
|
||||
) -> Result<()> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let peer_id = envelope
|
||||
.payload
|
||||
.peer_id
|
||||
.ok_or_else(|| anyhow!("invalid peer id"))?;
|
||||
let peer_id = envelope.payload.peer_id.context("invalid peer id")?;
|
||||
let replica_id = this
|
||||
.collaborators
|
||||
.remove(&peer_id)
|
||||
.ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
|
||||
.with_context(|| format!("unknown peer {peer_id:?}"))?
|
||||
.replica_id;
|
||||
this.buffer_store.update(cx, |buffer_store, cx| {
|
||||
buffer_store.forget_shared_buffers_for(&peer_id);
|
||||
|
@ -4557,11 +4554,7 @@ impl Project {
|
|||
) -> Result<proto::FindSearchCandidatesResponse> {
|
||||
let peer_id = envelope.original_sender_id()?;
|
||||
let message = envelope.payload;
|
||||
let query = SearchQuery::from_proto(
|
||||
message
|
||||
.query
|
||||
.ok_or_else(|| anyhow!("missing query field"))?,
|
||||
)?;
|
||||
let query = SearchQuery::from_proto(message.query.context("missing query field")?)?;
|
||||
let results = this.update(&mut cx, |this, cx| {
|
||||
this.find_search_candidate_buffers(&query, message.limit as _, cx)
|
||||
})?;
|
||||
|
@ -4639,13 +4632,10 @@ impl Project {
|
|||
.file()
|
||||
.map(|f| f.is_private())
|
||||
.unwrap_or_default();
|
||||
if is_private {
|
||||
Err(anyhow!(ErrorCode::UnsharedItem))
|
||||
} else {
|
||||
Ok(proto::OpenBufferResponse {
|
||||
buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
|
||||
})
|
||||
}
|
||||
anyhow::ensure!(!is_private, ErrorCode::UnsharedItem);
|
||||
Ok(proto::OpenBufferResponse {
|
||||
buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
|
||||
})
|
||||
})?
|
||||
}
|
||||
|
||||
|
|
|
@ -450,10 +450,7 @@ impl WorktreeStore {
|
|||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
|
||||
let (client, project_id) = self
|
||||
.upstream_client()
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("invalid project"))?;
|
||||
let (client, project_id) = self.upstream_client().clone().context("invalid project")?;
|
||||
|
||||
for worktree in worktrees {
|
||||
if let Some(old_worktree) =
|
||||
|
@ -916,7 +913,7 @@ impl WorktreeStore {
|
|||
let worktree = this.update(&mut cx, |this, cx| {
|
||||
let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
|
||||
this.worktree_for_id(worktree_id, cx)
|
||||
.ok_or_else(|| anyhow!("worktree not found"))
|
||||
.context("worktree not found")
|
||||
})??;
|
||||
Worktree::handle_create_entry(worktree, envelope.payload, cx).await
|
||||
}
|
||||
|
@ -929,7 +926,7 @@ impl WorktreeStore {
|
|||
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
|
||||
let worktree = this.update(&mut cx, |this, cx| {
|
||||
this.worktree_for_entry(entry_id, cx)
|
||||
.ok_or_else(|| anyhow!("worktree not found"))
|
||||
.context("worktree not found")
|
||||
})??;
|
||||
Worktree::handle_copy_entry(worktree, envelope.payload, cx).await
|
||||
}
|
||||
|
@ -942,7 +939,7 @@ impl WorktreeStore {
|
|||
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
|
||||
let worktree = this.update(&mut cx, |this, cx| {
|
||||
this.worktree_for_entry(entry_id, cx)
|
||||
.ok_or_else(|| anyhow!("worktree not found"))
|
||||
.context("worktree not found")
|
||||
})??;
|
||||
Worktree::handle_delete_entry(worktree, envelope.payload, cx).await
|
||||
}
|
||||
|
@ -955,7 +952,7 @@ impl WorktreeStore {
|
|||
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
|
||||
let worktree = this
|
||||
.update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
|
||||
.ok_or_else(|| anyhow!("invalid request"))?;
|
||||
.context("invalid request")?;
|
||||
Worktree::handle_expand_entry(worktree, envelope.payload, cx).await
|
||||
}
|
||||
|
||||
|
@ -967,7 +964,7 @@ impl WorktreeStore {
|
|||
let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
|
||||
let worktree = this
|
||||
.update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
|
||||
.ok_or_else(|| anyhow!("invalid request"))?;
|
||||
.context("invalid request")?;
|
||||
Worktree::handle_expand_all_for_entry(worktree, envelope.payload, cx).await
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue