Use anyhow
more idiomatically (#31052)
https://github.com/zed-industries/zed/issues/30972 brought up another case where our context is not enough to track the actual source of the issue: we get a general top-level error without inner error. The reason for this was `.ok_or_else(|| anyhow!("failed to read HEAD SHA"))?; ` on the top level. The PR finally reworks the way we use anyhow to reduce such issues (or at least make it simpler to bubble them up later in a fix). On top of that, uses a few more anyhow methods for better readability. * `.ok_or_else(|| anyhow!("..."))`, `map_err` and other similar error conversion/option reporting cases are replaced with `context` and `with_context` calls * in addition to that, various `anyhow!("failed to do ...")` are stripped with `.context("Doing ...")` messages instead to remove the parasitic `failed to` text * `anyhow::ensure!` is used instead of `if ... { return Err(...); }` calls * `anyhow::bail!` is used instead of `return Err(anyhow!(...));` Release Notes: - N/A
This commit is contained in:
parent
1e51a7ac44
commit
16366cf9f2
294 changed files with 2037 additions and 2610 deletions
|
@ -1212,12 +1212,7 @@ impl AgentPanel {
|
|||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(workspace) = self
|
||||
.workspace
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("workspace dropped"))
|
||||
.log_err()
|
||||
else {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
};
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use crate::context::ContextLoadResult;
|
||||
use crate::inline_prompt_editor::CodegenStatus;
|
||||
use crate::{context::load_context, context_store::ContextStore};
|
||||
use anyhow::Result;
|
||||
use anyhow::{Context as _, Result};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use client::telemetry::Telemetry;
|
||||
use collections::HashSet;
|
||||
|
@ -419,16 +419,16 @@ impl CodegenAlternative {
|
|||
if start_buffer.remote_id() == end_buffer.remote_id() {
|
||||
(start_buffer.clone(), start_buffer_offset..end_buffer_offset)
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("invalid transformation range"));
|
||||
anyhow::bail!("invalid transformation range");
|
||||
}
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("invalid transformation range"));
|
||||
anyhow::bail!("invalid transformation range");
|
||||
};
|
||||
|
||||
let prompt = self
|
||||
.builder
|
||||
.generate_inline_transformation_prompt(user_prompt, language_name, buffer, range)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to generate content prompt: {}", e))?;
|
||||
.context("generating content prompt")?;
|
||||
|
||||
let context_task = self.context_store.as_ref().map(|context_store| {
|
||||
if let Some(project) = self.project.upgrade() {
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::ops::Range;
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_context_editor::AssistantContext;
|
||||
use collections::{HashSet, IndexSet};
|
||||
use futures::{self, FutureExt};
|
||||
|
@ -142,17 +142,12 @@ impl ContextStore {
|
|||
remove_if_exists: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<Option<AgentContextHandle>> {
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return Err(anyhow!("failed to read project"));
|
||||
};
|
||||
|
||||
let Some(entry_id) = project
|
||||
let project = self.project.upgrade().context("failed to read project")?;
|
||||
let entry_id = project
|
||||
.read(cx)
|
||||
.entry_for_path(project_path, cx)
|
||||
.map(|entry| entry.id)
|
||||
else {
|
||||
return Err(anyhow!("no entry found for directory context"));
|
||||
};
|
||||
.context("no entry found for directory context")?;
|
||||
|
||||
let context_id = self.next_context_id.post_inc();
|
||||
let context = AgentContextHandle::Directory(DirectoryContextHandle {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::{collections::VecDeque, path::Path, sync::Arc};
|
||||
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use anyhow::Context as _;
|
||||
use assistant_context_editor::{AssistantContext, SavedContextMetadata};
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::future::{TryFutureExt as _, join_all};
|
||||
|
@ -130,7 +130,10 @@ impl HistoryStore {
|
|||
.boxed()
|
||||
})
|
||||
.unwrap_or_else(|_| {
|
||||
async { Err(anyhow!("no thread store")) }.boxed()
|
||||
async {
|
||||
anyhow::bail!("no thread store");
|
||||
}
|
||||
.boxed()
|
||||
}),
|
||||
SerializedRecentEntry::Context(id) => context_store
|
||||
.update(cx, |context_store, cx| {
|
||||
|
@ -140,7 +143,10 @@ impl HistoryStore {
|
|||
.boxed()
|
||||
})
|
||||
.unwrap_or_else(|_| {
|
||||
async { Err(anyhow!("no context store")) }.boxed()
|
||||
async {
|
||||
anyhow::bail!("no context store");
|
||||
}
|
||||
.boxed()
|
||||
}),
|
||||
});
|
||||
let entries = join_all(entries)
|
||||
|
|
|
@ -1630,7 +1630,7 @@ impl Thread {
|
|||
CompletionRequestStatus::Failed {
|
||||
code, message, request_id
|
||||
} => {
|
||||
return Err(anyhow!("completion request failed. request_id: {request_id}, code: {code}, message: {message}"));
|
||||
anyhow::bail!("completion request failed. request_id: {request_id}, code: {code}, message: {message}");
|
||||
}
|
||||
CompletionRequestStatus::UsageUpdated {
|
||||
amount, limit
|
||||
|
|
|
@ -419,7 +419,7 @@ impl ThreadStore {
|
|||
let thread = database
|
||||
.try_find_thread(id.clone())
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?;
|
||||
.with_context(|| format!("no thread found with ID: {id:?}"))?;
|
||||
|
||||
let thread = this.update_in(cx, |this, window, cx| {
|
||||
cx.new(|cx| {
|
||||
|
@ -699,20 +699,14 @@ impl SerializedThread {
|
|||
SerializedThread::VERSION => Ok(serde_json::from_value::<SerializedThread>(
|
||||
saved_thread_json,
|
||||
)?),
|
||||
_ => Err(anyhow!(
|
||||
"unrecognized serialized thread version: {}",
|
||||
version
|
||||
)),
|
||||
_ => anyhow::bail!("unrecognized serialized thread version: {version:?}"),
|
||||
},
|
||||
None => {
|
||||
let saved_thread =
|
||||
serde_json::from_value::<LegacySerializedThread>(saved_thread_json)?;
|
||||
Ok(saved_thread.upgrade())
|
||||
}
|
||||
version => Err(anyhow!(
|
||||
"unrecognized serialized thread version: {:?}",
|
||||
version
|
||||
)),
|
||||
version => anyhow::bail!("unrecognized serialized thread version: {version:?}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ impl Model {
|
|||
} else if id.starts_with("claude-3-haiku") {
|
||||
Ok(Self::Claude3Haiku)
|
||||
} else {
|
||||
Err(anyhow!("invalid model id"))
|
||||
anyhow::bail!("invalid model id {id}");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -385,10 +385,10 @@ impl RateLimitInfo {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> Result<&'a str, anyhow::Error> {
|
||||
fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> anyhow::Result<&'a str> {
|
||||
Ok(headers
|
||||
.get(key)
|
||||
.ok_or_else(|| anyhow!("missing header `{key}`"))?
|
||||
.with_context(|| format!("missing header `{key}`"))?
|
||||
.to_str()?)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// This crate was essentially pulled out verbatim from main `zed` crate to avoid having to run RustEmbed macro whenever zed has to be rebuilt. It saves a second or two on an incremental build.
|
||||
use anyhow::anyhow;
|
||||
|
||||
use anyhow::Context as _;
|
||||
use gpui::{App, AssetSource, Result, SharedString};
|
||||
use rust_embed::RustEmbed;
|
||||
|
||||
|
@ -21,7 +21,7 @@ impl AssetSource for Assets {
|
|||
fn load(&self, path: &str) -> Result<Option<std::borrow::Cow<'static, [u8]>>> {
|
||||
Self::get(path)
|
||||
.map(|f| Some(f.data))
|
||||
.ok_or_else(|| anyhow!("could not find asset at path \"{}\"", path))
|
||||
.with_context(|| format!("loading asset at path {path:?}"))
|
||||
}
|
||||
|
||||
fn list(&self, path: &str) -> Result<Vec<SharedString>> {
|
||||
|
@ -39,7 +39,7 @@ impl AssetSource for Assets {
|
|||
|
||||
impl Assets {
|
||||
/// Populate the [`TextSystem`] of the given [`AppContext`] with all `.ttf` fonts in the `fonts` directory.
|
||||
pub fn load_fonts(&self, cx: &App) -> gpui::Result<()> {
|
||||
pub fn load_fonts(&self, cx: &App) -> anyhow::Result<()> {
|
||||
let font_paths = self.list("fonts")?;
|
||||
let mut embedded_fonts = Vec::new();
|
||||
for font_path in font_paths {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#[cfg(test)]
|
||||
mod context_tests;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use anyhow::{Context as _, Result, bail};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use assistant_slash_command::{
|
||||
SlashCommandContent, SlashCommandEvent, SlashCommandLine, SlashCommandOutputSection,
|
||||
|
@ -3011,7 +3011,7 @@ impl SavedContext {
|
|||
let saved_context_json = serde_json::from_str::<serde_json::Value>(json)?;
|
||||
match saved_context_json
|
||||
.get("version")
|
||||
.ok_or_else(|| anyhow!("version not found"))?
|
||||
.context("version not found")?
|
||||
{
|
||||
serde_json::Value::String(version) => match version.as_str() {
|
||||
SavedContext::VERSION => {
|
||||
|
@ -3032,9 +3032,9 @@ impl SavedContext {
|
|||
serde_json::from_value::<SavedContextV0_1_0>(saved_context_json)?;
|
||||
Ok(saved_context.upgrade())
|
||||
}
|
||||
_ => Err(anyhow!("unrecognized saved context version: {}", version)),
|
||||
_ => anyhow::bail!("unrecognized saved context version: {version:?}"),
|
||||
},
|
||||
_ => Err(anyhow!("version not found on saved context")),
|
||||
_ => anyhow::bail!("version not found on saved context"),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::{
|
|||
AssistantContext, ContextEvent, ContextId, ContextOperation, ContextVersion, SavedContext,
|
||||
SavedContextMetadata,
|
||||
};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use assistant_slash_command::{SlashCommandId, SlashCommandWorkingSet};
|
||||
use client::{Client, TypedEnvelope, proto, telemetry::Telemetry};
|
||||
use clock::ReplicaId;
|
||||
|
@ -164,16 +164,18 @@ impl ContextStore {
|
|||
) -> Result<proto::OpenContextResponse> {
|
||||
let context_id = ContextId::from_proto(envelope.payload.context_id);
|
||||
let operations = this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("only the host contexts can be opened"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!this.project.read(cx).is_via_collab(),
|
||||
"only the host contexts can be opened"
|
||||
);
|
||||
|
||||
let context = this
|
||||
.loaded_context_for_id(&context_id, cx)
|
||||
.context("context not found")?;
|
||||
if context.read(cx).replica_id() != ReplicaId::default() {
|
||||
return Err(anyhow!("context must be opened via the host"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
context.read(cx).replica_id() == ReplicaId::default(),
|
||||
"context must be opened via the host"
|
||||
);
|
||||
|
||||
anyhow::Ok(
|
||||
context
|
||||
|
@ -193,9 +195,10 @@ impl ContextStore {
|
|||
mut cx: AsyncApp,
|
||||
) -> Result<proto::CreateContextResponse> {
|
||||
let (context_id, operations) = this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("can only create contexts as the host"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!this.project.read(cx).is_via_collab(),
|
||||
"can only create contexts as the host"
|
||||
);
|
||||
|
||||
let context = this.create(cx);
|
||||
let context_id = context.read(cx).id().clone();
|
||||
|
@ -237,9 +240,10 @@ impl ContextStore {
|
|||
mut cx: AsyncApp,
|
||||
) -> Result<proto::SynchronizeContextsResponse> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("only the host can synchronize contexts"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!this.project.read(cx).is_via_collab(),
|
||||
"only the host can synchronize contexts"
|
||||
);
|
||||
|
||||
let mut local_versions = Vec::new();
|
||||
for remote_version_proto in envelope.payload.contexts {
|
||||
|
@ -370,7 +374,7 @@ impl ContextStore {
|
|||
) -> Task<Result<Entity<AssistantContext>>> {
|
||||
let project = self.project.read(cx);
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
return Task::ready(Err(anyhow::anyhow!("project was not remote")));
|
||||
};
|
||||
|
||||
let replica_id = project.replica_id();
|
||||
|
@ -533,7 +537,7 @@ impl ContextStore {
|
|||
) -> Task<Result<Entity<AssistantContext>>> {
|
||||
let project = self.project.read(cx);
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
return Task::ready(Err(anyhow::anyhow!("project was not remote")));
|
||||
};
|
||||
|
||||
if let Some(context) = self.loaded_context_for_id(&context_id, cx) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_slash_command::{
|
||||
AfterCompletion, ArgumentCompletion, SlashCommand, SlashCommandOutput,
|
||||
SlashCommandOutputSection, SlashCommandResult,
|
||||
|
@ -84,9 +84,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
|||
|
||||
if let Some(server) = self.store.read(cx).get_running_server(&server_id) {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let Some(protocol) = server.client() else {
|
||||
return Err(anyhow!("Context server not initialized"));
|
||||
};
|
||||
let protocol = server.client().context("Context server not initialized")?;
|
||||
|
||||
let completion_result = protocol
|
||||
.completion(
|
||||
|
@ -139,21 +137,16 @@ impl SlashCommand for ContextServerSlashCommand {
|
|||
let store = self.store.read(cx);
|
||||
if let Some(server) = store.get_running_server(&server_id) {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let Some(protocol) = server.client() else {
|
||||
return Err(anyhow!("Context server not initialized"));
|
||||
};
|
||||
let protocol = server.client().context("Context server not initialized")?;
|
||||
let result = protocol.run_prompt(&prompt_name, prompt_args).await?;
|
||||
|
||||
// Check that there are only user roles
|
||||
if result
|
||||
anyhow::ensure!(
|
||||
result
|
||||
.messages
|
||||
.iter()
|
||||
.any(|msg| !matches!(msg.role, context_server::types::Role::User))
|
||||
{
|
||||
return Err(anyhow!(
|
||||
.all(|msg| matches!(msg.role, context_server::types::Role::User)),
|
||||
"Prompt contains non-user roles, which is not supported"
|
||||
));
|
||||
}
|
||||
);
|
||||
|
||||
// Extract text from user messages into a single prompt string
|
||||
let mut prompt = result
|
||||
|
@ -192,9 +185,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
|||
}
|
||||
|
||||
fn completion_argument(prompt: &Prompt, arguments: &[String]) -> Result<(String, String)> {
|
||||
if arguments.is_empty() {
|
||||
return Err(anyhow!("No arguments given"));
|
||||
}
|
||||
anyhow::ensure!(!arguments.is_empty(), "No arguments given");
|
||||
|
||||
match &prompt.arguments {
|
||||
Some(args) if args.len() == 1 => {
|
||||
|
@ -202,16 +193,16 @@ fn completion_argument(prompt: &Prompt, arguments: &[String]) -> Result<(String,
|
|||
let arg_value = arguments.join(" ");
|
||||
Ok((arg_name, arg_value))
|
||||
}
|
||||
Some(_) => Err(anyhow!("Prompt must have exactly one argument")),
|
||||
None => Err(anyhow!("Prompt has no arguments")),
|
||||
Some(_) => anyhow::bail!("Prompt must have exactly one argument"),
|
||||
None => anyhow::bail!("Prompt has no arguments"),
|
||||
}
|
||||
}
|
||||
|
||||
fn prompt_arguments(prompt: &Prompt, arguments: &[String]) -> Result<HashMap<String, String>> {
|
||||
match &prompt.arguments {
|
||||
Some(args) if args.len() > 1 => Err(anyhow!(
|
||||
"Prompt has more than one argument, which is not supported"
|
||||
)),
|
||||
Some(args) if args.len() > 1 => {
|
||||
anyhow::bail!("Prompt has more than one argument, which is not supported");
|
||||
}
|
||||
Some(args) if args.len() == 1 => {
|
||||
if !arguments.is_empty() {
|
||||
let mut map = HashMap::default();
|
||||
|
@ -220,15 +211,15 @@ fn prompt_arguments(prompt: &Prompt, arguments: &[String]) -> Result<HashMap<Str
|
|||
} else if arguments.is_empty() && args[0].required == Some(false) {
|
||||
Ok(HashMap::default())
|
||||
} else {
|
||||
Err(anyhow!("Prompt expects argument but none given"))
|
||||
anyhow::bail!("Prompt expects argument but none given");
|
||||
}
|
||||
}
|
||||
Some(_) | None => {
|
||||
if arguments.is_empty() {
|
||||
anyhow::ensure!(
|
||||
arguments.is_empty(),
|
||||
"Prompt expects no arguments but some were given"
|
||||
);
|
||||
Ok(HashMap::default())
|
||||
} else {
|
||||
Err(anyhow!("Prompt expects no arguments but some were given"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -118,10 +118,7 @@ impl SlashCommand for DeltaSlashCommand {
|
|||
}
|
||||
}
|
||||
|
||||
if !changes_detected {
|
||||
return Err(anyhow!("no new changes detected"));
|
||||
}
|
||||
|
||||
anyhow::ensure!(changes_detected, "no new changes detected");
|
||||
Ok(output.to_event_stream())
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
|
@ -189,7 +189,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
|||
window.spawn(cx, async move |_| {
|
||||
task.await?
|
||||
.map(|output| output.to_event_stream())
|
||||
.ok_or_else(|| anyhow!("No diagnostics found"))
|
||||
.context("No diagnostics found")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::sync::Arc;
|
|||
use std::sync::atomic::AtomicBool;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{Result, anyhow, bail};
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
|
@ -52,15 +52,16 @@ impl DocsSlashCommand {
|
|||
.is_none()
|
||||
{
|
||||
let index_provider_deps = maybe!({
|
||||
let workspace = workspace.clone().ok_or_else(|| anyhow!("no workspace"))?;
|
||||
let workspace = workspace
|
||||
.as_ref()
|
||||
.context("no workspace")?
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("workspace was dropped"))?;
|
||||
.context("workspace dropped")?;
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let cargo_workspace_root = Self::path_to_cargo_toml(project, cx)
|
||||
.and_then(|path| path.parent().map(|path| path.to_path_buf()))
|
||||
.ok_or_else(|| anyhow!("no Cargo workspace root found"))?;
|
||||
.context("no Cargo workspace root found")?;
|
||||
|
||||
anyhow::Ok((fs, cargo_workspace_root))
|
||||
});
|
||||
|
@ -78,10 +79,11 @@ impl DocsSlashCommand {
|
|||
.is_none()
|
||||
{
|
||||
let http_client = maybe!({
|
||||
let workspace = workspace.ok_or_else(|| anyhow!("no workspace"))?;
|
||||
let workspace = workspace
|
||||
.as_ref()
|
||||
.context("no workspace")?
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("workspace was dropped"))?;
|
||||
.context("workspace was dropped")?;
|
||||
let project = workspace.read(cx).project().clone();
|
||||
anyhow::Ok(project.read(cx).client().http_client())
|
||||
});
|
||||
|
@ -174,7 +176,7 @@ impl SlashCommand for DocsSlashCommand {
|
|||
let args = DocsSlashCommandArgs::parse(arguments);
|
||||
let store = args
|
||||
.provider()
|
||||
.ok_or_else(|| anyhow!("no docs provider specified"))
|
||||
.context("no docs provider specified")
|
||||
.and_then(|provider| IndexedDocsStore::try_global(provider, cx));
|
||||
cx.background_spawn(async move {
|
||||
fn build_completions(items: Vec<String>) -> Vec<ArgumentCompletion> {
|
||||
|
@ -287,7 +289,7 @@ impl SlashCommand for DocsSlashCommand {
|
|||
let task = cx.background_spawn({
|
||||
let store = args
|
||||
.provider()
|
||||
.ok_or_else(|| anyhow!("no docs provider specified"))
|
||||
.context("no docs provider specified")
|
||||
.and_then(|provider| IndexedDocsStore::try_global(provider, cx));
|
||||
async move {
|
||||
let (provider, key) = match args.clone() {
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::rc::Rc;
|
|||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use anyhow::{Context, Result, anyhow, bail};
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
|
|
|
@ -230,7 +230,10 @@ fn collect_files(
|
|||
})
|
||||
.collect::<anyhow::Result<Vec<custom_path_matcher::PathMatcher>>>()
|
||||
else {
|
||||
return futures::stream::once(async { Err(anyhow!("invalid path")) }).boxed();
|
||||
return futures::stream::once(async {
|
||||
anyhow::bail!("invalid path");
|
||||
})
|
||||
.boxed();
|
||||
};
|
||||
|
||||
let project_handle = project.downgrade();
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::ActionLog;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use gpui::{AsyncApp, Entity};
|
||||
use language::{OutlineItem, ParseStatus};
|
||||
use project::Project;
|
||||
|
@ -22,7 +22,7 @@ pub async fn file_outline(
|
|||
let project_path = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.find_project_path(&path, cx)
|
||||
.ok_or_else(|| anyhow!("Path {path} not found in project"))
|
||||
.with_context(|| format!("Path {path} not found in project"))
|
||||
})??;
|
||||
|
||||
project
|
||||
|
@ -41,9 +41,9 @@ pub async fn file_outline(
|
|||
}
|
||||
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
let Some(outline) = snapshot.outline(None) else {
|
||||
return Err(anyhow!("No outline information available for this file."));
|
||||
};
|
||||
let outline = snapshot
|
||||
.outline(None)
|
||||
.context("No outline information available for this file at path {path}")?;
|
||||
|
||||
render_outline(
|
||||
outline
|
||||
|
|
|
@ -27,12 +27,10 @@ fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
|
|||
const UNSUPPORTED_KEYS: [&str; 4] = ["if", "then", "else", "$ref"];
|
||||
|
||||
for key in UNSUPPORTED_KEYS {
|
||||
if obj.contains_key(key) {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Schema cannot be made compatible because it contains \"{}\" ",
|
||||
key
|
||||
));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!obj.contains_key(key),
|
||||
"Schema cannot be made compatible because it contains \"{key}\""
|
||||
);
|
||||
}
|
||||
|
||||
const KEYS_TO_REMOVE: [&str; 5] = [
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use gpui::AnyWindowHandle;
|
||||
use gpui::{App, AppContext, Entity, Task};
|
||||
|
@ -107,17 +107,13 @@ impl Tool for CopyPathTool {
|
|||
});
|
||||
|
||||
cx.background_spawn(async move {
|
||||
match copy_task.await {
|
||||
Ok(_) => Ok(
|
||||
format!("Copied {} to {}", input.source_path, input.destination_path).into(),
|
||||
),
|
||||
Err(err) => Err(anyhow!(
|
||||
"Failed to copy {} to {}: {}",
|
||||
input.source_path,
|
||||
input.destination_path,
|
||||
err
|
||||
)),
|
||||
}
|
||||
let _ = copy_task.await.with_context(|| {
|
||||
format!(
|
||||
"Copying {} to {}",
|
||||
input.source_path, input.destination_path
|
||||
)
|
||||
})?;
|
||||
Ok(format!("Copied {} to {}", input.source_path, input.destination_path).into())
|
||||
})
|
||||
.into()
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use gpui::AnyWindowHandle;
|
||||
use gpui::{App, Entity, Task};
|
||||
|
@ -86,7 +86,7 @@ impl Tool for CreateDirectoryTool {
|
|||
project.create_entry(project_path.clone(), true, cx)
|
||||
})?
|
||||
.await
|
||||
.map_err(|err| anyhow!("Unable to create directory {destination_path}: {err}"))?;
|
||||
.with_context(|| format!("Creating directory {destination_path}"))?;
|
||||
|
||||
Ok(format!("Created directory {destination_path}").into())
|
||||
})
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use futures::{SinkExt, StreamExt, channel::mpsc};
|
||||
use gpui::{AnyWindowHandle, App, AppContext, Entity, Task};
|
||||
|
@ -122,19 +122,17 @@ impl Tool for DeletePathTool {
|
|||
}
|
||||
}
|
||||
|
||||
let delete = project.update(cx, |project, cx| {
|
||||
let deletion_task = project
|
||||
.update(cx, |project, cx| {
|
||||
project.delete_file(project_path, false, cx)
|
||||
})?
|
||||
.with_context(|| {
|
||||
format!("Couldn't delete {path_str} because that path isn't in this project.")
|
||||
})?;
|
||||
|
||||
match delete {
|
||||
Some(deletion_task) => match deletion_task.await {
|
||||
Ok(()) => Ok(format!("Deleted {path_str}").into()),
|
||||
Err(err) => Err(anyhow!("Failed to delete {path_str}: {err}")),
|
||||
},
|
||||
None => Err(anyhow!(
|
||||
"Couldn't delete {path_str} because that path isn't in this project."
|
||||
)),
|
||||
}
|
||||
deletion_task
|
||||
.await
|
||||
.with_context(|| format!("Deleting {path_str}"))?;
|
||||
Ok(format!("Deleted {path_str}").into())
|
||||
})
|
||||
.into()
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@ use crate::{
|
|||
list_directory_tool::ListDirectoryToolInput,
|
||||
};
|
||||
use Role::*;
|
||||
use anyhow::anyhow;
|
||||
use assistant_tool::ToolRegistry;
|
||||
use client::{Client, UserStore};
|
||||
use collections::HashMap;
|
||||
|
@ -1207,10 +1206,7 @@ impl EvalAssertion {
|
|||
}
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"No score found in response. Raw output: {}",
|
||||
output
|
||||
))
|
||||
anyhow::bail!("No score found in response. Raw output: {output}");
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -98,21 +98,21 @@ impl BlameEntry {
|
|||
let sha = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<Oid>().ok())
|
||||
.ok_or_else(|| anyhow!("failed to parse sha"))?;
|
||||
.with_context(|| format!("parsing sha from {line}"))?;
|
||||
|
||||
let original_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
|
||||
.with_context(|| format!("parsing original line number from {line}"))?;
|
||||
let final_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing final line number from {line}"))?;
|
||||
|
||||
let line_count = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing line count from {line}"))?;
|
||||
|
||||
let start_line = final_line_number.saturating_sub(1);
|
||||
let end_line = start_line + line_count;
|
||||
|
|
|
@ -80,7 +80,7 @@ async fn run_git_blame(
|
|||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
|
||||
.context("starting git blame process")?;
|
||||
|
||||
let stdin = child
|
||||
.stdin
|
||||
|
@ -92,10 +92,7 @@ async fn run_git_blame(
|
|||
}
|
||||
stdin.flush().await?;
|
||||
|
||||
let output = child
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
|
@ -103,7 +100,7 @@ async fn run_git_blame(
|
|||
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
return Ok(String::new());
|
||||
}
|
||||
return Err(anyhow!("git blame process failed: {}", stderr));
|
||||
anyhow::bail!("git blame process failed: {stderr}");
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
|
@ -144,21 +141,21 @@ impl BlameEntry {
|
|||
let sha = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<Oid>().ok())
|
||||
.ok_or_else(|| anyhow!("failed to parse sha"))?;
|
||||
.with_context(|| format!("parsing sha from {line}"))?;
|
||||
|
||||
let original_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
|
||||
.with_context(|| format!("parsing original line number from {line}"))?;
|
||||
let final_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing final line number from {line}"))?;
|
||||
|
||||
let line_count = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing line count from {line}"))?;
|
||||
|
||||
let start_line = final_line_number.saturating_sub(1);
|
||||
let end_line = start_line + line_count;
|
||||
|
|
|
@ -5272,7 +5272,7 @@ impl Editor {
|
|||
task.await?;
|
||||
}
|
||||
|
||||
Ok::<_, anyhow::Error>(())
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
@ -10369,8 +10369,8 @@ impl Editor {
|
|||
.map(|line| {
|
||||
line.strip_prefix(&line_prefix)
|
||||
.or_else(|| line.trim_start().strip_prefix(&line_prefix.trim_start()))
|
||||
.ok_or_else(|| {
|
||||
anyhow!("line did not start with prefix {line_prefix:?}: {line:?}")
|
||||
.with_context(|| {
|
||||
format!("line did not start with prefix {line_prefix:?}: {line:?}")
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
|
@ -16944,7 +16944,7 @@ impl Editor {
|
|||
Err(err) => {
|
||||
let message = format!("Failed to copy permalink: {err}");
|
||||
|
||||
Err::<(), anyhow::Error>(err).log_err();
|
||||
anyhow::Result::<()>::Err(err).log_err();
|
||||
|
||||
if let Some(workspace) = workspace {
|
||||
workspace
|
||||
|
@ -16999,7 +16999,7 @@ impl Editor {
|
|||
Err(err) => {
|
||||
let message = format!("Failed to open permalink: {err}");
|
||||
|
||||
Err::<(), anyhow::Error>(err).log_err();
|
||||
anyhow::Result::<()>::Err(err).log_err();
|
||||
|
||||
if let Some(workspace) = workspace {
|
||||
workspace
|
||||
|
|
|
@ -80,7 +80,7 @@ async fn run_git_blame(
|
|||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
|
||||
.context("starting git blame process")?;
|
||||
|
||||
let stdin = child
|
||||
.stdin
|
||||
|
@ -92,10 +92,7 @@ async fn run_git_blame(
|
|||
}
|
||||
stdin.flush().await?;
|
||||
|
||||
let output = child
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
|
||||
handle_command_output(output)
|
||||
}
|
||||
|
@ -107,7 +104,7 @@ fn handle_command_output(output: std::process::Output) -> Result<String> {
|
|||
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
return Ok(String::new());
|
||||
}
|
||||
return Err(anyhow!("git blame process failed: {}", stderr));
|
||||
anyhow::bail!("git blame process failed: {stderr}");
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
|
@ -148,21 +145,21 @@ impl BlameEntry {
|
|||
let sha = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<Oid>().ok())
|
||||
.ok_or_else(|| anyhow!("failed to parse sha"))?;
|
||||
.with_context(|| format!("parsing sha from {line}"))?;
|
||||
|
||||
let original_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
|
||||
.with_context(|| format!("parsing original line number from {line}"))?;
|
||||
let final_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing final line number from {line}"))?;
|
||||
|
||||
let line_count = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing line count from {line}"))?;
|
||||
|
||||
let start_line = final_line_number.saturating_sub(1);
|
||||
let end_line = start_line + line_count;
|
||||
|
|
|
@ -80,7 +80,7 @@ async fn run_git_blame(
|
|||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
|
||||
.context("starting git blame process")?;
|
||||
|
||||
let stdin = child
|
||||
.stdin
|
||||
|
@ -92,10 +92,7 @@ async fn run_git_blame(
|
|||
}
|
||||
stdin.flush().await?;
|
||||
|
||||
let output = child
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
|
||||
let output = child.output().await.context("reading git blame output")?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
|
@ -103,7 +100,7 @@ async fn run_git_blame(
|
|||
if trimmed == GIT_BLAME_NO_COMMIT_ERROR || trimmed.contains(GIT_BLAME_NO_PATH) {
|
||||
return Ok(String::new());
|
||||
}
|
||||
return Err(anyhow!("git blame process failed: {}", stderr));
|
||||
anyhow::bail!("git blame process failed: {stderr}");
|
||||
}
|
||||
|
||||
Ok(String::from_utf8(output.stdout)?)
|
||||
|
@ -144,21 +141,21 @@ impl BlameEntry {
|
|||
let sha = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<Oid>().ok())
|
||||
.ok_or_else(|| anyhow!("failed to parse sha"))?;
|
||||
.with_context(|| format!("parsing sha from {line}"))?;
|
||||
|
||||
let original_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
|
||||
.with_context(|| format!("parsing original line number from {line}"))?;
|
||||
let final_line_number = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing final line number from {line}"))?;
|
||||
|
||||
let line_count = parts
|
||||
.next()
|
||||
.and_then(|line| line.parse::<u32>().ok())
|
||||
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
|
||||
.with_context(|| format!("parsing line count from {line}"))?;
|
||||
|
||||
let start_line = final_line_number.saturating_sub(1);
|
||||
let end_line = start_line + line_count;
|
||||
|
|
|
@ -20,7 +20,7 @@ use std::{
|
|||
|
||||
#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))]
|
||||
use anyhow::Error;
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use etcetera::BaseStrategy as _;
|
||||
use fs4::fs_std::FileExt;
|
||||
use indoc::indoc;
|
||||
|
@ -875,16 +875,13 @@ impl Loader {
|
|||
|
||||
FileExt::unlock(lock_file)?;
|
||||
fs::remove_file(lock_path)?;
|
||||
|
||||
if output.status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"Parser compilation failed.\nStdout: {}\nStderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
))
|
||||
}
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
|
@ -941,17 +938,13 @@ impl Loader {
|
|||
.map(|f| format!(" `{f}`"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
return Err(anyhow!(format!(
|
||||
indoc! {"
|
||||
anyhow::bail!(format!(indoc! {"
|
||||
Missing required functions in the external scanner, parsing won't work without these!
|
||||
|
||||
{}
|
||||
{missing}
|
||||
|
||||
You can read more about this at https://tree-sitter.github.io/tree-sitter/creating-parsers/4-external-scanners
|
||||
"},
|
||||
missing,
|
||||
)));
|
||||
"}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1008,9 +1001,9 @@ impl Loader {
|
|||
{
|
||||
EmccSource::Podman
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
anyhow::bail!(
|
||||
"You must have either emcc, docker, or podman on your PATH to run this command"
|
||||
));
|
||||
);
|
||||
};
|
||||
|
||||
let mut command = match source {
|
||||
|
@ -1103,12 +1096,11 @@ impl Loader {
|
|||
.spawn()
|
||||
.with_context(|| "Failed to run emcc command")?
|
||||
.wait()?;
|
||||
if !status.success() {
|
||||
return Err(anyhow!("emcc command failed"));
|
||||
}
|
||||
|
||||
fs::rename(src_path.join(output_name), output_path)
|
||||
.context("failed to rename wasm output file")?;
|
||||
anyhow::ensure!(status.success(), "emcc command failed");
|
||||
let source_path = src_path.join(output_name);
|
||||
fs::rename(&source_path, &output_path).with_context(|| {
|
||||
format!("failed to rename wasm output file from {source_path:?} to {output_path:?}")
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1185,11 +1177,8 @@ impl Loader {
|
|||
.map(|path| {
|
||||
let path = parser_path.join(path);
|
||||
// prevent p being above/outside of parser_path
|
||||
if path.starts_with(parser_path) {
|
||||
anyhow::ensure!(path.starts_with(parser_path), "External file path {path:?} is outside of parser directory {parser_path:?}");
|
||||
Ok(path)
|
||||
} else {
|
||||
Err(anyhow!("External file path {path:?} is outside of parser directory {parser_path:?}"))
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()
|
||||
}).transpose()?,
|
||||
|
@ -1324,11 +1313,8 @@ impl Loader {
|
|||
let name = GRAMMAR_NAME_REGEX
|
||||
.captures(&first_three_lines)
|
||||
.and_then(|c| c.get(1))
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Failed to parse the language name from grammar.json at {}",
|
||||
grammar_path.display()
|
||||
)
|
||||
.with_context(|| {
|
||||
format!("Failed to parse the language name from grammar.json at {grammar_path:?}")
|
||||
})?;
|
||||
|
||||
Ok(name.as_str().to_string())
|
||||
|
@ -1347,7 +1333,7 @@ impl Loader {
|
|||
{
|
||||
Ok(config.0)
|
||||
} else {
|
||||
Err(anyhow!("Unknown scope '{scope}'"))
|
||||
anyhow::bail!("Unknown scope '{scope}'")
|
||||
}
|
||||
} else if let Some((lang, _)) = self
|
||||
.language_configuration_for_file_name(path)
|
||||
|
@ -1371,7 +1357,7 @@ impl Loader {
|
|||
} else if let Some(lang) = self.language_configuration_for_first_line_regex(path)? {
|
||||
Ok(lang.0)
|
||||
} else {
|
||||
Err(anyhow!("No language found"))
|
||||
anyhow::bail!("No language found");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
|||
edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent},
|
||||
schema::json_schema_for,
|
||||
};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{
|
||||
ActionLog, AnyToolCard, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput,
|
||||
ToolUseStatus,
|
||||
|
@ -279,15 +279,15 @@ impl Tool for EditFileTool {
|
|||
|
||||
let input_path = input.path.display();
|
||||
if diff.is_empty() {
|
||||
if hallucinated_old_text {
|
||||
Err(anyhow!(formatdoc! {"
|
||||
anyhow::ensure!(
|
||||
!hallucinated_old_text,
|
||||
formatdoc! {"
|
||||
Some edits were produced but none of them could be applied.
|
||||
Read the relevant sections of {input_path} again so that
|
||||
I can perform the requested edits.
|
||||
"}))
|
||||
} else {
|
||||
"}
|
||||
);
|
||||
Ok("No edits were made.".to_string().into())
|
||||
}
|
||||
} else {
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Text(format!(
|
||||
|
@ -347,53 +347,52 @@ fn resolve_path(
|
|||
EditFileMode::Edit | EditFileMode::Overwrite => {
|
||||
let path = project
|
||||
.find_project_path(&input.path, cx)
|
||||
.ok_or_else(|| anyhow!("Can't edit file: path not found"))?;
|
||||
.context("Can't edit file: path not found")?;
|
||||
|
||||
let entry = project
|
||||
.entry_for_path(&path, cx)
|
||||
.ok_or_else(|| anyhow!("Can't edit file: path not found"))?;
|
||||
|
||||
if !entry.is_file() {
|
||||
return Err(anyhow!("Can't edit file: path is a directory"));
|
||||
}
|
||||
.context("Can't edit file: path not found")?;
|
||||
|
||||
anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory");
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
EditFileMode::Create => {
|
||||
if let Some(path) = project.find_project_path(&input.path, cx) {
|
||||
if project.entry_for_path(&path, cx).is_some() {
|
||||
return Err(anyhow!("Can't create file: file already exists"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
project.entry_for_path(&path, cx).is_none(),
|
||||
"Can't create file: file already exists"
|
||||
);
|
||||
}
|
||||
|
||||
let parent_path = input
|
||||
.path
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow!("Can't create file: incorrect path"))?;
|
||||
.context("Can't create file: incorrect path")?;
|
||||
|
||||
let parent_project_path = project.find_project_path(&parent_path, cx);
|
||||
|
||||
let parent_entry = parent_project_path
|
||||
.as_ref()
|
||||
.and_then(|path| project.entry_for_path(&path, cx))
|
||||
.ok_or_else(|| anyhow!("Can't create file: parent directory doesn't exist"))?;
|
||||
.context("Can't create file: parent directory doesn't exist")?;
|
||||
|
||||
if !parent_entry.is_dir() {
|
||||
return Err(anyhow!("Can't create file: parent is not a directory"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
parent_entry.is_dir(),
|
||||
"Can't create file: parent is not a directory"
|
||||
);
|
||||
|
||||
let file_name = input
|
||||
.path
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("Can't create file: invalid filename"))?;
|
||||
.context("Can't create file: invalid filename")?;
|
||||
|
||||
let new_file_path = parent_project_path.map(|parent| ProjectPath {
|
||||
path: Arc::from(parent.path.join(file_name)),
|
||||
..parent
|
||||
});
|
||||
|
||||
new_file_path.ok_or_else(|| anyhow!("Can't create file"))
|
||||
new_file_path.context("Can't create file")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -917,8 +916,6 @@ async fn build_buffer_diff(
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::result::Result;
|
||||
|
||||
use super::*;
|
||||
use client::TelemetrySettings;
|
||||
use fs::FakeFs;
|
||||
|
@ -1019,7 +1016,7 @@ mod tests {
|
|||
mode: &EditFileMode,
|
||||
path: &str,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Result<ProjectPath, anyhow::Error> {
|
||||
) -> anyhow::Result<ProjectPath> {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
@ -1046,7 +1043,7 @@ mod tests {
|
|||
result
|
||||
}
|
||||
|
||||
fn assert_resolved_path_eq(path: Result<ProjectPath, anyhow::Error>, expected: &str) {
|
||||
fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &str) {
|
||||
let actual = path
|
||||
.expect("Should return valid path")
|
||||
.path
|
||||
|
|
|
@ -109,7 +109,7 @@ impl Tool for GrepTool {
|
|||
let input = match serde_json::from_value::<GrepToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(error) => {
|
||||
return Task::ready(Err(anyhow!("Failed to parse input: {}", error))).into();
|
||||
return Task::ready(Err(anyhow!("Failed to parse input: {error}"))).into();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -122,7 +122,7 @@ impl Tool for GrepTool {
|
|||
) {
|
||||
Ok(matcher) => matcher,
|
||||
Err(error) => {
|
||||
return Task::ready(Err(anyhow!("invalid include glob pattern: {}", error))).into();
|
||||
return Task::ready(Err(anyhow!("invalid include glob pattern: {error}"))).into();
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use gpui::{AnyWindowHandle, App, AppContext, Entity, Task};
|
||||
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
|
||||
|
@ -117,17 +117,10 @@ impl Tool for MovePathTool {
|
|||
});
|
||||
|
||||
cx.background_spawn(async move {
|
||||
match rename_task.await {
|
||||
Ok(_) => {
|
||||
let _ = rename_task.await.with_context(|| {
|
||||
format!("Moving {} to {}", input.source_path, input.destination_path)
|
||||
})?;
|
||||
Ok(format!("Moved {} to {}", input.source_path, input.destination_path).into())
|
||||
}
|
||||
Err(err) => Err(anyhow!(
|
||||
"Failed to move {} to {}: {}",
|
||||
input.source_path,
|
||||
input.destination_path,
|
||||
err
|
||||
)),
|
||||
}
|
||||
})
|
||||
.into()
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use assistant_tool::{ToolResultContent, outline};
|
||||
use gpui::{AnyWindowHandle, App, Entity, Task};
|
||||
|
@ -129,7 +129,7 @@ impl Tool for ReadFileTool {
|
|||
let language_model_image = cx
|
||||
.update(|cx| LanguageModelImage::from_image(image, cx))?
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("Failed to process image"))?;
|
||||
.context("processing image")?;
|
||||
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Image(language_model_image),
|
||||
|
@ -152,7 +152,7 @@ impl Tool for ReadFileTool {
|
|||
.as_ref()
|
||||
.map_or(true, |file| !file.disk_state().exists())
|
||||
})? {
|
||||
return Err(anyhow!("{} not found", file_path));
|
||||
anyhow::bail!("{file_path} not found");
|
||||
}
|
||||
|
||||
project.update(cx, |project, cx| {
|
||||
|
|
|
@ -382,13 +382,11 @@ fn working_dir(
|
|||
|
||||
match worktrees.next() {
|
||||
Some(worktree) => {
|
||||
if worktrees.next().is_none() {
|
||||
Ok(Some(worktree.read(cx).abs_path().to_path_buf()))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
anyhow::ensure!(
|
||||
worktrees.next().is_none(),
|
||||
"'.' is ambiguous in multi-root workspaces. Please specify a root directory explicitly.",
|
||||
))
|
||||
}
|
||||
);
|
||||
Ok(Some(worktree.read(cx).abs_path().to_path_buf()))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
|
@ -409,9 +407,7 @@ fn working_dir(
|
|||
}
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"`cd` directory {cd:?} was not in any of the project's worktrees."
|
||||
))
|
||||
anyhow::bail!("`cd` directory {cd:?} was not in any of the project's worktrees.");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::{io::Cursor, sync::Arc};
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{Context as _, Result};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AssetSource, Global};
|
||||
use rodio::{
|
||||
|
@ -44,8 +44,8 @@ impl SoundRegistry {
|
|||
let bytes = self
|
||||
.assets
|
||||
.load(&path)?
|
||||
.map(Ok)
|
||||
.unwrap_or_else(|| Err(anyhow::anyhow!("No such asset available")))?
|
||||
.map(anyhow::Ok)
|
||||
.with_context(|| format!("No asset available for path {path}"))??
|
||||
.into_owned();
|
||||
let cursor = Cursor::new(bytes);
|
||||
let source = Decoder::new(cursor)?.convert_samples::<f32>().buffered();
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::{Client, TelemetrySettings};
|
||||
use db::RELEASE_CHANNEL;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
|
@ -367,7 +367,7 @@ impl AutoUpdater {
|
|||
cx.default_global::<GlobalAutoUpdate>()
|
||||
.0
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("auto-update not initialized"))
|
||||
.context("auto-update not initialized")
|
||||
})??;
|
||||
|
||||
let release = Self::get_release(
|
||||
|
@ -411,7 +411,7 @@ impl AutoUpdater {
|
|||
cx.default_global::<GlobalAutoUpdate>()
|
||||
.0
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("auto-update not initialized"))
|
||||
.context("auto-update not initialized")
|
||||
})??;
|
||||
|
||||
let release = Self::get_release(
|
||||
|
@ -465,12 +465,11 @@ impl AutoUpdater {
|
|||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow!(
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to fetch release: {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
));
|
||||
}
|
||||
);
|
||||
|
||||
serde_json::from_slice(body.as_slice()).with_context(|| {
|
||||
format!(
|
||||
|
@ -557,10 +556,10 @@ impl AutoUpdater {
|
|||
|
||||
let installer_dir = InstallerDir::new().await?;
|
||||
let filename = match OS {
|
||||
"macos" => Ok("Zed.dmg"),
|
||||
"macos" => anyhow::Ok("Zed.dmg"),
|
||||
"linux" => Ok("zed.tar.gz"),
|
||||
"windows" => Ok("ZedUpdateInstaller.exe"),
|
||||
_ => Err(anyhow!("not supported: {:?}", OS)),
|
||||
unsupported_os => anyhow::bail!("not supported: {unsupported_os}"),
|
||||
}?;
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
|
@ -581,7 +580,7 @@ impl AutoUpdater {
|
|||
"macos" => install_release_macos(&installer_dir, downloaded_asset, &cx).await,
|
||||
"linux" => install_release_linux(&installer_dir, downloaded_asset, &cx).await,
|
||||
"windows" => install_release_windows(downloaded_asset).await,
|
||||
_ => Err(anyhow!("not supported: {:?}", OS)),
|
||||
unsupported_os => anyhow::bail!("not supported: {unsupported_os}"),
|
||||
}?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
|
@ -640,12 +639,11 @@ async fn download_remote_server_binary(
|
|||
let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
if !response.status().is_success() {
|
||||
return Err(anyhow!(
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to download remote server release: {:?}",
|
||||
response.status()
|
||||
));
|
||||
}
|
||||
);
|
||||
smol::io::copy(response.body_mut(), &mut temp_file).await?;
|
||||
smol::fs::rename(&temp, &target_path).await?;
|
||||
|
||||
|
@ -792,7 +790,7 @@ async fn install_release_macos(
|
|||
let running_app_path = cx.update(|cx| cx.app_path())??;
|
||||
let running_app_filename = running_app_path
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("invalid running app path"))?;
|
||||
.with_context(|| format!("invalid running app path {running_app_path:?}"))?;
|
||||
|
||||
let mount_path = temp_dir.path().join("Zed");
|
||||
let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into();
|
||||
|
|
|
@ -22,7 +22,7 @@ mod windows_impl {
|
|||
|
||||
use super::dialog::create_dialog_window;
|
||||
use super::updater::perform_update;
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{Context as _, Result};
|
||||
use windows::{
|
||||
Win32::{
|
||||
Foundation::{HWND, LPARAM, WPARAM},
|
||||
|
|
|
@ -4,7 +4,7 @@ use std::{
|
|||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{Context as _, Result};
|
||||
use windows::Win32::{
|
||||
Foundation::{HWND, LPARAM, WPARAM},
|
||||
System::Threading::CREATE_NEW_PROCESS_GROUP,
|
||||
|
@ -124,9 +124,7 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>) -> Result<()>
|
|||
for job in JOBS.iter() {
|
||||
let start = Instant::now();
|
||||
loop {
|
||||
if start.elapsed().as_secs() > 2 {
|
||||
return Err(anyhow::anyhow!("Timed out"));
|
||||
}
|
||||
anyhow::ensure!(start.elapsed().as_secs() <= 2, "Timed out");
|
||||
match (*job)(app_dir) {
|
||||
Ok(_) => {
|
||||
unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? };
|
||||
|
|
|
@ -3,7 +3,7 @@ mod models;
|
|||
use std::collections::HashMap;
|
||||
use std::pin::Pin;
|
||||
|
||||
use anyhow::{Error, Result, anyhow};
|
||||
use anyhow::{Context as _, Error, Result, anyhow};
|
||||
use aws_sdk_bedrockruntime as bedrock;
|
||||
pub use aws_sdk_bedrockruntime as bedrock_client;
|
||||
pub use aws_sdk_bedrockruntime::types::{
|
||||
|
@ -97,7 +97,7 @@ pub async fn stream_completion(
|
|||
}
|
||||
})
|
||||
.await
|
||||
.map_err(|err| anyhow!("failed to spawn task: {err:?}"))?
|
||||
.context("spawning a task")?
|
||||
}
|
||||
|
||||
pub fn aws_document_to_value(document: &Document) -> Value {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use anyhow::anyhow;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::EnumIter;
|
||||
|
||||
|
@ -107,7 +106,7 @@ impl Model {
|
|||
} else if id.starts_with("claude-3-7-sonnet-thinking") {
|
||||
Ok(Self::Claude3_7SonnetThinking)
|
||||
} else {
|
||||
Err(anyhow!("invalid model id"))
|
||||
anyhow::bail!("invalid model id {id}");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -294,7 +293,7 @@ impl Model {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn cross_region_inference_id(&self, region: &str) -> Result<String, anyhow::Error> {
|
||||
pub fn cross_region_inference_id(&self, region: &str) -> anyhow::Result<String> {
|
||||
let region_group = if region.starts_with("us-gov-") {
|
||||
"us-gov"
|
||||
} else if region.starts_with("us-") {
|
||||
|
@ -307,8 +306,7 @@ impl Model {
|
|||
// Canada and South America regions - default to US profiles
|
||||
"us"
|
||||
} else {
|
||||
// Unknown region
|
||||
return Err(anyhow!("Unsupported Region"));
|
||||
anyhow::bail!("Unsupported Region {region}");
|
||||
};
|
||||
|
||||
let model_id = self.id();
|
||||
|
|
|
@ -2,7 +2,7 @@ pub mod participant;
|
|||
pub mod room;
|
||||
|
||||
use crate::call_settings::CallSettings;
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use audio::Audio;
|
||||
use client::{ChannelId, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE, proto};
|
||||
use collections::HashSet;
|
||||
|
@ -187,7 +187,7 @@ impl ActiveCall {
|
|||
|
||||
let invite = if let Some(room) = room {
|
||||
cx.spawn(async move |_, cx| {
|
||||
let room = room.await.map_err(|err| anyhow!("{:?}", err))?;
|
||||
let room = room.await.map_err(|err| anyhow!("{err:?}"))?;
|
||||
|
||||
let initial_project_id = if let Some(initial_project) = initial_project {
|
||||
Some(
|
||||
|
@ -236,7 +236,7 @@ impl ActiveCall {
|
|||
.shared();
|
||||
self.pending_room_creation = Some(room.clone());
|
||||
cx.background_spawn(async move {
|
||||
room.await.map_err(|err| anyhow!("{:?}", err))?;
|
||||
room.await.map_err(|err| anyhow!("{err:?}"))?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
};
|
||||
|
@ -326,7 +326,7 @@ impl ActiveCall {
|
|||
.0
|
||||
.borrow_mut()
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("no incoming call"))?;
|
||||
.context("no incoming call")?;
|
||||
telemetry::event!("Incoming Call Declined", room_id = call.room_id);
|
||||
self.client.send(proto::DeclineCall {
|
||||
room_id: call.room_id,
|
||||
|
@ -399,12 +399,9 @@ impl ActiveCall {
|
|||
project: Entity<Project>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<()> {
|
||||
if let Some((room, _)) = self.room.as_ref() {
|
||||
let (room, _) = self.room.as_ref().context("no active call")?;
|
||||
self.report_call_event("Project Unshared", cx);
|
||||
room.update(cx, |room, cx| room.unshare_project(project, cx))
|
||||
} else {
|
||||
Err(anyhow!("no active call"))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn location(&self) -> Option<&WeakEntity<Project>> {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::{ParticipantIndex, User, proto};
|
||||
use collections::HashMap;
|
||||
use gpui::WeakEntity;
|
||||
|
@ -18,17 +18,17 @@ pub enum ParticipantLocation {
|
|||
|
||||
impl ParticipantLocation {
|
||||
pub fn from_proto(location: Option<proto::ParticipantLocation>) -> Result<Self> {
|
||||
match location.and_then(|l| l.variant) {
|
||||
Some(proto::participant_location::Variant::SharedProject(project)) => {
|
||||
match location
|
||||
.and_then(|l| l.variant)
|
||||
.context("participant location was not provided")?
|
||||
{
|
||||
proto::participant_location::Variant::SharedProject(project) => {
|
||||
Ok(Self::SharedProject {
|
||||
project_id: project.id,
|
||||
})
|
||||
}
|
||||
Some(proto::participant_location::Variant::UnsharedProject(_)) => {
|
||||
Ok(Self::UnsharedProject)
|
||||
}
|
||||
Some(proto::participant_location::Variant::External(_)) => Ok(Self::External),
|
||||
None => Err(anyhow!("participant location was not provided")),
|
||||
proto::participant_location::Variant::UnsharedProject(_) => Ok(Self::UnsharedProject),
|
||||
proto::participant_location::Variant::External(_) => Ok(Self::External),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::{
|
|||
call_settings::CallSettings,
|
||||
participant::{LocalParticipant, ParticipantLocation, RemoteParticipant},
|
||||
};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use audio::{Audio, Sound};
|
||||
use client::{
|
||||
ChannelId, Client, ParticipantIndex, TypedEnvelope, User, UserStore,
|
||||
|
@ -165,7 +165,7 @@ impl Room {
|
|||
) -> Task<Result<Entity<Self>>> {
|
||||
cx.spawn(async move |cx| {
|
||||
let response = client.request(proto::CreateRoom {}).await?;
|
||||
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
|
||||
let room_proto = response.room.context("invalid room")?;
|
||||
let room = cx.new(|cx| {
|
||||
let mut room = Self::new(
|
||||
room_proto.id,
|
||||
|
@ -270,7 +270,7 @@ impl Room {
|
|||
user_store: Entity<UserStore>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<Entity<Self>> {
|
||||
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
|
||||
let room_proto = response.room.context("invalid room")?;
|
||||
let room = cx.new(|cx| {
|
||||
Self::new(
|
||||
room_proto.id,
|
||||
|
@ -360,7 +360,7 @@ impl Room {
|
|||
log::info!("detected client disconnection");
|
||||
|
||||
this.upgrade()
|
||||
.ok_or_else(|| anyhow!("room was dropped"))?
|
||||
.context("room was dropped")?
|
||||
.update(cx, |this, cx| {
|
||||
this.status = RoomStatus::Rejoining;
|
||||
cx.notify();
|
||||
|
@ -428,9 +428,7 @@ impl Room {
|
|||
log::info!("reconnection failed, leaving room");
|
||||
this.update(cx, |this, cx| this.leave(cx))?.await?;
|
||||
}
|
||||
Err(anyhow!(
|
||||
"can't reconnect to room: client failed to re-establish connection"
|
||||
))
|
||||
anyhow::bail!("can't reconnect to room: client failed to re-establish connection");
|
||||
}
|
||||
|
||||
fn rejoin(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
|
@ -494,7 +492,7 @@ impl Room {
|
|||
let response = response.await?;
|
||||
let message_id = response.message_id;
|
||||
let response = response.payload;
|
||||
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
|
||||
let room_proto = response.room.context("invalid room")?;
|
||||
this.update(cx, |this, cx| {
|
||||
this.status = RoomStatus::Online;
|
||||
this.apply_room_update(room_proto, cx)?;
|
||||
|
@ -645,10 +643,7 @@ impl Room {
|
|||
envelope: TypedEnvelope<proto::RoomUpdated>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<()> {
|
||||
let room = envelope
|
||||
.payload
|
||||
.room
|
||||
.ok_or_else(|| anyhow!("invalid room"))?;
|
||||
let room = envelope.payload.room.context("invalid room")?;
|
||||
this.update(&mut cx, |this, cx| this.apply_room_update(room, cx))?
|
||||
}
|
||||
|
||||
|
@ -937,8 +932,11 @@ impl Room {
|
|||
} => {
|
||||
let user_id = participant.identity().0.parse()?;
|
||||
let track_id = track.sid();
|
||||
let participant = self.remote_participants.get_mut(&user_id).ok_or_else(|| {
|
||||
anyhow!(
|
||||
let participant =
|
||||
self.remote_participants
|
||||
.get_mut(&user_id)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"{:?} subscribed to track by unknown participant {user_id}",
|
||||
self.client.user_id()
|
||||
)
|
||||
|
@ -972,8 +970,11 @@ impl Room {
|
|||
track, participant, ..
|
||||
} => {
|
||||
let user_id = participant.identity().0.parse()?;
|
||||
let participant = self.remote_participants.get_mut(&user_id).ok_or_else(|| {
|
||||
anyhow!(
|
||||
let participant =
|
||||
self.remote_participants
|
||||
.get_mut(&user_id)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"{:?}, unsubscribed from track by unknown participant {user_id}",
|
||||
self.client.user_id()
|
||||
)
|
||||
|
@ -1324,7 +1325,7 @@ impl Room {
|
|||
let live_kit = this
|
||||
.live_kit
|
||||
.as_mut()
|
||||
.ok_or_else(|| anyhow!("live-kit was not initialized"))?;
|
||||
.context("live-kit was not initialized")?;
|
||||
|
||||
let canceled = if let LocalTrack::Pending {
|
||||
publish_id: cur_publish_id,
|
||||
|
@ -1389,7 +1390,7 @@ impl Room {
|
|||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let sources = sources.await??;
|
||||
let source = sources.first().ok_or_else(|| anyhow!("no display found"))?;
|
||||
let source = sources.first().context("no display found")?;
|
||||
|
||||
let publication = participant.publish_screenshare_track(&**source, cx).await;
|
||||
|
||||
|
@ -1397,7 +1398,7 @@ impl Room {
|
|||
let live_kit = this
|
||||
.live_kit
|
||||
.as_mut()
|
||||
.ok_or_else(|| anyhow!("live-kit was not initialized"))?;
|
||||
.context("live-kit was not initialized")?;
|
||||
|
||||
let canceled = if let LocalTrack::Pending {
|
||||
publish_id: cur_publish_id,
|
||||
|
@ -1485,16 +1486,14 @@ impl Room {
|
|||
}
|
||||
|
||||
pub fn unshare_screen(&mut self, cx: &mut Context<Self>) -> Result<()> {
|
||||
if self.status.is_offline() {
|
||||
return Err(anyhow!("room is offline"));
|
||||
}
|
||||
anyhow::ensure!(!self.status.is_offline(), "room is offline");
|
||||
|
||||
let live_kit = self
|
||||
.live_kit
|
||||
.as_mut()
|
||||
.ok_or_else(|| anyhow!("live-kit was not initialized"))?;
|
||||
.context("live-kit was not initialized")?;
|
||||
match mem::take(&mut live_kit.screen_track) {
|
||||
LocalTrack::None => Err(anyhow!("screen was not shared")),
|
||||
LocalTrack::None => anyhow::bail!("screen was not shared"),
|
||||
LocalTrack::Pending { .. } => {
|
||||
cx.notify();
|
||||
Ok(())
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::{Channel, ChannelStore};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::{
|
||||
ChannelId, Client, Subscription, TypedEnvelope, UserId, proto,
|
||||
user::{User, UserStore},
|
||||
|
@ -170,15 +170,16 @@ impl ChannelChat {
|
|||
message: MessageParams,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Result<Task<Result<u64>>> {
|
||||
if message.text.trim().is_empty() {
|
||||
Err(anyhow!("message body can't be empty"))?;
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!message.text.trim().is_empty(),
|
||||
"message body can't be empty"
|
||||
);
|
||||
|
||||
let current_user = self
|
||||
.user_store
|
||||
.read(cx)
|
||||
.current_user()
|
||||
.ok_or_else(|| anyhow!("current_user is not present"))?;
|
||||
.context("current_user is not present")?;
|
||||
|
||||
let channel_id = self.channel_id;
|
||||
let pending_id = ChannelMessageId::Pending(post_inc(&mut self.next_pending_message_id));
|
||||
|
@ -215,7 +216,7 @@ impl ChannelChat {
|
|||
});
|
||||
let response = request.await?;
|
||||
drop(outgoing_message_guard);
|
||||
let response = response.message.ok_or_else(|| anyhow!("invalid message"))?;
|
||||
let response = response.message.context("invalid message")?;
|
||||
let id = response.id;
|
||||
let message = ChannelMessage::from_proto(response, &user_store, cx).await?;
|
||||
this.update(cx, |this, cx| {
|
||||
|
@ -470,7 +471,7 @@ impl ChannelChat {
|
|||
});
|
||||
let response = request.await?;
|
||||
let message = ChannelMessage::from_proto(
|
||||
response.message.ok_or_else(|| anyhow!("invalid message"))?,
|
||||
response.message.context("invalid message")?,
|
||||
&user_store,
|
||||
cx,
|
||||
)
|
||||
|
@ -531,10 +532,7 @@ impl ChannelChat {
|
|||
mut cx: AsyncApp,
|
||||
) -> Result<()> {
|
||||
let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
|
||||
let message = message
|
||||
.payload
|
||||
.message
|
||||
.ok_or_else(|| anyhow!("empty message"))?;
|
||||
let message = message.payload.message.context("empty message")?;
|
||||
let message_id = message.id;
|
||||
|
||||
let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
|
||||
|
@ -566,10 +564,7 @@ impl ChannelChat {
|
|||
mut cx: AsyncApp,
|
||||
) -> Result<()> {
|
||||
let user_store = this.update(&mut cx, |this, _| this.user_store.clone())?;
|
||||
let message = message
|
||||
.payload
|
||||
.message
|
||||
.ok_or_else(|| anyhow!("empty message"))?;
|
||||
let message = message.payload.message.context("empty message")?;
|
||||
|
||||
let message = ChannelMessage::from_proto(message, &user_store, &mut cx).await?;
|
||||
|
||||
|
@ -753,10 +748,7 @@ impl ChannelMessage {
|
|||
.collect(),
|
||||
timestamp: OffsetDateTime::from_unix_timestamp(message.timestamp as i64)?,
|
||||
sender,
|
||||
nonce: message
|
||||
.nonce
|
||||
.ok_or_else(|| anyhow!("nonce is required"))?
|
||||
.into(),
|
||||
nonce: message.nonce.context("nonce is required")?.into(),
|
||||
reply_to_message_id: message.reply_to_message_id,
|
||||
edited_at,
|
||||
})
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
mod channel_index;
|
||||
|
||||
use crate::{ChannelMessage, channel_buffer::ChannelBuffer, channel_chat::ChannelChat};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use channel_index::ChannelIndex;
|
||||
use client::{ChannelId, Client, ClientSettings, Subscription, User, UserId, UserStore};
|
||||
use collections::{HashMap, HashSet, hash_map};
|
||||
|
@ -332,9 +332,7 @@ impl ChannelStore {
|
|||
cx.spawn(async move |this, cx| {
|
||||
if let Some(request) = request {
|
||||
let response = request.await?;
|
||||
let this = this
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("channel store dropped"))?;
|
||||
let this = this.upgrade().context("channel store dropped")?;
|
||||
let user_store = this.update(cx, |this, _| this.user_store.clone())?;
|
||||
ChannelMessage::from_proto_vec(response.messages, &user_store, cx).await
|
||||
} else {
|
||||
|
@ -482,7 +480,7 @@ impl ChannelStore {
|
|||
.spawn(async move |this, cx| {
|
||||
let channel = this.update(cx, |this, _| {
|
||||
this.channel_for_id(channel_id).cloned().ok_or_else(|| {
|
||||
Arc::new(anyhow!("no channel for id: {}", channel_id))
|
||||
Arc::new(anyhow!("no channel for id: {channel_id}"))
|
||||
})
|
||||
})??;
|
||||
|
||||
|
@ -514,7 +512,7 @@ impl ChannelStore {
|
|||
}
|
||||
}
|
||||
};
|
||||
cx.background_spawn(async move { task.await.map_err(|error| anyhow!("{}", error)) })
|
||||
cx.background_spawn(async move { task.await.map_err(|error| anyhow!("{error}")) })
|
||||
}
|
||||
|
||||
pub fn is_channel_admin(&self, channel_id: ChannelId) -> bool {
|
||||
|
@ -578,9 +576,7 @@ impl ChannelStore {
|
|||
})
|
||||
.await?;
|
||||
|
||||
let channel = response
|
||||
.channel
|
||||
.ok_or_else(|| anyhow!("missing channel in response"))?;
|
||||
let channel = response.channel.context("missing channel in response")?;
|
||||
let channel_id = ChannelId(channel.id);
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
|
@ -752,7 +748,7 @@ impl ChannelStore {
|
|||
})
|
||||
.await?
|
||||
.channel
|
||||
.ok_or_else(|| anyhow!("missing channel in response"))?;
|
||||
.context("missing channel in response")?;
|
||||
this.update(cx, |this, cx| {
|
||||
let task = this.update_channels(
|
||||
proto::UpdateChannels {
|
||||
|
|
|
@ -169,7 +169,7 @@ fn main() -> Result<()> {
|
|||
"To retrieve the system specs on the command line, run the following command:",
|
||||
&format!("{} --system-specs", path.display()),
|
||||
];
|
||||
return Err(anyhow::anyhow!(msg.join("\n")));
|
||||
anyhow::bail!(msg.join("\n"));
|
||||
}
|
||||
|
||||
#[cfg(all(
|
||||
|
@ -255,11 +255,10 @@ fn main() -> Result<()> {
|
|||
}
|
||||
}
|
||||
|
||||
if let Some(_) = args.dev_server_token {
|
||||
return Err(anyhow::anyhow!(
|
||||
anyhow::ensure!(
|
||||
args.dev_server_token.is_none(),
|
||||
"Dev servers were removed in v0.157.x please upgrade to SSH remoting: https://zed.dev/docs/remote-development"
|
||||
))?;
|
||||
}
|
||||
);
|
||||
|
||||
let sender: JoinHandle<anyhow::Result<()>> = thread::spawn({
|
||||
let exit_status = exit_status.clone();
|
||||
|
@ -400,7 +399,7 @@ mod linux {
|
|||
time::Duration,
|
||||
};
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use cli::FORCE_CLI_MODE_ENV_VAR_NAME;
|
||||
use fork::Fork;
|
||||
|
||||
|
@ -417,9 +416,7 @@ mod linux {
|
|||
path.to_path_buf().canonicalize()?
|
||||
} else {
|
||||
let cli = env::current_exe()?;
|
||||
let dir = cli
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow!("no parent path for cli"))?;
|
||||
let dir = cli.parent().context("no parent path for cli")?;
|
||||
|
||||
// libexec is the standard, lib/zed is for Arch (and other non-libexec distros),
|
||||
// ./zed is for the target directory in development builds.
|
||||
|
@ -428,8 +425,8 @@ mod linux {
|
|||
possible_locations
|
||||
.iter()
|
||||
.find_map(|p| dir.join(p).canonicalize().ok().filter(|path| path != &cli))
|
||||
.ok_or_else(|| {
|
||||
anyhow!("could not find any of: {}", possible_locations.join(", "))
|
||||
.with_context(|| {
|
||||
format!("could not find any of: {}", possible_locations.join(", "))
|
||||
})?
|
||||
};
|
||||
|
||||
|
@ -759,7 +756,7 @@ mod windows {
|
|||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod mac_os {
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use core_foundation::{
|
||||
array::{CFArray, CFIndex},
|
||||
base::TCFType as _,
|
||||
|
@ -800,9 +797,10 @@ mod mac_os {
|
|||
let cli_path = std::env::current_exe()?.canonicalize()?;
|
||||
let mut app_path = cli_path.clone();
|
||||
while app_path.extension() != Some(OsStr::new("app")) {
|
||||
if !app_path.pop() {
|
||||
return Err(anyhow!("cannot find app bundle containing {:?}", cli_path));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
app_path.pop(),
|
||||
"cannot find app bundle containing {cli_path:?}"
|
||||
);
|
||||
}
|
||||
Ok(app_path)
|
||||
}
|
||||
|
|
|
@ -711,9 +711,10 @@ impl Client {
|
|||
let id = (TypeId::of::<T>(), remote_id);
|
||||
|
||||
let mut state = self.handler_set.lock();
|
||||
if state.entities_by_type_and_remote_id.contains_key(&id) {
|
||||
return Err(anyhow!("already subscribed to entity"));
|
||||
}
|
||||
anyhow::ensure!(
|
||||
!state.entities_by_type_and_remote_id.contains_key(&id),
|
||||
"already subscribed to entity"
|
||||
);
|
||||
|
||||
state
|
||||
.entities_by_type_and_remote_id
|
||||
|
@ -962,10 +963,7 @@ impl Client {
|
|||
hello_message_type_name
|
||||
)
|
||||
})?;
|
||||
let peer_id = hello
|
||||
.payload
|
||||
.peer_id
|
||||
.ok_or_else(|| anyhow!("invalid peer id"))?;
|
||||
let peer_id = hello.payload.peer_id.context("invalid peer id")?;
|
||||
Ok(peer_id)
|
||||
};
|
||||
|
||||
|
@ -1075,22 +1073,19 @@ impl Client {
|
|||
}
|
||||
|
||||
let response = http.get(&url, Default::default(), false).await?;
|
||||
let collab_url = if response.status().is_redirection() {
|
||||
response
|
||||
.headers()
|
||||
.get("Location")
|
||||
.ok_or_else(|| anyhow!("missing location header in /rpc response"))?
|
||||
.to_str()
|
||||
.map_err(EstablishConnectionError::other)?
|
||||
.to_string()
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
anyhow::ensure!(
|
||||
response.status().is_redirection(),
|
||||
"unexpected /rpc response status {}",
|
||||
response.status()
|
||||
))?
|
||||
};
|
||||
|
||||
Url::parse(&collab_url).context("invalid rpc url")
|
||||
);
|
||||
let collab_url = response
|
||||
.headers()
|
||||
.get("Location")
|
||||
.context("missing location header in /rpc response")?
|
||||
.to_str()
|
||||
.map_err(EstablishConnectionError::other)?
|
||||
.to_string();
|
||||
Url::parse(&collab_url).with_context(|| format!("parsing colab rpc url {collab_url}"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1132,7 +1127,7 @@ impl Client {
|
|||
let rpc_host = rpc_url
|
||||
.host_str()
|
||||
.zip(rpc_url.port_or_known_default())
|
||||
.ok_or_else(|| anyhow!("missing host in rpc url"))?;
|
||||
.context("missing host in rpc url")?;
|
||||
|
||||
let stream = {
|
||||
let handle = cx.update(|cx| gpui_tokio::Tokio::handle(cx)).ok().unwrap();
|
||||
|
@ -1287,16 +1282,13 @@ impl Client {
|
|||
)
|
||||
.context("failed to respond to login http request")?;
|
||||
return Ok((
|
||||
user_id
|
||||
.ok_or_else(|| anyhow!("missing user_id parameter"))?,
|
||||
access_token.ok_or_else(|| {
|
||||
anyhow!("missing access_token parameter")
|
||||
})?,
|
||||
user_id.context("missing user_id parameter")?,
|
||||
access_token.context("missing access_token parameter")?,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!("didn't receive login redirect"))
|
||||
anyhow::bail!("didn't receive login redirect");
|
||||
})
|
||||
.await?;
|
||||
|
||||
|
@ -1414,13 +1406,12 @@ impl Client {
|
|||
let mut response = http.send(request).await?;
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
if !response.status().is_success() {
|
||||
Err(anyhow!(
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"admin user request failed {} - {}",
|
||||
response.status().as_u16(),
|
||||
body,
|
||||
))?;
|
||||
}
|
||||
);
|
||||
let response: AuthenticatedUserResponse = serde_json::from_str(&body)?;
|
||||
|
||||
// Use the admin API token to authenticate as the impersonated user.
|
||||
|
@ -1457,7 +1448,7 @@ impl Client {
|
|||
if let Status::Connected { connection_id, .. } = *self.status().borrow() {
|
||||
Ok(connection_id)
|
||||
} else {
|
||||
Err(anyhow!("not connected"))
|
||||
anyhow::bail!("not connected");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
//! socks proxy
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use http_client::Url;
|
||||
use tokio_socks::tcp::{Socks4Stream, Socks5Stream};
|
||||
|
||||
|
@ -31,7 +31,7 @@ pub(crate) async fn connect_socks_proxy_stream(
|
|||
// If parsing the proxy URL fails, we must avoid falling back to an insecure connection.
|
||||
// SOCKS proxies are often used in contexts where security and privacy are critical,
|
||||
// so any fallback could expose users to significant risks.
|
||||
return Err(anyhow!("Parsing proxy url failed"));
|
||||
anyhow::bail!("Parsing proxy url failed");
|
||||
};
|
||||
|
||||
// Connect to proxy and wrap protocol later
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use chrono::Duration;
|
||||
use futures::{StreamExt, stream::BoxStream};
|
||||
use gpui::{AppContext as _, BackgroundExecutor, Entity, TestAppContext};
|
||||
|
@ -45,7 +45,7 @@ impl FakeServer {
|
|||
move |cx| {
|
||||
let state = state.clone();
|
||||
cx.spawn(async move |_| {
|
||||
let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
|
||||
let state = state.upgrade().context("server dropped")?;
|
||||
let mut state = state.lock();
|
||||
state.auth_count += 1;
|
||||
let access_token = state.access_token.to_string();
|
||||
|
@ -64,8 +64,8 @@ impl FakeServer {
|
|||
let state = state.clone();
|
||||
let credentials = credentials.clone();
|
||||
cx.spawn(async move |cx| {
|
||||
let state = state.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
|
||||
let peer = peer.upgrade().ok_or_else(|| anyhow!("server dropped"))?;
|
||||
let state = state.upgrade().context("server dropped")?;
|
||||
let peer = peer.upgrade().context("server dropped")?;
|
||||
if state.lock().forbid_connections {
|
||||
Err(EstablishConnectionError::Other(anyhow!(
|
||||
"server is forbidding connections"
|
||||
|
@ -155,7 +155,7 @@ impl FakeServer {
|
|||
.expect("not connected")
|
||||
.next()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("other half hung up"))?;
|
||||
.context("other half hung up")?;
|
||||
self.executor.finish_waiting();
|
||||
let type_name = message.payload_type_name();
|
||||
let message = message.into_any();
|
||||
|
|
|
@ -388,9 +388,7 @@ impl UserStore {
|
|||
// Users are fetched in parallel above and cached in call to get_users
|
||||
// No need to parallelize here
|
||||
let mut updated_contacts = Vec::new();
|
||||
let this = this
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("can't upgrade user store handle"))?;
|
||||
let this = this.upgrade().context("can't upgrade user store handle")?;
|
||||
for contact in message.contacts {
|
||||
updated_contacts
|
||||
.push(Arc::new(Contact::from_proto(contact, &this, cx).await?));
|
||||
|
@ -574,7 +572,7 @@ impl UserStore {
|
|||
let client = self.client.upgrade();
|
||||
cx.spawn(async move |_, _| {
|
||||
client
|
||||
.ok_or_else(|| anyhow!("can't upgrade client reference"))?
|
||||
.context("can't upgrade client reference")?
|
||||
.request(proto::RespondToContactRequest {
|
||||
requester_id,
|
||||
response: proto::ContactRequestResponse::Dismiss as i32,
|
||||
|
@ -596,7 +594,7 @@ impl UserStore {
|
|||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let response = client
|
||||
.ok_or_else(|| anyhow!("can't upgrade client reference"))?
|
||||
.context("can't upgrade client reference")?
|
||||
.request(request)
|
||||
.await;
|
||||
this.update(cx, |this, cx| {
|
||||
|
@ -663,7 +661,7 @@ impl UserStore {
|
|||
this.users
|
||||
.get(user_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| anyhow!("user {} not found", user_id))
|
||||
.with_context(|| format!("user {user_id} not found"))
|
||||
})
|
||||
.collect()
|
||||
})?
|
||||
|
@ -703,7 +701,7 @@ impl UserStore {
|
|||
this.users
|
||||
.get(&user_id)
|
||||
.cloned()
|
||||
.ok_or_else(|| anyhow!("server responded with no users"))
|
||||
.context("server responded with no users")
|
||||
})?
|
||||
})
|
||||
}
|
||||
|
@ -765,20 +763,17 @@ impl UserStore {
|
|||
};
|
||||
|
||||
let client = self.client.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
if let Some(client) = client.upgrade() {
|
||||
cx.spawn(async move |this, cx| -> anyhow::Result<()> {
|
||||
let client = client.upgrade().context("client not found")?;
|
||||
let response = client
|
||||
.request(proto::AcceptTermsOfService {})
|
||||
.await
|
||||
.context("error accepting tos")?;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.set_current_user_accepted_tos_at(Some(response.accepted_tos_at));
|
||||
cx.emit(Event::PrivateUserInfoUpdated);
|
||||
})
|
||||
} else {
|
||||
Err(anyhow!("client not found"))
|
||||
}
|
||||
})?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -897,7 +892,7 @@ impl Contact {
|
|||
impl Collaborator {
|
||||
pub fn from_proto(message: proto::Collaborator) -> Result<Self> {
|
||||
Ok(Self {
|
||||
peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
|
||||
peer_id: message.peer_id.context("invalid peer id")?,
|
||||
replica_id: message.replica_id as ReplicaId,
|
||||
user_id: message.user_id as UserId,
|
||||
is_host: message.is_host,
|
||||
|
|
|
@ -10,7 +10,7 @@ use crate::{
|
|||
db::{User, UserId},
|
||||
rpc,
|
||||
};
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Context as _;
|
||||
use axum::{
|
||||
Extension, Json, Router,
|
||||
body::Body,
|
||||
|
@ -220,7 +220,7 @@ async fn create_access_token(
|
|||
.db
|
||||
.get_user_by_id(user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let mut impersonated_user_id = None;
|
||||
if let Some(impersonate) = params.impersonate {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{Context, anyhow, bail};
|
||||
use anyhow::{Context as _, bail};
|
||||
use axum::{
|
||||
Extension, Json, Router,
|
||||
extract::{self, Query},
|
||||
|
@ -89,7 +89,7 @@ async fn get_billing_preferences(
|
|||
.db
|
||||
.get_user_by_github_user_id(params.github_user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
|
||||
let preferences = app.db.get_billing_preferences(user.id).await?;
|
||||
|
@ -138,7 +138,7 @@ async fn update_billing_preferences(
|
|||
.db
|
||||
.get_user_by_github_user_id(body.github_user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
|
||||
|
||||
|
@ -241,7 +241,7 @@ async fn list_billing_subscriptions(
|
|||
.db
|
||||
.get_user_by_github_user_id(params.github_user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let subscriptions = app.db.get_billing_subscriptions(user.id).await?;
|
||||
|
||||
|
@ -307,7 +307,7 @@ async fn create_billing_subscription(
|
|||
.db
|
||||
.get_user_by_github_user_id(body.github_user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let Some(stripe_billing) = app.stripe_billing.clone() else {
|
||||
log::error!("failed to retrieve Stripe billing object");
|
||||
|
@ -432,7 +432,7 @@ async fn manage_billing_subscription(
|
|||
.db
|
||||
.get_user_by_github_user_id(body.github_user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let Some(stripe_client) = app.stripe_client.clone() else {
|
||||
log::error!("failed to retrieve Stripe client");
|
||||
|
@ -454,7 +454,7 @@ async fn manage_billing_subscription(
|
|||
.db
|
||||
.get_billing_customer_by_user_id(user.id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("billing customer not found"))?;
|
||||
.context("billing customer not found")?;
|
||||
let customer_id = CustomerId::from_str(&customer.stripe_customer_id)
|
||||
.context("failed to parse customer ID")?;
|
||||
|
||||
|
@ -462,7 +462,7 @@ async fn manage_billing_subscription(
|
|||
.db
|
||||
.get_billing_subscription_by_id(body.subscription_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("subscription not found"))?;
|
||||
.context("subscription not found")?;
|
||||
let subscription_id = SubscriptionId::from_str(&subscription.stripe_subscription_id)
|
||||
.context("failed to parse subscription ID")?;
|
||||
|
||||
|
@ -559,7 +559,7 @@ async fn manage_billing_subscription(
|
|||
None
|
||||
}
|
||||
})
|
||||
.ok_or_else(|| anyhow!("No subscription item to update"))?;
|
||||
.context("No subscription item to update")?;
|
||||
|
||||
Some(CreateBillingPortalSessionFlowData {
|
||||
type_: CreateBillingPortalSessionFlowDataType::SubscriptionUpdateConfirm,
|
||||
|
@ -653,7 +653,7 @@ async fn migrate_to_new_billing(
|
|||
.db
|
||||
.get_user_by_github_user_id(body.github_user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let old_billing_subscriptions_by_user = app
|
||||
.db
|
||||
|
@ -732,13 +732,13 @@ async fn sync_billing_subscription(
|
|||
.db
|
||||
.get_user_by_github_user_id(body.github_user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let billing_customer = app
|
||||
.db
|
||||
.get_billing_customer_by_user_id(user.id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("billing customer not found"))?;
|
||||
.context("billing customer not found")?;
|
||||
let stripe_customer_id = billing_customer
|
||||
.stripe_customer_id
|
||||
.parse::<stripe::CustomerId>()
|
||||
|
@ -1031,13 +1031,13 @@ async fn sync_subscription(
|
|||
let billing_customer =
|
||||
find_or_create_billing_customer(app, stripe_client, subscription.customer)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("billing customer not found"))?;
|
||||
.context("billing customer not found")?;
|
||||
|
||||
if let Some(SubscriptionKind::ZedProTrial) = subscription_kind {
|
||||
if subscription.status == SubscriptionStatus::Trialing {
|
||||
let current_period_start =
|
||||
DateTime::from_timestamp(subscription.current_period_start, 0)
|
||||
.ok_or_else(|| anyhow!("No trial subscription period start"))?;
|
||||
.context("No trial subscription period start")?;
|
||||
|
||||
app.db
|
||||
.update_billing_customer(
|
||||
|
@ -1243,7 +1243,7 @@ async fn get_monthly_spend(
|
|||
.db
|
||||
.get_user_by_github_user_id(params.github_user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let Some(llm_db) = app.llm_db.clone() else {
|
||||
return Err(Error::http(
|
||||
|
@ -1311,7 +1311,7 @@ async fn get_current_usage(
|
|||
.db
|
||||
.get_user_by_github_user_id(params.github_user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let feature_flags = app.db.get_user_flags(user.id).await?;
|
||||
let has_extended_trial = feature_flags
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
use std::sync::{Arc, OnceLock};
|
||||
|
||||
use anyhow::anyhow;
|
||||
use axum::{
|
||||
Extension, Json, Router,
|
||||
extract::{self, Query},
|
||||
|
@ -39,7 +38,7 @@ impl CheckIsContributorParams {
|
|||
return Ok(ContributorSelector::GitHubLogin { github_login });
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
Err(anyhow::anyhow!(
|
||||
"must be one of `github_user_id` or `github_login`."
|
||||
))?
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::db::ExtensionVersionConstraints;
|
||||
use crate::{AppState, Error, Result, db::NewExtensionVersion};
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use anyhow::Context as _;
|
||||
use aws_sdk_s3::presigning::PresigningConfig;
|
||||
use axum::{
|
||||
Extension, Json, Router,
|
||||
|
@ -181,7 +181,7 @@ async fn download_latest_extension(
|
|||
.db
|
||||
.get_extension(¶ms.extension_id, constraints.as_ref())
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("unknown extension"))?;
|
||||
.context("unknown extension")?;
|
||||
download_extension(
|
||||
Extension(app),
|
||||
Path(DownloadExtensionParams {
|
||||
|
@ -238,7 +238,7 @@ async fn download_extension(
|
|||
))
|
||||
.presigned(PresigningConfig::expires_in(EXTENSION_DOWNLOAD_URL_LIFETIME).unwrap())
|
||||
.await
|
||||
.map_err(|e| anyhow!("failed to create presigned extension download url {e}"))?;
|
||||
.context("creating presigned extension download url")?;
|
||||
|
||||
Ok(Redirect::temporary(url.uri()))
|
||||
}
|
||||
|
@ -374,7 +374,7 @@ async fn fetch_extension_manifest(
|
|||
blob_store_bucket: &String,
|
||||
extension_id: &str,
|
||||
version: &str,
|
||||
) -> Result<NewExtensionVersion, anyhow::Error> {
|
||||
) -> anyhow::Result<NewExtensionVersion> {
|
||||
let object = blob_store_client
|
||||
.get_object()
|
||||
.bucket(blob_store_bucket)
|
||||
|
@ -397,8 +397,8 @@ async fn fetch_extension_manifest(
|
|||
String::from_utf8_lossy(&manifest_bytes)
|
||||
)
|
||||
})?;
|
||||
let published_at = object.last_modified.ok_or_else(|| {
|
||||
anyhow!("missing last modified timestamp for extension {extension_id} version {version}")
|
||||
let published_at = object.last_modified.with_context(|| {
|
||||
format!("missing last modified timestamp for extension {extension_id} version {version}")
|
||||
})?;
|
||||
let published_at = time::OffsetDateTime::from_unix_timestamp_nanos(published_at.as_nanos())?;
|
||||
let published_at = PrimitiveDateTime::new(published_at.date(), published_at.time());
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use anyhow::Context as _;
|
||||
use collections::HashMap;
|
||||
|
||||
use semantic_version::SemanticVersion;
|
||||
|
@ -13,18 +14,12 @@ pub struct IpsFile {
|
|||
impl IpsFile {
|
||||
pub fn parse(bytes: &[u8]) -> anyhow::Result<IpsFile> {
|
||||
let mut split = bytes.splitn(2, |&b| b == b'\n');
|
||||
let header_bytes = split
|
||||
.next()
|
||||
.ok_or_else(|| anyhow::anyhow!("No header found"))?;
|
||||
let header: Header = serde_json::from_slice(header_bytes)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to parse header: {}", e))?;
|
||||
let header_bytes = split.next().context("No header found")?;
|
||||
let header: Header = serde_json::from_slice(header_bytes).context("parsing header")?;
|
||||
|
||||
let body_bytes = split
|
||||
.next()
|
||||
.ok_or_else(|| anyhow::anyhow!("No body found"))?;
|
||||
let body_bytes = split.next().context("No body found")?;
|
||||
|
||||
let body: Body = serde_json::from_slice(body_bytes)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to parse body: {}", e))?;
|
||||
let body: Body = serde_json::from_slice(body_bytes).context("parsing body")?;
|
||||
Ok(IpsFile { header, body })
|
||||
}
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
|||
db::{self, AccessTokenId, Database, UserId},
|
||||
rpc::Principal,
|
||||
};
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use anyhow::Context as _;
|
||||
use axum::{
|
||||
http::{self, Request, StatusCode},
|
||||
middleware::Next,
|
||||
|
@ -85,14 +85,14 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
|
|||
.db
|
||||
.get_user_by_id(user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user {} not found", user_id))?;
|
||||
.with_context(|| format!("user {user_id} not found"))?;
|
||||
|
||||
if let Some(impersonator_id) = validate_result.impersonator_id {
|
||||
let admin = state
|
||||
.db
|
||||
.get_user_by_id(impersonator_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user {} not found", impersonator_id))?;
|
||||
.with_context(|| format!("user {impersonator_id} not found"))?;
|
||||
req.extensions_mut()
|
||||
.insert(Principal::Impersonated { user, admin });
|
||||
} else {
|
||||
|
@ -192,7 +192,7 @@ pub async fn verify_access_token(
|
|||
let db_token = db.get_access_token(token.id).await?;
|
||||
let token_user_id = db_token.impersonated_user_id.unwrap_or(db_token.user_id);
|
||||
if token_user_id != user_id {
|
||||
return Err(anyhow!("no such access token"))?;
|
||||
return Err(anyhow::anyhow!("no such access token"))?;
|
||||
}
|
||||
let t0 = Instant::now();
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ mod tables;
|
|||
pub mod tests;
|
||||
|
||||
use crate::{Error, Result, executor::Executor};
|
||||
use anyhow::anyhow;
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use collections::{BTreeMap, BTreeSet, HashMap, HashSet};
|
||||
use dashmap::DashMap;
|
||||
use futures::StreamExt;
|
||||
|
@ -320,11 +320,9 @@ impl Database {
|
|||
|
||||
let mut tx = Arc::new(Some(tx));
|
||||
let result = f(TransactionHandle(tx.clone())).await;
|
||||
let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
|
||||
return Err(anyhow!(
|
||||
"couldn't complete transaction because it's still in use"
|
||||
))?;
|
||||
};
|
||||
let tx = Arc::get_mut(&mut tx)
|
||||
.and_then(|tx| tx.take())
|
||||
.context("couldn't complete transaction because it's still in use")?;
|
||||
|
||||
Ok((tx, result))
|
||||
}
|
||||
|
@ -344,11 +342,9 @@ impl Database {
|
|||
|
||||
let mut tx = Arc::new(Some(tx));
|
||||
let result = f(TransactionHandle(tx.clone())).await;
|
||||
let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
|
||||
return Err(anyhow!(
|
||||
"couldn't complete transaction because it's still in use"
|
||||
))?;
|
||||
};
|
||||
let tx = Arc::get_mut(&mut tx)
|
||||
.and_then(|tx| tx.take())
|
||||
.context("couldn't complete transaction because it's still in use")?;
|
||||
|
||||
Ok((tx, result))
|
||||
}
|
||||
|
@ -853,9 +849,7 @@ fn db_status_to_proto(
|
|||
)
|
||||
}
|
||||
_ => {
|
||||
return Err(anyhow!(
|
||||
"Unexpected combination of status fields: {entry:?}"
|
||||
));
|
||||
anyhow::bail!("Unexpected combination of status fields: {entry:?}");
|
||||
}
|
||||
};
|
||||
Ok(proto::StatusEntry {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use super::*;
|
||||
use anyhow::Context as _;
|
||||
use sea_orm::sea_query::Query;
|
||||
|
||||
impl Database {
|
||||
|
@ -51,7 +52,7 @@ impl Database {
|
|||
Ok(access_token::Entity::find_by_id(access_token_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such access token"))?)
|
||||
.context("no such access token")?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use anyhow::Context as _;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -82,7 +84,7 @@ impl Database {
|
|||
Ok(preferences
|
||||
.into_iter()
|
||||
.next()
|
||||
.ok_or_else(|| anyhow!("billing preferences not found"))?)
|
||||
.context("billing preferences not found")?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use anyhow::Context as _;
|
||||
|
||||
use crate::db::billing_subscription::{
|
||||
StripeCancellationReason, StripeSubscriptionStatus, SubscriptionKind,
|
||||
};
|
||||
|
@ -51,7 +53,7 @@ impl Database {
|
|||
Ok(billing_subscription::Entity::find_by_id(id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("failed to retrieve inserted billing subscription"))?)
|
||||
.context("failed to retrieve inserted billing subscription")?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use super::*;
|
||||
use anyhow::Context as _;
|
||||
use prost::Message;
|
||||
use text::{EditOperation, UndoOperation};
|
||||
|
||||
|
@ -467,7 +468,7 @@ impl Database {
|
|||
.filter(buffer::Column::ChannelId.eq(channel_id))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such buffer"))?;
|
||||
.context("no such buffer")?;
|
||||
|
||||
let serialization_version = self
|
||||
.get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &tx)
|
||||
|
@ -606,7 +607,7 @@ impl Database {
|
|||
.into_values::<_, QueryOperationSerializationVersion>()
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("missing buffer snapshot"))?)
|
||||
.context("missing buffer snapshot")?)
|
||||
}
|
||||
|
||||
pub async fn get_channel_buffer(
|
||||
|
@ -621,7 +622,7 @@ impl Database {
|
|||
.find_related(buffer::Entity)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such buffer"))?)
|
||||
.context("no such buffer")?)
|
||||
}
|
||||
|
||||
async fn get_buffer_state(
|
||||
|
@ -643,7 +644,7 @@ impl Database {
|
|||
)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such snapshot"))?;
|
||||
.context("no such snapshot")?;
|
||||
|
||||
let version = snapshot.operation_serialization_version;
|
||||
(snapshot.text, version)
|
||||
|
@ -839,7 +840,7 @@ fn operation_from_storage(
|
|||
_format_version: i32,
|
||||
) -> Result<proto::operation::Variant, Error> {
|
||||
let operation =
|
||||
storage::Operation::decode(row.value.as_slice()).map_err(|error| anyhow!("{}", error))?;
|
||||
storage::Operation::decode(row.value.as_slice()).map_err(|error| anyhow!("{error}"))?;
|
||||
let version = version_from_storage(&operation.version);
|
||||
Ok(if operation.is_undo {
|
||||
proto::operation::Variant::Undo(proto::operation::Undo {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use super::*;
|
||||
use anyhow::Context as _;
|
||||
use rpc::{
|
||||
ErrorCode, ErrorCodeExt,
|
||||
proto::{ChannelBufferVersion, VectorClockEntry, channel_member::Kind},
|
||||
|
@ -647,11 +648,8 @@ impl Database {
|
|||
.and(channel_member::Column::UserId.eq(for_user)),
|
||||
)
|
||||
.one(&*tx)
|
||||
.await?;
|
||||
|
||||
let Some(membership) = membership else {
|
||||
Err(anyhow!("no such member"))?
|
||||
};
|
||||
.await?
|
||||
.context("no such member")?;
|
||||
|
||||
let mut update = membership.into_active_model();
|
||||
update.role = ActiveValue::Set(role);
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
use anyhow::Context as _;
|
||||
|
||||
use super::*;
|
||||
|
||||
impl Database {
|
||||
|
@ -215,7 +217,7 @@ impl Database {
|
|||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such contact"))?;
|
||||
.context("no such contact")?;
|
||||
|
||||
contact::Entity::delete_by_id(contact.id).exec(&*tx).await?;
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Context;
|
||||
use chrono::Utc;
|
||||
use sea_orm::sea_query::IntoCondition;
|
||||
use util::ResultExt;
|
||||
|
@ -166,7 +167,7 @@ impl Database {
|
|||
.filter(extension::Column::ExternalId.eq(extension_id))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such extension: {extension_id}"))?;
|
||||
.with_context(|| format!("no such extension: {extension_id}"))?;
|
||||
|
||||
let extensions = [extension];
|
||||
let mut versions = self
|
||||
|
@ -274,7 +275,7 @@ impl Database {
|
|||
.filter(extension::Column::ExternalId.eq(*external_id))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("failed to insert extension"))?
|
||||
.context("failed to insert extension")?
|
||||
};
|
||||
|
||||
extension_version::Entity::insert_many(versions.iter().map(|version| {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use super::*;
|
||||
use anyhow::Context as _;
|
||||
use rpc::Notification;
|
||||
use sea_orm::{SelectColumns, TryInsertResult};
|
||||
use time::OffsetDateTime;
|
||||
|
@ -330,7 +331,7 @@ impl Database {
|
|||
.filter(channel_message::Column::Nonce.eq(Uuid::from_u128(nonce)))
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("failed to insert message"))?
|
||||
.context("failed to insert message")?
|
||||
.id;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use super::*;
|
||||
use anyhow::Context as _;
|
||||
use rpc::Notification;
|
||||
use util::ResultExt;
|
||||
|
||||
|
@ -256,7 +257,7 @@ pub fn model_to_proto(this: &Database, row: notification::Model) -> Result<proto
|
|||
let kind = this
|
||||
.notification_kinds_by_id
|
||||
.get(&row.kind)
|
||||
.ok_or_else(|| anyhow!("Unknown notification kind"))?;
|
||||
.context("Unknown notification kind")?;
|
||||
Ok(proto::Notification {
|
||||
id: row.id.to_proto(),
|
||||
kind: kind.to_string(),
|
||||
|
@ -276,5 +277,5 @@ fn notification_kind_from_proto(
|
|||
.notification_kinds_by_name
|
||||
.get(&proto.kind)
|
||||
.copied()
|
||||
.ok_or_else(|| anyhow!("invalid notification kind {:?}", proto.kind))?)
|
||||
.with_context(|| format!("invalid notification kind {:?}", proto.kind))?)
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ impl Database {
|
|||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("could not find participant"))?;
|
||||
.context("could not find participant")?;
|
||||
if participant.room_id != room_id {
|
||||
return Err(anyhow!("shared project on unexpected room"))?;
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ impl Database {
|
|||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project not found"))?;
|
||||
.context("project not found")?;
|
||||
let room = if let Some(room_id) = project.room_id {
|
||||
Some(self.get_room(room_id, &tx).await?)
|
||||
} else {
|
||||
|
@ -160,7 +160,7 @@ impl Database {
|
|||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
.context("no such project")?;
|
||||
|
||||
self.update_project_worktrees(project.id, worktrees, &tx)
|
||||
.await?;
|
||||
|
@ -242,7 +242,7 @@ impl Database {
|
|||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project: {project_id}"))?;
|
||||
.with_context(|| format!("no such project: {project_id}"))?;
|
||||
|
||||
// Update metadata.
|
||||
worktree::Entity::update(worktree::ActiveModel {
|
||||
|
@ -624,16 +624,13 @@ impl Database {
|
|||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
let worktree_id = update.worktree_id as i64;
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let summary = update
|
||||
.summary
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("invalid summary"))?;
|
||||
let summary = update.summary.as_ref().context("invalid summary")?;
|
||||
|
||||
// Ensure the update comes from the host.
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
.context("no such project")?;
|
||||
if project.host_connection()? != connection {
|
||||
return Err(anyhow!("can't update a project hosted by someone else"))?;
|
||||
}
|
||||
|
@ -677,16 +674,13 @@ impl Database {
|
|||
) -> Result<TransactionGuard<Vec<ConnectionId>>> {
|
||||
let project_id = ProjectId::from_proto(update.project_id);
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let server = update
|
||||
.server
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("invalid language server"))?;
|
||||
let server = update.server.as_ref().context("invalid language server")?;
|
||||
|
||||
// Ensure the update comes from the host.
|
||||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
.context("no such project")?;
|
||||
if project.host_connection()? != connection {
|
||||
return Err(anyhow!("can't update a project hosted by someone else"))?;
|
||||
}
|
||||
|
@ -732,7 +726,7 @@ impl Database {
|
|||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
.context("no such project")?;
|
||||
if project.host_connection()? != connection {
|
||||
return Err(anyhow!("can't update a project hosted by someone else"))?;
|
||||
}
|
||||
|
@ -778,7 +772,7 @@ impl Database {
|
|||
Ok(project::Entity::find_by_id(id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?)
|
||||
.context("no such project")?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
@ -1074,7 +1068,7 @@ impl Database {
|
|||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
.context("no such project")?;
|
||||
let collaborators = project
|
||||
.find_related(project_collaborator::Entity)
|
||||
.all(&*tx)
|
||||
|
@ -1143,7 +1137,7 @@ impl Database {
|
|||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("failed to read project host"))?;
|
||||
.context("failed to read project host")?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
|
@ -1162,7 +1156,7 @@ impl Database {
|
|||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
.context("no such project")?;
|
||||
|
||||
let role_from_room = if let Some(room_id) = project.room_id {
|
||||
room_participant::Entity::find()
|
||||
|
@ -1287,7 +1281,7 @@ impl Database {
|
|||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such project"))?;
|
||||
.context("no such project")?;
|
||||
|
||||
let mut collaborators = project_collaborator::Entity::find()
|
||||
.filter(project_collaborator::Column::ProjectId.eq(project_id))
|
||||
|
|
|
@ -161,7 +161,7 @@ impl Database {
|
|||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user is not in the room"))?;
|
||||
.context("user is not in the room")?;
|
||||
|
||||
let called_user_role = match caller.role.unwrap_or(ChannelRole::Member) {
|
||||
ChannelRole::Admin | ChannelRole::Member => ChannelRole::Member,
|
||||
|
@ -193,7 +193,7 @@ impl Database {
|
|||
|
||||
let room = self.get_room(room_id, &tx).await?;
|
||||
let incoming_call = Self::build_incoming_call(&room, called_user_id)
|
||||
.ok_or_else(|| anyhow!("failed to build incoming call"))?;
|
||||
.context("failed to build incoming call")?;
|
||||
Ok((room, incoming_call))
|
||||
})
|
||||
.await
|
||||
|
@ -279,7 +279,7 @@ impl Database {
|
|||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no call to cancel"))?;
|
||||
.context("no call to cancel")?;
|
||||
|
||||
room_participant::Entity::delete(participant.into_active_model())
|
||||
.exec(&*tx)
|
||||
|
@ -310,7 +310,7 @@ impl Database {
|
|||
.into_values::<_, QueryChannelId>()
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no such room"))?;
|
||||
.context("no such room")?;
|
||||
|
||||
if channel_id.is_some() {
|
||||
Err(anyhow!("tried to join channel call directly"))?
|
||||
|
@ -462,7 +462,7 @@ impl Database {
|
|||
}
|
||||
|
||||
let (channel, room) = self.get_channel_room(room_id, tx).await?;
|
||||
let channel = channel.ok_or_else(|| anyhow!("no channel for room"))?;
|
||||
let channel = channel.context("no channel for room")?;
|
||||
Ok(JoinRoom {
|
||||
room,
|
||||
channel: Some(channel),
|
||||
|
@ -505,7 +505,7 @@ impl Database {
|
|||
let project = project::Entity::find_by_id(project_id)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("project does not exist"))?;
|
||||
.context("project does not exist")?;
|
||||
if project.host_user_id != Some(user_id) {
|
||||
return Err(anyhow!("no such project"))?;
|
||||
}
|
||||
|
@ -519,7 +519,7 @@ impl Database {
|
|||
.position(|collaborator| {
|
||||
collaborator.user_id == user_id && collaborator.is_host
|
||||
})
|
||||
.ok_or_else(|| anyhow!("host not found among collaborators"))?;
|
||||
.context("host not found among collaborators")?;
|
||||
let host = collaborators.swap_remove(host_ix);
|
||||
let old_connection_id = host.connection();
|
||||
|
||||
|
@ -1051,11 +1051,7 @@ impl Database {
|
|||
let tx = tx;
|
||||
let location_kind;
|
||||
let location_project_id;
|
||||
match location
|
||||
.variant
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("invalid location"))?
|
||||
{
|
||||
match location.variant.as_ref().context("invalid location")? {
|
||||
proto::participant_location::Variant::SharedProject(project) => {
|
||||
location_kind = 0;
|
||||
location_project_id = Some(ProjectId::from_proto(project.id));
|
||||
|
@ -1119,7 +1115,7 @@ impl Database {
|
|||
)
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("only admins can set participant role"))?;
|
||||
.context("only admins can set participant role")?;
|
||||
|
||||
if role.requires_cla() {
|
||||
self.check_user_has_signed_cla(user_id, room_id, &tx)
|
||||
|
@ -1156,7 +1152,7 @@ impl Database {
|
|||
let channel = room::Entity::find_by_id(room_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("could not find room"))?
|
||||
.context("could not find room")?
|
||||
.find_related(channel::Entity)
|
||||
.one(tx)
|
||||
.await?;
|
||||
|
@ -1297,7 +1293,7 @@ impl Database {
|
|||
let db_room = room::Entity::find_by_id(room_id)
|
||||
.one(tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("could not find room"))?;
|
||||
.context("could not find room")?;
|
||||
|
||||
let mut db_participants = db_room
|
||||
.find_related(room_participant::Entity)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use anyhow::Context as _;
|
||||
use chrono::NaiveDateTime;
|
||||
|
||||
use super::*;
|
||||
|
@ -247,7 +248,7 @@ impl Database {
|
|||
.into_values::<_, QueryAs>()
|
||||
.one(&*tx)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("could not find user"))?;
|
||||
.context("could not find user")?;
|
||||
Ok(metrics_id.to_string())
|
||||
})
|
||||
.await
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::db::{ProjectId, Result, RoomId, ServerId, UserId};
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Context as _;
|
||||
use rpc::ConnectionId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
|
@ -18,10 +18,10 @@ impl Model {
|
|||
pub fn host_connection(&self) -> Result<ConnectionId> {
|
||||
let host_connection_server_id = self
|
||||
.host_connection_server_id
|
||||
.ok_or_else(|| anyhow!("empty host_connection_server_id"))?;
|
||||
.context("empty host_connection_server_id")?;
|
||||
let host_connection_id = self
|
||||
.host_connection_id
|
||||
.ok_or_else(|| anyhow!("empty host_connection_id"))?;
|
||||
.context("empty host_connection_id")?;
|
||||
Ok(ConnectionId {
|
||||
owner_id: host_connection_server_id.0 as u32,
|
||||
id: host_connection_id as u32,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
|
@ -6,8 +6,8 @@ pub fn get_dotenv_vars(current_dir: impl AsRef<Path>) -> Result<Vec<(String, Str
|
|||
let current_dir = current_dir.as_ref();
|
||||
|
||||
let mut vars = Vec::new();
|
||||
let env_content = fs::read_to_string(current_dir.join(".env.toml"))
|
||||
.map_err(|_| anyhow!("no .env.toml file found"))?;
|
||||
let env_content =
|
||||
fs::read_to_string(current_dir.join(".env.toml")).context("no .env.toml file found")?;
|
||||
|
||||
add_vars(env_content, &mut vars)?;
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ pub mod user_backfiller;
|
|||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Context as _;
|
||||
use aws_config::{BehaviorVersion, Region};
|
||||
use axum::{
|
||||
http::{HeaderMap, StatusCode},
|
||||
|
@ -339,7 +339,7 @@ fn build_stripe_client(config: &Config) -> anyhow::Result<stripe::Client> {
|
|||
let api_key = config
|
||||
.stripe_api_key
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("missing stripe_api_key"))?;
|
||||
.context("missing stripe_api_key")?;
|
||||
Ok(stripe::Client::new(api_key))
|
||||
}
|
||||
|
||||
|
@ -348,11 +348,11 @@ async fn build_blob_store_client(config: &Config) -> anyhow::Result<aws_sdk_s3::
|
|||
config
|
||||
.blob_store_access_key
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing blob_store_access_key"))?,
|
||||
.context("missing blob_store_access_key")?,
|
||||
config
|
||||
.blob_store_secret_key
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing blob_store_secret_key"))?,
|
||||
.context("missing blob_store_secret_key")?,
|
||||
None,
|
||||
None,
|
||||
"env",
|
||||
|
@ -363,13 +363,13 @@ async fn build_blob_store_client(config: &Config) -> anyhow::Result<aws_sdk_s3::
|
|||
config
|
||||
.blob_store_url
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("missing blob_store_url"))?,
|
||||
.context("missing blob_store_url")?,
|
||||
)
|
||||
.region(Region::new(
|
||||
config
|
||||
.blob_store_region
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing blob_store_region"))?,
|
||||
.context("missing blob_store_region")?,
|
||||
))
|
||||
.credentials_provider(keys)
|
||||
.load()
|
||||
|
@ -383,11 +383,11 @@ async fn build_kinesis_client(config: &Config) -> anyhow::Result<aws_sdk_kinesis
|
|||
config
|
||||
.kinesis_access_key
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing kinesis_access_key"))?,
|
||||
.context("missing kinesis_access_key")?,
|
||||
config
|
||||
.kinesis_secret_key
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing kinesis_secret_key"))?,
|
||||
.context("missing kinesis_secret_key")?,
|
||||
None,
|
||||
None,
|
||||
"env",
|
||||
|
@ -398,7 +398,7 @@ async fn build_kinesis_client(config: &Config) -> anyhow::Result<aws_sdk_kinesis
|
|||
config
|
||||
.kinesis_region
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing kinesis_region"))?,
|
||||
.context("missing kinesis_region")?,
|
||||
))
|
||||
.credentials_provider(keys)
|
||||
.load()
|
||||
|
|
|
@ -19,7 +19,7 @@ use usage_measure::UsageMeasure;
|
|||
use std::future::Future;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Context;
|
||||
pub use sea_orm::ConnectOptions;
|
||||
use sea_orm::prelude::*;
|
||||
use sea_orm::{
|
||||
|
@ -93,7 +93,7 @@ impl LlmDatabase {
|
|||
Ok(self
|
||||
.models
|
||||
.get(&(provider, name.to_string()))
|
||||
.ok_or_else(|| anyhow!("unknown model {provider:?}:{name}"))?)
|
||||
.with_context(|| format!("unknown model {provider:?}:{name}"))?)
|
||||
}
|
||||
|
||||
pub fn model_by_id(&self, id: ModelId) -> Result<&model::Model> {
|
||||
|
@ -101,7 +101,7 @@ impl LlmDatabase {
|
|||
.models
|
||||
.values()
|
||||
.find(|model| model.id == id)
|
||||
.ok_or_else(|| anyhow!("no model for ID {id:?}"))?)
|
||||
.with_context(|| format!("no model for ID {id:?}"))?)
|
||||
}
|
||||
|
||||
pub fn options(&self) -> &ConnectOptions {
|
||||
|
@ -142,11 +142,9 @@ impl LlmDatabase {
|
|||
|
||||
let mut tx = Arc::new(Some(tx));
|
||||
let result = f(TransactionHandle(tx.clone())).await;
|
||||
let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
|
||||
return Err(anyhow!(
|
||||
"couldn't complete transaction because it's still in use"
|
||||
))?;
|
||||
};
|
||||
let tx = Arc::get_mut(&mut tx)
|
||||
.and_then(|tx| tx.take())
|
||||
.context("couldn't complete transaction because it's still in use")?;
|
||||
|
||||
Ok((tx, result))
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::db::billing_subscription::SubscriptionKind;
|
|||
use crate::db::{billing_subscription, user};
|
||||
use crate::llm::AGENT_EXTENDED_TRIAL_FEATURE_FLAG;
|
||||
use crate::{Config, db::billing_preference};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
@ -49,7 +49,7 @@ impl LlmTokenClaims {
|
|||
let secret = config
|
||||
.llm_api_secret
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("no LLM API secret"))?;
|
||||
.context("no LLM API secret")?;
|
||||
|
||||
let plan = if is_staff {
|
||||
Plan::ZedPro
|
||||
|
@ -63,7 +63,7 @@ impl LlmTokenClaims {
|
|||
let subscription_period =
|
||||
billing_subscription::Model::current_period(Some(subscription), is_staff)
|
||||
.map(|(start, end)| (start.naive_utc(), end.naive_utc()))
|
||||
.ok_or_else(|| anyhow!("A plan is required to use Zed's hosted models or edit predictions. Visit https://zed.dev/account to get started."))?;
|
||||
.context("A plan is required to use Zed's hosted models or edit predictions. Visit https://zed.dev/account to get started.")?;
|
||||
|
||||
let now = Utc::now();
|
||||
let claims = Self {
|
||||
|
@ -112,7 +112,7 @@ impl LlmTokenClaims {
|
|||
let secret = config
|
||||
.llm_api_secret
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("no LLM API secret"))?;
|
||||
.context("no LLM API secret")?;
|
||||
|
||||
match jsonwebtoken::decode::<Self>(
|
||||
token,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::anyhow;
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use axum::headers::HeaderMapExt;
|
||||
use axum::{
|
||||
Extension, Router,
|
||||
|
@ -138,11 +138,11 @@ async fn main() -> Result<()> {
|
|||
.config
|
||||
.llm_database_url
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("missing LLM_DATABASE_URL"))?;
|
||||
.context("missing LLM_DATABASE_URL")?;
|
||||
let max_connections = state
|
||||
.config
|
||||
.llm_database_max_connections
|
||||
.ok_or_else(|| anyhow!("missing LLM_DATABASE_MAX_CONNECTIONS"))?;
|
||||
.context("missing LLM_DATABASE_MAX_CONNECTIONS")?;
|
||||
|
||||
let mut db_options = db::ConnectOptions::new(database_url);
|
||||
db_options.max_connections(max_connections);
|
||||
|
@ -287,7 +287,7 @@ async fn setup_llm_database(config: &Config) -> Result<()> {
|
|||
let database_url = config
|
||||
.llm_database_url
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("missing LLM_DATABASE_URL"))?;
|
||||
.context("missing LLM_DATABASE_URL")?;
|
||||
|
||||
let db_options = db::ConnectOptions::new(database_url.clone());
|
||||
let db = LlmDatabase::new(db_options, Executor::Production).await?;
|
||||
|
|
|
@ -30,12 +30,11 @@ pub async fn run_database_migrations(
|
|||
for migration in migrations {
|
||||
match applied_migrations.get(&migration.version) {
|
||||
Some(applied_migration) => {
|
||||
if migration.checksum != applied_migration.checksum {
|
||||
Err(anyhow!(
|
||||
anyhow::ensure!(
|
||||
migration.checksum == applied_migration.checksum,
|
||||
"checksum mismatch for applied migration {}",
|
||||
migration.description
|
||||
))?;
|
||||
}
|
||||
);
|
||||
}
|
||||
None => {
|
||||
let elapsed = connection.apply(&migration).await?;
|
||||
|
|
|
@ -664,7 +664,7 @@ impl Server {
|
|||
Err(error) => {
|
||||
let proto_err = match &error {
|
||||
Error::Internal(err) => err.to_proto(),
|
||||
_ => ErrorCode::Internal.message(format!("{}", error)).to_proto(),
|
||||
_ => ErrorCode::Internal.message(format!("{error}")).to_proto(),
|
||||
};
|
||||
peer.respond_with_error(receipt, proto_err)?;
|
||||
Err(error)
|
||||
|
@ -938,7 +938,7 @@ impl Server {
|
|||
.db
|
||||
.get_user_by_id(user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
|
||||
let update_user_plan = make_update_user_plan_message(
|
||||
&self.app_state.db,
|
||||
|
@ -1169,7 +1169,7 @@ pub async fn handle_metrics(Extension(server): Extension<Arc<Server>>) -> Result
|
|||
let metric_families = prometheus::gather();
|
||||
let encoded_metrics = encoder
|
||||
.encode_to_string(&metric_families)
|
||||
.map_err(|err| anyhow!("{}", err))?;
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
Ok(encoded_metrics)
|
||||
}
|
||||
|
||||
|
@ -1685,7 +1685,7 @@ async fn decline_call(message: proto::DeclineCall, session: Session) -> Result<(
|
|||
.await
|
||||
.decline_call(Some(room_id), session.user_id())
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("failed to decline call"))?;
|
||||
.context("declining call")?;
|
||||
room_updated(&room, &session.peer);
|
||||
}
|
||||
|
||||
|
@ -1715,9 +1715,7 @@ async fn update_participant_location(
|
|||
session: Session,
|
||||
) -> Result<()> {
|
||||
let room_id = RoomId::from_proto(request.room_id);
|
||||
let location = request
|
||||
.location
|
||||
.ok_or_else(|| anyhow!("invalid location"))?;
|
||||
let location = request.location.context("invalid location")?;
|
||||
|
||||
let db = session.db().await;
|
||||
let room = db
|
||||
|
@ -2246,7 +2244,7 @@ async fn create_buffer_for_peer(
|
|||
session.connection_id,
|
||||
)
|
||||
.await?;
|
||||
let peer_id = request.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?;
|
||||
let peer_id = request.peer_id.context("invalid peer id")?;
|
||||
session
|
||||
.peer
|
||||
.forward_send(session.connection_id, peer_id.into(), request)?;
|
||||
|
@ -2377,10 +2375,7 @@ async fn follow(
|
|||
) -> Result<()> {
|
||||
let room_id = RoomId::from_proto(request.room_id);
|
||||
let project_id = request.project_id.map(ProjectId::from_proto);
|
||||
let leader_id = request
|
||||
.leader_id
|
||||
.ok_or_else(|| anyhow!("invalid leader id"))?
|
||||
.into();
|
||||
let leader_id = request.leader_id.context("invalid leader id")?.into();
|
||||
let follower_id = session.connection_id;
|
||||
|
||||
session
|
||||
|
@ -2411,10 +2406,7 @@ async fn follow(
|
|||
async fn unfollow(request: proto::Unfollow, session: Session) -> Result<()> {
|
||||
let room_id = RoomId::from_proto(request.room_id);
|
||||
let project_id = request.project_id.map(ProjectId::from_proto);
|
||||
let leader_id = request
|
||||
.leader_id
|
||||
.ok_or_else(|| anyhow!("invalid leader id"))?
|
||||
.into();
|
||||
let leader_id = request.leader_id.context("invalid leader id")?.into();
|
||||
let follower_id = session.connection_id;
|
||||
|
||||
session
|
||||
|
@ -3358,9 +3350,7 @@ async fn join_channel_internal(
|
|||
};
|
||||
|
||||
channel_updated(
|
||||
&joined_room
|
||||
.channel
|
||||
.ok_or_else(|| anyhow!("channel not returned"))?,
|
||||
&joined_room.channel.context("channel not returned")?,
|
||||
&joined_room.room,
|
||||
&session.peer,
|
||||
&*session.connection_pool().await,
|
||||
|
@ -3568,9 +3558,7 @@ async fn send_channel_message(
|
|||
// TODO: adjust mentions if body is trimmed
|
||||
|
||||
let timestamp = OffsetDateTime::now_utc();
|
||||
let nonce = request
|
||||
.nonce
|
||||
.ok_or_else(|| anyhow!("nonce can't be blank"))?;
|
||||
let nonce = request.nonce.context("nonce can't be blank")?;
|
||||
|
||||
let channel_id = ChannelId::from_proto(request.channel_id);
|
||||
let CreatedChannelMessage {
|
||||
|
@ -3710,10 +3698,7 @@ async fn update_channel_message(
|
|||
)
|
||||
.await?;
|
||||
|
||||
let nonce = request
|
||||
.nonce
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("nonce can't be blank"))?;
|
||||
let nonce = request.nonce.clone().context("nonce can't be blank")?;
|
||||
|
||||
let message = proto::ChannelMessage {
|
||||
sender_id: session.user_id().to_proto(),
|
||||
|
@ -3818,14 +3803,12 @@ async fn get_supermaven_api_key(
|
|||
return Err(anyhow!("supermaven not enabled for this account"))?;
|
||||
}
|
||||
|
||||
let email = session
|
||||
.email()
|
||||
.ok_or_else(|| anyhow!("user must have an email"))?;
|
||||
let email = session.email().context("user must have an email")?;
|
||||
|
||||
let supermaven_admin_api = session
|
||||
.supermaven_client
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("supermaven not configured"))?;
|
||||
.context("supermaven not configured")?;
|
||||
|
||||
let result = supermaven_admin_api
|
||||
.try_get_or_create_user(CreateExternalUserRequest { id: user_id, email })
|
||||
|
@ -3973,7 +3956,7 @@ async fn get_private_user_info(
|
|||
let user = db
|
||||
.get_user_by_id(session.user_id())
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
.context("user not found")?;
|
||||
let flags = db.get_user_flags(session.user_id()).await?;
|
||||
|
||||
response.send(proto::GetPrivateUserInfoResponse {
|
||||
|
@ -4019,19 +4002,23 @@ async fn get_llm_api_token(
|
|||
let user = db
|
||||
.get_user_by_id(user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user {} not found", user_id))?;
|
||||
.with_context(|| format!("user {user_id} not found"))?;
|
||||
|
||||
if user.accepted_tos_at.is_none() {
|
||||
Err(anyhow!("terms of service not accepted"))?
|
||||
}
|
||||
|
||||
let Some(stripe_client) = session.app_state.stripe_client.as_ref() else {
|
||||
Err(anyhow!("failed to retrieve Stripe client"))?
|
||||
};
|
||||
let stripe_client = session
|
||||
.app_state
|
||||
.stripe_client
|
||||
.as_ref()
|
||||
.context("failed to retrieve Stripe client")?;
|
||||
|
||||
let Some(stripe_billing) = session.app_state.stripe_billing.as_ref() else {
|
||||
Err(anyhow!("failed to retrieve Stripe billing object"))?
|
||||
};
|
||||
let stripe_billing = session
|
||||
.app_state
|
||||
.stripe_billing
|
||||
.as_ref()
|
||||
.context("failed to retrieve Stripe billing object")?;
|
||||
|
||||
let billing_customer =
|
||||
if let Some(billing_customer) = db.get_billing_customer_by_user_id(user.id).await? {
|
||||
|
@ -4047,7 +4034,7 @@ async fn get_llm_api_token(
|
|||
stripe::Expandable::Id(customer_id),
|
||||
)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("billing customer not found"))?
|
||||
.context("billing customer not found")?
|
||||
};
|
||||
|
||||
let billing_subscription =
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::db::{ChannelId, ChannelRole, UserId};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use rpc::ConnectionId;
|
||||
use semantic_version::SemanticVersion;
|
||||
|
@ -77,7 +77,7 @@ impl ConnectionPool {
|
|||
let connection = self
|
||||
.connections
|
||||
.get_mut(&connection_id)
|
||||
.ok_or_else(|| anyhow!("no such connection"))?;
|
||||
.context("no such connection")?;
|
||||
|
||||
let user_id = connection.user_id;
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use super::{RandomizedTest, TestClient, TestError, TestServer, UserTestPlan};
|
||||
use crate::{db::UserId, tests::run_randomized_test};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use async_trait::async_trait;
|
||||
use call::ActiveCall;
|
||||
use collections::{BTreeMap, HashMap};
|
||||
|
@ -782,8 +782,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
|||
let save =
|
||||
project.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
|
||||
let save = cx.spawn(|cx| async move {
|
||||
save.await
|
||||
.map_err(|err| anyhow!("save request failed: {:?}", err))?;
|
||||
save.await.context("save request failed")?;
|
||||
assert!(
|
||||
buffer
|
||||
.read_with(&cx, |buffer, _| { buffer.saved_version().to_owned() })
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use util::ResultExt;
|
||||
|
||||
|
@ -144,12 +144,9 @@ impl UserBackfiller {
|
|||
}
|
||||
}
|
||||
|
||||
let response = match response.error_for_status() {
|
||||
Ok(response) => response,
|
||||
Err(err) => return Err(anyhow!("failed to fetch GitHub user: {err}")),
|
||||
};
|
||||
|
||||
response
|
||||
.error_for_status()
|
||||
.context("fetching GitHub user")?
|
||||
.json()
|
||||
.await
|
||||
.with_context(|| format!("failed to deserialize GitHub user from '{url}'"))
|
||||
|
|
|
@ -3,6 +3,7 @@ mod contact_finder;
|
|||
|
||||
use self::channel_modal::ChannelModal;
|
||||
use crate::{CollaborationPanelSettings, channel_view::ChannelView, chat_panel::ChatPanel};
|
||||
use anyhow::Context as _;
|
||||
use call::ActiveCall;
|
||||
use channel::{Channel, ChannelEvent, ChannelStore};
|
||||
use client::{ChannelId, Client, Contact, User, UserStore};
|
||||
|
@ -388,9 +389,7 @@ impl CollabPanel {
|
|||
Some(serialization_key) => cx
|
||||
.background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
|
||||
.await
|
||||
.map_err(|_| {
|
||||
anyhow::anyhow!("Failed to read collaboration panel from key value store")
|
||||
})
|
||||
.context("reading collaboration panel from key value store")
|
||||
.log_err()
|
||||
.flatten()
|
||||
.map(|panel| serde_json::from_str::<SerializedCollabPanel>(&panel))
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{Context, Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use collections::HashMap;
|
||||
use futures::{FutureExt, StreamExt, channel::oneshot, select};
|
||||
use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, Task};
|
||||
|
@ -308,7 +308,7 @@ impl Client {
|
|||
.response_handlers
|
||||
.lock()
|
||||
.as_mut()
|
||||
.ok_or_else(|| anyhow!("server shut down"))
|
||||
.context("server shut down")
|
||||
.map(|handlers| {
|
||||
handlers.insert(
|
||||
RequestId::Int(id),
|
||||
|
@ -341,7 +341,7 @@ impl Client {
|
|||
} else if let Some(result) = parsed.result {
|
||||
Ok(serde_json::from_str(result.get())?)
|
||||
} else {
|
||||
Err(anyhow!("Invalid response: no result or error"))
|
||||
anyhow::bail!("Invalid response: no result or error");
|
||||
}
|
||||
}
|
||||
Err(_) => anyhow::bail!("cancelled")
|
||||
|
|
|
@ -46,12 +46,11 @@ impl ModelContextProtocol {
|
|||
.request(types::RequestType::Initialize.as_str(), params)
|
||||
.await?;
|
||||
|
||||
if !Self::supported_protocols().contains(&response.protocol_version) {
|
||||
return Err(anyhow::anyhow!(
|
||||
anyhow::ensure!(
|
||||
Self::supported_protocols().contains(&response.protocol_version),
|
||||
"Unsupported protocol version: {:?}",
|
||||
response.protocol_version
|
||||
));
|
||||
}
|
||||
);
|
||||
|
||||
log::trace!("mcp server info {:?}", response.server_info);
|
||||
|
||||
|
@ -96,14 +95,11 @@ impl InitializedContextServerProtocol {
|
|||
}
|
||||
|
||||
fn check_capability(&self, capability: ServerCapability) -> Result<()> {
|
||||
if self.capable(capability) {
|
||||
anyhow::ensure!(
|
||||
self.capable(capability),
|
||||
"Server does not support {capability:?} capability"
|
||||
);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!(
|
||||
"Server does not support {:?} capability",
|
||||
capability
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// List the MCP prompts.
|
||||
|
|
|
@ -133,21 +133,20 @@ enum CopilotServer {
|
|||
impl CopilotServer {
|
||||
fn as_authenticated(&mut self) -> Result<&mut RunningCopilotServer> {
|
||||
let server = self.as_running()?;
|
||||
if matches!(server.sign_in_status, SignInStatus::Authorized { .. }) {
|
||||
anyhow::ensure!(
|
||||
matches!(server.sign_in_status, SignInStatus::Authorized { .. }),
|
||||
"must sign in before using copilot"
|
||||
);
|
||||
Ok(server)
|
||||
} else {
|
||||
Err(anyhow!("must sign in before using copilot"))
|
||||
}
|
||||
}
|
||||
|
||||
fn as_running(&mut self) -> Result<&mut RunningCopilotServer> {
|
||||
match self {
|
||||
CopilotServer::Starting { .. } => Err(anyhow!("copilot is still starting")),
|
||||
CopilotServer::Disabled => Err(anyhow!("copilot is disabled")),
|
||||
CopilotServer::Error(error) => Err(anyhow!(
|
||||
"copilot was not started because of an error: {}",
|
||||
error
|
||||
)),
|
||||
CopilotServer::Starting { .. } => anyhow::bail!("copilot is still starting"),
|
||||
CopilotServer::Disabled => anyhow::bail!("copilot is disabled"),
|
||||
CopilotServer::Error(error) => {
|
||||
anyhow::bail!("copilot was not started because of an error: {error}")
|
||||
}
|
||||
CopilotServer::Running(server) => Ok(server),
|
||||
}
|
||||
}
|
||||
|
@ -648,7 +647,7 @@ impl Copilot {
|
|||
}
|
||||
};
|
||||
|
||||
cx.background_spawn(task.map_err(|err| anyhow!("{:?}", err)))
|
||||
cx.background_spawn(task.map_err(|err| anyhow!("{err:?}")))
|
||||
} else {
|
||||
// If we're downloading, wait until download is finished
|
||||
// If we're in a stuck state, display to the user
|
||||
|
|
|
@ -2,6 +2,7 @@ use std::path::PathBuf;
|
|||
use std::sync::Arc;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use anyhow::Context as _;
|
||||
use anyhow::{Result, anyhow};
|
||||
use chrono::DateTime;
|
||||
use collections::HashSet;
|
||||
|
@ -322,8 +323,8 @@ impl TryFrom<ApiTokenResponse> for ApiToken {
|
|||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(response: ApiTokenResponse) -> Result<Self, Self::Error> {
|
||||
let expires_at = DateTime::from_timestamp(response.expires_at, 0)
|
||||
.ok_or_else(|| anyhow!("invalid expires_at"))?;
|
||||
let expires_at =
|
||||
DateTime::from_timestamp(response.expires_at, 0).context("invalid expires_at")?;
|
||||
|
||||
Ok(Self {
|
||||
api_key: response.token,
|
||||
|
@ -442,9 +443,11 @@ impl CopilotChat {
|
|||
request: Request,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
|
||||
let Some(this) = cx.update(|cx| Self::global(cx)).ok().flatten() else {
|
||||
return Err(anyhow!("Copilot chat is not enabled"));
|
||||
};
|
||||
let this = cx
|
||||
.update(|cx| Self::global(cx))
|
||||
.ok()
|
||||
.flatten()
|
||||
.context("Copilot chat is not enabled")?;
|
||||
|
||||
let (oauth_token, api_token, client) = this.read_with(&cx, |this, _| {
|
||||
(
|
||||
|
@ -454,7 +457,7 @@ impl CopilotChat {
|
|||
)
|
||||
})?;
|
||||
|
||||
let oauth_token = oauth_token.ok_or_else(|| anyhow!("No OAuth token available"))?;
|
||||
let oauth_token = oauth_token.context("No OAuth token available")?;
|
||||
|
||||
let token = match api_token {
|
||||
Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(),
|
||||
|
@ -513,7 +516,11 @@ async fn request_models(api_token: String, client: Arc<dyn HttpClient>) -> Resul
|
|||
|
||||
let mut response = client.send(request).await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"Failed to request models: {}",
|
||||
response.status()
|
||||
);
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
|
||||
|
@ -522,9 +529,6 @@ async fn request_models(api_token: String, client: Arc<dyn HttpClient>) -> Resul
|
|||
let models = serde_json::from_str::<ModelSchema>(body_str)?.data;
|
||||
|
||||
Ok(models)
|
||||
} else {
|
||||
Err(anyhow!("Failed to request models: {}", response.status()))
|
||||
}
|
||||
}
|
||||
|
||||
async fn request_api_token(oauth_token: &str, client: Arc<dyn HttpClient>) -> Result<ApiToken> {
|
||||
|
@ -551,8 +555,7 @@ async fn request_api_token(oauth_token: &str, client: Arc<dyn HttpClient>) -> Re
|
|||
response.body_mut().read_to_end(&mut body).await?;
|
||||
|
||||
let body_str = std::str::from_utf8(&body)?;
|
||||
|
||||
Err(anyhow!("Failed to request API token: {}", body_str))
|
||||
anyhow::bail!("Failed to request API token: {body_str}");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -603,11 +606,11 @@ async fn stream_completion(
|
|||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
let body_str = std::str::from_utf8(&body)?;
|
||||
return Err(anyhow!(
|
||||
anyhow::bail!(
|
||||
"Failed to connect to API: {} {}",
|
||||
response.status(),
|
||||
body_str
|
||||
));
|
||||
);
|
||||
}
|
||||
|
||||
if is_streaming {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use ::fs::Fs;
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use async_trait::async_trait;
|
||||
|
@ -103,8 +103,8 @@ impl TcpArguments {
|
|||
pub fn from_proto(proto: proto::TcpHost) -> anyhow::Result<Self> {
|
||||
let host = TcpArgumentsTemplate::from_proto(proto)?;
|
||||
Ok(TcpArguments {
|
||||
host: host.host.ok_or_else(|| anyhow!("missing host"))?,
|
||||
port: host.port.ok_or_else(|| anyhow!("missing port"))?,
|
||||
host: host.host.context("missing host")?,
|
||||
port: host.port.context("missing port")?,
|
||||
timeout: host.timeout,
|
||||
})
|
||||
}
|
||||
|
@ -200,9 +200,7 @@ impl DebugTaskDefinition {
|
|||
}
|
||||
|
||||
pub fn from_proto(proto: proto::DebugTaskDefinition) -> Result<Self> {
|
||||
let request = proto
|
||||
.request
|
||||
.ok_or_else(|| anyhow::anyhow!("request is required"))?;
|
||||
let request = proto.request.context("request is required")?;
|
||||
Ok(Self {
|
||||
label: proto.label.into(),
|
||||
initialize_args: proto.initialize_args.map(|v| v.into()),
|
||||
|
@ -346,12 +344,11 @@ pub async fn download_adapter_from_github(
|
|||
.get(&github_version.url, Default::default(), true)
|
||||
.await
|
||||
.context("Error downloading release")?;
|
||||
if !response.status().is_success() {
|
||||
Err(anyhow!(
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"download failed with status {}",
|
||||
response.status().to_string()
|
||||
))?;
|
||||
}
|
||||
);
|
||||
|
||||
match file_type {
|
||||
DownloadedFileType::GzipTar => {
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::{
|
|||
adapters::DebugAdapterBinary,
|
||||
transport::{IoKind, LogKind, TransportDelegate},
|
||||
};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::Result;
|
||||
use dap_types::{
|
||||
messages::{Message, Response},
|
||||
requests::Request,
|
||||
|
@ -187,10 +187,7 @@ impl DebugAdapterClient {
|
|||
Ok(serde_json::from_value(Default::default())?)
|
||||
}
|
||||
}
|
||||
false => Err(anyhow!(
|
||||
"Request failed: {}",
|
||||
response.message.unwrap_or_default()
|
||||
)),
|
||||
false => anyhow::bail!("Request failed: {}", response.message.unwrap_or_default()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::proto::{
|
||||
self, DapChecksum, DapChecksumAlgorithm, DapEvaluateContext, DapModule, DapScope,
|
||||
DapScopePresentationHint, DapSource, DapSourcePresentationHint, DapStackFrame, DapVariable,
|
||||
|
@ -311,9 +311,9 @@ impl ProtoConversion for dap_types::Module {
|
|||
fn from_proto(payload: Self::ProtoType) -> Result<Self> {
|
||||
let id = match payload
|
||||
.id
|
||||
.ok_or(anyhow!("All DapModule proto messages must have an id"))?
|
||||
.context("All DapModule proto messages must have an id")?
|
||||
.id
|
||||
.ok_or(anyhow!("All DapModuleID proto messages must have an id"))?
|
||||
.context("All DapModuleID proto messages must have an id")?
|
||||
{
|
||||
proto::dap_module_id::Id::String(string) => dap_types::ModuleId::String(string),
|
||||
proto::dap_module_id::Id::Number(num) => dap_types::ModuleId::Number(num),
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{Context, Result, anyhow, bail};
|
||||
use anyhow::{Context as _, Result, bail};
|
||||
use dap_types::{
|
||||
ErrorResponse,
|
||||
messages::{Message, Response},
|
||||
|
@ -226,12 +226,9 @@ impl TransportDelegate {
|
|||
|
||||
pub(crate) async fn send_message(&self, message: Message) -> Result<()> {
|
||||
if let Some(server_tx) = self.server_tx.lock().await.as_ref() {
|
||||
server_tx
|
||||
.send(message)
|
||||
.await
|
||||
.map_err(|e| anyhow!("Failed to send message: {}", e))
|
||||
server_tx.send(message).await.context("sending message")
|
||||
} else {
|
||||
Err(anyhow!("Server tx already dropped"))
|
||||
anyhow::bail!("Server tx already dropped")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -254,7 +251,7 @@ impl TransportDelegate {
|
|||
};
|
||||
|
||||
if bytes_read == 0 {
|
||||
break Err(anyhow!("Debugger log stream closed"));
|
||||
anyhow::bail!("Debugger log stream closed");
|
||||
}
|
||||
|
||||
if let Some(log_handlers) = log_handlers.as_ref() {
|
||||
|
@ -379,7 +376,7 @@ impl TransportDelegate {
|
|||
|
||||
let result = loop {
|
||||
match reader.read_line(&mut buffer).await {
|
||||
Ok(0) => break Err(anyhow!("debugger error stream closed")),
|
||||
Ok(0) => anyhow::bail!("debugger error stream closed"),
|
||||
Ok(_) => {
|
||||
for (kind, log_handler) in log_handlers.lock().iter_mut() {
|
||||
if matches!(kind, LogKind::Adapter) {
|
||||
|
@ -409,13 +406,13 @@ impl TransportDelegate {
|
|||
.and_then(|response| response.error.map(|msg| msg.format))
|
||||
.or_else(|| response.message.clone())
|
||||
{
|
||||
return Err(anyhow!(error_message));
|
||||
anyhow::bail!(error_message);
|
||||
};
|
||||
|
||||
Err(anyhow!(
|
||||
anyhow::bail!(
|
||||
"Received error response from adapter. Response: {:?}",
|
||||
response.clone()
|
||||
))
|
||||
response
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -437,7 +434,7 @@ impl TransportDelegate {
|
|||
.with_context(|| "reading a message from server")?
|
||||
== 0
|
||||
{
|
||||
return Err(anyhow!("debugger reader stream closed"));
|
||||
anyhow::bail!("debugger reader stream closed");
|
||||
};
|
||||
|
||||
if buffer == "\r\n" {
|
||||
|
@ -540,9 +537,10 @@ impl TcpTransport {
|
|||
}
|
||||
|
||||
async fn start(binary: &DebugAdapterBinary, cx: AsyncApp) -> Result<(TransportPipe, Self)> {
|
||||
let Some(connection_args) = binary.connection.as_ref() else {
|
||||
return Err(anyhow!("No connection arguments provided"));
|
||||
};
|
||||
let connection_args = binary
|
||||
.connection
|
||||
.as_ref()
|
||||
.context("No connection arguments provided")?;
|
||||
|
||||
let host = connection_args.host;
|
||||
let port = connection_args.port;
|
||||
|
@ -577,7 +575,7 @@ impl TcpTransport {
|
|||
|
||||
let (mut process, (rx, tx)) = select! {
|
||||
_ = cx.background_executor().timer(Duration::from_millis(timeout)).fuse() => {
|
||||
return Err(anyhow!(format!("Connection to TCP DAP timeout {}:{}", host, port)))
|
||||
anyhow::bail!("Connection to TCP DAP timeout {host}:{port}");
|
||||
},
|
||||
result = cx.spawn(async move |cx| {
|
||||
loop {
|
||||
|
@ -591,7 +589,7 @@ impl TcpTransport {
|
|||
} else {
|
||||
String::from_utf8_lossy(&output.stderr).to_string()
|
||||
};
|
||||
return Err(anyhow!("{}\nerror: process exited before debugger attached.", output));
|
||||
anyhow::bail!("{output}\nerror: process exited before debugger attached.");
|
||||
}
|
||||
cx.background_executor().timer(Duration::from_millis(100)).await;
|
||||
}
|
||||
|
@ -664,14 +662,8 @@ impl StdioTransport {
|
|||
.spawn()
|
||||
.with_context(|| "failed to spawn command.")?;
|
||||
|
||||
let stdin = process
|
||||
.stdin
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("Failed to open stdin"))?;
|
||||
let stdout = process
|
||||
.stdout
|
||||
.take()
|
||||
.ok_or_else(|| anyhow!("Failed to open stdout"))?;
|
||||
let stdin = process.stdin.take().context("Failed to open stdin")?;
|
||||
let stdout = process.stdout.take().context("Failed to open stdout")?;
|
||||
let stderr = process
|
||||
.stderr
|
||||
.take()
|
||||
|
@ -793,7 +785,7 @@ impl FakeTransport {
|
|||
|
||||
match message {
|
||||
Err(error) => {
|
||||
break anyhow!(error);
|
||||
break anyhow::anyhow!(error);
|
||||
}
|
||||
Ok(message) => {
|
||||
match message {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::{collections::HashMap, path::PathBuf, sync::OnceLock};
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{Context as _, Result};
|
||||
use async_trait::async_trait;
|
||||
use dap::adapters::{DebugTaskDefinition, latest_github_release};
|
||||
use futures::StreamExt;
|
||||
|
@ -69,22 +69,16 @@ impl CodeLldbDebugAdapter {
|
|||
let arch = match std::env::consts::ARCH {
|
||||
"aarch64" => "arm64",
|
||||
"x86_64" => "x64",
|
||||
_ => {
|
||||
return Err(anyhow!(
|
||||
"unsupported architecture {}",
|
||||
std::env::consts::ARCH
|
||||
));
|
||||
unsupported => {
|
||||
anyhow::bail!("unsupported architecture {unsupported}");
|
||||
}
|
||||
};
|
||||
let platform = match std::env::consts::OS {
|
||||
"macos" => "darwin",
|
||||
"linux" => "linux",
|
||||
"windows" => "win32",
|
||||
_ => {
|
||||
return Err(anyhow!(
|
||||
"unsupported operating system {}",
|
||||
std::env::consts::OS
|
||||
));
|
||||
unsupported => {
|
||||
anyhow::bail!("unsupported operating system {unsupported}");
|
||||
}
|
||||
};
|
||||
let asset_name = format!("codelldb-{platform}-{arch}.vsix");
|
||||
|
@ -94,7 +88,7 @@ impl CodeLldbDebugAdapter {
|
|||
.assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == asset_name)
|
||||
.ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?
|
||||
.with_context(|| format!("no asset found matching {asset_name:?}"))?
|
||||
.browser_download_url
|
||||
.clone(),
|
||||
};
|
||||
|
@ -138,10 +132,7 @@ impl DebugAdapter for CodeLldbDebugAdapter {
|
|||
version_path
|
||||
} else {
|
||||
let mut paths = delegate.fs().read_dir(&adapter_path).await?;
|
||||
paths
|
||||
.next()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("No adapter found"))??
|
||||
paths.next().await.context("No adapter found")??
|
||||
};
|
||||
let adapter_dir = version_path.join("extension").join("adapter");
|
||||
let path = adapter_dir.join("codelldb").to_string_lossy().to_string();
|
||||
|
|
|
@ -8,7 +8,7 @@ mod ruby;
|
|||
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use codelldb::CodeLldbDebugAdapter;
|
||||
use dap::{
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::{collections::HashMap, ffi::OsStr};
|
||||
|
||||
use anyhow::{Result, bail};
|
||||
use anyhow::{Context as _, Result, bail};
|
||||
use async_trait::async_trait;
|
||||
use dap::{StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
|
||||
use gpui::AsyncApp;
|
||||
|
@ -78,7 +78,7 @@ impl DebugAdapter for GdbDebugAdapter {
|
|||
.which(OsStr::new("gdb"))
|
||||
.await
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()))
|
||||
.ok_or(anyhow!("Could not find gdb in path"));
|
||||
.context("Could not find gdb in path");
|
||||
|
||||
if gdb_path.is_err() && user_setting_path.is_none() {
|
||||
bail!("Could not find gdb path or it's not installed");
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use anyhow::Context as _;
|
||||
use dap::{StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
|
||||
use gpui::{AsyncApp, SharedString};
|
||||
use language::LanguageName;
|
||||
|
@ -59,18 +60,14 @@ impl DebugAdapter for GoDebugAdapter {
|
|||
.which(OsStr::new("dlv"))
|
||||
.await
|
||||
.and_then(|p| p.to_str().map(|p| p.to_string()))
|
||||
.ok_or(anyhow!("Dlv not found in path"))?;
|
||||
.context("Dlv not found in path")?;
|
||||
|
||||
let tcp_connection = config.tcp_connection.clone().unwrap_or_default();
|
||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
command: delve_path,
|
||||
arguments: vec![
|
||||
"dap".into(),
|
||||
"--listen".into(),
|
||||
format!("{}:{}", host, port),
|
||||
],
|
||||
arguments: vec!["dap".into(), "--listen".into(), format!("{host}:{port}")],
|
||||
cwd: None,
|
||||
envs: HashMap::default(),
|
||||
connection: Some(adapters::TcpArguments {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use adapters::latest_github_release;
|
||||
use anyhow::Context as _;
|
||||
use dap::{StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
|
||||
use gpui::AsyncApp;
|
||||
use std::{collections::HashMap, path::PathBuf, sync::OnceLock};
|
||||
|
@ -74,7 +75,7 @@ impl JsDebugAdapter {
|
|||
.assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == asset_name)
|
||||
.ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?
|
||||
.with_context(|| format!("no asset found matching {asset_name:?}"))?
|
||||
.browser_download_url
|
||||
.clone(),
|
||||
})
|
||||
|
@ -98,7 +99,7 @@ impl JsDebugAdapter {
|
|||
file_name.starts_with(&file_name_prefix)
|
||||
})
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("Couldn't find JavaScript dap directory"))?
|
||||
.context("Couldn't find JavaScript dap directory")?
|
||||
};
|
||||
|
||||
let tcp_connection = config.tcp_connection.clone().unwrap_or_default();
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use adapters::latest_github_release;
|
||||
use anyhow::Context as _;
|
||||
use dap::adapters::{DebugTaskDefinition, TcpArguments};
|
||||
use gpui::{AsyncApp, SharedString};
|
||||
use language::LanguageName;
|
||||
|
@ -58,7 +59,7 @@ impl PhpDebugAdapter {
|
|||
.assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == asset_name)
|
||||
.ok_or_else(|| anyhow!("no asset found matching {:?}", asset_name))?
|
||||
.with_context(|| format!("no asset found matching {asset_name:?}"))?
|
||||
.browser_download_url
|
||||
.clone(),
|
||||
})
|
||||
|
@ -82,7 +83,7 @@ impl PhpDebugAdapter {
|
|||
file_name.starts_with(&file_name_prefix)
|
||||
})
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("Couldn't find PHP dap directory"))?
|
||||
.context("Couldn't find PHP dap directory")?
|
||||
};
|
||||
|
||||
let tcp_connection = config.tcp_connection.clone().unwrap_or_default();
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use crate::*;
|
||||
use anyhow::Context as _;
|
||||
use dap::{DebugRequest, StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
|
||||
use gpui::{AsyncApp, SharedString};
|
||||
use language::LanguageName;
|
||||
|
@ -112,7 +113,7 @@ impl PythonDebugAdapter {
|
|||
file_name.starts_with(&file_name_prefix)
|
||||
})
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("Debugpy directory not found"))?
|
||||
.context("Debugpy directory not found")?
|
||||
};
|
||||
|
||||
let toolchain = delegate
|
||||
|
@ -143,7 +144,7 @@ impl PythonDebugAdapter {
|
|||
};
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
command: python_path.ok_or(anyhow!("failed to find binary path for python"))?,
|
||||
command: python_path.context("failed to find binary path for Python")?,
|
||||
arguments: vec![
|
||||
debugpy_dir
|
||||
.join(Self::ADAPTER_PATH)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use anyhow::{Result, anyhow};
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use dap::{
|
||||
DebugRequest, StartDebuggingRequestArguments,
|
||||
|
@ -54,12 +54,11 @@ impl DebugAdapter for RubyDebugAdapter {
|
|||
.arg("debug")
|
||||
.output()
|
||||
.await?;
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"Failed to install rdbg:\n{}",
|
||||
String::from_utf8_lossy(&output.stderr).to_string()
|
||||
));
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
ShowStackTrace, StepBack, StepInto, StepOut, StepOver, Stop, ToggleIgnoreBreakpoints,
|
||||
persistence,
|
||||
};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
use dap::StartDebuggingRequestArguments;
|
||||
use dap::adapters::DebugAdapterName;
|
||||
|
@ -1021,17 +1021,13 @@ impl DebugPanel {
|
|||
}
|
||||
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(project_path) = workspace
|
||||
workspace
|
||||
.project()
|
||||
.read(cx)
|
||||
.project_path_for_absolute_path(&path, cx)
|
||||
{
|
||||
Ok(project_path)
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Couldn't get project path for .zed/debug.json in active worktree"
|
||||
))
|
||||
}
|
||||
.context(
|
||||
"Couldn't get project path for .zed/debug.json in active worktree",
|
||||
)
|
||||
})?
|
||||
})
|
||||
})
|
||||
|
|
|
@ -9,7 +9,6 @@ use std::{
|
|||
usize,
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
use dap::{
|
||||
DapRegistry, DebugRequest,
|
||||
adapters::{DebugAdapterName, DebugTaskDefinition},
|
||||
|
@ -253,7 +252,7 @@ impl NewSessionModal {
|
|||
cx.emit(DismissEvent);
|
||||
})
|
||||
.ok();
|
||||
Result::<_, anyhow::Error>::Ok(())
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use anyhow::Context as _;
|
||||
use collections::HashMap;
|
||||
use dap::{Capabilities, adapters::DebugAdapterName};
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
|
@ -96,18 +97,14 @@ pub(crate) async fn serialize_pane_layout(
|
|||
adapter_name: DebugAdapterName,
|
||||
pane_group: SerializedLayout,
|
||||
) -> anyhow::Result<()> {
|
||||
if let Ok(serialized_pane_group) = serde_json::to_string(&pane_group) {
|
||||
let serialized_pane_group = serde_json::to_string(&pane_group)
|
||||
.context("Serializing pane group with serde_json as a string")?;
|
||||
KEY_VALUE_STORE
|
||||
.write_kvp(
|
||||
format!("{DEBUGGER_PANEL_PREFIX}-{adapter_name}"),
|
||||
serialized_pane_group,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
Err(anyhow::anyhow!(
|
||||
"Failed to serialize pane group with serde_json as a string"
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn build_serialized_layout(
|
||||
|
|
|
@ -196,7 +196,7 @@ impl FollowableItem for DebugSession {
|
|||
_state: &mut Option<proto::view::Variant>,
|
||||
_window: &mut Window,
|
||||
_cx: &mut App,
|
||||
) -> Option<gpui::Task<gpui::Result<Entity<Self>>>> {
|
||||
) -> Option<gpui::Task<anyhow::Result<Entity<Self>>>> {
|
||||
None
|
||||
}
|
||||
|
||||
|
@ -218,7 +218,7 @@ impl FollowableItem for DebugSession {
|
|||
_message: proto::update_view::Variant,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Self>,
|
||||
) -> gpui::Task<gpui::Result<()>> {
|
||||
) -> gpui::Task<anyhow::Result<()>> {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue