Use anyhow
more idiomatically (#31052)
https://github.com/zed-industries/zed/issues/30972 brought up another case where our context is not enough to track the actual source of the issue: we get a general top-level error without inner error. The reason for this was `.ok_or_else(|| anyhow!("failed to read HEAD SHA"))?; ` on the top level. The PR finally reworks the way we use anyhow to reduce such issues (or at least make it simpler to bubble them up later in a fix). On top of that, uses a few more anyhow methods for better readability. * `.ok_or_else(|| anyhow!("..."))`, `map_err` and other similar error conversion/option reporting cases are replaced with `context` and `with_context` calls * in addition to that, various `anyhow!("failed to do ...")` are stripped with `.context("Doing ...")` messages instead to remove the parasitic `failed to` text * `anyhow::ensure!` is used instead of `if ... { return Err(...); }` calls * `anyhow::bail!` is used instead of `return Err(anyhow!(...));` Release Notes: - N/A
This commit is contained in:
parent
1e51a7ac44
commit
16366cf9f2
294 changed files with 2037 additions and 2610 deletions
|
@ -19,7 +19,7 @@ use usage_measure::UsageMeasure;
|
|||
use std::future::Future;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Context;
|
||||
pub use sea_orm::ConnectOptions;
|
||||
use sea_orm::prelude::*;
|
||||
use sea_orm::{
|
||||
|
@ -93,7 +93,7 @@ impl LlmDatabase {
|
|||
Ok(self
|
||||
.models
|
||||
.get(&(provider, name.to_string()))
|
||||
.ok_or_else(|| anyhow!("unknown model {provider:?}:{name}"))?)
|
||||
.with_context(|| format!("unknown model {provider:?}:{name}"))?)
|
||||
}
|
||||
|
||||
pub fn model_by_id(&self, id: ModelId) -> Result<&model::Model> {
|
||||
|
@ -101,7 +101,7 @@ impl LlmDatabase {
|
|||
.models
|
||||
.values()
|
||||
.find(|model| model.id == id)
|
||||
.ok_or_else(|| anyhow!("no model for ID {id:?}"))?)
|
||||
.with_context(|| format!("no model for ID {id:?}"))?)
|
||||
}
|
||||
|
||||
pub fn options(&self) -> &ConnectOptions {
|
||||
|
@ -142,11 +142,9 @@ impl LlmDatabase {
|
|||
|
||||
let mut tx = Arc::new(Some(tx));
|
||||
let result = f(TransactionHandle(tx.clone())).await;
|
||||
let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
|
||||
return Err(anyhow!(
|
||||
"couldn't complete transaction because it's still in use"
|
||||
))?;
|
||||
};
|
||||
let tx = Arc::get_mut(&mut tx)
|
||||
.and_then(|tx| tx.take())
|
||||
.context("couldn't complete transaction because it's still in use")?;
|
||||
|
||||
Ok((tx, result))
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ use crate::db::billing_subscription::SubscriptionKind;
|
|||
use crate::db::{billing_subscription, user};
|
||||
use crate::llm::AGENT_EXTENDED_TRIAL_FEATURE_FLAG;
|
||||
use crate::{Config, db::billing_preference};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result};
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
@ -49,7 +49,7 @@ impl LlmTokenClaims {
|
|||
let secret = config
|
||||
.llm_api_secret
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("no LLM API secret"))?;
|
||||
.context("no LLM API secret")?;
|
||||
|
||||
let plan = if is_staff {
|
||||
Plan::ZedPro
|
||||
|
@ -63,7 +63,7 @@ impl LlmTokenClaims {
|
|||
let subscription_period =
|
||||
billing_subscription::Model::current_period(Some(subscription), is_staff)
|
||||
.map(|(start, end)| (start.naive_utc(), end.naive_utc()))
|
||||
.ok_or_else(|| anyhow!("A plan is required to use Zed's hosted models or edit predictions. Visit https://zed.dev/account to get started."))?;
|
||||
.context("A plan is required to use Zed's hosted models or edit predictions. Visit https://zed.dev/account to get started.")?;
|
||||
|
||||
let now = Utc::now();
|
||||
let claims = Self {
|
||||
|
@ -112,7 +112,7 @@ impl LlmTokenClaims {
|
|||
let secret = config
|
||||
.llm_api_secret
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("no LLM API secret"))?;
|
||||
.context("no LLM API secret")?;
|
||||
|
||||
match jsonwebtoken::decode::<Self>(
|
||||
token,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue