Use anyhow more idiomatically (#31052)

https://github.com/zed-industries/zed/issues/30972 brought up another
case where our context is not enough to track the actual source of the
issue: we get a general top-level error without inner error.

The reason for this was `.ok_or_else(|| anyhow!("failed to read HEAD
SHA"))?; ` on the top level.

The PR finally reworks the way we use anyhow to reduce such issues (or
at least make it simpler to bubble them up later in a fix).
On top of that, uses a few more anyhow methods for better readability.

* `.ok_or_else(|| anyhow!("..."))`, `map_err` and other similar error
conversion/option reporting cases are replaced with `context` and
`with_context` calls
* in addition to that, various `anyhow!("failed to do ...")` are
stripped with `.context("Doing ...")` messages instead to remove the
parasitic `failed to` text
* `anyhow::ensure!` is used instead of `if ... { return Err(...); }`
calls
* `anyhow::bail!` is used instead of `return Err(anyhow!(...));`

Release Notes:

- N/A
This commit is contained in:
Kirill Bulatov 2025-05-21 02:06:07 +03:00 committed by GitHub
parent 1e51a7ac44
commit 16366cf9f2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
294 changed files with 2037 additions and 2610 deletions

View file

@ -37,7 +37,7 @@ impl TryFrom<String> for Role {
"assistant" => Ok(Self::Assistant),
"system" => Ok(Self::System),
"tool" => Ok(Self::Tool),
_ => Err(anyhow!("invalid role '{value}'")),
_ => anyhow::bail!("invalid role '{value}'"),
}
}
}
@ -118,7 +118,7 @@ impl Model {
"o3-mini" => Ok(Self::O3Mini),
"o3" => Ok(Self::O3),
"o4-mini" => Ok(Self::O4Mini),
_ => Err(anyhow!("invalid model id")),
invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"),
}
}
@ -491,16 +491,15 @@ pub async fn complete(
}
match serde_json::from_str::<OpenAiResponse>(&body) {
Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
Ok(response) if !response.error.message.is_empty() => anyhow::bail!(
"Failed to connect to OpenAI API: {}",
response.error.message,
)),
_ => Err(anyhow!(
),
_ => anyhow::bail!(
"Failed to connect to OpenAI API: {} {}",
response.status(),
body,
)),
),
}
}
}
@ -541,16 +540,15 @@ pub async fn complete_text(
}
match serde_json::from_str::<OpenAiResponse>(&body) {
Ok(response) if !response.error.message.is_empty() => Err(anyhow!(
Ok(response) if !response.error.message.is_empty() => anyhow::bail!(
"Failed to connect to OpenAI API: {}",
response.error.message,
)),
_ => Err(anyhow!(
),
_ => anyhow::bail!(
"Failed to connect to OpenAI API: {} {}",
response.status(),
body,
)),
),
}
}
}
@ -672,11 +670,11 @@ pub async fn stream_completion(
response.error.message,
)),
_ => Err(anyhow!(
_ => anyhow::bail!(
"Failed to connect to OpenAI API: {} {}",
response.status(),
body,
)),
),
}
}
}
@ -732,16 +730,14 @@ pub fn embed<'a>(
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
if response.status().is_success() {
let response: OpenAiEmbeddingResponse =
serde_json::from_str(&body).context("failed to parse OpenAI embedding response")?;
Ok(response)
} else {
Err(anyhow!(
"error during embedding, status: {:?}, body: {:?}",
response.status(),
body
))
}
anyhow::ensure!(
response.status().is_success(),
"error during embedding, status: {:?}, body: {:?}",
response.status(),
body
);
let response: OpenAiEmbeddingResponse =
serde_json::from_str(&body).context("failed to parse OpenAI embedding response")?;
Ok(response)
}
}