assistant: Propagate LLM stop reason upwards (#17358)

This PR makes it so we propagate the `stop_reason` from Anthropic up to
the Assistant so that we can take action based on it.

The `extract_content_from_events` function was moved from `anthropic` to
the `anthropic` module in `language_model` since it is more useful if it
is able to name the `LanguageModelCompletionEvent` type, as otherwise
we'd need an additional layer of plumbing.

Release Notes:

- N/A
This commit is contained in:
Marshall Bowers 2024-09-04 12:31:10 -04:00 committed by GitHub
parent 7c8f62e943
commit f38956943b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 143 additions and 144 deletions

View file

@ -1,4 +1,5 @@
use super::open_ai::count_open_ai_tokens;
use crate::provider::anthropic::map_to_language_model_completion_events;
use crate::{
settings::AllLanguageModelSettings, CloudModel, LanguageModel, LanguageModelCacheConfiguration,
LanguageModelId, LanguageModelName, LanguageModelProviderId, LanguageModelProviderName,
@ -33,10 +34,7 @@ use std::{
use strum::IntoEnumIterator;
use ui::{prelude::*, TintColor};
use crate::{
LanguageModelAvailability, LanguageModelCompletionEvent, LanguageModelProvider,
LanguageModelToolUse,
};
use crate::{LanguageModelAvailability, LanguageModelCompletionEvent, LanguageModelProvider};
use super::anthropic::count_anthropic_tokens;
@ -518,30 +516,11 @@ impl LanguageModel for CloudLanguageModel {
},
)
.await?;
Ok(anthropic::extract_content_from_events(Box::pin(
Ok(map_to_language_model_completion_events(Box::pin(
response_lines(response).map_err(AnthropicError::Other),
)))
});
async move {
Ok(future
.await?
.map(|result| {
result
.map(|content| match content {
anthropic::ResponseContent::Text { text } => {
LanguageModelCompletionEvent::Text(text)
}
anthropic::ResponseContent::ToolUse { id, name, input } => {
LanguageModelCompletionEvent::ToolUse(
LanguageModelToolUse { id, name, input },
)
}
})
.map_err(|err| anyhow!(err))
})
.boxed())
}
.boxed()
async move { Ok(future.await?.boxed()) }.boxed()
}
CloudModel::OpenAi(model) => {
let client = self.client.clone();