assistant: Add display_name for OpenAI and Gemini (#17508)
This commit is contained in:
parent
85f4c96fef
commit
fb9d01b0d5
8 changed files with 34 additions and 16 deletions
|
@ -254,11 +254,13 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
|
|||
}),
|
||||
AvailableProvider::OpenAi => CloudModel::OpenAi(open_ai::Model::Custom {
|
||||
name: model.name.clone(),
|
||||
display_name: model.display_name.clone(),
|
||||
max_tokens: model.max_tokens,
|
||||
max_output_tokens: model.max_output_tokens,
|
||||
}),
|
||||
AvailableProvider::Google => CloudModel::Google(google_ai::Model::Custom {
|
||||
name: model.name.clone(),
|
||||
display_name: model.display_name.clone(),
|
||||
max_tokens: model.max_tokens,
|
||||
}),
|
||||
};
|
||||
|
|
|
@ -37,6 +37,7 @@ pub struct GoogleSettings {
|
|||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct AvailableModel {
|
||||
name: String,
|
||||
display_name: Option<String>,
|
||||
max_tokens: usize,
|
||||
}
|
||||
|
||||
|
@ -170,6 +171,7 @@ impl LanguageModelProvider for GoogleLanguageModelProvider {
|
|||
model.name.clone(),
|
||||
google_ai::Model::Custom {
|
||||
name: model.name.clone(),
|
||||
display_name: model.display_name.clone(),
|
||||
max_tokens: model.max_tokens,
|
||||
},
|
||||
);
|
||||
|
|
|
@ -40,6 +40,7 @@ pub struct OpenAiSettings {
|
|||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct AvailableModel {
|
||||
pub name: String,
|
||||
pub display_name: Option<String>,
|
||||
pub max_tokens: usize,
|
||||
pub max_output_tokens: Option<u32>,
|
||||
}
|
||||
|
@ -171,6 +172,7 @@ impl LanguageModelProvider for OpenAiLanguageModelProvider {
|
|||
model.name.clone(),
|
||||
open_ai::Model::Custom {
|
||||
name: model.name.clone(),
|
||||
display_name: model.display_name.clone(),
|
||||
max_tokens: model.max_tokens,
|
||||
max_output_tokens: model.max_output_tokens,
|
||||
},
|
||||
|
@ -368,11 +370,7 @@ pub fn count_open_ai_tokens(
|
|||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if let open_ai::Model::Custom { .. } = model {
|
||||
tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
|
||||
} else {
|
||||
tiktoken_rs::num_tokens_from_messages(model.id(), &messages)
|
||||
}
|
||||
tiktoken_rs::num_tokens_from_messages(model.id(), &messages)
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
|
|
|
@ -175,12 +175,14 @@ impl OpenAiSettingsContent {
|
|||
.filter_map(|model| match model {
|
||||
open_ai::Model::Custom {
|
||||
name,
|
||||
display_name,
|
||||
max_tokens,
|
||||
max_output_tokens,
|
||||
} => Some(provider::open_ai::AvailableModel {
|
||||
name,
|
||||
max_tokens,
|
||||
max_output_tokens,
|
||||
display_name,
|
||||
}),
|
||||
_ => None,
|
||||
})
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue