OpenAI o1-preview and o1-mini support (#17796)

Release Notes:

- Added support for OpenAI o1-mini and o1-preview models.

---------

Co-authored-by: Jason Mancuso <7891333+jvmncs@users.noreply.github.com>
Co-authored-by: Bennet <bennet@zed.dev>
This commit is contained in:
Peter Tripp 2024-09-13 16:23:55 -04:00 committed by GitHub
parent e145c13f73
commit d245f5e75c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 39 additions and 14 deletions

View file

@ -102,6 +102,8 @@ impl CloudModel {
| open_ai::Model::FourTurbo
| open_ai::Model::FourOmni
| open_ai::Model::FourOmniMini
| open_ai::Model::O1Mini
| open_ai::Model::O1Preview
| open_ai::Model::Custom { .. } => {
LanguageModelAvailability::RequiresPlan(Plan::ZedPro)
}

View file

@ -372,10 +372,13 @@ pub fn count_open_ai_tokens(
})
.collect::<Vec<_>>();
if let open_ai::Model::Custom { .. } = model {
tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
} else {
tiktoken_rs::num_tokens_from_messages(model.id(), &messages)
match model {
open_ai::Model::Custom { .. }
| open_ai::Model::O1Mini
| open_ai::Model::O1Preview => {
tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
}
_ => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
}
})
.boxed()

View file

@ -241,6 +241,7 @@ pub struct LanguageModelRequest {
impl LanguageModelRequest {
pub fn into_open_ai(self, model: String, max_output_tokens: Option<u32>) -> open_ai::Request {
let stream = !model.starts_with("o1-");
open_ai::Request {
model,
messages: self
@ -259,7 +260,7 @@ impl LanguageModelRequest {
},
})
.collect(),
stream: true,
stream,
stop: self.stop,
temperature: self.temperature,
max_tokens: max_output_tokens,