Add support for gpt-4o when using zed.dev as the model provider (#11794)

Release Notes:

- N/A
This commit is contained in:
Antonio Scandurra 2024-05-14 13:55:47 +02:00 committed by GitHub
parent a13a92fbbf
commit 019d98898e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 21 additions and 5 deletions

View file

@ -1,3 +1,4 @@
use crate::assistant_settings::ZedDotDevModel;
use crate::{
assistant_settings::OpenAiModel, CompletionProvider, LanguageModel, LanguageModelRequest, Role,
};
@ -202,7 +203,15 @@ pub fn count_open_ai_tokens(
})
.collect::<Vec<_>>();
tiktoken_rs::num_tokens_from_messages(request.model.id(), &messages)
match request.model {
LanguageModel::OpenAi(OpenAiModel::FourOmni)
| LanguageModel::ZedDotDev(ZedDotDevModel::Gpt4Omni) => {
// Tiktoken doesn't yet support gpt-4o, so we manually use the
// same tokenizer as GPT-4.
tiktoken_rs::num_tokens_from_messages("gpt-4", &messages)
}
_ => tiktoken_rs::num_tokens_from_messages(request.model.id(), &messages),
}
})
.boxed()
}