Use gpt-4o tokenizer for gpt-5 for now
This commit is contained in:
parent
d5b6a4d710
commit
a6b9668355
1 changed files with 5 additions and 4 deletions
|
@ -673,10 +673,11 @@ pub fn count_open_ai_tokens(
|
|||
| Model::O1
|
||||
| Model::O3
|
||||
| Model::O3Mini
|
||||
| Model::O4Mini
|
||||
| Model::Five
|
||||
| Model::FiveMini
|
||||
| Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
|
||||
| Model::O4Mini => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
|
||||
// GPT-5 models don't have tiktoken support yet; fall back on gpt-4o tokenizer
|
||||
Model::Five | Model::FiveMini | Model::FiveNano => {
|
||||
tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages)
|
||||
}
|
||||
}
|
||||
.map(|tokens| tokens as u64)
|
||||
})
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue