Standardize on u64 for token counts (#32869)

Previously we were using a mix of `u32` and `usize`, e.g. `max_tokens:
usize, max_output_tokens: Option<u32>` in the same `struct`.

Although [tiktoken](https://github.com/openai/tiktoken) uses `usize`,
token counts should be consistent across targets (e.g. the same model
doesn't suddenly get a smaller context window if you're compiling for
wasm32), and these token counts could end up getting serialized using a
binary protocol, so `usize` is not the right choice for token counts.

I chose to standardize on `u64` over `u32` because we don't store many
of them (so the extra size should be insignificant) and future models
may exceed `u32::MAX` tokens.

Release Notes:

- N/A
This commit is contained in:
Richard Feldman 2025-06-17 10:43:07 -04:00 committed by GitHub
parent a391d67366
commit 5405c2c2d3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
32 changed files with 191 additions and 192 deletions

View file

@ -44,7 +44,7 @@ pub struct LmStudioSettings {
pub struct AvailableModel {
pub name: String,
pub display_name: Option<String>,
pub max_tokens: usize,
pub max_tokens: u64,
pub supports_tool_calls: bool,
pub supports_images: bool,
}
@ -414,7 +414,7 @@ impl LanguageModel for LmStudioLanguageModel {
format!("lmstudio/{}", self.model.id())
}
fn max_token_count(&self) -> usize {
fn max_token_count(&self) -> u64 {
self.model.max_token_count()
}
@ -422,7 +422,7 @@ impl LanguageModel for LmStudioLanguageModel {
&self,
request: LanguageModelRequest,
_cx: &App,
) -> BoxFuture<'static, Result<usize>> {
) -> BoxFuture<'static, Result<u64>> {
// Endpoint for this is coming soon. In the meantime, hacky estimation
let token_count = request
.messages
@ -430,7 +430,7 @@ impl LanguageModel for LmStudioLanguageModel {
.map(|msg| msg.string_contents().split_whitespace().count())
.sum::<usize>();
let estimated_tokens = (token_count as f64 * 0.75) as usize;
let estimated_tokens = (token_count as f64 * 0.75) as u64;
async move { Ok(estimated_tokens) }.boxed()
}