Standardize on u64 for token counts (#32869)

Previously we were using a mix of `u32` and `usize`, e.g. `max_tokens:
usize, max_output_tokens: Option<u32>` in the same `struct`.

Although [tiktoken](https://github.com/openai/tiktoken) uses `usize`,
token counts should be consistent across targets (e.g. the same model
doesn't suddenly get a smaller context window if you're compiling for
wasm32), and these token counts could end up getting serialized using a
binary protocol, so `usize` is not the right choice for token counts.

I chose to standardize on `u64` over `u32` because we don't store many
of them (so the extra size should be insignificant) and future models
may exceed `u32::MAX` tokens.

Release Notes:

- N/A
This commit is contained in:
Richard Feldman 2025-06-17 10:43:07 -04:00 committed by GitHub
parent a391d67366
commit 5405c2c2d3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
32 changed files with 191 additions and 192 deletions

View file

@ -276,17 +276,17 @@ pub struct PromptFeedback {
#[serde(rename_all = "camelCase")]
pub struct UsageMetadata {
#[serde(skip_serializing_if = "Option::is_none")]
pub prompt_token_count: Option<usize>,
pub prompt_token_count: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cached_content_token_count: Option<usize>,
pub cached_content_token_count: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub candidates_token_count: Option<usize>,
pub candidates_token_count: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_use_prompt_token_count: Option<usize>,
pub tool_use_prompt_token_count: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thoughts_token_count: Option<usize>,
pub thoughts_token_count: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub total_token_count: Option<usize>,
pub total_token_count: Option<u64>,
}
#[derive(Debug, Serialize, Deserialize)]
@ -395,7 +395,7 @@ pub struct CountTokensRequest {
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CountTokensResponse {
pub total_tokens: usize,
pub total_tokens: u64,
}
#[derive(Debug, Serialize, Deserialize)]
@ -523,7 +523,7 @@ pub enum Model {
name: String,
/// The name displayed in the UI, such as in the assistant panel model dropdown menu.
display_name: Option<String>,
max_tokens: usize,
max_tokens: u64,
#[serde(default)]
mode: GoogleModelMode,
},
@ -586,9 +586,9 @@ impl Model {
}
}
pub fn max_token_count(&self) -> usize {
const ONE_MILLION: usize = 1_048_576;
const TWO_MILLION: usize = 2_097_152;
pub fn max_token_count(&self) -> u64 {
const ONE_MILLION: u64 = 1_048_576;
const TWO_MILLION: u64 = 2_097_152;
match self {
Model::Gemini15Pro => TWO_MILLION,
Model::Gemini15Flash => ONE_MILLION,