collab: Clean up LLM token creation (#26955)

This PR cleans up the LLM token creation a bit.

We now pass in the entire list of feature flags to the
`LlmTokenClaims::create` method to prevent having a bunch of confusable
`bool` parameters.

Release Notes:

- N/A
This commit is contained in:
Marshall Bowers 2025-03-17 18:25:43 -04:00 committed by GitHub
parent 2b2b9c1624
commit 1397e01735
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 26 additions and 14 deletions

View file

@ -37,8 +37,7 @@ impl LlmTokenClaims {
user: &user::Model,
is_staff: bool,
billing_preferences: Option<billing_preference::Model>,
has_llm_closed_beta_feature_flag: bool,
has_predict_edits_feature_flag: bool,
feature_flags: &Vec<String>,
has_llm_subscription: bool,
plan: rpc::proto::Plan,
system_id: Option<String>,
@ -59,8 +58,12 @@ impl LlmTokenClaims {
metrics_id: user.metrics_id,
github_user_login: user.github_login.clone(),
is_staff,
has_llm_closed_beta_feature_flag,
has_predict_edits_feature_flag,
has_llm_closed_beta_feature_flag: feature_flags
.iter()
.any(|flag| flag == "llm-closed-beta"),
has_predict_edits_feature_flag: feature_flags
.iter()
.any(|flag| flag == "predict-edits"),
has_llm_subscription,
max_monthly_spend_in_cents: billing_preferences
.map_or(DEFAULT_MAX_MONTHLY_SPEND.0, |preferences| {