collab: Include more information on some LLM usage log lines (#28116)
This PR updates the `user rate limit` and `user usage` log lines to include some more information that will be useful for graphing in Axiom. Release Notes: - N/A
This commit is contained in:
parent
435fff94bd
commit
9bd3dbcf28
1 changed files with 3 additions and 0 deletions
|
@ -574,6 +574,7 @@ async fn check_usage_limit(
|
||||||
is_staff = claims.is_staff,
|
is_staff = claims.is_staff,
|
||||||
provider = provider.to_string(),
|
provider = provider.to_string(),
|
||||||
model = model.name,
|
model = model.name,
|
||||||
|
usage_measure = resource,
|
||||||
requests_this_minute = usage.requests_this_minute,
|
requests_this_minute = usage.requests_this_minute,
|
||||||
tokens_this_minute = usage.tokens_this_minute,
|
tokens_this_minute = usage.tokens_this_minute,
|
||||||
input_tokens_this_minute = usage.input_tokens_this_minute,
|
input_tokens_this_minute = usage.input_tokens_this_minute,
|
||||||
|
@ -696,6 +697,8 @@ impl<S> Drop for TokenCountingStream<S> {
|
||||||
login = claims.github_user_login,
|
login = claims.github_user_login,
|
||||||
authn.jti = claims.jti,
|
authn.jti = claims.jti,
|
||||||
is_staff = claims.is_staff,
|
is_staff = claims.is_staff,
|
||||||
|
provider = provider.to_string(),
|
||||||
|
model = model,
|
||||||
requests_this_minute = usage.requests_this_minute,
|
requests_this_minute = usage.requests_this_minute,
|
||||||
tokens_this_minute = usage.tokens_this_minute,
|
tokens_this_minute = usage.tokens_this_minute,
|
||||||
input_tokens_this_minute = usage.input_tokens_this_minute,
|
input_tokens_this_minute = usage.input_tokens_this_minute,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue