ollama: Set default max_tokens for llama3.3 (#23558)

This commit is contained in:
Peter Tripp 2025-01-23 12:38:43 -05:00 committed by GitHub
parent 3dee32c43d
commit f38d0ff069
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -83,8 +83,8 @@ fn get_max_tokens(name: &str) -> usize {
"codellama" | "starcoder2" => 16384,
"mistral" | "codestral" | "mixstral" | "llava" | "qwen2" | "qwen2.5-coder"
| "dolphin-mixtral" => 32768,
"llama3.1" | "phi3" | "phi3.5" | "phi4" | "command-r" | "deepseek-coder-v2"
| "deepseek-r1" | "yi-coder" | "llama3.2" => 128000,
"llama3.1" | "llama3.2" | "llama3.3" | "phi3" | "phi3.5" | "phi4" | "command-r"
| "deepseek-coder-v2" | "deepseek-r1" | "yi-coder" => 128000,
_ => DEFAULT_TOKENS,
}
.clamp(1, MAXIMUM_TOKENS)