openai: Don't send prompt_cache_key for OpenAI-compatible models (#36231)
Some APIs fail when they get this parameter Closes #36215 Release Notes: - Fixed OpenAI-compatible providers that don't support prompt caching and/or reasoning
This commit is contained in:
parent
d891348442
commit
2a57b160b0
8 changed files with 29 additions and 2 deletions
|
@ -105,6 +105,10 @@ impl Model {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn supports_prompt_cache_key(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
pub fn supports_tool(&self) -> bool {
|
||||
match self {
|
||||
Self::Grok2Vision
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue