open_ai: Send prompt_cache_key
to improve caching (#36065)
Release Notes: - N/A Co-authored-by: Michael Sloan <mgsloan@gmail.com>
This commit is contained in:
parent
7ff0f1525e
commit
7167f193c0
2 changed files with 3 additions and 0 deletions
|
@ -473,6 +473,7 @@ pub fn into_open_ai(
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
},
|
},
|
||||||
|
prompt_cache_key: request.thread_id,
|
||||||
tools: request
|
tools: request
|
||||||
.tools
|
.tools
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
|
|
@ -244,6 +244,8 @@ pub struct Request {
|
||||||
pub parallel_tool_calls: Option<bool>,
|
pub parallel_tool_calls: Option<bool>,
|
||||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||||
pub tools: Vec<ToolDefinition>,
|
pub tools: Vec<ToolDefinition>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub prompt_cache_key: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue