open_ai: Disable parallel_tool_calls
(#28056)
This PR disables `parallel_tool_calls` for the models that support it, as the Agent currently expects at most one tool use per turn. It was a bit of trial and error to figure this out. OpenAI's API annoyingly will return an error if passing `parallel_tool_calls` to a model that doesn't support it. Release Notes: - N/A
This commit is contained in:
parent
c6e2d20a02
commit
819bb8fffb
3 changed files with 32 additions and 6 deletions
|
@ -690,7 +690,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
}
|
||||
CloudModel::OpenAi(model) => {
|
||||
let client = self.client.clone();
|
||||
let request = into_open_ai(request, model.id().into(), model.max_output_tokens());
|
||||
let request = into_open_ai(request, model, model.max_output_tokens());
|
||||
let llm_api_token = self.llm_api_token.clone();
|
||||
let future = self.request_limiter.stream(async move {
|
||||
let response = Self::perform_llm_completion(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue