assistant: Add Mistral support (#24879)

Closes #12519.

Release Notes:

- Added support for Mistral to the Assistant.

---------

Co-authored-by: Marshall Bowers <git@maxdeviant.com>
This commit is contained in:
Shidfar Hodizoda 2025-02-14 19:07:41 +01:00 committed by GitHub
parent 5c5caf1ffe
commit 7ee492746d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 1034 additions and 0 deletions

View file

@ -28,6 +28,7 @@ http_client.workspace = true
image.workspace = true
lmstudio = { workspace = true, features = ["schemars"] }
log.workspace = true
mistral = { workspace = true, features = ["schemars"] }
ollama = { workspace = true, features = ["schemars"] }
open_ai = { workspace = true, features = ["schemars"] }
parking_lot.workspace = true

View file

@ -269,6 +269,47 @@ impl LanguageModelRequest {
}
}
pub fn into_mistral(self, model: String, max_output_tokens: Option<u32>) -> mistral::Request {
let len = self.messages.len();
let merged_messages =
self.messages
.into_iter()
.fold(Vec::with_capacity(len), |mut acc, msg| {
let role = msg.role;
let content = msg.string_contents();
acc.push(match role {
Role::User => mistral::RequestMessage::User { content },
Role::Assistant => mistral::RequestMessage::Assistant {
content: Some(content),
tool_calls: Vec::new(),
},
Role::System => mistral::RequestMessage::System { content },
});
acc
});
mistral::Request {
model,
messages: merged_messages,
stream: true,
max_tokens: max_output_tokens,
temperature: self.temperature,
response_format: None,
tools: self
.tools
.into_iter()
.map(|tool| mistral::ToolDefinition::Function {
function: mistral::FunctionDefinition {
name: tool.name,
description: Some(tool.description),
parameters: Some(tool.input_schema),
},
})
.collect(),
}
}
pub fn into_google(self, model: String) -> google_ai::GenerateContentRequest {
google_ai::GenerateContentRequest {
model,