gemini: Pass system prompt as system instructions (#28793)

https://ai.google.dev/gemini-api/docs/text-generation#system-instructions

Release Notes:

- agent: Improve performance of Gemini models
This commit is contained in:
Bennet Bo Fenner 2025-04-15 19:45:47 +02:00 committed by GitHub
parent c381a500f8
commit c7e80c80c6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 62 additions and 36 deletions

View file

@ -125,6 +125,7 @@ pub struct GenerateContentRequest {
#[serde(default, skip_serializing_if = "String::is_empty")]
pub model: String,
pub contents: Vec<Content>,
pub system_instructions: Option<SystemInstructions>,
pub generation_config: Option<GenerationConfig>,
pub safety_settings: Option<Vec<SafetySetting>>,
#[serde(skip_serializing_if = "Option::is_none")]
@ -159,6 +160,12 @@ pub struct Content {
pub role: Role,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SystemInstructions {
pub parts: Vec<Part>,
}
#[derive(Debug, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub enum Role {

View file

@ -3,14 +3,16 @@ use collections::BTreeMap;
use credentials_provider::CredentialsProvider;
use editor::{Editor, EditorElement, EditorStyle};
use futures::{FutureExt, Stream, StreamExt, future::BoxFuture};
use google_ai::{FunctionDeclaration, GenerateContentResponse, Part, UsageMetadata};
use google_ai::{
FunctionDeclaration, GenerateContentResponse, Part, SystemInstructions, UsageMetadata,
};
use gpui::{
AnyView, App, AsyncApp, Context, Entity, FontStyle, Subscription, Task, TextStyle, WhiteSpace,
};
use http_client::HttpClient;
use language_model::{
AuthenticateError, LanguageModelCompletionEvent, LanguageModelToolSchemaFormat,
LanguageModelToolUse, LanguageModelToolUseId, StopReason,
LanguageModelToolUse, LanguageModelToolUseId, MessageContent, StopReason,
};
use language_model::{
LanguageModel, LanguageModelId, LanguageModelName, LanguageModelProvider,
@ -359,17 +361,11 @@ impl LanguageModel for GoogleLanguageModel {
}
pub fn into_google(
request: LanguageModelRequest,
mut request: LanguageModelRequest,
model: String,
) -> google_ai::GenerateContentRequest {
google_ai::GenerateContentRequest {
model,
contents: request
.messages
.into_iter()
.map(|message| google_ai::Content {
parts: message
.content
fn map_content(content: Vec<MessageContent>) -> Vec<Part> {
content
.into_iter()
.filter_map(|content| match content {
language_model::MessageContent::Text(text) => {
@ -400,7 +396,30 @@ pub fn into_google(
}),
),
})
.collect(),
.collect()
}
let system_instructions = if request
.messages
.first()
.map_or(false, |msg| matches!(msg.role, Role::System))
{
let message = request.messages.remove(0);
Some(SystemInstructions {
parts: map_content(message.content),
})
} else {
None
};
google_ai::GenerateContentRequest {
model,
system_instructions,
contents: request
.messages
.into_iter()
.map(|message| google_ai::Content {
parts: map_content(message.content),
role: match message.role {
Role::User => google_ai::Role::User,
Role::Assistant => google_ai::Role::Model,