language_model: Remove dependencies on individual model provider crates (#25503)

This PR removes the dependencies on the individual model provider crates
from the `language_model` crate.

The various conversion methods for converting a `LanguageModelRequest`
into its provider-specific request type have been inlined into the
various provider modules in the `language_models` crate.

The model providers we provide via Zed's cloud offering get to stay, for
now.

Release Notes:

- N/A
This commit is contained in:
Marshall Bowers 2025-02-24 16:41:35 -05:00 committed by GitHub
parent 2f7a62780a
commit 0acd556106
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 347 additions and 366 deletions

View file

@ -272,7 +272,7 @@ impl LanguageModel for GoogleLanguageModel {
request: LanguageModelRequest,
cx: &App,
) -> BoxFuture<'static, Result<usize>> {
let request = request.into_google(self.model.id().to_string());
let request = into_google(request, self.model.id().to_string());
let http_client = self.http_client.clone();
let api_key = self.state.read(cx).api_key.clone();
@ -303,7 +303,7 @@ impl LanguageModel for GoogleLanguageModel {
'static,
Result<futures::stream::BoxStream<'static, Result<LanguageModelCompletionEvent>>>,
> {
let request = request.into_google(self.model.id().to_string());
let request = into_google(request, self.model.id().to_string());
let http_client = self.http_client.clone();
let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, cx| {
@ -341,6 +341,38 @@ impl LanguageModel for GoogleLanguageModel {
}
}
pub fn into_google(
request: LanguageModelRequest,
model: String,
) -> google_ai::GenerateContentRequest {
google_ai::GenerateContentRequest {
model,
contents: request
.messages
.into_iter()
.map(|msg| google_ai::Content {
parts: vec![google_ai::Part::TextPart(google_ai::TextPart {
text: msg.string_contents(),
})],
role: match msg.role {
Role::User => google_ai::Role::User,
Role::Assistant => google_ai::Role::Model,
Role::System => google_ai::Role::User, // Google AI doesn't have a system role
},
})
.collect(),
generation_config: Some(google_ai::GenerationConfig {
candidate_count: Some(1),
stop_sequences: Some(request.stop),
max_output_tokens: None,
temperature: request.temperature.map(|t| t as f64).or(Some(1.0)),
top_p: None,
top_k: None,
}),
safety_settings: None,
}
}
pub fn count_google_tokens(
request: LanguageModelRequest,
cx: &App,