language_model: Remove CloudModel enum (#31322)

This PR removes the `CloudModel` enum, as it is no longer needed after
#31316.

Release Notes:

- N/A
This commit is contained in:
Marshall Bowers 2025-05-23 22:04:51 -04:00 committed by GitHub
parent fc8702a8f8
commit 7fb9569c15
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 3 additions and 67 deletions

3
Cargo.lock generated
View file

@ -8803,19 +8803,16 @@ dependencies = [
"client", "client",
"collections", "collections",
"futures 0.3.31", "futures 0.3.31",
"google_ai",
"gpui", "gpui",
"http_client", "http_client",
"icons", "icons",
"image", "image",
"open_ai",
"parking_lot", "parking_lot",
"proto", "proto",
"schemars", "schemars",
"serde", "serde",
"serde_json", "serde_json",
"smol", "smol",
"strum 0.27.1",
"telemetry_events", "telemetry_events",
"thiserror 2.0.12", "thiserror 2.0.12",
"util", "util",

View file

@ -8,7 +8,7 @@ use anyhow::{Result, bail};
use collections::IndexMap; use collections::IndexMap;
use deepseek::Model as DeepseekModel; use deepseek::Model as DeepseekModel;
use gpui::{App, Pixels, SharedString}; use gpui::{App, Pixels, SharedString};
use language_model::{CloudModel, LanguageModel}; use language_model::LanguageModel;
use lmstudio::Model as LmStudioModel; use lmstudio::Model as LmStudioModel;
use mistral::Model as MistralModel; use mistral::Model as MistralModel;
use ollama::Model as OllamaModel; use ollama::Model as OllamaModel;
@ -45,7 +45,7 @@ pub enum NotifyWhenAgentWaiting {
#[schemars(deny_unknown_fields)] #[schemars(deny_unknown_fields)]
pub enum AssistantProviderContentV1 { pub enum AssistantProviderContentV1 {
#[serde(rename = "zed.dev")] #[serde(rename = "zed.dev")]
ZedDotDev { default_model: Option<CloudModel> }, ZedDotDev { default_model: Option<String> },
#[serde(rename = "openai")] #[serde(rename = "openai")]
OpenAi { OpenAi {
default_model: Option<OpenAiModel>, default_model: Option<OpenAiModel>,
@ -222,7 +222,7 @@ impl AssistantSettingsContent {
AssistantProviderContentV1::ZedDotDev { default_model } => { AssistantProviderContentV1::ZedDotDev { default_model } => {
default_model.map(|model| LanguageModelSelection { default_model.map(|model| LanguageModelSelection {
provider: "zed.dev".into(), provider: "zed.dev".into(),
model: model.id().to_string(), model,
}) })
} }
AssistantProviderContentV1::OpenAi { default_model, .. } => { AssistantProviderContentV1::OpenAi { default_model, .. } => {

View file

@ -22,19 +22,16 @@ base64.workspace = true
client.workspace = true client.workspace = true
collections.workspace = true collections.workspace = true
futures.workspace = true futures.workspace = true
google_ai = { workspace = true, features = ["schemars"] }
gpui.workspace = true gpui.workspace = true
http_client.workspace = true http_client.workspace = true
icons.workspace = true icons.workspace = true
image.workspace = true image.workspace = true
open_ai = { workspace = true, features = ["schemars"] }
parking_lot.workspace = true parking_lot.workspace = true
proto.workspace = true proto.workspace = true
schemars.workspace = true schemars.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true
smol.workspace = true smol.workspace = true
strum.workspace = true
telemetry_events.workspace = true telemetry_events.workspace = true
thiserror.workspace = true thiserror.workspace = true
util.workspace = true util.workspace = true

View file

@ -7,67 +7,9 @@ use gpui::{
App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, ReadGlobal as _, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, ReadGlobal as _,
}; };
use proto::{Plan, TypedEnvelope}; use proto::{Plan, TypedEnvelope};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard}; use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard};
use strum::EnumIter;
use thiserror::Error; use thiserror::Error;
use crate::LanguageModelToolSchemaFormat;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
#[serde(tag = "provider", rename_all = "lowercase")]
pub enum CloudModel {
Anthropic(anthropic::Model),
OpenAi(open_ai::Model),
Google(google_ai::Model),
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, EnumIter)]
pub enum ZedModel {
#[serde(rename = "Qwen/Qwen2-7B-Instruct")]
Qwen2_7bInstruct,
}
impl Default for CloudModel {
fn default() -> Self {
Self::Anthropic(anthropic::Model::default())
}
}
impl CloudModel {
pub fn id(&self) -> &str {
match self {
Self::Anthropic(model) => model.id(),
Self::OpenAi(model) => model.id(),
Self::Google(model) => model.id(),
}
}
pub fn display_name(&self) -> &str {
match self {
Self::Anthropic(model) => model.display_name(),
Self::OpenAi(model) => model.display_name(),
Self::Google(model) => model.display_name(),
}
}
pub fn max_token_count(&self) -> usize {
match self {
Self::Anthropic(model) => model.max_token_count(),
Self::OpenAi(model) => model.max_token_count(),
Self::Google(model) => model.max_token_count(),
}
}
pub fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
match self {
Self::Anthropic(_) | Self::OpenAi(_) => LanguageModelToolSchemaFormat::JsonSchema,
Self::Google(_) => LanguageModelToolSchemaFormat::JsonSchemaSubset,
}
}
}
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub struct PaymentRequiredError; pub struct PaymentRequiredError;