
This cleans up our settings to not include any `version` fields, as we have an actual settings migrator now. This PR removes `language_models > anthropic > version`, `language_models > openai > version` and `agent > version`. We had migration paths in the code for a long time, so in practice almost everyone should be using the latest version of these settings. Release Notes: - Remove `version` fields in settings for `agent`, `language_models > anthropic`, `language_models > openai`. Your settings will automatically be migrated. If you're running into issues with this open an issue [here](https://github.com/zed-industries/zed/issues)
85 lines
2.8 KiB
Rust
85 lines
2.8 KiB
Rust
use std::sync::Arc;
|
|
|
|
use client::{Client, UserStore};
|
|
use gpui::{App, Context, Entity};
|
|
use language_model::LanguageModelRegistry;
|
|
use provider::deepseek::DeepSeekLanguageModelProvider;
|
|
|
|
pub mod provider;
|
|
mod settings;
|
|
pub mod ui;
|
|
|
|
use crate::provider::anthropic::AnthropicLanguageModelProvider;
|
|
use crate::provider::bedrock::BedrockLanguageModelProvider;
|
|
use crate::provider::cloud::CloudLanguageModelProvider;
|
|
use crate::provider::copilot_chat::CopilotChatLanguageModelProvider;
|
|
use crate::provider::google::GoogleLanguageModelProvider;
|
|
use crate::provider::lmstudio::LmStudioLanguageModelProvider;
|
|
use crate::provider::mistral::MistralLanguageModelProvider;
|
|
use crate::provider::ollama::OllamaLanguageModelProvider;
|
|
use crate::provider::open_ai::OpenAiLanguageModelProvider;
|
|
use crate::provider::open_router::OpenRouterLanguageModelProvider;
|
|
use crate::provider::vercel::VercelLanguageModelProvider;
|
|
pub use crate::settings::*;
|
|
|
|
pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
|
|
crate::settings::init(cx);
|
|
let registry = LanguageModelRegistry::global(cx);
|
|
registry.update(cx, |registry, cx| {
|
|
register_language_model_providers(registry, user_store, client, cx);
|
|
});
|
|
}
|
|
|
|
fn register_language_model_providers(
|
|
registry: &mut LanguageModelRegistry,
|
|
user_store: Entity<UserStore>,
|
|
client: Arc<Client>,
|
|
cx: &mut Context<LanguageModelRegistry>,
|
|
) {
|
|
registry.register_provider(
|
|
CloudLanguageModelProvider::new(user_store.clone(), client.clone(), cx),
|
|
cx,
|
|
);
|
|
|
|
registry.register_provider(
|
|
AnthropicLanguageModelProvider::new(client.http_client(), cx),
|
|
cx,
|
|
);
|
|
registry.register_provider(
|
|
OpenAiLanguageModelProvider::new(client.http_client(), cx),
|
|
cx,
|
|
);
|
|
registry.register_provider(
|
|
OllamaLanguageModelProvider::new(client.http_client(), cx),
|
|
cx,
|
|
);
|
|
registry.register_provider(
|
|
LmStudioLanguageModelProvider::new(client.http_client(), cx),
|
|
cx,
|
|
);
|
|
registry.register_provider(
|
|
DeepSeekLanguageModelProvider::new(client.http_client(), cx),
|
|
cx,
|
|
);
|
|
registry.register_provider(
|
|
GoogleLanguageModelProvider::new(client.http_client(), cx),
|
|
cx,
|
|
);
|
|
registry.register_provider(
|
|
MistralLanguageModelProvider::new(client.http_client(), cx),
|
|
cx,
|
|
);
|
|
registry.register_provider(
|
|
BedrockLanguageModelProvider::new(client.http_client(), cx),
|
|
cx,
|
|
);
|
|
registry.register_provider(
|
|
OpenRouterLanguageModelProvider::new(client.http_client(), cx),
|
|
cx,
|
|
);
|
|
registry.register_provider(
|
|
VercelLanguageModelProvider::new(client.http_client(), cx),
|
|
cx,
|
|
);
|
|
registry.register_provider(CopilotChatLanguageModelProvider::new(cx), cx);
|
|
}
|