Support multiple OpenAI compatible providers (#34212)
TODO - [x] OpenAI Compatible API Icon - [x] Docs - [x] Link to docs in OpenAI provider section about configuring OpenAI API compatible providers Closes #33992 Related to #30010 Release Notes: - agent: Add support for adding multiple OpenAI API compatible providers --------- Co-authored-by: MrSubidubi <dev@bahn.sh> Co-authored-by: Danilo Leal <daniloleal09@gmail.com>
This commit is contained in:
parent
1a76a6b0bf
commit
230061a6cb
23 changed files with 1450 additions and 191 deletions
|
@ -1,8 +1,10 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use ::settings::{Settings, SettingsStore};
|
||||
use client::{Client, UserStore};
|
||||
use collections::HashSet;
|
||||
use gpui::{App, Context, Entity};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use language_model::{LanguageModelProviderId, LanguageModelRegistry};
|
||||
use provider::deepseek::DeepSeekLanguageModelProvider;
|
||||
|
||||
pub mod provider;
|
||||
|
@ -18,17 +20,81 @@ use crate::provider::lmstudio::LmStudioLanguageModelProvider;
|
|||
use crate::provider::mistral::MistralLanguageModelProvider;
|
||||
use crate::provider::ollama::OllamaLanguageModelProvider;
|
||||
use crate::provider::open_ai::OpenAiLanguageModelProvider;
|
||||
use crate::provider::open_ai_compatible::OpenAiCompatibleLanguageModelProvider;
|
||||
use crate::provider::open_router::OpenRouterLanguageModelProvider;
|
||||
use crate::provider::vercel::VercelLanguageModelProvider;
|
||||
use crate::provider::x_ai::XAiLanguageModelProvider;
|
||||
pub use crate::settings::*;
|
||||
|
||||
pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
|
||||
crate::settings::init(cx);
|
||||
crate::settings::init_settings(cx);
|
||||
let registry = LanguageModelRegistry::global(cx);
|
||||
registry.update(cx, |registry, cx| {
|
||||
register_language_model_providers(registry, user_store, client, cx);
|
||||
register_language_model_providers(registry, user_store, client.clone(), cx);
|
||||
});
|
||||
|
||||
let mut openai_compatible_providers = AllLanguageModelSettings::get_global(cx)
|
||||
.openai_compatible
|
||||
.keys()
|
||||
.cloned()
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
registry.update(cx, |registry, cx| {
|
||||
register_openai_compatible_providers(
|
||||
registry,
|
||||
&HashSet::default(),
|
||||
&openai_compatible_providers,
|
||||
client.clone(),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
cx.observe_global::<SettingsStore>(move |cx| {
|
||||
let openai_compatible_providers_new = AllLanguageModelSettings::get_global(cx)
|
||||
.openai_compatible
|
||||
.keys()
|
||||
.cloned()
|
||||
.collect::<HashSet<_>>();
|
||||
if openai_compatible_providers_new != openai_compatible_providers {
|
||||
registry.update(cx, |registry, cx| {
|
||||
register_openai_compatible_providers(
|
||||
registry,
|
||||
&openai_compatible_providers,
|
||||
&openai_compatible_providers_new,
|
||||
client.clone(),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
openai_compatible_providers = openai_compatible_providers_new;
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn register_openai_compatible_providers(
|
||||
registry: &mut LanguageModelRegistry,
|
||||
old: &HashSet<Arc<str>>,
|
||||
new: &HashSet<Arc<str>>,
|
||||
client: Arc<Client>,
|
||||
cx: &mut Context<LanguageModelRegistry>,
|
||||
) {
|
||||
for provider_id in old {
|
||||
if !new.contains(provider_id) {
|
||||
registry.unregister_provider(LanguageModelProviderId::from(provider_id.clone()), cx);
|
||||
}
|
||||
}
|
||||
|
||||
for provider_id in new {
|
||||
if !old.contains(provider_id) {
|
||||
registry.register_provider(
|
||||
OpenAiCompatibleLanguageModelProvider::new(
|
||||
provider_id.clone(),
|
||||
client.http_client(),
|
||||
cx,
|
||||
),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn register_language_model_providers(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue