Support multiple OpenAI compatible providers (#34212)

TODO
- [x] OpenAI Compatible API Icon
- [x] Docs
- [x] Link to docs in OpenAI provider section about configuring OpenAI
API compatible providers

Closes #33992

Related to #30010

Release Notes:

- agent: Add support for adding multiple OpenAI API compatible providers

---------

Co-authored-by: MrSubidubi <dev@bahn.sh>
Co-authored-by: Danilo Leal <daniloleal09@gmail.com>
This commit is contained in:
Bennet Bo Fenner 2025-07-22 17:20:07 +02:00 committed by Orual
parent ead2217683
commit a0732cd9f4
No known key found for this signature in database
23 changed files with 1450 additions and 191 deletions

View file

@ -2,7 +2,6 @@ use anyhow::{Context as _, Result, anyhow};
use collections::{BTreeMap, HashMap};
use credentials_provider::CredentialsProvider;
use fs::Fs;
use futures::Stream;
use futures::{FutureExt, StreamExt, future::BoxFuture};
use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window};
@ -18,7 +17,7 @@ use menu;
use open_ai::{ImageUrl, Model, ResponseStreamEvent, stream_completion};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore, update_settings_file};
use settings::{Settings, SettingsStore};
use std::pin::Pin;
use std::str::FromStr as _;
use std::sync::Arc;
@ -28,7 +27,6 @@ use ui::{ElevationIndex, List, Tooltip, prelude::*};
use ui_input::SingleLineInput;
use util::ResultExt;
use crate::OpenAiSettingsContent;
use crate::{AllLanguageModelSettings, ui::InstructionListItem};
const PROVIDER_ID: LanguageModelProviderId = language_model::OPEN_AI_PROVIDER_ID;
@ -621,26 +619,32 @@ struct RawToolCall {
arguments: String,
}
pub(crate) fn collect_tiktoken_messages(
request: LanguageModelRequest,
) -> Vec<tiktoken_rs::ChatCompletionRequestMessage> {
request
.messages
.into_iter()
.map(|message| tiktoken_rs::ChatCompletionRequestMessage {
role: match message.role {
Role::User => "user".into(),
Role::Assistant => "assistant".into(),
Role::System => "system".into(),
},
content: Some(message.string_contents()),
name: None,
function_call: None,
})
.collect::<Vec<_>>()
}
pub fn count_open_ai_tokens(
request: LanguageModelRequest,
model: Model,
cx: &App,
) -> BoxFuture<'static, Result<u64>> {
cx.background_spawn(async move {
let messages = request
.messages
.into_iter()
.map(|message| tiktoken_rs::ChatCompletionRequestMessage {
role: match message.role {
Role::User => "user".into(),
Role::Assistant => "assistant".into(),
Role::System => "system".into(),
},
content: Some(message.string_contents()),
name: None,
function_call: None,
})
.collect::<Vec<_>>();
let messages = collect_tiktoken_messages(request);
match model {
Model::Custom { max_tokens, .. } => {
@ -678,7 +682,6 @@ pub fn count_open_ai_tokens(
struct ConfigurationView {
api_key_editor: Entity<SingleLineInput>,
api_url_editor: Entity<SingleLineInput>,
state: gpui::Entity<State>,
load_credentials_task: Option<Task<()>>,
}
@ -691,23 +694,6 @@ impl ConfigurationView {
cx,
"sk-000000000000000000000000000000000000000000000000",
)
.label("API key")
});
let api_url = AllLanguageModelSettings::get_global(cx)
.openai
.api_url
.clone();
let api_url_editor = cx.new(|cx| {
let input = SingleLineInput::new(window, cx, open_ai::OPEN_AI_API_URL).label("API URL");
if !api_url.is_empty() {
input.editor.update(cx, |editor, cx| {
editor.set_text(&*api_url, window, cx);
});
}
input
});
cx.observe(&state, |_, _, cx| {
@ -735,7 +721,6 @@ impl ConfigurationView {
Self {
api_key_editor,
api_url_editor,
state,
load_credentials_task,
}
@ -783,57 +768,6 @@ impl ConfigurationView {
cx.notify();
}
fn save_api_url(&mut self, cx: &mut Context<Self>) {
let api_url = self
.api_url_editor
.read(cx)
.editor()
.read(cx)
.text(cx)
.trim()
.to_string();
let current_url = AllLanguageModelSettings::get_global(cx)
.openai
.api_url
.clone();
let effective_current_url = if current_url.is_empty() {
open_ai::OPEN_AI_API_URL
} else {
&current_url
};
if !api_url.is_empty() && api_url != effective_current_url {
let fs = <dyn Fs>::global(cx);
update_settings_file::<AllLanguageModelSettings>(fs, cx, move |settings, _| {
if let Some(settings) = settings.openai.as_mut() {
settings.api_url = Some(api_url.clone());
} else {
settings.openai = Some(OpenAiSettingsContent {
api_url: Some(api_url.clone()),
available_models: None,
});
}
});
}
}
fn reset_api_url(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.api_url_editor.update(cx, |input, cx| {
input.editor.update(cx, |editor, cx| {
editor.set_text("", window, cx);
});
});
let fs = <dyn Fs>::global(cx);
update_settings_file::<AllLanguageModelSettings>(fs, cx, |settings, _cx| {
if let Some(settings) = settings.openai.as_mut() {
settings.api_url = None;
}
});
cx.notify();
}
fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
!self.state.read(cx).is_authenticated()
}
@ -846,7 +780,6 @@ impl Render for ConfigurationView {
let api_key_section = if self.should_render_editor(cx) {
v_flex()
.on_action(cx.listener(Self::save_api_key))
.child(Label::new("To use Zed's assistant with OpenAI, you need to add an API key. Follow these steps:"))
.child(
List::new()
@ -910,59 +843,34 @@ impl Render for ConfigurationView {
.into_any()
};
let custom_api_url_set =
AllLanguageModelSettings::get_global(cx).openai.api_url != open_ai::OPEN_AI_API_URL;
let api_url_section = if custom_api_url_set {
h_flex()
.mt_1()
.p_1()
.justify_between()
.rounded_md()
.border_1()
.border_color(cx.theme().colors().border)
.bg(cx.theme().colors().background)
.child(
h_flex()
.gap_1()
.child(Icon::new(IconName::Check).color(Color::Success))
.child(Label::new("Custom API URL configured.")),
)
.child(
Button::new("reset-api-url", "Reset API URL")
.label_size(LabelSize::Small)
.icon(IconName::Undo)
.icon_size(IconSize::Small)
.icon_position(IconPosition::Start)
.layer(ElevationIndex::ModalSurface)
.on_click(
cx.listener(|this, _, window, cx| this.reset_api_url(window, cx)),
),
)
.into_any()
} else {
v_flex()
.on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| {
this.save_api_url(cx);
cx.notify();
}))
.mt_2()
.pt_2()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.gap_1()
.child(
List::new()
.child(InstructionListItem::text_only(
"Optionally, you can change the base URL for the OpenAI API request.",
))
.child(InstructionListItem::text_only(
"Paste the new API endpoint below and hit enter",
)),
)
.child(self.api_url_editor.clone())
.into_any()
};
let compatible_api_section = h_flex()
.mt_1p5()
.gap_0p5()
.flex_wrap()
.when(self.should_render_editor(cx), |this| {
this.pt_1p5()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
})
.child(
h_flex()
.gap_2()
.child(
Icon::new(IconName::Info)
.size(IconSize::XSmall)
.color(Color::Muted),
)
.child(Label::new("Zed also supports OpenAI-compatible models.")),
)
.child(
Button::new("docs", "Learn More")
.icon(IconName::ArrowUpRight)
.icon_size(IconSize::XSmall)
.icon_color(Color::Muted)
.on_click(move |_, _window, cx| {
cx.open_url("https://zed.dev/docs/ai/configuration#openai-api-compatible")
}),
);
if self.load_credentials_task.is_some() {
div().child(Label::new("Loading credentials…")).into_any()
@ -970,7 +878,7 @@ impl Render for ConfigurationView {
v_flex()
.size_full()
.child(api_key_section)
.child(api_url_section)
.child(compatible_api_section)
.into_any()
}
}