Allow OpenAI API URL to be configured via assistant.openai_api_url (#7552)

Partially fixes #4321, since Azure OpenAI API can be converted to OpenAI
API.

Release Notes:

- Added `assistant.openai_api_url` setting to allow OpenAI API URL to be
configured.

---------

Co-authored-by: Marshall Bowers <elliott.codes@gmail.com>
This commit is contained in:
Yesterday17 2024-02-13 00:37:27 +08:00 committed by GitHub
parent d959719f3e
commit 9e17018416
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 60 additions and 12 deletions

View file

@ -68,6 +68,7 @@ struct SavedConversation {
messages: Vec<SavedMessage>,
message_metadata: HashMap<MessageId, MessageMetadata>,
summary: String,
api_url: Option<String>,
model: OpenAiModel,
}

View file

@ -7,6 +7,7 @@ use crate::{
SavedMessage, Split, ToggleFocus, ToggleIncludeConversation, ToggleRetrieveContext,
};
use ai::prompts::repository_context::PromptCodeSnippet;
use ai::providers::open_ai::OPEN_AI_API_URL;
use ai::{
auth::ProviderCredential,
completion::{CompletionProvider, CompletionRequest},
@ -121,10 +122,22 @@ impl AssistantPanel {
.await
.log_err()
.unwrap_or_default();
// Defaulting currently to GPT4, allow for this to be set via config.
let completion_provider =
OpenAiCompletionProvider::new("gpt-4".into(), cx.background_executor().clone())
.await;
let (api_url, model_name) = cx
.update(|cx| {
let settings = AssistantSettings::get_global(cx);
(
settings.openai_api_url.clone(),
settings.default_open_ai_model.full_name().to_string(),
)
})
.log_err()
.unwrap();
let completion_provider = OpenAiCompletionProvider::new(
api_url,
model_name,
cx.background_executor().clone(),
)
.await;
// TODO: deserialize state.
let workspace_handle = workspace.clone();
@ -1407,6 +1420,7 @@ struct Conversation {
completion_count: usize,
pending_completions: Vec<PendingCompletion>,
model: OpenAiModel,
api_url: Option<String>,
token_count: Option<usize>,
max_token_count: usize,
pending_token_count: Task<Option<()>>,
@ -1441,6 +1455,7 @@ impl Conversation {
let settings = AssistantSettings::get_global(cx);
let model = settings.default_open_ai_model.clone();
let api_url = settings.openai_api_url.clone();
let mut this = Self {
id: Some(Uuid::new_v4().to_string()),
@ -1454,6 +1469,7 @@ impl Conversation {
token_count: None,
max_token_count: tiktoken_rs::model::get_context_size(&model.full_name()),
pending_token_count: Task::ready(None),
api_url: Some(api_url),
model: model.clone(),
_subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
pending_save: Task::ready(Ok(())),
@ -1499,6 +1515,7 @@ impl Conversation {
.map(|summary| summary.text.clone())
.unwrap_or_default(),
model: self.model.clone(),
api_url: self.api_url.clone(),
}
}
@ -1513,8 +1530,12 @@ impl Conversation {
None => Some(Uuid::new_v4().to_string()),
};
let model = saved_conversation.model;
let api_url = saved_conversation.api_url;
let completion_provider: Arc<dyn CompletionProvider> = Arc::new(
OpenAiCompletionProvider::new(
api_url
.clone()
.unwrap_or_else(|| OPEN_AI_API_URL.to_string()),
model.full_name().into(),
cx.background_executor().clone(),
)
@ -1567,6 +1588,7 @@ impl Conversation {
token_count: None,
max_token_count: tiktoken_rs::model::get_context_size(&model.full_name()),
pending_token_count: Task::ready(None),
api_url,
model,
_subscriptions: vec![cx.subscribe(&buffer, Self::handle_buffer_event)],
pending_save: Task::ready(Ok(())),

View file

@ -55,6 +55,7 @@ pub struct AssistantSettings {
pub default_width: Pixels,
pub default_height: Pixels,
pub default_open_ai_model: OpenAiModel,
pub openai_api_url: String,
}
/// Assistant panel settings
@ -80,6 +81,10 @@ pub struct AssistantSettingsContent {
///
/// Default: gpt-4-1106-preview
pub default_open_ai_model: Option<OpenAiModel>,
/// OpenAI API base URL to use when starting new conversations.
///
/// Default: https://api.openai.com/v1
pub openai_api_url: Option<String>,
}
impl Settings for AssistantSettings {