Wire up Azure OpenAI completion provider (#8646)

This PR wires up support for [Azure
OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview)
as an alternative AI provider in the assistant panel.

This can be configured using the following in the settings file:

```json
{
  "assistant": {
    "provider": {
      "type": "azure_openai",
      "api_url": "https://{your-resource-name}.openai.azure.com",
      "deployment_id": "gpt-4",
      "api_version": "2023-05-15"
    }
  },
}
```

You will need to deploy a model within Azure and update the settings
accordingly.

Release Notes:

- N/A
This commit is contained in:
Marshall Bowers 2024-02-29 22:01:33 -05:00 committed by GitHub
parent 7c1ef966f3
commit eb1ab69606
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 290 additions and 65 deletions

View file

@ -124,16 +124,18 @@ impl AssistantPanel {
.await
.log_err()
.unwrap_or_default();
let (api_url, model_name) = cx.update(|cx| {
let (provider_kind, api_url, model_name) = cx.update(|cx| {
let settings = AssistantSettings::get_global(cx);
(
settings.openai_api_url.clone(),
settings.default_open_ai_model.full_name().to_string(),
)
})?;
anyhow::Ok((
settings.provider_kind()?,
settings.provider_api_url()?,
settings.provider_model_name()?,
))
})??;
let completion_provider = OpenAiCompletionProvider::new(
api_url,
OpenAiCompletionProviderKind::OpenAi,
provider_kind,
model_name,
cx.background_executor().clone(),
)
@ -693,24 +695,29 @@ impl AssistantPanel {
Task::ready(Ok(Vec::new()))
};
let mut model = AssistantSettings::get_global(cx)
.default_open_ai_model
.clone();
let model_name = model.full_name();
let Some(mut model_name) = AssistantSettings::get_global(cx)
.provider_model_name()
.log_err()
else {
return;
};
let prompt = cx.background_executor().spawn(async move {
let snippets = snippets.await?;
let prompt = cx.background_executor().spawn({
let model_name = model_name.clone();
async move {
let snippets = snippets.await?;
let language_name = language_name.as_deref();
generate_content_prompt(
user_prompt,
language_name,
buffer,
range,
snippets,
model_name,
project_name,
)
let language_name = language_name.as_deref();
generate_content_prompt(
user_prompt,
language_name,
buffer,
range,
snippets,
&model_name,
project_name,
)
}
});
let mut messages = Vec::new();
@ -722,7 +729,7 @@ impl AssistantPanel {
.messages(cx)
.map(|message| message.to_open_ai_message(buffer)),
);
model = conversation.model.clone();
model_name = conversation.model.full_name().to_string();
}
cx.spawn(|_, mut cx| async move {
@ -735,7 +742,7 @@ impl AssistantPanel {
});
let request = Box::new(OpenAiRequest {
model: model.full_name().into(),
model: model_name,
messages,
stream: true,
stop: vec!["|END|>".to_string()],
@ -1454,8 +1461,14 @@ impl Conversation {
});
let settings = AssistantSettings::get_global(cx);
let model = settings.default_open_ai_model.clone();
let api_url = settings.openai_api_url.clone();
let model = settings
.provider_model()
.log_err()
.unwrap_or(OpenAiModel::FourTurbo);
let api_url = settings
.provider_api_url()
.log_err()
.unwrap_or_else(|| OPEN_AI_API_URL.to_string());
let mut this = Self {
id: Some(Uuid::new_v4().to_string()),
@ -3655,9 +3668,9 @@ fn report_assistant_event(
let client = workspace.read(cx).project().read(cx).client();
let telemetry = client.telemetry();
let model = AssistantSettings::get_global(cx)
.default_open_ai_model
.clone();
let Ok(model_name) = AssistantSettings::get_global(cx).provider_model_name() else {
return;
};
telemetry.report_assistant_event(conversation_id, assistant_kind, model.full_name())
telemetry.report_assistant_event(conversation_id, assistant_kind, &model_name)
}