Improve model selection in the assistant (#12472)

https://github.com/zed-industries/zed/assets/482957/3b017850-b7b6-457a-9b2f-324d5533442e


Release Notes:

- Improved the UX for selecting a model in the assistant panel. You can
now switch model using just the keyboard by pressing `alt-m`. Also, when
switching models via the UI, settings will now be updated automatically.
This commit is contained in:
Antonio Scandurra 2024-05-30 12:36:07 +02:00 committed by GitHub
parent 5a149b970c
commit 6ff01b17ca
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 517 additions and 295 deletions

View file

@ -11,6 +11,7 @@ use open_ai::{stream_completion, Request, RequestMessage, Role as OpenAiRole};
use settings::Settings;
use std::time::Duration;
use std::{env, sync::Arc};
use strum::IntoEnumIterator;
use theme::ThemeSettings;
use ui::prelude::*;
use util::ResultExt;
@ -18,7 +19,7 @@ use util::ResultExt;
pub struct OpenAiCompletionProvider {
api_key: Option<String>,
api_url: String,
default_model: OpenAiModel,
model: OpenAiModel,
http_client: Arc<dyn HttpClient>,
low_speed_timeout: Option<Duration>,
settings_version: usize,
@ -26,7 +27,7 @@ pub struct OpenAiCompletionProvider {
impl OpenAiCompletionProvider {
pub fn new(
default_model: OpenAiModel,
model: OpenAiModel,
api_url: String,
http_client: Arc<dyn HttpClient>,
low_speed_timeout: Option<Duration>,
@ -35,7 +36,7 @@ impl OpenAiCompletionProvider {
Self {
api_key: None,
api_url,
default_model,
model,
http_client,
low_speed_timeout,
settings_version,
@ -44,17 +45,21 @@ impl OpenAiCompletionProvider {
pub fn update(
&mut self,
default_model: OpenAiModel,
model: OpenAiModel,
api_url: String,
low_speed_timeout: Option<Duration>,
settings_version: usize,
) {
self.default_model = default_model;
self.model = model;
self.api_url = api_url;
self.low_speed_timeout = low_speed_timeout;
self.settings_version = settings_version;
}
pub fn available_models(&self) -> impl Iterator<Item = OpenAiModel> {
OpenAiModel::iter()
}
pub fn settings_version(&self) -> usize {
self.settings_version
}
@ -104,8 +109,8 @@ impl OpenAiCompletionProvider {
.into()
}
pub fn default_model(&self) -> OpenAiModel {
self.default_model.clone()
pub fn model(&self) -> OpenAiModel {
self.model.clone()
}
pub fn count_tokens(
@ -152,7 +157,7 @@ impl OpenAiCompletionProvider {
fn to_open_ai_request(&self, request: LanguageModelRequest) -> Request {
let model = match request.model {
LanguageModel::OpenAi(model) => model,
_ => self.default_model(),
_ => self.model(),
};
Request {