Adjust model selector popover design (#15056)

This PR mostly refines the model selector popover design by formatting
the models names' and adjusting spacing/alignment in the list-related
items. The list component changes could've been made in a separate PR
but it was also very practical to do it here as I was already
in-context. Either way, I'm happy to separate if that's better!

One thing I couldn't necessarily figure out, though, is why the order
changed (e.g., Anthropic at last ). I wonder if that was because of the
separator logic somehow? I'd love guidance here—new to Rust!

| Before | After |
|--------|--------|
| <img width="228" alt="Screenshot 2024-07-23 at 21 02 33"
src="https://github.com/user-attachments/assets/3372c6c9-08dc-4d71-9265-26f015e2dbc2">
| <img width="228" alt="Screenshot 2024-07-23 at 21 01 45"
src="https://github.com/user-attachments/assets/624cc7db-a3d9-48e3-99d7-c29829501130">
|

---

Release Notes:

- N/A

---------

Co-authored-by: Marshall Bowers <elliott.codes@gmail.com>
Co-authored-by: Bennet Bo Fenner <bennet@zed.dev>
Co-authored-by: Antonio <antonio@zed.dev>
Co-authored-by: Antonio Scandurra <me@as-cii.com>
This commit is contained in:
Danilo Leal 2024-07-24 07:24:54 -03:00 committed by GitHub
parent 87d93033d1
commit 912b396e58
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
13 changed files with 220 additions and 298 deletions

View file

@ -1,6 +1,6 @@
use crate::{
assistant_settings::AssistantSettings, humanize_token_count, prompts::generate_content_prompt,
AssistantPanel, AssistantPanelEvent, Hunk, LanguageModelCompletionProvider, StreamingDiff,
humanize_token_count, prompts::generate_content_prompt, AssistantPanel, AssistantPanelEvent,
Hunk, LanguageModelCompletionProvider, ModelSelector, StreamingDiff,
};
use anyhow::{anyhow, Context as _, Result};
use client::telemetry::Telemetry;
@ -27,13 +27,11 @@ use gpui::{
WindowContext,
};
use language::{Buffer, Point, Selection, TransactionId};
use language_model::{
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role,
};
use language_model::{LanguageModelRequest, LanguageModelRequestMessage, Role};
use multi_buffer::MultiBufferRow;
use parking_lot::Mutex;
use rope::Rope;
use settings::{update_settings_file, Settings};
use settings::Settings;
use similar::TextDiff;
use smol::future::FutureExt;
use std::{
@ -47,7 +45,7 @@ use std::{
time::{Duration, Instant},
};
use theme::ThemeSettings;
use ui::{prelude::*, ContextMenu, IconButtonShape, PopoverMenu, Tooltip};
use ui::{prelude::*, IconButtonShape, Tooltip};
use util::RangeExt;
use workspace::{notifications::NotificationId, Toast, Workspace};
@ -1325,8 +1323,6 @@ impl EventEmitter<PromptEditorEvent> for PromptEditor {}
impl Render for PromptEditor {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
let gutter_dimensions = *self.gutter_dimensions.lock();
let fs = self.fs.clone();
let buttons = match &self.codegen.read(cx).status {
CodegenStatus::Idle => {
vec![
@ -1427,74 +1423,27 @@ impl Render for PromptEditor {
.w(gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0))
.justify_center()
.gap_2()
.child(
PopoverMenu::new("model-switcher")
.menu(move |cx| {
ContextMenu::build(cx, |mut menu, cx| {
for available_model in
LanguageModelRegistry::read_global(cx).available_models(cx)
{
menu = menu.custom_entry(
{
let model_name = available_model.name().0.clone();
let provider =
available_model.provider_id().0.clone();
move |_| {
h_flex()
.w_full()
.justify_between()
.child(Label::new(model_name.clone()))
.child(
div().ml_4().child(
Label::new(provider.clone())
.color(Color::Muted),
),
)
.into_any()
}
},
{
let fs = fs.clone();
let model = available_model.clone();
move |cx| {
let model = model.clone();
update_settings_file::<AssistantSettings>(
fs.clone(),
cx,
move |settings, _| {
settings.set_model(model)
},
);
}
},
);
}
menu
})
.into()
})
.trigger(
IconButton::new("context", IconName::Settings)
.shape(IconButtonShape::Square)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.tooltip(move |cx| {
Tooltip::with_meta(
format!(
"Using {}",
LanguageModelCompletionProvider::read_global(cx)
.active_model()
.map(|model| model.name().0)
.unwrap_or_else(|| "No model selected".into()),
),
None,
"Change Model",
cx,
)
}),
)
.anchor(gpui::AnchorCorner::BottomRight),
)
.child(ModelSelector::new(
self.fs.clone(),
IconButton::new("context", IconName::Settings)
.shape(IconButtonShape::Square)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.tooltip(move |cx| {
Tooltip::with_meta(
format!(
"Using {}",
LanguageModelCompletionProvider::read_global(cx)
.active_model()
.map(|model| model.name().0)
.unwrap_or_else(|| "No model selected".into()),
),
None,
"Change Model",
cx,
)
}),
))
.children(
if let CodegenStatus::Error(error) = &self.codegen.read(cx).status {
let error_message = SharedString::from(error.to_string());
@ -2625,6 +2574,7 @@ mod tests {
language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, LanguageMatcher,
Point,
};
use language_model::LanguageModelRegistry;
use rand::prelude::*;
use serde::Serialize;
use settings::SettingsStore;