
[This PR has been sitting around for a bit](https://github.com/zed-industries/zed/pull/2845). I received a bit of mixed opinions from the team on how this setting should work, if it should use the full model names or some simpler form of it, etc. I went ahead and made the decision to do the following: - Use the full model names in settings - ex: `gpt-4-0613` - Default to `gpt-4-0613` when no setting is present - Save the full model names in the conversation history files (this is how it was prior) - ex: `gpt-4-0613` - Display the shortened model names in the assistant - ex: `gpt-4` - Not worry about adding an option to add custom models (can add in a follow-up PR) - Not query what models are available to the user via their api key (can add in a follow-up PR) Release Notes: - Added a `default_open_ai_model` setting for the assistant (defaults to `gpt-4-0613`). --------- Co-authored-by: Mikayla <mikayla@zed.dev>
75 lines
2 KiB
Rust
75 lines
2 KiB
Rust
use anyhow;
|
|
use schemars::JsonSchema;
|
|
use serde::{Deserialize, Serialize};
|
|
use settings::Setting;
|
|
|
|
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
|
pub enum OpenAIModel {
|
|
#[serde(rename = "gpt-3.5-turbo-0613")]
|
|
ThreePointFiveTurbo,
|
|
#[serde(rename = "gpt-4-0613")]
|
|
Four,
|
|
}
|
|
|
|
impl OpenAIModel {
|
|
pub fn full_name(&self) -> &'static str {
|
|
match self {
|
|
OpenAIModel::ThreePointFiveTurbo => "gpt-3.5-turbo-0613",
|
|
OpenAIModel::Four => "gpt-4-0613",
|
|
}
|
|
}
|
|
|
|
pub fn short_name(&self) -> &'static str {
|
|
match self {
|
|
OpenAIModel::ThreePointFiveTurbo => "gpt-3.5-turbo",
|
|
OpenAIModel::Four => "gpt-4",
|
|
}
|
|
}
|
|
|
|
pub fn cycle(&self) -> Self {
|
|
match self {
|
|
OpenAIModel::ThreePointFiveTurbo => OpenAIModel::Four,
|
|
OpenAIModel::Four => OpenAIModel::ThreePointFiveTurbo,
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
|
|
#[serde(rename_all = "snake_case")]
|
|
pub enum AssistantDockPosition {
|
|
Left,
|
|
Right,
|
|
Bottom,
|
|
}
|
|
|
|
#[derive(Deserialize, Debug)]
|
|
pub struct AssistantSettings {
|
|
pub button: bool,
|
|
pub dock: AssistantDockPosition,
|
|
pub default_width: f32,
|
|
pub default_height: f32,
|
|
pub default_open_ai_model: OpenAIModel,
|
|
}
|
|
|
|
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
|
pub struct AssistantSettingsContent {
|
|
pub button: Option<bool>,
|
|
pub dock: Option<AssistantDockPosition>,
|
|
pub default_width: Option<f32>,
|
|
pub default_height: Option<f32>,
|
|
pub default_open_ai_model: Option<OpenAIModel>,
|
|
}
|
|
|
|
impl Setting for AssistantSettings {
|
|
const KEY: Option<&'static str> = Some("assistant");
|
|
|
|
type FileContent = AssistantSettingsContent;
|
|
|
|
fn load(
|
|
default_value: &Self::FileContent,
|
|
user_values: &[&Self::FileContent],
|
|
_: &gpui::AppContext,
|
|
) -> anyhow::Result<Self> {
|
|
Self::load_via_json_merge(default_value, user_values)
|
|
}
|
|
}
|