settings: Remove version fields (#33372)

This cleans up our settings to not include any `version` fields, as we
have an actual settings migrator now.

This PR removes `language_models > anthropic > version`,
`language_models > openai > version` and `agent > version`.

We had migration paths in the code for a long time, so in practice
almost everyone should be using the latest version of these settings.


Release Notes:

- Remove `version` fields in settings for `agent`, `language_models >
anthropic`, `language_models > openai`. Your settings will automatically
be migrated. If you're running into issues with this open an issue
[here](https://github.com/zed-industries/zed/issues)
This commit is contained in:
Bennet Bo Fenner 2025-06-25 19:05:29 +02:00 committed by GitHub
parent c0acd8e8b1
commit 224de2ec6c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
18 changed files with 331 additions and 972 deletions

View file

@ -42,7 +42,6 @@ open_ai = { workspace = true, features = ["schemars"] }
open_router = { workspace = true, features = ["schemars"] }
vercel = { workspace = true, features = ["schemars"] }
partial-json-fixer.workspace = true
project.workspace = true
proto.workspace = true
release_channel.workspace = true
schemars.workspace = true

View file

@ -1,7 +1,6 @@
use std::sync::Arc;
use client::{Client, UserStore};
use fs::Fs;
use gpui::{App, Context, Entity};
use language_model::LanguageModelRegistry;
use provider::deepseek::DeepSeekLanguageModelProvider;
@ -23,8 +22,8 @@ use crate::provider::open_router::OpenRouterLanguageModelProvider;
use crate::provider::vercel::VercelLanguageModelProvider;
pub use crate::settings::*;
pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, fs: Arc<dyn Fs>, cx: &mut App) {
crate::settings::init(fs, cx);
pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
crate::settings::init(cx);
let registry = LanguageModelRegistry::global(cx);
registry.update(cx, |registry, cx| {
register_language_model_providers(registry, user_store, client, cx);

View file

@ -41,7 +41,6 @@ pub struct AnthropicSettings {
pub api_url: String,
/// Extend Zed's list of Anthropic models.
pub available_models: Vec<AvailableModel>,
pub needs_setting_migration: bool,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]

View file

@ -36,7 +36,6 @@ const PROVIDER_NAME: &str = "Mistral";
pub struct MistralSettings {
pub api_url: String,
pub available_models: Vec<AvailableModel>,
pub needs_setting_migration: bool,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]

View file

@ -28,6 +28,7 @@ use ui::{ElevationIndex, List, Tooltip, prelude::*};
use ui_input::SingleLineInput;
use util::ResultExt;
use crate::OpenAiSettingsContent;
use crate::{AllLanguageModelSettings, ui::InstructionListItem};
const PROVIDER_ID: &str = "openai";
@ -37,7 +38,6 @@ const PROVIDER_NAME: &str = "OpenAI";
pub struct OpenAiSettings {
pub api_url: String,
pub available_models: Vec<AvailableModel>,
pub needs_setting_migration: bool,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
@ -803,30 +803,13 @@ impl ConfigurationView {
if !api_url.is_empty() && api_url != effective_current_url {
let fs = <dyn Fs>::global(cx);
update_settings_file::<AllLanguageModelSettings>(fs, cx, move |settings, _| {
use crate::settings::{OpenAiSettingsContent, VersionedOpenAiSettingsContent};
if settings.openai.is_none() {
settings.openai = Some(OpenAiSettingsContent::Versioned(
VersionedOpenAiSettingsContent::V1(
crate::settings::OpenAiSettingsContentV1 {
api_url: Some(api_url.clone()),
available_models: None,
},
),
));
if let Some(settings) = settings.openai.as_mut() {
settings.api_url = Some(api_url.clone());
} else {
if let Some(openai) = settings.openai.as_mut() {
match openai {
OpenAiSettingsContent::Versioned(versioned) => match versioned {
VersionedOpenAiSettingsContent::V1(v1) => {
v1.api_url = Some(api_url.clone());
}
},
OpenAiSettingsContent::Legacy(legacy) => {
legacy.api_url = Some(api_url.clone());
}
}
}
settings.openai = Some(OpenAiSettingsContent {
api_url: Some(api_url.clone()),
available_models: None,
});
}
});
}
@ -840,19 +823,8 @@ impl ConfigurationView {
});
let fs = <dyn Fs>::global(cx);
update_settings_file::<AllLanguageModelSettings>(fs, cx, |settings, _cx| {
use crate::settings::{OpenAiSettingsContent, VersionedOpenAiSettingsContent};
if let Some(openai) = settings.openai.as_mut() {
match openai {
OpenAiSettingsContent::Versioned(versioned) => match versioned {
VersionedOpenAiSettingsContent::V1(v1) => {
v1.api_url = None;
}
},
OpenAiSettingsContent::Legacy(legacy) => {
legacy.api_url = None;
}
}
if let Some(settings) = settings.openai.as_mut() {
settings.api_url = None;
}
});
cx.notify();

View file

@ -32,7 +32,6 @@ const PROVIDER_NAME: &str = "Vercel";
pub struct VercelSettings {
pub api_url: String,
pub available_models: Vec<AvailableModel>,
pub needs_setting_migration: bool,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]

View file

@ -1,12 +1,8 @@
use std::sync::Arc;
use anyhow::Result;
use gpui::App;
use language_model::LanguageModelCacheConfiguration;
use project::Fs;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources, update_settings_file};
use settings::{Settings, SettingsSources};
use crate::provider::{
self,
@ -24,36 +20,8 @@ use crate::provider::{
};
/// Initializes the language model settings.
pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
pub fn init(cx: &mut App) {
AllLanguageModelSettings::register(cx);
if AllLanguageModelSettings::get_global(cx)
.openai
.needs_setting_migration
{
update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
if let Some(settings) = setting.openai.clone() {
let (newest_version, _) = settings.upgrade();
setting.openai = Some(OpenAiSettingsContent::Versioned(
VersionedOpenAiSettingsContent::V1(newest_version),
));
}
});
}
if AllLanguageModelSettings::get_global(cx)
.anthropic
.needs_setting_migration
{
update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
if let Some(settings) = setting.anthropic.clone() {
let (newest_version, _) = settings.upgrade();
setting.anthropic = Some(AnthropicSettingsContent::Versioned(
VersionedAnthropicSettingsContent::V1(newest_version),
));
}
});
}
}
#[derive(Default)]
@ -90,78 +58,7 @@ pub struct AllLanguageModelSettingsContent {
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
#[serde(untagged)]
pub enum AnthropicSettingsContent {
Versioned(VersionedAnthropicSettingsContent),
Legacy(LegacyAnthropicSettingsContent),
}
impl AnthropicSettingsContent {
pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
match self {
AnthropicSettingsContent::Legacy(content) => (
AnthropicSettingsContentV1 {
api_url: content.api_url,
available_models: content.available_models.map(|models| {
models
.into_iter()
.filter_map(|model| match model {
anthropic::Model::Custom {
name,
display_name,
max_tokens,
tool_override,
cache_configuration,
max_output_tokens,
default_temperature,
extra_beta_headers,
mode,
} => Some(provider::anthropic::AvailableModel {
name,
display_name,
max_tokens,
tool_override,
cache_configuration: cache_configuration.as_ref().map(
|config| LanguageModelCacheConfiguration {
max_cache_anchors: config.max_cache_anchors,
should_speculate: config.should_speculate,
min_total_token: config.min_total_token,
},
),
max_output_tokens,
default_temperature,
extra_beta_headers,
mode: Some(mode.into()),
}),
_ => None,
})
.collect()
}),
},
true,
),
AnthropicSettingsContent::Versioned(content) => match content {
VersionedAnthropicSettingsContent::V1(content) => (content, false),
},
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct LegacyAnthropicSettingsContent {
pub api_url: Option<String>,
pub available_models: Option<Vec<anthropic::Model>>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
#[serde(tag = "version")]
pub enum VersionedAnthropicSettingsContent {
#[serde(rename = "1")]
V1(AnthropicSettingsContentV1),
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct AnthropicSettingsContentV1 {
pub struct AnthropicSettingsContent {
pub api_url: Option<String>,
pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
}
@ -200,64 +97,7 @@ pub struct MistralSettingsContent {
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
#[serde(untagged)]
pub enum OpenAiSettingsContent {
Versioned(VersionedOpenAiSettingsContent),
Legacy(LegacyOpenAiSettingsContent),
}
impl OpenAiSettingsContent {
pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
match self {
OpenAiSettingsContent::Legacy(content) => (
OpenAiSettingsContentV1 {
api_url: content.api_url,
available_models: content.available_models.map(|models| {
models
.into_iter()
.filter_map(|model| match model {
open_ai::Model::Custom {
name,
display_name,
max_tokens,
max_output_tokens,
max_completion_tokens,
} => Some(provider::open_ai::AvailableModel {
name,
max_tokens,
max_output_tokens,
display_name,
max_completion_tokens,
}),
_ => None,
})
.collect()
}),
},
true,
),
OpenAiSettingsContent::Versioned(content) => match content {
VersionedOpenAiSettingsContent::V1(content) => (content, false),
},
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct LegacyOpenAiSettingsContent {
pub api_url: Option<String>,
pub available_models: Option<Vec<open_ai::Model>>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
#[serde(tag = "version")]
pub enum VersionedOpenAiSettingsContent {
#[serde(rename = "1")]
V1(OpenAiSettingsContentV1),
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct OpenAiSettingsContentV1 {
pub struct OpenAiSettingsContent {
pub api_url: Option<String>,
pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
}
@ -303,15 +143,7 @@ impl settings::Settings for AllLanguageModelSettings {
for value in sources.defaults_and_customizations() {
// Anthropic
let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
Some((content, upgraded)) => (Some(content), upgraded),
None => (None, false),
};
if upgraded {
settings.anthropic.needs_setting_migration = true;
}
let anthropic = value.anthropic.clone();
merge(
&mut settings.anthropic.api_url,
anthropic.as_ref().and_then(|s| s.api_url.clone()),
@ -377,15 +209,7 @@ impl settings::Settings for AllLanguageModelSettings {
);
// OpenAI
let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
Some((content, upgraded)) => (Some(content), upgraded),
None => (None, false),
};
if upgraded {
settings.openai.needs_setting_migration = true;
}
let openai = value.openai.clone();
merge(
&mut settings.openai.api_url,
openai.as_ref().and_then(|s| s.api_url.clone()),