ZIm/crates/language_models/src/settings.rs
Julia Ryan f11c749353
VSCode Settings import (#29018)
Things this doesn't currently handle:

- [x] ~testing~
- ~we really need an snapshot test that takes a vscode settings file
with all options that we support, and verifies the zed settings file you
get from importing it, both from an empty starting file or one with lots
of conflicts. that way we can open said vscode settings file in vscode
to ensure that those options all still exist in the future.~
- Discussed this, we don't think this will meaningfully protect us from
future failures, and we will just do this as a manual validation step
before merging this PR. Any imports that have meaningfully complex
translation steps should still be tested.
- [x] confirmation (right now it just clobbers your settings file
silently)
- it'd be really cool if we could show a diff multibuffer of your
current settings with the result of the vscode import and let you pick
"hunks" to keep, but that's probably too much effort for this feature,
especially given that we expect most of the people using it to have an
empty/barebones zed config when they run the import.
- [x] ~UI in the "welcome" page~
- we're planning on redoing our welcome/walkthrough experience anyways,
but in the meantime it'd be nice to conditionally show a button there if
we see a user level vscode config
- we'll add it to the UI when we land the new walkthrough experience,
for now it'll be accessible through the action
- [ ] project-specific settings
- handling translation of `.vscode/settings.json` or `.code-workspace`
settings to `.zed/settings.json` will come in a future PR, along with UI
to prompt the user for those actions when opening a project with local
vscode settings for the first time
- [ ] extension settings
- we probably want to do a best-effort pass of popular extensions like
vim and git lens
- it's also possible to look for installed/enabled extensions with `code
--list-extensions`, but we'd have to maintain some sort of mapping of
those to our settings and/or extensions
- [ ] LSP settings
- these are tricky without access to the json schemas for various
language server extensions. we could probably manage to do translations
for a couple popular languages and avoid solving it in the general case.
- [ ] platform specific settings (`[macos].blah`)
  - this is blocked on #16392 which I'm hoping to address soon
- [ ] language specific settings (`[rust].foo`)
  - totally doable, just haven't gotten to it yet
 
~We may want to put this behind some kind of flag and/or not land it
until some of the above issues are addressed, given that we expect
people to only run this importer once there's an incentive to get it
right the first time. Maybe we land it alongside a keymap importer so
you don't have to go through separate imports for those?~

We are gonna land this as-is, all these unchecked items at the bottom
will be addressed in followup PRs, so maybe don't run the importer for
now if you have a large and complex VsCode settings file you'd like to
import.

Release Notes:

- Added a VSCode settings importer, available via a
`zed::ImportVsCodeSettings` action

---------

Co-authored-by: Mikayla Maki <mikayla@zed.dev>
Co-authored-by: Kirill Bulatov <kirill@zed.dev>
Co-authored-by: Mikayla Maki <mikayla.c.maki@gmail.com>
Co-authored-by: Marshall Bowers <git@maxdeviant.com>
2025-04-23 20:54:09 +00:00

418 lines
15 KiB
Rust

use std::sync::Arc;
use anyhow::Result;
use gpui::App;
use language_model::LanguageModelCacheConfiguration;
use project::Fs;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources, update_settings_file};
use crate::provider::{
self,
anthropic::AnthropicSettings,
bedrock::AmazonBedrockSettings,
cloud::{self, ZedDotDevSettings},
copilot_chat::CopilotChatSettings,
deepseek::DeepSeekSettings,
google::GoogleSettings,
lmstudio::LmStudioSettings,
mistral::MistralSettings,
ollama::OllamaSettings,
open_ai::OpenAiSettings,
};
/// Initializes the language model settings.
pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
AllLanguageModelSettings::register(cx);
if AllLanguageModelSettings::get_global(cx)
.openai
.needs_setting_migration
{
update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
if let Some(settings) = setting.openai.clone() {
let (newest_version, _) = settings.upgrade();
setting.openai = Some(OpenAiSettingsContent::Versioned(
VersionedOpenAiSettingsContent::V1(newest_version),
));
}
});
}
if AllLanguageModelSettings::get_global(cx)
.anthropic
.needs_setting_migration
{
update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
if let Some(settings) = setting.anthropic.clone() {
let (newest_version, _) = settings.upgrade();
setting.anthropic = Some(AnthropicSettingsContent::Versioned(
VersionedAnthropicSettingsContent::V1(newest_version),
));
}
});
}
}
#[derive(Default)]
pub struct AllLanguageModelSettings {
pub anthropic: AnthropicSettings,
pub bedrock: AmazonBedrockSettings,
pub ollama: OllamaSettings,
pub openai: OpenAiSettings,
pub zed_dot_dev: ZedDotDevSettings,
pub google: GoogleSettings,
pub copilot_chat: CopilotChatSettings,
pub lmstudio: LmStudioSettings,
pub deepseek: DeepSeekSettings,
pub mistral: MistralSettings,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct AllLanguageModelSettingsContent {
pub anthropic: Option<AnthropicSettingsContent>,
pub bedrock: Option<AmazonBedrockSettingsContent>,
pub ollama: Option<OllamaSettingsContent>,
pub lmstudio: Option<LmStudioSettingsContent>,
pub openai: Option<OpenAiSettingsContent>,
#[serde(rename = "zed.dev")]
pub zed_dot_dev: Option<ZedDotDevSettingsContent>,
pub google: Option<GoogleSettingsContent>,
pub deepseek: Option<DeepseekSettingsContent>,
pub copilot_chat: Option<CopilotChatSettingsContent>,
pub mistral: Option<MistralSettingsContent>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
#[serde(untagged)]
pub enum AnthropicSettingsContent {
Legacy(LegacyAnthropicSettingsContent),
Versioned(VersionedAnthropicSettingsContent),
}
impl AnthropicSettingsContent {
pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
match self {
AnthropicSettingsContent::Legacy(content) => (
AnthropicSettingsContentV1 {
api_url: content.api_url,
available_models: content.available_models.map(|models| {
models
.into_iter()
.filter_map(|model| match model {
anthropic::Model::Custom {
name,
display_name,
max_tokens,
tool_override,
cache_configuration,
max_output_tokens,
default_temperature,
extra_beta_headers,
mode,
} => Some(provider::anthropic::AvailableModel {
name,
display_name,
max_tokens,
tool_override,
cache_configuration: cache_configuration.as_ref().map(
|config| LanguageModelCacheConfiguration {
max_cache_anchors: config.max_cache_anchors,
should_speculate: config.should_speculate,
min_total_token: config.min_total_token,
},
),
max_output_tokens,
default_temperature,
extra_beta_headers,
mode: Some(mode.into()),
}),
_ => None,
})
.collect()
}),
},
true,
),
AnthropicSettingsContent::Versioned(content) => match content {
VersionedAnthropicSettingsContent::V1(content) => (content, false),
},
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct LegacyAnthropicSettingsContent {
pub api_url: Option<String>,
pub available_models: Option<Vec<anthropic::Model>>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
#[serde(tag = "version")]
pub enum VersionedAnthropicSettingsContent {
#[serde(rename = "1")]
V1(AnthropicSettingsContentV1),
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct AnthropicSettingsContentV1 {
pub api_url: Option<String>,
pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct AmazonBedrockSettingsContent {
available_models: Option<Vec<provider::bedrock::AvailableModel>>,
endpoint_url: Option<String>,
region: Option<String>,
profile: Option<String>,
authentication_method: Option<provider::bedrock::BedrockAuthMethod>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct OllamaSettingsContent {
pub api_url: Option<String>,
pub available_models: Option<Vec<provider::ollama::AvailableModel>>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct LmStudioSettingsContent {
pub api_url: Option<String>,
pub available_models: Option<Vec<provider::lmstudio::AvailableModel>>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct DeepseekSettingsContent {
pub api_url: Option<String>,
pub available_models: Option<Vec<provider::deepseek::AvailableModel>>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct MistralSettingsContent {
pub api_url: Option<String>,
pub available_models: Option<Vec<provider::mistral::AvailableModel>>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
#[serde(untagged)]
pub enum OpenAiSettingsContent {
Legacy(LegacyOpenAiSettingsContent),
Versioned(VersionedOpenAiSettingsContent),
}
impl OpenAiSettingsContent {
pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
match self {
OpenAiSettingsContent::Legacy(content) => (
OpenAiSettingsContentV1 {
api_url: content.api_url,
available_models: content.available_models.map(|models| {
models
.into_iter()
.filter_map(|model| match model {
open_ai::Model::Custom {
name,
display_name,
max_tokens,
max_output_tokens,
max_completion_tokens,
} => Some(provider::open_ai::AvailableModel {
name,
max_tokens,
max_output_tokens,
display_name,
max_completion_tokens,
}),
_ => None,
})
.collect()
}),
},
true,
),
OpenAiSettingsContent::Versioned(content) => match content {
VersionedOpenAiSettingsContent::V1(content) => (content, false),
},
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct LegacyOpenAiSettingsContent {
pub api_url: Option<String>,
pub available_models: Option<Vec<open_ai::Model>>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
#[serde(tag = "version")]
pub enum VersionedOpenAiSettingsContent {
#[serde(rename = "1")]
V1(OpenAiSettingsContentV1),
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct OpenAiSettingsContentV1 {
pub api_url: Option<String>,
pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct GoogleSettingsContent {
pub api_url: Option<String>,
pub available_models: Option<Vec<provider::google::AvailableModel>>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct ZedDotDevSettingsContent {
available_models: Option<Vec<cloud::AvailableModel>>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
pub struct CopilotChatSettingsContent {}
impl settings::Settings for AllLanguageModelSettings {
const KEY: Option<&'static str> = Some("language_models");
const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
type FileContent = AllLanguageModelSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
fn merge<T>(target: &mut T, value: Option<T>) {
if let Some(value) = value {
*target = value;
}
}
let mut settings = AllLanguageModelSettings::default();
for value in sources.defaults_and_customizations() {
// Anthropic
let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
Some((content, upgraded)) => (Some(content), upgraded),
None => (None, false),
};
if upgraded {
settings.anthropic.needs_setting_migration = true;
}
merge(
&mut settings.anthropic.api_url,
anthropic.as_ref().and_then(|s| s.api_url.clone()),
);
merge(
&mut settings.anthropic.available_models,
anthropic.as_ref().and_then(|s| s.available_models.clone()),
);
// Bedrock
let bedrock = value.bedrock.clone();
merge(
&mut settings.bedrock.profile_name,
bedrock.as_ref().map(|s| s.profile.clone()),
);
merge(
&mut settings.bedrock.authentication_method,
bedrock.as_ref().map(|s| s.authentication_method.clone()),
);
merge(
&mut settings.bedrock.region,
bedrock.as_ref().map(|s| s.region.clone()),
);
merge(
&mut settings.bedrock.endpoint,
bedrock.as_ref().map(|s| s.endpoint_url.clone()),
);
// Ollama
let ollama = value.ollama.clone();
merge(
&mut settings.ollama.api_url,
value.ollama.as_ref().and_then(|s| s.api_url.clone()),
);
merge(
&mut settings.ollama.available_models,
ollama.as_ref().and_then(|s| s.available_models.clone()),
);
// LM Studio
let lmstudio = value.lmstudio.clone();
merge(
&mut settings.lmstudio.api_url,
value.lmstudio.as_ref().and_then(|s| s.api_url.clone()),
);
merge(
&mut settings.lmstudio.available_models,
lmstudio.as_ref().and_then(|s| s.available_models.clone()),
);
// DeepSeek
let deepseek = value.deepseek.clone();
merge(
&mut settings.deepseek.api_url,
value.deepseek.as_ref().and_then(|s| s.api_url.clone()),
);
merge(
&mut settings.deepseek.available_models,
deepseek.as_ref().and_then(|s| s.available_models.clone()),
);
// OpenAI
let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
Some((content, upgraded)) => (Some(content), upgraded),
None => (None, false),
};
if upgraded {
settings.openai.needs_setting_migration = true;
}
merge(
&mut settings.openai.api_url,
openai.as_ref().and_then(|s| s.api_url.clone()),
);
merge(
&mut settings.openai.available_models,
openai.as_ref().and_then(|s| s.available_models.clone()),
);
merge(
&mut settings.zed_dot_dev.available_models,
value
.zed_dot_dev
.as_ref()
.and_then(|s| s.available_models.clone()),
);
merge(
&mut settings.google.api_url,
value.google.as_ref().and_then(|s| s.api_url.clone()),
);
merge(
&mut settings.google.available_models,
value
.google
.as_ref()
.and_then(|s| s.available_models.clone()),
);
// Mistral
let mistral = value.mistral.clone();
merge(
&mut settings.mistral.api_url,
mistral.as_ref().and_then(|s| s.api_url.clone()),
);
merge(
&mut settings.mistral.available_models,
mistral.as_ref().and_then(|s| s.available_models.clone()),
);
}
Ok(settings)
}
fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
}