assistant: Fix formatting in settings (#17172)
This PR fixes some formatting issues in `assistant_settings.rs` that were being caused by long lines. Release Notes: - N/A
This commit is contained in:
parent
68ea661711
commit
c0731bfa28
1 changed files with 41 additions and 32 deletions
|
@ -1,11 +1,17 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use ::open_ai::Model as OpenAiModel;
|
||||||
use anthropic::Model as AnthropicModel;
|
use anthropic::Model as AnthropicModel;
|
||||||
use fs::Fs;
|
use fs::Fs;
|
||||||
use gpui::{AppContext, Pixels};
|
use gpui::{AppContext, Pixels};
|
||||||
|
use language_model::provider::open_ai;
|
||||||
|
use language_model::settings::{
|
||||||
|
AnthropicSettingsContent, AnthropicSettingsContentV1, OllamaSettingsContent,
|
||||||
|
OpenAiSettingsContent, OpenAiSettingsContentV1, VersionedAnthropicSettingsContent,
|
||||||
|
VersionedOpenAiSettingsContent,
|
||||||
|
};
|
||||||
use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
|
use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
|
||||||
use ollama::Model as OllamaModel;
|
use ollama::Model as OllamaModel;
|
||||||
use open_ai::Model as OpenAiModel;
|
|
||||||
use schemars::{schema::Schema, JsonSchema};
|
use schemars::{schema::Schema, JsonSchema};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use settings::{update_settings_file, Settings, SettingsSources};
|
use settings::{update_settings_file, Settings, SettingsSources};
|
||||||
|
@ -109,15 +115,14 @@ impl AssistantSettingsContent {
|
||||||
cx,
|
cx,
|
||||||
move |content, _| {
|
move |content, _| {
|
||||||
if content.anthropic.is_none() {
|
if content.anthropic.is_none() {
|
||||||
content.anthropic =
|
content.anthropic = Some(AnthropicSettingsContent::Versioned(
|
||||||
Some(language_model::settings::AnthropicSettingsContent::Versioned(
|
VersionedAnthropicSettingsContent::V1(
|
||||||
language_model::settings::VersionedAnthropicSettingsContent::V1(
|
AnthropicSettingsContentV1 {
|
||||||
language_model::settings::AnthropicSettingsContentV1 {
|
|
||||||
api_url,
|
api_url,
|
||||||
low_speed_timeout_in_seconds,
|
low_speed_timeout_in_seconds,
|
||||||
available_models: None
|
available_models: None,
|
||||||
}
|
},
|
||||||
)
|
),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -131,8 +136,7 @@ impl AssistantSettingsContent {
|
||||||
cx,
|
cx,
|
||||||
move |content, _| {
|
move |content, _| {
|
||||||
if content.ollama.is_none() {
|
if content.ollama.is_none() {
|
||||||
content.ollama =
|
content.ollama = Some(OllamaSettingsContent {
|
||||||
Some(language_model::settings::OllamaSettingsContent {
|
|
||||||
api_url,
|
api_url,
|
||||||
low_speed_timeout_in_seconds,
|
low_speed_timeout_in_seconds,
|
||||||
available_models: None,
|
available_models: None,
|
||||||
|
@ -154,22 +158,27 @@ impl AssistantSettingsContent {
|
||||||
models
|
models
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|model| match model {
|
.filter_map(|model| match model {
|
||||||
open_ai::Model::Custom { name, max_tokens,max_output_tokens } => {
|
OpenAiModel::Custom {
|
||||||
Some(language_model::provider::open_ai::AvailableModel { name, max_tokens,max_output_tokens })
|
name,
|
||||||
}
|
max_tokens,
|
||||||
|
max_output_tokens,
|
||||||
|
} => Some(open_ai::AvailableModel {
|
||||||
|
name,
|
||||||
|
max_tokens,
|
||||||
|
max_output_tokens,
|
||||||
|
}),
|
||||||
_ => None,
|
_ => None,
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
});
|
});
|
||||||
content.openai =
|
content.openai = Some(OpenAiSettingsContent::Versioned(
|
||||||
Some(language_model::settings::OpenAiSettingsContent::Versioned(
|
VersionedOpenAiSettingsContent::V1(
|
||||||
language_model::settings::VersionedOpenAiSettingsContent::V1(
|
OpenAiSettingsContentV1 {
|
||||||
language_model::settings::OpenAiSettingsContentV1 {
|
|
||||||
api_url,
|
api_url,
|
||||||
low_speed_timeout_in_seconds,
|
low_speed_timeout_in_seconds,
|
||||||
available_models
|
available_models,
|
||||||
}
|
},
|
||||||
)
|
),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -317,7 +326,7 @@ impl AssistantSettingsContent {
|
||||||
_ => (None, None, None),
|
_ => (None, None, None),
|
||||||
};
|
};
|
||||||
settings.provider = Some(AssistantProviderContentV1::OpenAi {
|
settings.provider = Some(AssistantProviderContentV1::OpenAi {
|
||||||
default_model: open_ai::Model::from_id(&model).ok(),
|
default_model: OpenAiModel::from_id(&model).ok(),
|
||||||
api_url,
|
api_url,
|
||||||
low_speed_timeout_in_seconds,
|
low_speed_timeout_in_seconds,
|
||||||
available_models,
|
available_models,
|
||||||
|
@ -330,7 +339,7 @@ impl AssistantSettingsContent {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
AssistantSettingsContent::Legacy(settings) => {
|
AssistantSettingsContent::Legacy(settings) => {
|
||||||
if let Ok(model) = open_ai::Model::from_id(&language_model.id().0) {
|
if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
|
||||||
settings.default_open_ai_model = Some(model);
|
settings.default_open_ai_model = Some(model);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue