ai: Separate model settings for each feature (#28088)
Closes: https://github.com/zed-industries/zed/issues/20582 Allows users to select a specific model for each AI-powered feature: - Agent panel - Inline assistant - Thread summarization - Commit message generation If unspecified for a given feature, it will use the `default_model` setting. Release Notes: - Added support for configuring a specific model for each AI-powered feature --------- Co-authored-by: Danilo Leal <daniloleal09@gmail.com> Co-authored-by: Bennet Bo Fenner <bennetbo@gmx.de>
This commit is contained in:
parent
cf0d1e4229
commit
43cb925a59
27 changed files with 670 additions and 381 deletions
|
@ -34,8 +34,8 @@ use gpui::{
|
|||
};
|
||||
use language::{Buffer, IndentKind, Point, Selection, TransactionId, line_diff};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
|
||||
LanguageModelTextStream, Role, report_assistant_event,
|
||||
ConfiguredModel, LanguageModel, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelTextStream, Role, report_assistant_event,
|
||||
};
|
||||
use language_model_selector::{LanguageModelSelector, LanguageModelSelectorPopoverMenu};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
|
@ -312,7 +312,9 @@ impl InlineAssistant {
|
|||
start..end,
|
||||
));
|
||||
|
||||
if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
|
||||
if let Some(ConfiguredModel { model, .. }) =
|
||||
LanguageModelRegistry::read_global(cx).default_model()
|
||||
{
|
||||
self.telemetry.report_assistant_event(AssistantEvent {
|
||||
conversation_id: None,
|
||||
kind: AssistantKind::Inline,
|
||||
|
@ -877,7 +879,9 @@ impl InlineAssistant {
|
|||
let active_alternative = assist.codegen.read(cx).active_alternative().clone();
|
||||
let message_id = active_alternative.read(cx).message_id.clone();
|
||||
|
||||
if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
|
||||
if let Some(ConfiguredModel { model, .. }) =
|
||||
LanguageModelRegistry::read_global(cx).default_model()
|
||||
{
|
||||
let language_name = assist.editor.upgrade().and_then(|editor| {
|
||||
let multibuffer = editor.read(cx).buffer().read(cx);
|
||||
let multibuffer_snapshot = multibuffer.snapshot(cx);
|
||||
|
@ -1629,8 +1633,8 @@ impl Render for PromptEditor {
|
|||
format!(
|
||||
"Using {}",
|
||||
LanguageModelRegistry::read_global(cx)
|
||||
.active_model()
|
||||
.map(|model| model.name().0)
|
||||
.default_model()
|
||||
.map(|default| default.model.name().0)
|
||||
.unwrap_or_else(|| "No model selected".into()),
|
||||
),
|
||||
None,
|
||||
|
@ -2077,7 +2081,7 @@ impl PromptEditor {
|
|||
let disabled = matches!(codegen.status(cx), CodegenStatus::Idle);
|
||||
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let default_model = model_registry.active_model();
|
||||
let default_model = model_registry.default_model().map(|default| default.model);
|
||||
let alternative_models = model_registry.inline_alternative_models();
|
||||
|
||||
let get_model_name = |index: usize| -> String {
|
||||
|
@ -2183,7 +2187,9 @@ impl PromptEditor {
|
|||
}
|
||||
|
||||
fn render_token_count(&self, cx: &mut Context<Self>) -> Option<impl IntoElement> {
|
||||
let model = LanguageModelRegistry::read_global(cx).active_model()?;
|
||||
let model = LanguageModelRegistry::read_global(cx)
|
||||
.default_model()?
|
||||
.model;
|
||||
let token_counts = self.token_counts?;
|
||||
let max_token_count = model.max_token_count();
|
||||
|
||||
|
@ -2638,8 +2644,9 @@ impl Codegen {
|
|||
}
|
||||
|
||||
let primary_model = LanguageModelRegistry::read_global(cx)
|
||||
.active_model()
|
||||
.context("no active model")?;
|
||||
.default_model()
|
||||
.context("no active model")?
|
||||
.model;
|
||||
|
||||
for (model, alternative) in iter::once(primary_model)
|
||||
.chain(alternative_models)
|
||||
|
@ -2863,7 +2870,9 @@ impl CodegenAlternative {
|
|||
assistant_panel_context: Option<LanguageModelRequest>,
|
||||
cx: &App,
|
||||
) -> BoxFuture<'static, Result<TokenCounts>> {
|
||||
if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
|
||||
if let Some(ConfiguredModel { model, .. }) =
|
||||
LanguageModelRegistry::read_global(cx).inline_assistant_model()
|
||||
{
|
||||
let request = self.build_request(user_prompt, assistant_panel_context.clone(), cx);
|
||||
match request {
|
||||
Ok(request) => {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue