Add LM Studio support to the Assistant (#23097)
#### Release Notes: - Added support for [LM Studio](https://lmstudio.ai/) to the Assistant. #### Quick demo: https://github.com/user-attachments/assets/af58fc13-1abc-4898-9747-3511016da86a #### Future enhancements: - wire up tool calling (new in [LM Studio 0.3.6](https://lmstudio.ai/blog/lmstudio-v0.3.6)) --------- Co-authored-by: Marshall Bowers <elliott.codes@gmail.com>
This commit is contained in:
parent
4445679f3c
commit
c038696aa8
24 changed files with 1153 additions and 2 deletions
|
@ -52,6 +52,7 @@ language.workspace = true
|
|||
language_model.workspace = true
|
||||
language_model_selector.workspace = true
|
||||
language_models.workspace = true
|
||||
lmstudio = { workspace = true, features = ["schemars"] }
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
markdown.workspace = true
|
||||
|
|
|
@ -5,6 +5,7 @@ use anthropic::Model as AnthropicModel;
|
|||
use feature_flags::FeatureFlagAppExt;
|
||||
use gpui::{AppContext, Pixels};
|
||||
use language_model::{CloudModel, LanguageModel};
|
||||
use lmstudio::Model as LmStudioModel;
|
||||
use ollama::Model as OllamaModel;
|
||||
use schemars::{schema::Schema, JsonSchema};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
@ -40,6 +41,10 @@ pub enum AssistantProviderContentV1 {
|
|||
default_model: Option<OllamaModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
LmStudio {
|
||||
default_model: Option<LmStudioModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
|
@ -137,6 +142,12 @@ impl AssistantSettingsContent {
|
|||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
AssistantProviderContentV1::LmStudio { default_model, .. } => {
|
||||
default_model.map(|model| LanguageModelSelection {
|
||||
provider: "lmstudio".to_string(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
|
@ -214,6 +225,18 @@ impl AssistantSettingsContent {
|
|||
api_url,
|
||||
});
|
||||
}
|
||||
"lmstudio" => {
|
||||
let api_url = match &settings.provider {
|
||||
Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
|
||||
api_url.clone()
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
settings.provider = Some(AssistantProviderContentV1::LmStudio {
|
||||
default_model: Some(lmstudio::Model::new(&model, None, None)),
|
||||
api_url,
|
||||
});
|
||||
}
|
||||
"openai" => {
|
||||
let (api_url, available_models) = match &settings.provider {
|
||||
Some(AssistantProviderContentV1::OpenAi {
|
||||
|
@ -313,6 +336,7 @@ fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema:
|
|||
"anthropic".into(),
|
||||
"google".into(),
|
||||
"ollama".into(),
|
||||
"lmstudio".into(),
|
||||
"openai".into(),
|
||||
"zed.dev".into(),
|
||||
"copilot_chat".into(),
|
||||
|
@ -355,7 +379,7 @@ pub struct AssistantSettingsContentV1 {
|
|||
default_height: Option<f32>,
|
||||
/// The provider of the assistant service.
|
||||
///
|
||||
/// This can be "openai", "anthropic", "ollama", "zed.dev"
|
||||
/// This can be "openai", "anthropic", "ollama", "lmstudio", "zed.dev"
|
||||
/// each with their respective default models and configurations.
|
||||
provider: Option<AssistantProviderContentV1>,
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue