agent: Allow customizing temperature by provider/model (#30033)
Adds a new `agent.model_parameters` setting that allows the user to specify a custom temperature for a provider AND/OR model: ```json5 "model_parameters": [ // To set parameters for all requests to OpenAI models: { "provider": "openai", "temperature": 0.5 }, // To set parameters for all requests in general: { "temperature": 0 }, // To set parameters for a specific provider and model: { "provider": "zed.dev", "model": "claude-3-7-sonnet-latest", "temperature": 1.0 } ], ``` Release Notes: - agent: Allow customizing temperature by provider/model --------- Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com> Co-authored-by: Marshall Bowers <git@maxdeviant.com>
This commit is contained in:
parent
0055a20512
commit
3cdf5ce947
22 changed files with 348 additions and 106 deletions
|
@ -78,6 +78,7 @@ zed_llm_client.workspace = true
|
|||
[dev-dependencies]
|
||||
assistant = { workspace = true, features = ["test-support"] }
|
||||
assistant_context_editor.workspace = true
|
||||
assistant_settings.workspace = true
|
||||
assistant_slash_command.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
async-trait.workspace = true
|
||||
|
|
|
@ -307,6 +307,7 @@ impl TestServer {
|
|||
);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
assistant_context_editor::init(client.clone(), cx);
|
||||
assistant_settings::init(cx);
|
||||
});
|
||||
|
||||
client
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue