language_models: Pass up mode
for completion requests through Zed (#29294)
This PR makes it so we pass up the `mode` for completion requests through the Zed provider. Release Notes: - N/A
This commit is contained in:
parent
724c935196
commit
92e810bfec
3 changed files with 7 additions and 4 deletions
4
Cargo.lock
generated
4
Cargo.lock
generated
|
@ -18400,9 +18400,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "zed_llm_client"
|
||||
version = "0.6.1"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad17428120f5ca776dc5195e2411a282f5150a26d5536671f8943c622c31274f"
|
||||
checksum = "3c1666cd923c5eb4635f3743e69c6920d0ed71f29b26920616a5d220607df7c4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"serde",
|
||||
|
|
|
@ -605,7 +605,7 @@ wasmtime-wasi = "29"
|
|||
which = "6.0.0"
|
||||
wit-component = "0.221"
|
||||
workspace-hack = "0.1.0"
|
||||
zed_llm_client = "0.6.1"
|
||||
zed_llm_client = "0.7.0"
|
||||
zstd = "0.11"
|
||||
metal = "0.29"
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ use strum::IntoEnumIterator;
|
|||
use thiserror::Error;
|
||||
use ui::{TintColor, prelude::*};
|
||||
use zed_llm_client::{
|
||||
CURRENT_PLAN_HEADER_NAME, CompletionBody, EXPIRED_LLM_TOKEN_HEADER_NAME,
|
||||
CURRENT_PLAN_HEADER_NAME, CompletionBody, CompletionMode, EXPIRED_LLM_TOKEN_HEADER_NAME,
|
||||
MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME, MODEL_REQUESTS_RESOURCE_HEADER_VALUE,
|
||||
SUBSCRIPTION_LIMIT_RESOURCE_HEADER_NAME,
|
||||
};
|
||||
|
@ -748,6 +748,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
CompletionBody {
|
||||
thread_id,
|
||||
prompt_id,
|
||||
mode: Some(CompletionMode::Max),
|
||||
provider: zed_llm_client::LanguageModelProvider::Anthropic,
|
||||
model: request.model.clone(),
|
||||
provider_request: serde_json::to_value(&request)?,
|
||||
|
@ -794,6 +795,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
CompletionBody {
|
||||
thread_id,
|
||||
prompt_id,
|
||||
mode: Some(CompletionMode::Max),
|
||||
provider: zed_llm_client::LanguageModelProvider::OpenAi,
|
||||
model: request.model.clone(),
|
||||
provider_request: serde_json::to_value(&request)?,
|
||||
|
@ -824,6 +826,7 @@ impl LanguageModel for CloudLanguageModel {
|
|||
CompletionBody {
|
||||
thread_id,
|
||||
prompt_id,
|
||||
mode: Some(CompletionMode::Max),
|
||||
provider: zed_llm_client::LanguageModelProvider::Google,
|
||||
model: request.model.clone(),
|
||||
provider_request: serde_json::to_value(&request)?,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue