diff --git a/crates/copilot/src/copilot_chat.rs b/crates/copilot/src/copilot_chat.rs index b1fa1565f3..4c91b4fedb 100644 --- a/crates/copilot/src/copilot_chat.rs +++ b/crates/copilot/src/copilot_chat.rs @@ -528,6 +528,7 @@ impl CopilotChat { pub async fn stream_completion( request: Request, + is_user_initiated: bool, mut cx: AsyncApp, ) -> Result>> { let this = cx @@ -562,7 +563,14 @@ impl CopilotChat { }; let api_url = configuration.api_url_from_endpoint(&token.api_endpoint); - stream_completion(client.clone(), token.api_key, api_url.into(), request).await + stream_completion( + client.clone(), + token.api_key, + api_url.into(), + request, + is_user_initiated, + ) + .await } pub fn set_configuration( @@ -697,6 +705,7 @@ async fn stream_completion( api_key: String, completion_url: Arc, request: Request, + is_user_initiated: bool, ) -> Result>> { let is_vision_request = request.messages.iter().any(|message| match message { ChatMessage::User { content } @@ -707,6 +716,8 @@ async fn stream_completion( _ => false, }); + let request_initiator = if is_user_initiated { "user" } else { "agent" }; + let mut request_builder = HttpRequest::builder() .method(Method::POST) .uri(completion_url.as_ref()) @@ -719,7 +730,8 @@ async fn stream_completion( ) .header("Authorization", format!("Bearer {}", api_key)) .header("Content-Type", "application/json") - .header("Copilot-Integration-Id", "vscode-chat"); + .header("Copilot-Integration-Id", "vscode-chat") + .header("X-Initiator", request_initiator); if is_vision_request { request_builder = diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index 5411fbc63c..d9a84f1eb7 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -30,6 +30,7 @@ use settings::SettingsStore; use std::time::Duration; use ui::prelude::*; use util::debug_panic; +use zed_llm_client::CompletionIntent; use super::anthropic::count_anthropic_tokens; use super::google::count_google_tokens; @@ -268,6 +269,19 @@ impl LanguageModel for CopilotChatLanguageModel { LanguageModelCompletionError, >, > { + let is_user_initiated = request.intent.is_none_or(|intent| match intent { + CompletionIntent::UserPrompt + | CompletionIntent::ThreadContextSummarization + | CompletionIntent::InlineAssist + | CompletionIntent::TerminalInlineAssist + | CompletionIntent::GenerateGitCommitMessage => true, + + CompletionIntent::ToolResults + | CompletionIntent::ThreadSummarization + | CompletionIntent::CreateFile + | CompletionIntent::EditFile => false, + }); + let copilot_request = match into_copilot_chat(&self.model, request) { Ok(request) => request, Err(err) => return futures::future::ready(Err(err.into())).boxed(), @@ -276,7 +290,8 @@ impl LanguageModel for CopilotChatLanguageModel { let request_limiter = self.request_limiter.clone(); let future = cx.spawn(async move |cx| { - let request = CopilotChat::stream_completion(copilot_request, cx.clone()); + let request = + CopilotChat::stream_completion(copilot_request, is_user_initiated, cx.clone()); request_limiter .stream(async move { let response = request.await?;