copilot: Support HTTP/HTTPS proxy for Copilot language server (#24364)

Closes #6701 (one of the top ranking issues as of writing)

Adds the ability to specify an HTTP/HTTPS proxy to route Copilot code
completion API requests through. This should fix copilot functionality
in restricted network environments (where such a proxy is required) but
also opens up the ability to point copilot code completion requests at
your own local LLM, using e.g.:
- https://github.com/jjleng/copilot-proxy
- https://github.com/bernardo-bruning/ollama-copilot/tree/master

External MITM-proxy tools permitting, this can serve as a stop-gap to
allow local LLM code completion in Zed until a proper OpenAI-compatible
local code completions provider is implemented. With this in mind, in
this PR I've added separate `settings.json` variables to configure a
proxy server _specific to the code completions provider_ instead of
using the global `proxy` setting, to allow for cases like this where we
_only_ want to proxy e.g. the Copilot requests, but not all outgoing
traffic from the application.

Currently, two new settings are added:
- `inline_completions.copilot.proxy`: Proxy server URL (HTTP and HTTPS
schemes supported)
- `inline_completions.copilot.proxy_no_verify`: Whether to disable
certificate verification through the proxy

Example:
```js
"features": {
  "inline_completion_provider": "copilot"
},
"show_completions_on_input": true,
// New:
"inline_completions": {
  "copilot": {
    "proxy": "http://example.com:15432",
    "proxy_no_verify": true
  }
}
```


Release Notes:

- Added the ability to specify an HTTP/HTTPS proxy for Copilot.

---------

Co-authored-by: Marshall Bowers <git@maxdeviant.com>
This commit is contained in:
Eli Kaplan 2025-02-24 09:11:00 -08:00 committed by GitHub
parent dd0de3cfa9
commit a8d56877ee
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 102 additions and 7 deletions

View file

@ -38,6 +38,7 @@ gpui.workspace = true
http_client.workspace = true
inline_completion.workspace = true
language.workspace = true
log.workspace = true
lsp.workspace = true
menu.workspace = true
node_runtime.workspace = true
@ -62,7 +63,9 @@ async-std = { version = "1.12.0", features = ["unstable"] }
client = { workspace = true, features = ["test-support"] }
clock = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }
ctor.workspace = true
editor = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
fs = { workspace = true, features = ["test-support"] }
gpui = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }

View file

@ -16,6 +16,7 @@ use gpui::{
};
use http_client::github::get_release_by_tag_name;
use http_client::HttpClient;
use language::language_settings::CopilotSettings;
use language::{
language_settings::{all_language_settings, language_settings, EditPredictionProvider},
point_from_lsp, point_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, Language, PointUtf16,
@ -367,13 +368,13 @@ impl Copilot {
let server_id = self.server_id;
let http = self.http.clone();
let node_runtime = self.node_runtime.clone();
if all_language_settings(None, cx).edit_predictions.provider
== EditPredictionProvider::Copilot
{
let language_settings = all_language_settings(None, cx);
if language_settings.edit_predictions.provider == EditPredictionProvider::Copilot {
if matches!(self.server, CopilotServer::Disabled) {
let env = self.build_env(&language_settings.edit_predictions.copilot);
let start_task = cx
.spawn(move |this, cx| {
Self::start_language_server(server_id, http, node_runtime, this, cx)
Self::start_language_server(server_id, http, node_runtime, env, this, cx)
})
.shared();
self.server = CopilotServer::Starting { task: start_task };
@ -385,6 +386,30 @@ impl Copilot {
}
}
fn build_env(&self, copilot_settings: &CopilotSettings) -> Option<HashMap<String, String>> {
let proxy_url = copilot_settings.proxy.clone()?;
let no_verify = copilot_settings.proxy_no_verify;
let http_or_https_proxy = if proxy_url.starts_with("http:") {
"HTTP_PROXY"
} else if proxy_url.starts_with("https:") {
"HTTPS_PROXY"
} else {
log::error!(
"Unsupported protocol scheme for language server proxy (must be http or https)"
);
return None;
};
let mut env = HashMap::default();
env.insert(http_or_https_proxy.to_string(), proxy_url);
if let Some(true) = no_verify {
env.insert("NODE_TLS_REJECT_UNAUTHORIZED".to_string(), "0".to_string());
};
Some(env)
}
#[cfg(any(test, feature = "test-support"))]
pub fn fake(cx: &mut gpui::TestAppContext) -> (Entity<Self>, lsp::FakeLanguageServer) {
use lsp::FakeLanguageServer;
@ -422,6 +447,7 @@ impl Copilot {
new_server_id: LanguageServerId,
http: Arc<dyn HttpClient>,
node_runtime: NodeRuntime,
env: Option<HashMap<String, String>>,
this: WeakEntity<Self>,
mut cx: AsyncApp,
) {
@ -432,8 +458,7 @@ impl Copilot {
let binary = LanguageServerBinary {
path: node_path,
arguments,
// TODO: We could set HTTP_PROXY etc here and fix the copilot issue.
env: None,
env,
};
let root_path = if cfg!(target_os = "windows") {
@ -611,6 +636,8 @@ impl Copilot {
}
pub fn reinstall(&mut self, cx: &mut Context<Self>) -> Task<()> {
let language_settings = all_language_settings(None, cx);
let env = self.build_env(&language_settings.edit_predictions.copilot);
let start_task = cx
.spawn({
let http = self.http.clone();
@ -618,7 +645,7 @@ impl Copilot {
let server_id = self.server_id;
move |this, cx| async move {
clear_copilot_dir().await;
Self::start_language_server(server_id, http, node_runtime, this, cx).await
Self::start_language_server(server_id, http, node_runtime, env, this, cx).await
}
})
.shared();
@ -1279,3 +1306,11 @@ mod tests {
}
}
}
#[cfg(test)]
#[ctor::ctor]
fn init_logger() {
if std::env::var("RUST_LOG").is_ok() {
env_logger::init();
}
}