Verify downloaded rust-analyzer and clang binaries by checking the artifact digest (#35642)

Release Notes:

- Added GitHub artifact digest verification for rust-analyzer and clangd
binary downloads, skipping downloads if cached binary digest is up to
date
- Added verification that cached rust-analyzer and clangd binaries are
executable, if not they are redownloaded

---------

Co-authored-by: Kirill Bulatov <kirill@zed.dev>
This commit is contained in:
Lukas Wirth 2025-08-06 10:32:25 +02:00 committed by GitHub
parent 40129147c6
commit c59c436a11
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 354 additions and 123 deletions

3
Cargo.lock generated
View file

@ -9208,6 +9208,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-compression",
"async-fs",
"async-tar",
"async-trait",
"chrono",
@ -9239,9 +9240,11 @@ dependencies = [
"serde_json",
"serde_json_lenient",
"settings",
"sha2",
"smol",
"snippet_provider",
"task",
"tempfile",
"text",
"theme",
"toml 0.8.20",

View file

@ -8,6 +8,7 @@ use url::Url;
pub struct GitHubLspBinaryVersion {
pub name: String,
pub url: String,
pub digest: Option<String>,
}
#[derive(Deserialize, Debug)]
@ -24,6 +25,7 @@ pub struct GithubRelease {
pub struct GithubReleaseAsset {
pub name: String,
pub browser_download_url: String,
pub digest: Option<String>,
}
pub async fn latest_github_release(

View file

@ -36,6 +36,7 @@ load-grammars = [
[dependencies]
anyhow.workspace = true
async-compression.workspace = true
async-fs.workspace = true
async-tar.workspace = true
async-trait.workspace = true
chrono.workspace = true
@ -62,6 +63,7 @@ regex.workspace = true
rope.workspace = true
rust-embed.workspace = true
schemars.workspace = true
sha2.workspace = true
serde.workspace = true
serde_json.workspace = true
serde_json_lenient.workspace = true
@ -69,6 +71,7 @@ settings.workspace = true
smol.workspace = true
snippet_provider.workspace = true
task.workspace = true
tempfile.workspace = true
toml.workspace = true
tree-sitter = { workspace = true, optional = true }
tree-sitter-bash = { workspace = true, optional = true }

View file

@ -2,14 +2,16 @@ use anyhow::{Context as _, Result, bail};
use async_trait::async_trait;
use futures::StreamExt;
use gpui::{App, AsyncApp};
use http_client::github::{GitHubLspBinaryVersion, latest_github_release};
use http_client::github::{AssetKind, GitHubLspBinaryVersion, latest_github_release};
pub use language::*;
use lsp::{InitializeParams, LanguageServerBinary, LanguageServerName};
use project::lsp_store::clangd_ext;
use serde_json::json;
use smol::fs;
use std::{any::Any, env::consts, path::PathBuf, sync::Arc};
use util::{ResultExt, archive::extract_zip, fs::remove_matching, maybe, merge_json_value_into};
use util::{ResultExt, fs::remove_matching, maybe, merge_json_value_into};
use crate::github_download::{GithubBinaryMetadata, download_server_binary};
pub struct CLspAdapter;
@ -58,6 +60,7 @@ impl super::LspAdapter for CLspAdapter {
let version = GitHubLspBinaryVersion {
name: release.tag_name,
url: asset.browser_download_url.clone(),
digest: asset.digest.clone(),
};
Ok(Box::new(version) as Box<_>)
}
@ -68,32 +71,67 @@ impl super::LspAdapter for CLspAdapter {
container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let version_dir = container_dir.join(format!("clangd_{}", version.name));
let GitHubLspBinaryVersion { name, url, digest } =
&*version.downcast::<GitHubLspBinaryVersion>().unwrap();
let version_dir = container_dir.join(format!("clangd_{name}"));
let binary_path = version_dir.join("bin/clangd");
if fs::metadata(&binary_path).await.is_err() {
let mut response = delegate
.http_client()
.get(&version.url, Default::default(), true)
.await
.context("error downloading release")?;
anyhow::ensure!(
response.status().is_success(),
"download failed with status {}",
response.status().to_string()
);
extract_zip(&container_dir, response.body_mut())
.await
.with_context(|| format!("unzipping clangd archive to {container_dir:?}"))?;
remove_matching(&container_dir, |entry| entry != version_dir).await;
}
Ok(LanguageServerBinary {
path: binary_path,
let binary = LanguageServerBinary {
path: binary_path.clone(),
env: None,
arguments: Vec::new(),
})
arguments: Default::default(),
};
let metadata_path = version_dir.join("metadata");
let metadata = GithubBinaryMetadata::read_from_file(&metadata_path)
.await
.ok();
if let Some(metadata) = metadata {
let validity_check = async || {
delegate
.try_exec(LanguageServerBinary {
path: binary_path.clone(),
arguments: vec!["--version".into()],
env: None,
})
.await
.inspect_err(|err| {
log::warn!("Unable to run {binary_path:?} asset, redownloading: {err}",)
})
};
if let (Some(actual_digest), Some(expected_digest)) = (&metadata.digest, digest) {
if actual_digest == expected_digest {
if validity_check().await.is_ok() {
return Ok(binary);
}
} else {
log::info!(
"SHA-256 mismatch for {binary_path:?} asset, downloading new asset. Expected: {expected_digest}, Got: {actual_digest}"
);
}
} else if validity_check().await.is_ok() {
return Ok(binary);
}
}
download_server_binary(
delegate,
url,
digest.as_deref(),
&container_dir,
AssetKind::Zip,
)
.await?;
remove_matching(&container_dir, |entry| entry != version_dir).await;
GithubBinaryMetadata::write_to_file(
&GithubBinaryMetadata {
metadata_version: 1,
digest: digest.clone(),
},
&metadata_path,
)
.await?;
Ok(binary)
}
async fn cached_server_binary(

View file

@ -0,0 +1,190 @@
use std::{path::Path, pin::Pin, task::Poll};
use anyhow::{Context, Result};
use async_compression::futures::bufread::GzipDecoder;
use futures::{AsyncRead, AsyncSeek, AsyncSeekExt, AsyncWrite, io::BufReader};
use http_client::github::AssetKind;
use language::LspAdapterDelegate;
use sha2::{Digest, Sha256};
#[derive(serde::Deserialize, serde::Serialize, Debug)]
pub(crate) struct GithubBinaryMetadata {
pub(crate) metadata_version: u64,
pub(crate) digest: Option<String>,
}
impl GithubBinaryMetadata {
pub(crate) async fn read_from_file(metadata_path: &Path) -> Result<GithubBinaryMetadata> {
let metadata_content = async_fs::read_to_string(metadata_path)
.await
.with_context(|| format!("reading metadata file at {metadata_path:?}"))?;
let metadata: GithubBinaryMetadata = serde_json::from_str(&metadata_content)
.with_context(|| format!("parsing metadata file at {metadata_path:?}"))?;
Ok(metadata)
}
pub(crate) async fn write_to_file(&self, metadata_path: &Path) -> Result<()> {
let metadata_content = serde_json::to_string(self)
.with_context(|| format!("serializing metadata for {metadata_path:?}"))?;
async_fs::write(metadata_path, metadata_content.as_bytes())
.await
.with_context(|| format!("writing metadata file at {metadata_path:?}"))?;
Ok(())
}
}
pub(crate) async fn download_server_binary(
delegate: &dyn LspAdapterDelegate,
url: &str,
digest: Option<&str>,
destination_path: &Path,
asset_kind: AssetKind,
) -> Result<(), anyhow::Error> {
log::info!("downloading github artifact from {url}");
let mut response = delegate
.http_client()
.get(url, Default::default(), true)
.await
.with_context(|| format!("downloading release from {url}"))?;
let body = response.body_mut();
match digest {
Some(expected_sha_256) => {
let temp_asset_file = tempfile::NamedTempFile::new()
.with_context(|| format!("creating a temporary file for {url}"))?;
let (temp_asset_file, _temp_guard) = temp_asset_file.into_parts();
let mut writer = HashingWriter {
writer: async_fs::File::from(temp_asset_file),
hasher: Sha256::new(),
};
futures::io::copy(&mut BufReader::new(body), &mut writer)
.await
.with_context(|| {
format!("saving archive contents into the temporary file for {url}",)
})?;
let asset_sha_256 = format!("{:x}", writer.hasher.finalize());
anyhow::ensure!(
asset_sha_256 == expected_sha_256,
"{url} asset got SHA-256 mismatch. Expected: {expected_sha_256}, Got: {asset_sha_256}",
);
writer
.writer
.seek(std::io::SeekFrom::Start(0))
.await
.with_context(|| format!("seeking temporary file {destination_path:?}",))?;
stream_file_archive(&mut writer.writer, url, destination_path, asset_kind)
.await
.with_context(|| {
format!("extracting downloaded asset for {url} into {destination_path:?}",)
})?;
}
None => stream_response_archive(body, url, destination_path, asset_kind)
.await
.with_context(|| {
format!("extracting response for asset {url} into {destination_path:?}",)
})?,
}
Ok(())
}
async fn stream_response_archive(
response: impl AsyncRead + Unpin,
url: &str,
destination_path: &Path,
asset_kind: AssetKind,
) -> Result<()> {
match asset_kind {
AssetKind::TarGz => extract_tar_gz(destination_path, url, response).await?,
AssetKind::Gz => extract_gz(destination_path, url, response).await?,
AssetKind::Zip => {
util::archive::extract_zip(&destination_path, response).await?;
}
};
Ok(())
}
async fn stream_file_archive(
file_archive: impl AsyncRead + AsyncSeek + Unpin,
url: &str,
destination_path: &Path,
asset_kind: AssetKind,
) -> Result<()> {
match asset_kind {
AssetKind::TarGz => extract_tar_gz(destination_path, url, file_archive).await?,
AssetKind::Gz => extract_gz(destination_path, url, file_archive).await?,
#[cfg(not(windows))]
AssetKind::Zip => {
util::archive::extract_seekable_zip(&destination_path, file_archive).await?;
}
#[cfg(windows)]
AssetKind::Zip => {
util::archive::extract_zip(&destination_path, file_archive).await?;
}
};
Ok(())
}
async fn extract_tar_gz(
destination_path: &Path,
url: &str,
from: impl AsyncRead + Unpin,
) -> Result<(), anyhow::Error> {
let decompressed_bytes = GzipDecoder::new(BufReader::new(from));
let archive = async_tar::Archive::new(decompressed_bytes);
archive
.unpack(&destination_path)
.await
.with_context(|| format!("extracting {url} to {destination_path:?}"))?;
Ok(())
}
async fn extract_gz(
destination_path: &Path,
url: &str,
from: impl AsyncRead + Unpin,
) -> Result<(), anyhow::Error> {
let mut decompressed_bytes = GzipDecoder::new(BufReader::new(from));
let mut file = smol::fs::File::create(&destination_path)
.await
.with_context(|| {
format!("creating a file {destination_path:?} for a download from {url}")
})?;
futures::io::copy(&mut decompressed_bytes, &mut file)
.await
.with_context(|| format!("extracting {url} to {destination_path:?}"))?;
Ok(())
}
struct HashingWriter<W: AsyncWrite + Unpin> {
writer: W,
hasher: Sha256,
}
impl<W: AsyncWrite + Unpin> AsyncWrite for HashingWriter<W> {
fn poll_write(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &[u8],
) -> Poll<std::result::Result<usize, std::io::Error>> {
match Pin::new(&mut self.writer).poll_write(cx, buf) {
Poll::Ready(Ok(n)) => {
self.hasher.update(&buf[..n]);
Poll::Ready(Ok(n))
}
other => other,
}
}
fn poll_flush(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Result<(), std::io::Error>> {
Pin::new(&mut self.writer).poll_flush(cx)
}
fn poll_close(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<std::result::Result<(), std::io::Error>> {
Pin::new(&mut self.writer).poll_close(cx)
}
}

View file

@ -517,6 +517,7 @@ impl LspAdapter for NodeVersionAdapter {
Ok(Box::new(GitHubLspBinaryVersion {
name: release.tag_name,
url: asset.browser_download_url.clone(),
digest: asset.digest.clone(),
}))
}

View file

@ -17,6 +17,7 @@ use crate::{json::JsonTaskProvider, python::BasedPyrightLspAdapter};
mod bash;
mod c;
mod css;
mod github_download;
mod go;
mod json;
mod package_json;

View file

@ -1,8 +1,7 @@
use anyhow::{Context as _, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_trait::async_trait;
use collections::HashMap;
use futures::{StreamExt, io::BufReader};
use futures::StreamExt;
use gpui::{App, AppContext, AsyncApp, SharedString, Task};
use http_client::github::AssetKind;
use http_client::github::{GitHubLspBinaryVersion, latest_github_release};
@ -23,14 +22,11 @@ use std::{
sync::{Arc, LazyLock},
};
use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName};
use util::archive::extract_zip;
use util::fs::make_file_executable;
use util::merge_json_value_into;
use util::{
ResultExt,
fs::{make_file_executable, remove_matching},
maybe,
};
use util::{ResultExt, maybe};
use crate::github_download::{GithubBinaryMetadata, download_server_binary};
use crate::language_settings::language_settings;
pub struct RustLspAdapter;
@ -163,7 +159,6 @@ impl LspAdapter for RustLspAdapter {
)
.await?;
let asset_name = Self::build_asset_name();
let asset = release
.assets
.iter()
@ -172,6 +167,7 @@ impl LspAdapter for RustLspAdapter {
Ok(Box::new(GitHubLspBinaryVersion {
name: release.tag_name,
url: asset.browser_download_url.clone(),
digest: asset.digest.clone(),
}))
}
@ -181,58 +177,76 @@ impl LspAdapter for RustLspAdapter {
container_dir: PathBuf,
delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
let GitHubLspBinaryVersion { name, url, digest } =
&*version.downcast::<GitHubLspBinaryVersion>().unwrap();
let expected_digest = digest
.as_ref()
.and_then(|digest| digest.strip_prefix("sha256:"));
let destination_path = container_dir.join(format!("rust-analyzer-{name}"));
let server_path = match Self::GITHUB_ASSET_KIND {
AssetKind::TarGz | AssetKind::Gz => destination_path.clone(), // Tar and gzip extract in place.
AssetKind::Zip => destination_path.clone().join("rust-analyzer.exe"), // zip contains a .exe
};
if fs::metadata(&server_path).await.is_err() {
remove_matching(&container_dir, |entry| entry != destination_path).await;
let binary = LanguageServerBinary {
path: server_path.clone(),
env: None,
arguments: Default::default(),
};
let mut response = delegate
.http_client()
.get(&version.url, Default::default(), true)
.await
.with_context(|| format!("downloading release from {}", version.url))?;
match Self::GITHUB_ASSET_KIND {
AssetKind::TarGz => {
let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
let archive = async_tar::Archive::new(decompressed_bytes);
archive.unpack(&destination_path).await.with_context(|| {
format!("extracting {} to {:?}", version.url, destination_path)
})?;
}
AssetKind::Gz => {
let mut decompressed_bytes =
GzipDecoder::new(BufReader::new(response.body_mut()));
let mut file =
fs::File::create(&destination_path).await.with_context(|| {
format!(
"creating a file {:?} for a download from {}",
destination_path, version.url,
)
})?;
futures::io::copy(&mut decompressed_bytes, &mut file)
.await
.with_context(|| {
format!("extracting {} to {:?}", version.url, destination_path)
})?;
}
AssetKind::Zip => {
extract_zip(&destination_path, response.body_mut())
.await
.with_context(|| {
format!("unzipping {} to {:?}", version.url, destination_path)
})?;
}
let metadata_path = destination_path.with_extension("metadata");
let metadata = GithubBinaryMetadata::read_from_file(&metadata_path)
.await
.ok();
if let Some(metadata) = metadata {
let validity_check = async || {
delegate
.try_exec(LanguageServerBinary {
path: server_path.clone(),
arguments: vec!["--version".into()],
env: None,
})
.await
.inspect_err(|err| {
log::warn!("Unable to run {server_path:?} asset, redownloading: {err}",)
})
};
// todo("windows")
make_file_executable(&server_path).await?;
if let (Some(actual_digest), Some(expected_digest)) =
(&metadata.digest, expected_digest)
{
if actual_digest == expected_digest {
if validity_check().await.is_ok() {
return Ok(binary);
}
} else {
log::info!(
"SHA-256 mismatch for {destination_path:?} asset, downloading new asset. Expected: {expected_digest}, Got: {actual_digest}"
);
}
} else if validity_check().await.is_ok() {
return Ok(binary);
}
}
_ = fs::remove_dir_all(&destination_path).await;
download_server_binary(
delegate,
url,
expected_digest,
&destination_path,
Self::GITHUB_ASSET_KIND,
)
.await?;
make_file_executable(&server_path).await?;
GithubBinaryMetadata::write_to_file(
&GithubBinaryMetadata {
metadata_version: 1,
digest: expected_digest.map(ToString::to_string),
},
&metadata_path,
)
.await?;
Ok(LanguageServerBinary {
path: server_path,
env: None,

View file

@ -1,6 +1,4 @@
use anyhow::{Context as _, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use async_trait::async_trait;
use chrono::{DateTime, Local};
use collections::HashMap;
@ -15,7 +13,7 @@ use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName};
use node_runtime::NodeRuntime;
use project::{Fs, lsp_store::language_server_settings};
use serde_json::{Value, json};
use smol::{fs, io::BufReader, lock::RwLock, stream::StreamExt};
use smol::{fs, lock::RwLock, stream::StreamExt};
use std::{
any::Any,
borrow::Cow,
@ -24,11 +22,10 @@ use std::{
sync::Arc,
};
use task::{TaskTemplate, TaskTemplates, VariableName};
use util::archive::extract_zip;
use util::merge_json_value_into;
use util::{ResultExt, fs::remove_matching, maybe};
use crate::{PackageJson, PackageJsonData};
use crate::{PackageJson, PackageJsonData, github_download::download_server_binary};
#[derive(Debug)]
pub(crate) struct TypeScriptContextProvider {
@ -897,6 +894,7 @@ impl LspAdapter for EsLintLspAdapter {
Ok(Box::new(GitHubLspBinaryVersion {
name: Self::CURRENT_VERSION.into(),
digest: None,
url,
}))
}
@ -914,43 +912,14 @@ impl LspAdapter for EsLintLspAdapter {
if fs::metadata(&server_path).await.is_err() {
remove_matching(&container_dir, |entry| entry != destination_path).await;
let mut response = delegate
.http_client()
.get(&version.url, Default::default(), true)
.await
.context("downloading release")?;
match Self::GITHUB_ASSET_KIND {
AssetKind::TarGz => {
let decompressed_bytes = GzipDecoder::new(BufReader::new(response.body_mut()));
let archive = Archive::new(decompressed_bytes);
archive.unpack(&destination_path).await.with_context(|| {
format!("extracting {} to {:?}", version.url, destination_path)
})?;
}
AssetKind::Gz => {
let mut decompressed_bytes =
GzipDecoder::new(BufReader::new(response.body_mut()));
let mut file =
fs::File::create(&destination_path).await.with_context(|| {
format!(
"creating a file {:?} for a download from {}",
destination_path, version.url,
)
})?;
futures::io::copy(&mut decompressed_bytes, &mut file)
.await
.with_context(|| {
format!("extracting {} to {:?}", version.url, destination_path)
})?;
}
AssetKind::Zip => {
extract_zip(&destination_path, response.body_mut())
.await
.with_context(|| {
format!("unzipping {} to {:?}", version.url, destination_path)
})?;
}
}
download_server_binary(
delegate,
&version.url,
None,
&destination_path,
Self::GITHUB_ASSET_KIND,
)
.await?;
let mut dir = fs::read_dir(&destination_path).await?;
let first = dir.next().await.context("missing first file")??;

View file

@ -2,6 +2,8 @@ use std::path::Path;
use anyhow::{Context as _, Result};
use async_zip::base::read;
#[cfg(not(windows))]
use futures::AsyncSeek;
use futures::{AsyncRead, io::BufReader};
#[cfg(windows)]
@ -62,7 +64,15 @@ pub async fn extract_zip<R: AsyncRead + Unpin>(destination: &Path, reader: R) ->
futures::io::copy(&mut BufReader::new(reader), &mut file)
.await
.context("saving archive contents into the temporary file")?;
let mut reader = read::seek::ZipFileReader::new(BufReader::new(file))
extract_seekable_zip(destination, file).await
}
#[cfg(not(windows))]
pub async fn extract_seekable_zip<R: AsyncRead + AsyncSeek + Unpin>(
destination: &Path,
reader: R,
) -> Result<()> {
let mut reader = read::seek::ZipFileReader::new(BufReader::new(reader))
.await
.context("reading the zip archive")?;
let destination = &destination

View file

@ -95,9 +95,9 @@ pub async fn move_folder_files_to_folder<P: AsRef<Path>>(
#[cfg(unix)]
/// Set the permissions for the given path so that the file becomes executable.
/// This is a noop for non-unix platforms.
pub async fn make_file_executable(path: &PathBuf) -> std::io::Result<()> {
pub async fn make_file_executable(path: &Path) -> std::io::Result<()> {
fs::set_permissions(
&path,
path,
<fs::Permissions as fs::unix::PermissionsExt>::from_mode(0o755),
)
.await
@ -107,6 +107,6 @@ pub async fn make_file_executable(path: &PathBuf) -> std::io::Result<()> {
#[allow(clippy::unused_async)]
/// Set the permissions for the given path so that the file becomes executable.
/// This is a noop for non-unix platforms.
pub async fn make_file_executable(_path: &PathBuf) -> std::io::Result<()> {
pub async fn make_file_executable(_path: &Path) -> std::io::Result<()> {
Ok(())
}