Verify downloaded rust-analyzer and clang binaries by checking the artifact digest (#35642)
Release Notes: - Added GitHub artifact digest verification for rust-analyzer and clangd binary downloads, skipping downloads if cached binary digest is up to date - Added verification that cached rust-analyzer and clangd binaries are executable, if not they are redownloaded --------- Co-authored-by: Kirill Bulatov <kirill@zed.dev>
This commit is contained in:
parent
40129147c6
commit
c59c436a11
11 changed files with 354 additions and 123 deletions
190
crates/languages/src/github_download.rs
Normal file
190
crates/languages/src/github_download.rs
Normal file
|
@ -0,0 +1,190 @@
|
|||
use std::{path::Path, pin::Pin, task::Poll};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use futures::{AsyncRead, AsyncSeek, AsyncSeekExt, AsyncWrite, io::BufReader};
|
||||
use http_client::github::AssetKind;
|
||||
use language::LspAdapterDelegate;
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
#[derive(serde::Deserialize, serde::Serialize, Debug)]
|
||||
pub(crate) struct GithubBinaryMetadata {
|
||||
pub(crate) metadata_version: u64,
|
||||
pub(crate) digest: Option<String>,
|
||||
}
|
||||
|
||||
impl GithubBinaryMetadata {
|
||||
pub(crate) async fn read_from_file(metadata_path: &Path) -> Result<GithubBinaryMetadata> {
|
||||
let metadata_content = async_fs::read_to_string(metadata_path)
|
||||
.await
|
||||
.with_context(|| format!("reading metadata file at {metadata_path:?}"))?;
|
||||
let metadata: GithubBinaryMetadata = serde_json::from_str(&metadata_content)
|
||||
.with_context(|| format!("parsing metadata file at {metadata_path:?}"))?;
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
pub(crate) async fn write_to_file(&self, metadata_path: &Path) -> Result<()> {
|
||||
let metadata_content = serde_json::to_string(self)
|
||||
.with_context(|| format!("serializing metadata for {metadata_path:?}"))?;
|
||||
async_fs::write(metadata_path, metadata_content.as_bytes())
|
||||
.await
|
||||
.with_context(|| format!("writing metadata file at {metadata_path:?}"))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn download_server_binary(
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
url: &str,
|
||||
digest: Option<&str>,
|
||||
destination_path: &Path,
|
||||
asset_kind: AssetKind,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
log::info!("downloading github artifact from {url}");
|
||||
let mut response = delegate
|
||||
.http_client()
|
||||
.get(url, Default::default(), true)
|
||||
.await
|
||||
.with_context(|| format!("downloading release from {url}"))?;
|
||||
let body = response.body_mut();
|
||||
match digest {
|
||||
Some(expected_sha_256) => {
|
||||
let temp_asset_file = tempfile::NamedTempFile::new()
|
||||
.with_context(|| format!("creating a temporary file for {url}"))?;
|
||||
let (temp_asset_file, _temp_guard) = temp_asset_file.into_parts();
|
||||
let mut writer = HashingWriter {
|
||||
writer: async_fs::File::from(temp_asset_file),
|
||||
hasher: Sha256::new(),
|
||||
};
|
||||
futures::io::copy(&mut BufReader::new(body), &mut writer)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("saving archive contents into the temporary file for {url}",)
|
||||
})?;
|
||||
let asset_sha_256 = format!("{:x}", writer.hasher.finalize());
|
||||
anyhow::ensure!(
|
||||
asset_sha_256 == expected_sha_256,
|
||||
"{url} asset got SHA-256 mismatch. Expected: {expected_sha_256}, Got: {asset_sha_256}",
|
||||
);
|
||||
writer
|
||||
.writer
|
||||
.seek(std::io::SeekFrom::Start(0))
|
||||
.await
|
||||
.with_context(|| format!("seeking temporary file {destination_path:?}",))?;
|
||||
stream_file_archive(&mut writer.writer, url, destination_path, asset_kind)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("extracting downloaded asset for {url} into {destination_path:?}",)
|
||||
})?;
|
||||
}
|
||||
None => stream_response_archive(body, url, destination_path, asset_kind)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("extracting response for asset {url} into {destination_path:?}",)
|
||||
})?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn stream_response_archive(
|
||||
response: impl AsyncRead + Unpin,
|
||||
url: &str,
|
||||
destination_path: &Path,
|
||||
asset_kind: AssetKind,
|
||||
) -> Result<()> {
|
||||
match asset_kind {
|
||||
AssetKind::TarGz => extract_tar_gz(destination_path, url, response).await?,
|
||||
AssetKind::Gz => extract_gz(destination_path, url, response).await?,
|
||||
AssetKind::Zip => {
|
||||
util::archive::extract_zip(&destination_path, response).await?;
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn stream_file_archive(
|
||||
file_archive: impl AsyncRead + AsyncSeek + Unpin,
|
||||
url: &str,
|
||||
destination_path: &Path,
|
||||
asset_kind: AssetKind,
|
||||
) -> Result<()> {
|
||||
match asset_kind {
|
||||
AssetKind::TarGz => extract_tar_gz(destination_path, url, file_archive).await?,
|
||||
AssetKind::Gz => extract_gz(destination_path, url, file_archive).await?,
|
||||
#[cfg(not(windows))]
|
||||
AssetKind::Zip => {
|
||||
util::archive::extract_seekable_zip(&destination_path, file_archive).await?;
|
||||
}
|
||||
#[cfg(windows)]
|
||||
AssetKind::Zip => {
|
||||
util::archive::extract_zip(&destination_path, file_archive).await?;
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn extract_tar_gz(
|
||||
destination_path: &Path,
|
||||
url: &str,
|
||||
from: impl AsyncRead + Unpin,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
let decompressed_bytes = GzipDecoder::new(BufReader::new(from));
|
||||
let archive = async_tar::Archive::new(decompressed_bytes);
|
||||
archive
|
||||
.unpack(&destination_path)
|
||||
.await
|
||||
.with_context(|| format!("extracting {url} to {destination_path:?}"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn extract_gz(
|
||||
destination_path: &Path,
|
||||
url: &str,
|
||||
from: impl AsyncRead + Unpin,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
let mut decompressed_bytes = GzipDecoder::new(BufReader::new(from));
|
||||
let mut file = smol::fs::File::create(&destination_path)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!("creating a file {destination_path:?} for a download from {url}")
|
||||
})?;
|
||||
futures::io::copy(&mut decompressed_bytes, &mut file)
|
||||
.await
|
||||
.with_context(|| format!("extracting {url} to {destination_path:?}"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct HashingWriter<W: AsyncWrite + Unpin> {
|
||||
writer: W,
|
||||
hasher: Sha256,
|
||||
}
|
||||
|
||||
impl<W: AsyncWrite + Unpin> AsyncWrite for HashingWriter<W> {
|
||||
fn poll_write(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut std::task::Context<'_>,
|
||||
buf: &[u8],
|
||||
) -> Poll<std::result::Result<usize, std::io::Error>> {
|
||||
match Pin::new(&mut self.writer).poll_write(cx, buf) {
|
||||
Poll::Ready(Ok(n)) => {
|
||||
self.hasher.update(&buf[..n]);
|
||||
Poll::Ready(Ok(n))
|
||||
}
|
||||
other => other,
|
||||
}
|
||||
}
|
||||
|
||||
fn poll_flush(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut std::task::Context<'_>,
|
||||
) -> Poll<Result<(), std::io::Error>> {
|
||||
Pin::new(&mut self.writer).poll_flush(cx)
|
||||
}
|
||||
|
||||
fn poll_close(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut std::task::Context<'_>,
|
||||
) -> Poll<std::result::Result<(), std::io::Error>> {
|
||||
Pin::new(&mut self.writer).poll_close(cx)
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue