use std::{path::Path, pin::Pin, task::Poll}; use anyhow::{Context, Result}; use async_compression::futures::bufread::GzipDecoder; use futures::{AsyncRead, AsyncSeek, AsyncSeekExt, AsyncWrite, io::BufReader}; use http_client::github::AssetKind; use language::LspAdapterDelegate; use sha2::{Digest, Sha256}; #[derive(serde::Deserialize, serde::Serialize, Debug)] pub(crate) struct GithubBinaryMetadata { pub(crate) metadata_version: u64, pub(crate) digest: Option, } impl GithubBinaryMetadata { pub(crate) async fn read_from_file(metadata_path: &Path) -> Result { let metadata_content = async_fs::read_to_string(metadata_path) .await .with_context(|| format!("reading metadata file at {metadata_path:?}"))?; serde_json::from_str(&metadata_content) .with_context(|| format!("parsing metadata file at {metadata_path:?}")) } pub(crate) async fn write_to_file(&self, metadata_path: &Path) -> Result<()> { let metadata_content = serde_json::to_string(self) .with_context(|| format!("serializing metadata for {metadata_path:?}"))?; async_fs::write(metadata_path, metadata_content.as_bytes()) .await .with_context(|| format!("writing metadata file at {metadata_path:?}"))?; Ok(()) } } pub(crate) async fn download_server_binary( delegate: &dyn LspAdapterDelegate, url: &str, digest: Option<&str>, destination_path: &Path, asset_kind: AssetKind, ) -> Result<(), anyhow::Error> { log::info!("downloading github artifact from {url}"); let mut response = delegate .http_client() .get(url, Default::default(), true) .await .with_context(|| format!("downloading release from {url}"))?; let body = response.body_mut(); match digest { Some(expected_sha_256) => { let temp_asset_file = tempfile::NamedTempFile::new() .with_context(|| format!("creating a temporary file for {url}"))?; let (temp_asset_file, _temp_guard) = temp_asset_file.into_parts(); let mut writer = HashingWriter { writer: async_fs::File::from(temp_asset_file), hasher: Sha256::new(), }; futures::io::copy(&mut BufReader::new(body), &mut writer) .await .with_context(|| { format!("saving archive contents into the temporary file for {url}",) })?; let asset_sha_256 = format!("{:x}", writer.hasher.finalize()); anyhow::ensure!( asset_sha_256 == expected_sha_256, "{url} asset got SHA-256 mismatch. Expected: {expected_sha_256}, Got: {asset_sha_256}", ); writer .writer .seek(std::io::SeekFrom::Start(0)) .await .with_context(|| format!("seeking temporary file {destination_path:?}",))?; stream_file_archive(&mut writer.writer, url, destination_path, asset_kind) .await .with_context(|| { format!("extracting downloaded asset for {url} into {destination_path:?}",) })?; } None => stream_response_archive(body, url, destination_path, asset_kind) .await .with_context(|| { format!("extracting response for asset {url} into {destination_path:?}",) })?, } Ok(()) } async fn stream_response_archive( response: impl AsyncRead + Unpin, url: &str, destination_path: &Path, asset_kind: AssetKind, ) -> Result<()> { match asset_kind { AssetKind::TarGz => extract_tar_gz(destination_path, url, response).await?, AssetKind::Gz => extract_gz(destination_path, url, response).await?, AssetKind::Zip => { util::archive::extract_zip(destination_path, response).await?; } }; Ok(()) } async fn stream_file_archive( file_archive: impl AsyncRead + AsyncSeek + Unpin, url: &str, destination_path: &Path, asset_kind: AssetKind, ) -> Result<()> { match asset_kind { AssetKind::TarGz => extract_tar_gz(destination_path, url, file_archive).await?, AssetKind::Gz => extract_gz(destination_path, url, file_archive).await?, #[cfg(not(windows))] AssetKind::Zip => { util::archive::extract_seekable_zip(destination_path, file_archive).await?; } #[cfg(windows)] AssetKind::Zip => { util::archive::extract_zip(destination_path, file_archive).await?; } }; Ok(()) } async fn extract_tar_gz( destination_path: &Path, url: &str, from: impl AsyncRead + Unpin, ) -> Result<(), anyhow::Error> { let decompressed_bytes = GzipDecoder::new(BufReader::new(from)); let archive = async_tar::Archive::new(decompressed_bytes); archive .unpack(&destination_path) .await .with_context(|| format!("extracting {url} to {destination_path:?}"))?; Ok(()) } async fn extract_gz( destination_path: &Path, url: &str, from: impl AsyncRead + Unpin, ) -> Result<(), anyhow::Error> { let mut decompressed_bytes = GzipDecoder::new(BufReader::new(from)); let mut file = smol::fs::File::create(&destination_path) .await .with_context(|| { format!("creating a file {destination_path:?} for a download from {url}") })?; futures::io::copy(&mut decompressed_bytes, &mut file) .await .with_context(|| format!("extracting {url} to {destination_path:?}"))?; Ok(()) } struct HashingWriter { writer: W, hasher: Sha256, } impl AsyncWrite for HashingWriter { fn poll_write( mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>, buf: &[u8], ) -> Poll> { match Pin::new(&mut self.writer).poll_write(cx, buf) { Poll::Ready(Ok(n)) => { self.hasher.update(&buf[..n]); Poll::Ready(Ok(n)) } other => other, } } fn poll_flush( mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> Poll> { Pin::new(&mut self.writer).poll_flush(cx) } fn poll_close( mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>, ) -> Poll> { Pin::new(&mut self.writer).poll_close(cx) } }