languages: Don't remove old artifacts on download failure (#35967)
Release Notes: - N/A
This commit is contained in:
parent
c82cd0c6b1
commit
8d332da4c5
4 changed files with 35 additions and 33 deletions
|
@ -71,11 +71,19 @@ pub async fn latest_github_release(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
releases
|
let mut release = releases
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|release| !require_assets || !release.assets.is_empty())
|
.filter(|release| !require_assets || !release.assets.is_empty())
|
||||||
.find(|release| release.pre_release == pre_release)
|
.find(|release| release.pre_release == pre_release)
|
||||||
.context("finding a prerelease")
|
.context("finding a prerelease")?;
|
||||||
|
release.assets.iter_mut().for_each(|asset| {
|
||||||
|
if let Some(digest) = &mut asset.digest {
|
||||||
|
if let Some(stripped) = digest.strip_prefix("sha256:") {
|
||||||
|
*digest = stripped.to_owned();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
Ok(release)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_release_by_tag_name(
|
pub async fn get_release_by_tag_name(
|
||||||
|
|
|
@ -71,13 +71,13 @@ impl super::LspAdapter for CLspAdapter {
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
delegate: &dyn LspAdapterDelegate,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let GitHubLspBinaryVersion { name, url, digest } =
|
let GitHubLspBinaryVersion {
|
||||||
&*version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
name,
|
||||||
|
url,
|
||||||
|
digest: expected_digest,
|
||||||
|
} = *version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||||
let version_dir = container_dir.join(format!("clangd_{name}"));
|
let version_dir = container_dir.join(format!("clangd_{name}"));
|
||||||
let binary_path = version_dir.join("bin/clangd");
|
let binary_path = version_dir.join("bin/clangd");
|
||||||
let expected_digest = digest
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|digest| digest.strip_prefix("sha256:"));
|
|
||||||
|
|
||||||
let binary = LanguageServerBinary {
|
let binary = LanguageServerBinary {
|
||||||
path: binary_path.clone(),
|
path: binary_path.clone(),
|
||||||
|
@ -103,7 +103,7 @@ impl super::LspAdapter for CLspAdapter {
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
if let (Some(actual_digest), Some(expected_digest)) =
|
if let (Some(actual_digest), Some(expected_digest)) =
|
||||||
(&metadata.digest, expected_digest)
|
(&metadata.digest, &expected_digest)
|
||||||
{
|
{
|
||||||
if actual_digest == expected_digest {
|
if actual_digest == expected_digest {
|
||||||
if validity_check().await.is_ok() {
|
if validity_check().await.is_ok() {
|
||||||
|
@ -120,8 +120,8 @@ impl super::LspAdapter for CLspAdapter {
|
||||||
}
|
}
|
||||||
download_server_binary(
|
download_server_binary(
|
||||||
delegate,
|
delegate,
|
||||||
url,
|
&url,
|
||||||
digest.as_deref(),
|
expected_digest.as_deref(),
|
||||||
&container_dir,
|
&container_dir,
|
||||||
AssetKind::Zip,
|
AssetKind::Zip,
|
||||||
)
|
)
|
||||||
|
@ -130,7 +130,7 @@ impl super::LspAdapter for CLspAdapter {
|
||||||
GithubBinaryMetadata::write_to_file(
|
GithubBinaryMetadata::write_to_file(
|
||||||
&GithubBinaryMetadata {
|
&GithubBinaryMetadata {
|
||||||
metadata_version: 1,
|
metadata_version: 1,
|
||||||
digest: digest.clone(),
|
digest: expected_digest,
|
||||||
},
|
},
|
||||||
&metadata_path,
|
&metadata_path,
|
||||||
)
|
)
|
||||||
|
|
|
@ -18,9 +18,8 @@ impl GithubBinaryMetadata {
|
||||||
let metadata_content = async_fs::read_to_string(metadata_path)
|
let metadata_content = async_fs::read_to_string(metadata_path)
|
||||||
.await
|
.await
|
||||||
.with_context(|| format!("reading metadata file at {metadata_path:?}"))?;
|
.with_context(|| format!("reading metadata file at {metadata_path:?}"))?;
|
||||||
let metadata: GithubBinaryMetadata = serde_json::from_str(&metadata_content)
|
serde_json::from_str(&metadata_content)
|
||||||
.with_context(|| format!("parsing metadata file at {metadata_path:?}"))?;
|
.with_context(|| format!("parsing metadata file at {metadata_path:?}"))
|
||||||
Ok(metadata)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn write_to_file(&self, metadata_path: &Path) -> Result<()> {
|
pub(crate) async fn write_to_file(&self, metadata_path: &Path) -> Result<()> {
|
||||||
|
@ -63,11 +62,6 @@ pub(crate) async fn download_server_binary(
|
||||||
})?;
|
})?;
|
||||||
let asset_sha_256 = format!("{:x}", writer.hasher.finalize());
|
let asset_sha_256 = format!("{:x}", writer.hasher.finalize());
|
||||||
|
|
||||||
// Strip "sha256:" prefix for comparison
|
|
||||||
let expected_sha_256 = expected_sha_256
|
|
||||||
.strip_prefix("sha256:")
|
|
||||||
.unwrap_or(expected_sha_256);
|
|
||||||
|
|
||||||
anyhow::ensure!(
|
anyhow::ensure!(
|
||||||
asset_sha_256 == expected_sha_256,
|
asset_sha_256 == expected_sha_256,
|
||||||
"{url} asset got SHA-256 mismatch. Expected: {expected_sha_256}, Got: {asset_sha_256}",
|
"{url} asset got SHA-256 mismatch. Expected: {expected_sha_256}, Got: {asset_sha_256}",
|
||||||
|
|
|
@ -23,7 +23,7 @@ use std::{
|
||||||
sync::{Arc, LazyLock},
|
sync::{Arc, LazyLock},
|
||||||
};
|
};
|
||||||
use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName};
|
use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName};
|
||||||
use util::fs::make_file_executable;
|
use util::fs::{make_file_executable, remove_matching};
|
||||||
use util::merge_json_value_into;
|
use util::merge_json_value_into;
|
||||||
use util::{ResultExt, maybe};
|
use util::{ResultExt, maybe};
|
||||||
|
|
||||||
|
@ -162,13 +162,13 @@ impl LspAdapter for RustLspAdapter {
|
||||||
let asset_name = Self::build_asset_name();
|
let asset_name = Self::build_asset_name();
|
||||||
let asset = release
|
let asset = release
|
||||||
.assets
|
.assets
|
||||||
.iter()
|
.into_iter()
|
||||||
.find(|asset| asset.name == asset_name)
|
.find(|asset| asset.name == asset_name)
|
||||||
.with_context(|| format!("no asset found matching `{asset_name:?}`"))?;
|
.with_context(|| format!("no asset found matching `{asset_name:?}`"))?;
|
||||||
Ok(Box::new(GitHubLspBinaryVersion {
|
Ok(Box::new(GitHubLspBinaryVersion {
|
||||||
name: release.tag_name,
|
name: release.tag_name,
|
||||||
url: asset.browser_download_url.clone(),
|
url: asset.browser_download_url,
|
||||||
digest: asset.digest.clone(),
|
digest: asset.digest,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -178,11 +178,11 @@ impl LspAdapter for RustLspAdapter {
|
||||||
container_dir: PathBuf,
|
container_dir: PathBuf,
|
||||||
delegate: &dyn LspAdapterDelegate,
|
delegate: &dyn LspAdapterDelegate,
|
||||||
) -> Result<LanguageServerBinary> {
|
) -> Result<LanguageServerBinary> {
|
||||||
let GitHubLspBinaryVersion { name, url, digest } =
|
let GitHubLspBinaryVersion {
|
||||||
&*version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
name,
|
||||||
let expected_digest = digest
|
url,
|
||||||
.as_ref()
|
digest: expected_digest,
|
||||||
.and_then(|digest| digest.strip_prefix("sha256:"));
|
} = *version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||||
let destination_path = container_dir.join(format!("rust-analyzer-{name}"));
|
let destination_path = container_dir.join(format!("rust-analyzer-{name}"));
|
||||||
let server_path = match Self::GITHUB_ASSET_KIND {
|
let server_path = match Self::GITHUB_ASSET_KIND {
|
||||||
AssetKind::TarGz | AssetKind::Gz => destination_path.clone(), // Tar and gzip extract in place.
|
AssetKind::TarGz | AssetKind::Gz => destination_path.clone(), // Tar and gzip extract in place.
|
||||||
|
@ -213,7 +213,7 @@ impl LspAdapter for RustLspAdapter {
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
if let (Some(actual_digest), Some(expected_digest)) =
|
if let (Some(actual_digest), Some(expected_digest)) =
|
||||||
(&metadata.digest, expected_digest)
|
(&metadata.digest, &expected_digest)
|
||||||
{
|
{
|
||||||
if actual_digest == expected_digest {
|
if actual_digest == expected_digest {
|
||||||
if validity_check().await.is_ok() {
|
if validity_check().await.is_ok() {
|
||||||
|
@ -229,20 +229,20 @@ impl LspAdapter for RustLspAdapter {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_ = fs::remove_dir_all(&destination_path).await;
|
|
||||||
download_server_binary(
|
download_server_binary(
|
||||||
delegate,
|
delegate,
|
||||||
url,
|
&url,
|
||||||
expected_digest,
|
expected_digest.as_deref(),
|
||||||
&destination_path,
|
&destination_path,
|
||||||
Self::GITHUB_ASSET_KIND,
|
Self::GITHUB_ASSET_KIND,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
make_file_executable(&server_path).await?;
|
make_file_executable(&server_path).await?;
|
||||||
|
remove_matching(&container_dir, |path| server_path == path).await;
|
||||||
GithubBinaryMetadata::write_to_file(
|
GithubBinaryMetadata::write_to_file(
|
||||||
&GithubBinaryMetadata {
|
&GithubBinaryMetadata {
|
||||||
metadata_version: 1,
|
metadata_version: 1,
|
||||||
digest: expected_digest.map(ToString::to_string),
|
digest: expected_digest,
|
||||||
},
|
},
|
||||||
&metadata_path,
|
&metadata_path,
|
||||||
)
|
)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue