diff --git a/MODULE.bazel b/MODULE.bazel index 31dc1e6..f4264d3 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -7,7 +7,7 @@ module( ) bazel_dep(name = "bazel_skylib", version = "1.5.0") -bazel_dep(name = "aspect_bazel_lib", version = "2.7.3") +bazel_dep(name = "aspect_bazel_lib", version = "2.7.9") bazel_lib_toolchains = use_extension("@aspect_bazel_lib//lib:extensions.bzl", "toolchains") use_repo(bazel_lib_toolchains, "zstd_toolchains") diff --git a/apt/private/package_index.bzl b/apt/private/package_index.bzl index c633a4d..b1c5c7c 100644 --- a/apt/private/package_index.bzl +++ b/apt/private/package_index.bzl @@ -5,31 +5,53 @@ load(":util.bzl", "util") def _fetch_package_index(rctx, url, dist, comp, arch, integrity): target_triple = "{dist}/{comp}/{arch}".format(dist = dist, comp = comp, arch = arch) - file_types = {"xz": ["xz", "--decompress"], "gz": ["gzip", "-d"]} - r = {"success": False, "integrity": None} - - decompression_successful = False - for file_type, tool in file_types.items(): - output = "{}/Packages.{}".format(target_triple, file_type) - r = rctx.download( - url = "{}/dists/{}/{}/binary-{}/Packages.{}".format(url, dist, comp, arch, file_type), + # See https://linux.die.net/man/1/xz and https://linux.die.net/man/1/gzip + # --keep -> keep the original file (Bazel might be still committing the output to the cache) + # --force -> overwrite the output if it exists + # --decompress -> decompress + supported_extensions = { + "xz": ["xz", "--decompress", "--keep", "--force"], + "gz": ["gzip", "--decompress", "--keep", "--force"], + } + + failed_attempts = [] + + for (ext, cmd) in supported_extensions.items(): + output = "{}/Packages.{}".format(target_triple, ext) + dist_url = "{}/dists/{}/{}/binary-{}/Packages.{}".format(url, dist, comp, arch, ext) + download = rctx.download( + url = dist_url, output = output, integrity = integrity, allow_fail = True, ) - if r.success: - re = rctx.execute(tool + [output]) - if re.return_code == 0: - decompression_successful = True + decompress_r = None + if download.success: + decompress_r = rctx.execute(cmd + [output]) + if decompress_r.return_code == 0: + integrity = download.integrity break - if not r.success: - fail("unable to download package index") + failed_attempts.append((url, download, decompress_r)) - if not decompression_successful: - fail("unable to decompress package index") + if len(failed_attempts) == len(supported_extensions): + attempt_messages = [] + for (url, download, decompress) in failed_attempts: + reason = "unknown" + if not download.success: + reason = "Download failed. See warning above for details." + elif decompress.return_code != 0: + reason = "Decompression failed with non-zero exit code.\n\n{}\n{}".format(decompress.stderr, decompress.stdout) - return ("{}/Packages".format(target_triple), r.integrity) + attempt_messages.append("""\n*) Failed '{}'\n\n{}""".format(url, reason)) + + fail(""" +** Tried to download {} different package indices and all failed. + +{} + """.format(len(failed_attempts), "\n".join(attempt_messages))) + + return ("{}/Packages".format(target_triple), integrity) def _parse_package_index(state, contents, arch, root): last_key = "" @@ -91,11 +113,11 @@ def _create(rctx, sources, archs): # on misconfigured HTTP servers) url = url.rstrip("/") - rctx.report_progress("Fetching package index: {}/{}".format(dist, arch)) + rctx.report_progress("Fetching package index: {}/{} for {}".format(dist, comp, arch)) (output, _) = _fetch_package_index(rctx, url, dist, comp, arch, "") # TODO: this is expensive to perform. - rctx.report_progress("Parsing package index: {}/{}".format(dist, arch)) + rctx.report_progress("Parsing package index: {}/{} for {}".format(dist, comp, arch)) _parse_package_index(state, rctx.read(output), arch, url) return struct(