Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions backends/arm/scripts/fvp_utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,9 @@ function install_fvp() {
log_step "fvp" "Downloading FVP ${fvp}"
url_variable=${fvp}_url
fvp_url=${!url_variable}
curl --output "FVP_${fvp}.tgz" "${fvp_url}"
md5_variable=${fvp}_md5_checksum
fvp_md5_checksum=${!md5_variable}
verify_md5 ${fvp_md5_checksum} FVP_${fvp}.tgz || exit 1
download_with_retry "fvp" "${fvp_url}" "FVP_${fvp}.tgz" "${fvp_md5_checksum}" || exit 1
fi

log_step "fvp" "Installing FVP ${fvp}"
Expand Down
3 changes: 1 addition & 2 deletions backends/arm/scripts/toolchain_utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,7 @@ function setup_toolchain() {

if [[ ! -e "${toolchain_archive}" ]]; then
log_step "toolchain" "Downloading ${toolchain_dir} toolchain"
curl --output "${toolchain_archive}" -L "${toolchain_url}"
verify_md5 ${toolchain_md5_checksum} "${toolchain_archive}" || exit 1
download_with_retry "toolchain" "${toolchain_url}" "${toolchain_archive}" "${toolchain_md5_checksum}" || exit 1
fi

log_step "toolchain" "Installing ${toolchain_dir} toolchain"
Expand Down
48 changes: 45 additions & 3 deletions backends/arm/scripts/utils.sh
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,10 @@ function verify_md5() {
# Arg 1: Expected checksum for file
# Arg 2: Path to file
# Exits with return code 1 if the number of arguments is incorrect.
# Exits with return code 2 if the calculated mf5 does not match the given.
# Returns 2 if the calculated md5 does not match the given. Returning
# rather than exiting lets callers like download_with_retry treat a bad
# checksum as a retryable failure (e.g. truncated download) instead of
# tearing down the whole script.

[[ $# -ne 2 ]] \
&& { echo "[${FUNCNAME[0]}] Invalid number of args, expecting 2, but got $#"; exit 1; }
Expand All @@ -60,11 +63,50 @@ function verify_md5() {
local file_checksum="$(md5sum $file | awk '{print $1}')"
fi
if [[ ${ref_checksum} != ${file_checksum} ]]; then
echo "Mismatched MD5 checksum for file: ${file}. Expecting ${ref_checksum} but got ${file_checksum}. Exiting."
exit 2
echo "Mismatched MD5 checksum for file: ${file}. Expecting ${ref_checksum} but got ${file_checksum}."
return 2
fi
}

function download_with_retry() {
# Download a URL to a path and validate its MD5, retrying on transport
# or checksum errors. developer.arm.com's CDN intermittently aborts the
# download mid-stream with HTTP/2 INTERNAL_ERROR (curl exit 92), and
# rare cases return a short error body that curl treats as success;
# both are caught here. --fail rejects HTTP errors,
# --retry-all-errors handles transport errors, and verify_md5 catches
# truncation / wrong-content via the published archive checksum.

# Arg 1: log context (passed to log_step)
# Arg 2: URL to download
# Arg 3: Output path
# Arg 4: Expected MD5 checksum

[[ $# -ne 4 ]] \
&& { echo "[${FUNCNAME[0]}] Invalid number of args, expecting 4, but got $#"; exit 1; }
local context="${1}"
local url="${2}"
local output="${3}"
local expected_md5="${4}"

local max_attempts=5
for attempt in $(seq 1 ${max_attempts}); do
rm -f "${output}"
if curl --fail --retry 3 --retry-delay 5 --retry-connrefused --retry-all-errors \
-L --output "${output}" "${url}" \
&& verify_md5 "${expected_md5}" "${output}"; then
return 0
fi
ls -l "${output}" 2>&1 || true
if [[ "${attempt}" = "${max_attempts}" ]]; then
log_step "${context}" "ERROR: download of ${url} failed after ${attempt} attempts"
return 1
fi
log_step "${context}" "download attempt ${attempt} failed; retrying in $((attempt * 10))s..."
sleep $((attempt * 10))
done
}

function patch_repo() {
# Patch git repo found in $repo_dir, starting from patch $base_rev and applying patches found in $patch_dir/$name.

Expand Down
Loading