fc180191de
Clean up leftover for commitcd13136f03("fetchurl: use __structuredAttrs = true and pass curlOptsList directly") Continue the work of commit23236b331d("fetchurl: fix handling of fallback URLs"), addressing a Bash array re-assignment quirk: when assigning a Bash array variable as if it were a plain variable, the value goes to the first element, and the rest of the array stays the same. ```console $ foo=(a b) $ declare -p foo declare -a foo=([0]="a" [1]="b") $ foo="c d" $ declare -p foo declare -a foo=([0]="c d" [1]="b") ``` Don't rewrite the `${urls[@]}` with resolved URLs, but hold them with `${resolvedUrls[@]}` instead. Co-authored-by: Matt Sturgeon <matt@sturgeon.me.uk> Co-authored-by: Wolfgang Walther <walther@technowledgy.de>
192 lines
5.0 KiB
Bash
192 lines
5.0 KiB
Bash
source "$NIX_ATTRS_SH_FILE"
|
||
source $mirrorsFile
|
||
|
||
# Normalize `curlOpts` as a string.
|
||
# If defined as a list (deprecated), it would be a bash array.
|
||
if [[ "$(declare -p curlOpts 2&>/dev/null || true)" =~ ^"declare -a" ]]; then
|
||
unset _temp
|
||
_temp="${curlOpts[*]}"
|
||
unset curlOpts
|
||
curlOpts=$_temp
|
||
unset _temp
|
||
fi
|
||
|
||
curlVersion=$(curl -V | head -1 | cut -d' ' -f2)
|
||
|
||
# Curl flags to handle redirects, not use EPSV, handle cookies for
|
||
# servers to need them during redirects, and work on SSL without a
|
||
# certificate (this isn't a security problem because we check the
|
||
# cryptographic hash of the output anyway).
|
||
curl=(
|
||
curl
|
||
--location
|
||
--max-redirs 20
|
||
--retry 3
|
||
--retry-all-errors
|
||
--continue-at -
|
||
--disable-epsv
|
||
--cookie-jar cookies
|
||
--user-agent "curl/$curlVersion Nixpkgs/$nixpkgsVersion"
|
||
)
|
||
|
||
if ! [ -f "$SSL_CERT_FILE" ]; then
|
||
curl+=(--insecure)
|
||
fi
|
||
|
||
# NOTE:
|
||
# `netrcPhase` should not attempt to access builder.sh implementation details (e.g., the `${curl[@]}` array),
|
||
# The implementation detail could change in any Nixpkgs revision, including backports.
|
||
if [[ -n "${netrcPhase-}" ]]; then
|
||
runPhase netrcPhase
|
||
curl+=(--netrc-file "$PWD/netrc")
|
||
fi
|
||
|
||
curl+=(
|
||
"${curlOptsList[@]}"
|
||
$curlOpts
|
||
$NIX_CURL_FLAGS
|
||
)
|
||
|
||
downloadedFile="$out"
|
||
if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi
|
||
|
||
tryDownload() {
|
||
local url="$1"
|
||
local target="$2"
|
||
echo
|
||
echo "trying $url"
|
||
local curlexit=18;
|
||
|
||
success=
|
||
|
||
# if we get error code 18, resume partial download
|
||
while [ $curlexit -eq 18 ]; do
|
||
# keep this inside an if statement, since on failure it doesn't abort the script
|
||
if "${curl[@]}" -C - --fail "$url" --output "$target"; then
|
||
success=1
|
||
break
|
||
else
|
||
curlexit=$?;
|
||
fi
|
||
done
|
||
}
|
||
|
||
|
||
finish() {
|
||
local skipPostFetch="$1"
|
||
|
||
set +o noglob
|
||
|
||
if [[ $executable == "1" ]]; then
|
||
chmod +x $downloadedFile
|
||
fi
|
||
|
||
if [ -z "$skipPostFetch" ]; then
|
||
runHook postFetch
|
||
fi
|
||
|
||
exit 0
|
||
}
|
||
|
||
|
||
tryHashedMirrors() {
|
||
if test -n "$NIX_HASHED_MIRRORS"; then
|
||
hashedMirrors="$NIX_HASHED_MIRRORS"
|
||
fi
|
||
|
||
for mirror in $hashedMirrors; do
|
||
url="$mirror/$outputHashAlgo/$outputHash"
|
||
if "${curl[@]}" --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \
|
||
--fail --silent --show-error --head "$url" \
|
||
--write-out "%{http_code}" --output /dev/null > code 2> log; then
|
||
# Directly download to $out, because postFetch doesn't need to run,
|
||
# since hashed mirrors provide pre-built derivation outputs.
|
||
tryDownload "$url" "$out"
|
||
|
||
# We skip postFetch here, because hashed-mirrors are
|
||
# already content addressed. So if $outputHash is in the
|
||
# hashed-mirror, changes from ‘postFetch’ would already be
|
||
# made. So, running postFetch will end up applying the
|
||
# change /again/, which we don’t want.
|
||
if test -n "$success"; then finish skipPostFetch; fi
|
||
else
|
||
# Be quiet about 404 errors, which we interpret as the file
|
||
# not being present on this particular mirror.
|
||
if test "$(cat code)" != 404; then
|
||
echo "error checking the existence of $url:"
|
||
cat log
|
||
fi
|
||
fi
|
||
done
|
||
}
|
||
|
||
|
||
# URL list may contain ?. No glob expansion for that, please
|
||
set -o noglob
|
||
|
||
resolvedUrls=()
|
||
for url in "${urls[@]}"; do
|
||
if test "${url:0:9}" != "mirror://"; then
|
||
resolvedUrls+=("$url")
|
||
else
|
||
url2="${url:9}"; echo "${url2/\// }" > split; read site fileName < split
|
||
#varName="mirror_$site"
|
||
varName="$site" # !!! danger of name clash, fix this
|
||
if test -z "${!varName}"; then
|
||
echo "warning: unknown mirror:// site \`$site'"
|
||
else
|
||
mirrors=${!varName}
|
||
|
||
# Allow command-line override by setting NIX_MIRRORS_$site.
|
||
varName="NIX_MIRRORS_$site"
|
||
if test -n "${!varName}"; then mirrors="${!varName}"; fi
|
||
|
||
for url3 in $mirrors; do
|
||
resolvedUrls+=("$url3$fileName");
|
||
done
|
||
fi
|
||
fi
|
||
done
|
||
|
||
# Restore globbing settings
|
||
set +o noglob
|
||
|
||
if test -n "$showURLs"; then
|
||
echo "${resolvedUrls[*]}" > $out
|
||
exit 0
|
||
fi
|
||
|
||
if test -n "$preferHashedMirrors"; then
|
||
tryHashedMirrors
|
||
fi
|
||
|
||
# URL list may contain ?. No glob expansion for that, please
|
||
set -o noglob
|
||
|
||
success=
|
||
for url in "${resolvedUrls[@]}"; do
|
||
if [ -z "$postFetch" ]; then
|
||
case "$url" in
|
||
https://github.com/*/archive/*)
|
||
echo "warning: archives from GitHub revisions should use fetchFromGitHub"
|
||
;;
|
||
https://gitlab.com/*/-/archive/*)
|
||
echo "warning: archives from GitLab revisions should use fetchFromGitLab"
|
||
;;
|
||
esac
|
||
fi
|
||
tryDownload "$url" "$downloadedFile"
|
||
if test -n "$success"; then finish; fi
|
||
done
|
||
|
||
# Restore globbing settings
|
||
set +o noglob
|
||
|
||
if test -z "$preferHashedMirrors"; then
|
||
tryHashedMirrors
|
||
fi
|
||
|
||
|
||
echo "error: cannot download $name from any mirror"
|
||
exit 1
|