diff --git a/pkgs/build-support/fetchurl/builder.sh b/pkgs/build-support/fetchurl/builder.sh index b7b1bfa5eb99..560b912d414f 100644 --- a/pkgs/build-support/fetchurl/builder.sh +++ b/pkgs/build-support/fetchurl/builder.sh @@ -35,6 +35,7 @@ if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi tryDownload() { local url="$1" + local target="$2" echo echo "trying $url" local curlexit=18; @@ -44,7 +45,7 @@ tryDownload() { # if we get error code 18, resume partial download while [ $curlexit -eq 18 ]; do # keep this inside an if statement, since on failure it doesn't abort the script - if "${curl[@]}" -C - --fail "$url" --output "$downloadedFile"; then + if "${curl[@]}" -C - --fail "$url" --output "$target"; then success=1 break else @@ -81,7 +82,9 @@ tryHashedMirrors() { if "${curl[@]}" --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \ --fail --silent --show-error --head "$url" \ --write-out "%{http_code}" --output /dev/null > code 2> log; then - tryDownload "$url" + # Directly download to $out, because postFetch doesn't need to run, + # since hashed mirrors provide pre-built derivation outputs. + tryDownload "$url" "$out" # We skip postFetch here, because hashed-mirrors are # already content addressed. So if $outputHash is in the @@ -156,7 +159,7 @@ for url in $urls; do ;; esac fi - tryDownload "$url" + tryDownload "$url" "$downloadedFile" if test -n "$success"; then finish; fi done diff --git a/pkgs/build-support/fetchurl/tests.nix b/pkgs/build-support/fetchurl/tests.nix index 8d9064141f23..6d214a74d18a 100644 --- a/pkgs/build-support/fetchurl/tests.nix +++ b/pkgs/build-support/fetchurl/tests.nix @@ -22,4 +22,24 @@ ${jq}/bin/jq -r '.headers.Hello' $out | ${moreutils}/bin/sponge $out ''; }; + # Tests that downloadToTemp works with hashedMirrors + no-skipPostFetch = testers.invalidateFetcherByDrvHash fetchurl { + # Make sure that we can only download from hashed mirrors + url = "http://broken"; + # A file with this hash is definitely on tarballs.nixos.org + sha256 = "1j1y3cq6ys30m734axc0brdm2q9n2as4h32jws15r7w5fwr991km"; + + # No chance + curlOptsList = [ + "--retry" + "0" + ]; + + downloadToTemp = true; + # Usually postFetch is needed with downloadToTemp to populate $out from + # $downloadedFile, but here we know that because the URL is broken, it will + # have to fallback to fetching the previously-built derivation from + # tarballs.nixos.org, which provides pre-built derivation outputs. + + }; }