tree-wide: Indent .bash files like .sh

This commit is contained in:
Victor Engmark
2024-11-06 13:00:05 +13:00
parent 54b3f3213e
commit 106ba63900
5 changed files with 70 additions and 70 deletions

View File

@@ -35,15 +35,15 @@ Synopsis:
This is usually done in the following cases: This is usually done in the following cases:
1. Single target fix: current bootstrap files for a single target 1. Single target fix: current bootstrap files for a single target
are problematic for some reason (target-specific bug). In this are problematic for some reason (target-specific bug). In this
case we can refresh just that target as: case we can refresh just that target as:
\$ $0 --commit --targets=i686-unknown-linux-gnu \$ $0 --commit --targets=i686-unknown-linux-gnu
2. Routine refresh: all bootstrap files should be refreshed to avoid 2. Routine refresh: all bootstrap files should be refreshed to avoid
debugging problems that only occur on very old binaries. debugging problems that only occur on very old binaries.
\$ $0 --commit --all-targets \$ $0 --commit --all-targets
To get help on uploading refreshed binaries to 'tarballs.nixos.org' To get help on uploading refreshed binaries to 'tarballs.nixos.org'
please have a look at <maintainers/scripts/bootstrap-files/README.md>. please have a look at <maintainers/scripts/bootstrap-files/README.md>.
@@ -232,50 +232,50 @@ for target in "${targets[@]}"; do
# - build time: ${build_time} # - build time: ${build_time}
{ {
EOF EOF
for p in "${outpath}/on-server"/*; do for p in "${outpath}/on-server"/*; do
fname=$(basename "$p") fname=$(basename "$p")
fnames+=("$fname") fnames+=("$fname")
case "$fname" in case "$fname" in
bootstrap-tools.tar.xz) attr=bootstrapTools ;; bootstrap-tools.tar.xz) attr=bootstrapTools ;;
busybox) attr=$fname ;; busybox) attr=$fname ;;
unpack.nar.xz) attr=unpack ;; unpack.nar.xz) attr=unpack ;;
*) die "Don't know how to map '$fname' to attribute name. Please update me." *) die "Don't know how to map '$fname' to attribute name. Please update me."
esac esac
executable_arg= executable_arg=
executable_nix= executable_nix=
if [[ -x "$p" ]]; then if [[ -x "$p" ]]; then
executable_arg="--executable" executable_arg="--executable"
executable_nix="executable = true;" executable_nix="executable = true;"
fi fi
unpack_nix= unpack_nix=
name_nix= name_nix=
if [[ $fname = *.nar.xz ]]; then if [[ $fname = *.nar.xz ]]; then
unpack_nix="unpack = true;" unpack_nix="unpack = true;"
name_nix="name = \"${fname%.nar.xz}\";" name_nix="name = \"${fname%.nar.xz}\";"
sri=$(nar_sri_get "$p" "${fname%.nar.xz}") sri=$(nar_sri_get "$p" "${fname%.nar.xz}")
[[ $? -ne 0 ]] && die "Failed to get hash of '$p'" [[ $? -ne 0 ]] && die "Failed to get hash of '$p'"
else else
sha256=$(nix-prefetch-url $executable_arg --name "$fname" "file://$p") sha256=$(nix-prefetch-url $executable_arg --name "$fname" "file://$p")
[[ $? -ne 0 ]] && die "Failed to get the hash for '$p'" [[ $? -ne 0 ]] && die "Failed to get the hash for '$p'"
sri=$(nix-hash --to-sri "sha256:$sha256") sri=$(nix-hash --to-sri "sha256:$sha256")
[[ $? -ne 0 ]] && die "Failed to convert '$sha256' hash to an SRI form" [[ $? -ne 0 ]] && die "Failed to convert '$sha256' hash to an SRI form"
fi fi
# individual file entries # individual file entries
cat <<EOF cat <<EOF
$attr = import <nix/fetchurl.nix> { $attr = import <nix/fetchurl.nix> {
url = "http://tarballs.nixos.org/${s3_prefix}/${nixpkgs_revision}/$fname"; url = "http://tarballs.nixos.org/${s3_prefix}/${nixpkgs_revision}/$fname";
hash = "${sri}";$( hash = "${sri}";$(
[[ -n ${executable_nix} ]] && printf "\n %s" "${executable_nix}" [[ -n ${executable_nix} ]] && printf "\n %s" "${executable_nix}"
[[ -n ${name_nix} ]] && printf "\n %s" "${name_nix}" [[ -n ${name_nix} ]] && printf "\n %s" "${name_nix}"
[[ -n ${unpack_nix} ]] && printf "\n %s" "${unpack_nix}" [[ -n ${unpack_nix} ]] && printf "\n %s" "${unpack_nix}"
) )
}; };
EOF EOF
done done
# footer # footer
cat <<EOF cat <<EOF
} }
EOF EOF
} > "${target_file}" } > "${target_file}"

View File

@@ -24,9 +24,9 @@ checkComposerValidate() {
setComposerRootVersion setComposerRootVersion
if [ "1" == "${composerGlobal-}" ]; then if [ "1" == "${composerGlobal-}" ]; then
global="global"; global="global";
else else
global=""; global="";
fi fi
command="composer ${global} validate --strict --quiet --no-interaction --no-check-all --no-check-lock" command="composer ${global} validate --strict --quiet --no-interaction --no-check-all --no-check-lock"

View File

@@ -22,9 +22,9 @@ setComposerEnvVariables() {
checkComposerValidate() { checkComposerValidate() {
if [ "1" == "${composerGlobal-}" ]; then if [ "1" == "${composerGlobal-}" ]; then
global="global"; global="global";
else else
global=""; global="";
fi fi
command="composer ${global} validate --strict --quiet --no-interaction --no-check-all --no-check-lock" command="composer ${global} validate --strict --quiet --no-interaction --no-check-all --no-check-lock"

View File

@@ -2,12 +2,12 @@
function _dotnet_bash_complete() function _dotnet_bash_complete()
{ {
local cur="${COMP_WORDS[COMP_CWORD]}" IFS=$'\n' # On Windows you may need to use use IFS=$'\r\n' local cur="${COMP_WORDS[COMP_CWORD]}" IFS=$'\n' # On Windows you may need to use use IFS=$'\r\n'
local candidates local candidates
read -d '' -ra candidates < <(dotnet complete --position "${COMP_POINT}" "${COMP_LINE}" 2>/dev/null) read -d '' -ra candidates < <(dotnet complete --position "${COMP_POINT}" "${COMP_LINE}" 2>/dev/null)
read -d '' -ra COMPREPLY < <(compgen -W "${candidates[*]:-}" -- "$cur") read -d '' -ra COMPREPLY < <(compgen -W "${candidates[*]:-}" -- "$cur")
} }
complete -f -F _dotnet_bash_complete dotnet complete -f -F _dotnet_bash_complete dotnet

View File

@@ -4,8 +4,8 @@ set -o errexit
set -o nounset set -o nounset
if test "$#" != 1; then if test "$#" != 1; then
printf >&2 'usage: update-test-samples.bash /path/to/PyAV/source\n' printf >&2 'usage: update-test-samples.bash /path/to/PyAV/source\n'
exit 2 exit 2
fi fi
pyav_source=$1 pyav_source=$1
@@ -13,31 +13,31 @@ pyav_source=$1
exec > "$(dirname "$(readlink -f "$0")")/test-samples.toml" exec > "$(dirname "$(readlink -f "$0")")/test-samples.toml"
fetch() { fetch() {
path=$1 path=$1
url=$2 url=$2
prefetch_json=$(nix store prefetch-file --json "${url}") prefetch_json=$(nix store prefetch-file --json "${url}")
sri_hash=$(jq -r .hash <<< "${prefetch_json}") sri_hash=$(jq -r .hash <<< "${prefetch_json}")
printf '"%s" = { url = "%s", hash = "%s" }\n' "${path}" "${url}" "${sri_hash}" printf '"%s" = { url = "%s", hash = "%s" }\n' "${path}" "${url}" "${sri_hash}"
} }
fetch_all() { fetch_all() {
function=$1 function=$1
base_path=$2 base_path=$2
base_url=$3 base_url=$3
samples=$( samples=$(
rg \ rg \
--only-matching \ --only-matching \
--no-filename \ --no-filename \
"\\b${function}\\([\"']([^\"']+)[\"']\\)" \ "\\b${function}\\([\"']([^\"']+)[\"']\\)" \
--replace '$1' \ --replace '$1' \
"${pyav_source}" "${pyav_source}"
) )
unique_samples=$(sort -u <<< "${samples}") unique_samples=$(sort -u <<< "${samples}")
while IFS= read -r sample; do while IFS= read -r sample; do
fetch "${base_path}/${sample}" "${base_url}/${sample}" fetch "${base_path}/${sample}" "${base_url}/${sample}"
done <<< "${unique_samples}" done <<< "${unique_samples}"
} }
fetch_all fate_suite fate-suite "http://fate.ffmpeg.org/fate-suite" fetch_all fate_suite fate-suite "http://fate.ffmpeg.org/fate-suite"