spark: initialize 4.0.1 as spark_4_0 and update default vers. (#441064)

This commit is contained in:
Sandro
2025-11-01 17:28:33 +00:00
committed by GitHub
2 changed files with 19 additions and 2 deletions

View File

@@ -8,6 +8,8 @@
RSupport ? true,
R,
nixosTests,
# needeed in situations where hadoop's jdk version is too old
jdk21_headless,
}:
let
@@ -28,7 +30,15 @@ let
R
pysparkPython
;
inherit (finalAttrs.hadoop) jdk;
jdk =
if
(
(lib.versionAtLeast finalAttrs.version "4") && (lib.versionOlder finalAttrs.hadoop.jdk.version "21")
)
then
jdk21_headless
else
finalAttrs.hadoop.jdk;
src = fetchzip {
url =
@@ -96,6 +106,11 @@ in
# we strictly adhere to the EOL timeline, despite 3.3.4 being released one day before (2023-12-08).
# A better policy is to keep these versions around, and clean up EOL versions just before
# a new NixOS release.
spark_4_0 = spark {
pname = "spark";
version = "4.0.1";
hash = "sha256-AW+EQ83b4orJO3+dUPPDlTRAH/D94U7KQBKvKjguChY=";
};
spark_3_5 = spark {
pname = "spark";
version = "3.5.5";

View File

@@ -5969,11 +5969,13 @@ with pkgs;
rubyPackages_3_5 = recurseIntoAttrs ruby_3_5.gems;
inherit (callPackages ../applications/networking/cluster/spark { })
spark_4_0
spark_3_5
spark_3_4
;
spark3 = spark_3_5;
spark = spark3;
spark4 = spark_4_0;
spark = spark_4_0;
inherit
({