nixos/hadoop: fix, hadoop: 3.4.0 -> 3.4.1 (#364992)

This commit is contained in:
Sandro
2024-12-17 23:56:12 +01:00
committed by GitHub
6 changed files with 47 additions and 19 deletions

View File

@@ -188,22 +188,30 @@ in
services.hadoop.yarnSiteInternal =
with cfg.yarn.nodemanager;
mkMerge [
lib.mkMerge [
({
"yarn.nodemanager.local-dirs" = mkIf (localDir != null) (concatStringsSep "," localDir);
"yarn.nodemanager.local-dirs" = lib.mkIf (localDir != null) (concatStringsSep "," localDir);
"yarn.scheduler.maximum-allocation-vcores" = resource.maximumAllocationVCores;
"yarn.scheduler.maximum-allocation-mb" = resource.maximumAllocationMB;
"yarn.nodemanager.resource.cpu-vcores" = resource.cpuVCores;
"yarn.nodemanager.resource.memory-mb" = resource.memoryMB;
})
(mkIf useCGroups {
"yarn.nodemanager.linux-container-executor.cgroups.hierarchy" = "/hadoop-yarn";
"yarn.nodemanager.linux-container-executor.resources-handler.class" =
"org.apache.hadoop.yarn.server.nodemanager.util.CgroupsLCEResourcesHandler";
"yarn.nodemanager.linux-container-executor.cgroups.mount" = "true";
"yarn.nodemanager.linux-container-executor.cgroups.mount-path" =
"/run/wrappers/yarn-nodemanager/cgroup";
})
(lib.mkIf useCGroups (
lib.warnIf (lib.versionOlder cfg.package.version "3.5.0")
''
hadoop < 3.5.0 does not support cgroup v2
setting `services.hadoop.yarn.nodemanager.useCGroups = false` is recommended
see: https://issues.apache.org/jira/browse/YARN-11669
''
{
"yarn.nodemanager.linux-container-executor.cgroups.hierarchy" = "/hadoop-yarn";
"yarn.nodemanager.linux-container-executor.resources-handler.class" =
"org.apache.hadoop.yarn.server.nodemanager.util.CgroupsLCEResourcesHandler";
"yarn.nodemanager.linux-container-executor.cgroups.mount" = "true";
"yarn.nodemanager.linux-container-executor.cgroups.mount-path" =
"/run/wrappers/yarn-nodemanager/cgroup";
}
))
];
networking.firewall.allowedTCPPortRanges = [

View File

@@ -1,8 +1,16 @@
{ handleTestOn, package, ... }:
{
all = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hadoop.nix { inherit package; };
hdfs = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hdfs.nix { inherit package; };
yarn = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./yarn.nix { inherit package; };
hbase = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hbase.nix { inherit package; };
all = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hadoop.nix {
inherit package;
};
hdfs = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hdfs.nix {
inherit package;
};
yarn = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./yarn.nix {
inherit package;
};
hbase = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./hbase.nix {
inherit package;
};
}

View File

@@ -173,6 +173,7 @@ import ../make-test-python.nix (
yarn.nodemanager = {
enable = true;
openFirewall = true;
useCGroups = false;
};
};
};

View File

@@ -36,6 +36,7 @@ import ../make-test-python.nix (
datanode =
{ pkgs, ... }:
{
virtualisation.diskSize = 4096;
services.hadoop = {
inherit package;
hdfs.datanode = {

View File

@@ -24,6 +24,7 @@ import ../make-test-python.nix (
yarn.nodemanager = {
enable = true;
openFirewall = true;
useCGroups = false;
};
yarnSite = {
"yarn.resourcemanager.hostname" = "resourcemanager";

View File

@@ -46,7 +46,9 @@ let
"mirror://apache/hadoop/common/hadoop-${finalAttrs.version}/hadoop-${finalAttrs.version}"
+ lib.optionalString stdenv.hostPlatform.isAarch64 "-aarch64"
+ ".tar.gz";
inherit (platformAttrs.${stdenv.system} or (throw "Unsupported system: ${stdenv.system}")) hash;
inherit (platformAttrs.${stdenv.system} or (throw "Unsupported system: ${stdenv.system}"))
hash
;
};
doCheck = true;
@@ -101,7 +103,12 @@ let
# hadoop 3.3+ depends on protobuf 3.18, 3.2 depends on 3.8
find $out/lib/native -name 'libhdfspp.so*' | \
xargs -r -n1 patchelf --replace-needed libprotobuf.so.${
if (lib.versionAtLeast finalAttrs.version "3.3") then "18" else "8"
if (lib.versionAtLeast finalAttrs.version "3.4.1") then
"32"
else if (lib.versionAtLeast finalAttrs.version "3.3") then
"18"
else
"8"
} libprotobuf.so
patchelf --replace-needed libcrypto.so.1.1 libcrypto.so \
@@ -163,13 +170,15 @@ in
pname = "hadoop";
platformAttrs = rec {
x86_64-linux = {
version = "3.4.0";
hash = "sha256-4xGnhIBBQDD57GNUml1oXmnibyBxA9mr8hpIud0DyGw=";
srcHash = "sha256-viDF3LdRCZHqFycOYfN7nUQBPHiMCIjmu7jgIAaaK9E=";
version = "3.4.1";
hash = "sha256-mtVIeDOZbf5VFOdW9DkQKckFKf0i6NAC/T3QwUwEukY=";
srcHash = "sha256-lE9uSohy6GWXprFEYbEin2ITqTms2h6EWXe4nEd3U4Y=";
};
x86_64-darwin = x86_64-linux;
aarch64-linux = x86_64-linux // {
version = "3.4.0";
hash = "sha256-QWxzKtNyw/AzcHMv0v7kj91pw1HO7VAN9MHO84caFk8=";
srcHash = "sha256-viDF3LdRCZHqFycOYfN7nUQBPHiMCIjmu7jgIAaaK9E=";
};
aarch64-darwin = aarch64-linux;
};