spark: remove untarDir
This commit is contained in:
parent
392bc54225
commit
ef9732e11f
@ -69,8 +69,8 @@ with lib;
|
|||||||
confDir = mkOption {
|
confDir = mkOption {
|
||||||
type = types.path;
|
type = types.path;
|
||||||
description = lib.mdDoc "Spark configuration directory. Spark will use the configuration files (spark-defaults.conf, spark-env.sh, log4j.properties, etc) from this directory.";
|
description = lib.mdDoc "Spark configuration directory. Spark will use the configuration files (spark-defaults.conf, spark-env.sh, log4j.properties, etc) from this directory.";
|
||||||
default = "${cfg.package}/lib/${cfg.package.untarDir}/conf";
|
default = "${cfg.package}/conf";
|
||||||
defaultText = literalExpression ''"''${package}/lib/''${package.untarDir}/conf"'';
|
defaultText = literalExpression ''"''${package}/conf"'';
|
||||||
};
|
};
|
||||||
logDir = mkOption {
|
logDir = mkOption {
|
||||||
type = types.path;
|
type = types.path;
|
||||||
@ -113,9 +113,9 @@ with lib;
|
|||||||
Type = "forking";
|
Type = "forking";
|
||||||
User = "spark";
|
User = "spark";
|
||||||
Group = "spark";
|
Group = "spark";
|
||||||
WorkingDirectory = "${cfg.package}/lib/${cfg.package.untarDir}";
|
WorkingDirectory = "${cfg.package}/";
|
||||||
ExecStart = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/start-master.sh";
|
ExecStart = "${cfg.package}/sbin/start-master.sh";
|
||||||
ExecStop = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/stop-master.sh";
|
ExecStop = "${cfg.package}/sbin/stop-master.sh";
|
||||||
TimeoutSec = 300;
|
TimeoutSec = 300;
|
||||||
StartLimitBurst=10;
|
StartLimitBurst=10;
|
||||||
Restart = "always";
|
Restart = "always";
|
||||||
@ -136,9 +136,9 @@ with lib;
|
|||||||
serviceConfig = {
|
serviceConfig = {
|
||||||
Type = "forking";
|
Type = "forking";
|
||||||
User = "spark";
|
User = "spark";
|
||||||
WorkingDirectory = "${cfg.package}/lib/${cfg.package.untarDir}";
|
WorkingDirectory = "${cfg.package}/";
|
||||||
ExecStart = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/start-worker.sh spark://${cfg.worker.master}";
|
ExecStart = "${cfg.package}/sbin/start-worker.sh spark://${cfg.worker.master}";
|
||||||
ExecStop = "${cfg.package}/lib/${cfg.package.untarDir}/sbin/stop-worker.sh";
|
ExecStop = "${cfg.package}/sbin/stop-worker.sh";
|
||||||
TimeoutSec = 300;
|
TimeoutSec = 300;
|
||||||
StartLimitBurst=10;
|
StartLimitBurst=10;
|
||||||
Restart = "always";
|
Restart = "always";
|
||||||
|
@ -41,6 +41,7 @@ let
|
|||||||
worker.wait_for_unit("spark-worker.service")
|
worker.wait_for_unit("spark-worker.service")
|
||||||
worker.copy_from_host( "${./spark_sample.py}", "/spark_sample.py" )
|
worker.copy_from_host( "${./spark_sample.py}", "/spark_sample.py" )
|
||||||
assert "<title>Spark Master at spark://" in worker.succeed("curl -sSfkL http://master:8080/")
|
assert "<title>Spark Master at spark://" in worker.succeed("curl -sSfkL http://master:8080/")
|
||||||
|
worker.succeed("spark-submit --version | systemd-cat")
|
||||||
worker.succeed("spark-submit --master spark://master:7077 --executor-memory 512m --executor-cores 1 /spark_sample.py")
|
worker.succeed("spark-submit --master spark://master:7077 --executor-memory 512m --executor-cores 1 /spark_sample.py")
|
||||||
'';
|
'';
|
||||||
});
|
});
|
||||||
|
@ -3,10 +3,9 @@
|
|||||||
, fetchzip
|
, fetchzip
|
||||||
, makeWrapper
|
, makeWrapper
|
||||||
, jdk8
|
, jdk8
|
||||||
, python3Packages
|
, python3
|
||||||
, extraPythonPackages ? [ ]
|
, python310
|
||||||
, coreutils
|
, coreutils
|
||||||
, hadoopSupport ? true
|
|
||||||
, hadoop
|
, hadoop
|
||||||
, RSupport ? true
|
, RSupport ? true
|
||||||
, R
|
, R
|
||||||
@ -14,55 +13,43 @@
|
|||||||
}:
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
spark = { pname, version, hash, extraMeta ? {} }:
|
spark = { pname, version, hash, extraMeta ? {}, pysparkPython ? python3 }:
|
||||||
stdenv.mkDerivation (finalAttrs: {
|
stdenv.mkDerivation (finalAttrs: {
|
||||||
inherit pname version hash;
|
inherit pname version hash hadoop R pysparkPython;
|
||||||
jdk = if hadoopSupport then hadoop.jdk else jdk8;
|
inherit (finalAttrs.hadoop) jdk;
|
||||||
src = fetchzip {
|
src = fetchzip {
|
||||||
url = with finalAttrs; "mirror://apache/spark/${pname}-${version}/${pname}-${version}-bin-without-hadoop.tgz";
|
url = with finalAttrs; "mirror://apache/spark/${pname}-${version}/${pname}-${version}-bin-without-hadoop.tgz";
|
||||||
inherit (finalAttrs) hash;
|
inherit (finalAttrs) hash;
|
||||||
};
|
};
|
||||||
nativeBuildInputs = [ makeWrapper ];
|
nativeBuildInputs = [ makeWrapper ];
|
||||||
buildInputs = [ finalAttrs.jdk python3Packages.python ]
|
buildInputs = [ finalAttrs.jdk finalAttrs.pysparkPython ]
|
||||||
++ extraPythonPackages
|
|
||||||
++ lib.optional RSupport R;
|
++ lib.optional RSupport R;
|
||||||
|
|
||||||
untarDir = with finalAttrs; "${pname}-${version}";
|
|
||||||
installPhase = with finalAttrs; ''
|
installPhase = with finalAttrs; ''
|
||||||
mkdir -p $out/{lib/${untarDir}/conf,bin,/share/java}
|
mkdir -p "$out/opt"
|
||||||
mv * $out/lib/${untarDir}
|
mv * $out/
|
||||||
|
for n in $(find $out/bin -type f -executable ! -name "find-spark-home"); do
|
||||||
cp $out/lib/${untarDir}/conf/log4j.properties{.template,} || \
|
wrapProgram "$n" --set JAVA_HOME "${jdk}" \
|
||||||
cp $out/lib/${untarDir}/conf/log4j2.properties{.template,}
|
--run "[ -z SPARK_DIST_CLASSPATH ] && export SPARK_DIST_CLASSPATH=$(${finalAttrs.hadoop}/bin/hadoop classpath)" \
|
||||||
|
${lib.optionalString RSupport ''--set SPARKR_R_SHELL "${R}/bin/R"''} \
|
||||||
cat > $out/lib/${untarDir}/conf/spark-env.sh <<- EOF
|
--prefix PATH : "${
|
||||||
export JAVA_HOME="${jdk}"
|
lib.makeBinPath (
|
||||||
export SPARK_HOME="$out/lib/${untarDir}"
|
[ pysparkPython ] ++
|
||||||
'' + lib.optionalString hadoopSupport ''
|
(lib.optionals RSupport [ R ])
|
||||||
export SPARK_DIST_CLASSPATH=$(${hadoop}/bin/hadoop classpath)
|
)}"
|
||||||
'' + ''
|
|
||||||
export PYSPARK_PYTHON="${python3Packages.python}/bin/${python3Packages.python.executable}"
|
|
||||||
export PYTHONPATH="\$PYTHONPATH:$PYTHONPATH"
|
|
||||||
${lib.optionalString RSupport ''
|
|
||||||
export SPARKR_R_SHELL="${R}/bin/R"
|
|
||||||
export PATH="\$PATH:${R}/bin"''}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
for n in $(find $out/lib/${untarDir}/bin -type f ! -name "*.*"); do
|
|
||||||
makeWrapper "$n" "$out/bin/$(basename $n)"
|
|
||||||
substituteInPlace "$n" --replace dirname ${coreutils.out}/bin/dirname
|
|
||||||
done
|
done
|
||||||
for n in $(find $out/lib/${untarDir}/sbin -type f); do
|
ln -s ${finalAttrs.hadoop} "$out/opt/hadoop"
|
||||||
# Spark deprecated scripts with "slave" in the name.
|
${lib.optionalString RSupport ''ln -s ${finalAttrs.R} "$out/opt/R"''}
|
||||||
# This line adds forward compatibility with the nixos spark module for
|
|
||||||
# older versions of spark that don't have the new "worker" scripts.
|
|
||||||
ln -s "$n" $(echo "$n" | sed -r 's/slave(s?).sh$/worker\1.sh/g') || true
|
|
||||||
done
|
|
||||||
ln -s $out/lib/${untarDir}/lib/spark-assembly-*.jar $out/share/java
|
|
||||||
'';
|
'';
|
||||||
|
|
||||||
passthru.tests = nixosTests.spark.default.passthru.override {
|
passthru = {
|
||||||
sparkPackage = finalAttrs.finalPackage;
|
tests = nixosTests.spark.default.passthru.override {
|
||||||
|
sparkPackage = finalAttrs.finalPackage;
|
||||||
|
};
|
||||||
|
# Add python packages to PYSPARK_PYTHON
|
||||||
|
withPythonPackages = f: finalAttrs.finalPackage.overrideAttrs (old: {
|
||||||
|
pysparkPython = old.pysparkPython.withPackages f;
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
@ -90,11 +77,13 @@ in
|
|||||||
pname = "spark";
|
pname = "spark";
|
||||||
version = "3.3.3";
|
version = "3.3.3";
|
||||||
hash = "sha256-YtHxRYTwrwSle3UpFjRSwKcnLFj2m9/zLBENH/HVzuM=";
|
hash = "sha256-YtHxRYTwrwSle3UpFjRSwKcnLFj2m9/zLBENH/HVzuM=";
|
||||||
|
pysparkPython = python310;
|
||||||
};
|
};
|
||||||
spark_3_2 = spark rec {
|
spark_3_2 = spark rec {
|
||||||
pname = "spark";
|
pname = "spark";
|
||||||
version = "3.2.4";
|
version = "3.2.4";
|
||||||
hash = "sha256-xL4W+dTWbvmmncq3/8iXmhp24rp5SftvoRfkTyxCI8E=";
|
hash = "sha256-xL4W+dTWbvmmncq3/8iXmhp24rp5SftvoRfkTyxCI8E=";
|
||||||
|
pysparkPython = python310;
|
||||||
extraMeta.knownVulnerabilities = [ "CVE-2023-22946" ];
|
extraMeta.knownVulnerabilities = [ "CVE-2023-22946" ];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user