2017-10-18 15:05:07 +01:00
|
|
|
{ stdenv, fetchzip, makeWrapper, jre, pythonPackages, coreutils, hadoop
|
2017-03-26 17:17:17 +01:00
|
|
|
, RSupport? true, R
|
2014-12-04 16:27:01 +00:00
|
|
|
, mesosSupport ? true, mesos
|
2017-02-06 20:18:15 +00:00
|
|
|
, version
|
2014-12-04 16:27:01 +00:00
|
|
|
}:
|
|
|
|
|
2017-02-06 20:18:15 +00:00
|
|
|
let
|
2017-10-18 15:05:07 +01:00
|
|
|
sha256 = {
|
|
|
|
"1.6.3" = "142hw73wf20d846l83ydx0yg7qj5qxywm4h7qrhwnd7lsy2sbnjf";
|
|
|
|
"2.2.1" = "10nxsf9a6hj1263sxv0cbdqxdb8mb4cl6iqq32ljq9ydvk32s99c";
|
|
|
|
}.${version};
|
2017-02-06 20:18:15 +00:00
|
|
|
in
|
|
|
|
|
2014-12-04 16:27:01 +00:00
|
|
|
with stdenv.lib;
|
2014-02-20 11:54:04 +00:00
|
|
|
|
|
|
|
stdenv.mkDerivation rec {
|
2017-02-06 20:18:15 +00:00
|
|
|
|
|
|
|
name = "spark-${version}";
|
2014-02-20 11:54:04 +00:00
|
|
|
|
2015-12-26 17:29:08 +00:00
|
|
|
src = fetchzip {
|
2017-10-18 15:05:07 +01:00
|
|
|
inherit sha256;
|
|
|
|
url = "mirror://apache/spark/${name}/${name}-bin-without-hadoop.tgz";
|
2014-02-20 11:54:04 +00:00
|
|
|
};
|
|
|
|
|
2014-12-04 16:27:01 +00:00
|
|
|
buildInputs = [ makeWrapper jre pythonPackages.python pythonPackages.numpy ]
|
2017-03-26 17:17:17 +01:00
|
|
|
++ optional RSupport R
|
2016-10-01 21:38:06 +01:00
|
|
|
++ optional mesosSupport mesos;
|
2014-02-20 11:54:04 +00:00
|
|
|
|
2017-10-18 15:05:07 +01:00
|
|
|
untarDir = "${name}-bin-without-hadoop";
|
2014-02-20 11:54:04 +00:00
|
|
|
installPhase = ''
|
2015-12-26 17:30:28 +00:00
|
|
|
mkdir -p $out/{lib/${untarDir}/conf,bin,/share/java}
|
2014-12-04 16:27:01 +00:00
|
|
|
mv * $out/lib/${untarDir}
|
|
|
|
|
2015-12-26 17:29:49 +00:00
|
|
|
sed -e 's/INFO, console/WARN, console/' < \
|
|
|
|
$out/lib/${untarDir}/conf/log4j.properties.template > \
|
|
|
|
$out/lib/${untarDir}/conf/log4j.properties
|
|
|
|
|
2014-12-04 16:27:01 +00:00
|
|
|
cat > $out/lib/${untarDir}/conf/spark-env.sh <<- EOF
|
|
|
|
export JAVA_HOME="${jre}"
|
|
|
|
export SPARK_HOME="$out/lib/${untarDir}"
|
2017-10-18 15:05:07 +01:00
|
|
|
export SPARK_DIST_CLASSPATH=$(${hadoop}/bin/hadoop classpath)
|
2014-12-04 16:27:01 +00:00
|
|
|
export PYSPARK_PYTHON="${pythonPackages.python}/bin/${pythonPackages.python.executable}"
|
|
|
|
export PYTHONPATH="\$PYTHONPATH:$PYTHONPATH"
|
2017-03-26 17:17:17 +01:00
|
|
|
${optionalString RSupport
|
|
|
|
''export SPARKR_R_SHELL="${R}/bin/R"
|
|
|
|
export PATH=$PATH:"${R}/bin/R"''}
|
2014-12-04 16:27:01 +00:00
|
|
|
${optionalString mesosSupport
|
|
|
|
''export MESOS_NATIVE_LIBRARY="$MESOS_NATIVE_LIBRARY"''}
|
2014-02-20 11:54:04 +00:00
|
|
|
EOF
|
|
|
|
|
2014-12-04 16:27:01 +00:00
|
|
|
for n in $(find $out/lib/${untarDir}/bin -type f ! -name "*.*"); do
|
|
|
|
makeWrapper "$n" "$out/bin/$(basename $n)"
|
2018-03-21 00:57:58 +00:00
|
|
|
substituteInPlace "$n" --replace dirname ${coreutils.out}/bin/dirname
|
2014-02-20 11:54:04 +00:00
|
|
|
done
|
2015-12-26 17:30:28 +00:00
|
|
|
ln -s $out/lib/${untarDir}/lib/spark-assembly-*.jar $out/share/java
|
2014-02-20 11:54:04 +00:00
|
|
|
'';
|
|
|
|
|
2014-03-02 11:51:22 +00:00
|
|
|
meta = {
|
2017-03-26 17:17:17 +01:00
|
|
|
description = "Apache Spark is a fast and general engine for large-scale data processing";
|
2014-04-29 16:34:28 +01:00
|
|
|
homepage = "http://spark.apache.org";
|
|
|
|
license = stdenv.lib.licenses.asl20;
|
|
|
|
platforms = stdenv.lib.platforms.all;
|
2017-10-18 15:05:07 +01:00
|
|
|
maintainers = with maintainers; [ thoughtpolice offline kamilchm ];
|
2014-04-29 16:34:28 +01:00
|
|
|
repositories.git = git://git.apache.org/spark.git;
|
2014-02-20 11:54:04 +00:00
|
|
|
};
|
|
|
|
}
|