Merge pull request #5221 from offlinehacker/pkgs/spark/update
spark: update to 1.1.1, simplify a lot
This commit is contained in:
commit
e66ad71535
pkgs/applications/networking/cluster/spark
|
@ -1,128 +1,46 @@
|
||||||
{ stdenv, fetchurl, jre, bash, simpleBuildTool, python27Packages }:
|
{ stdenv, fetchurl, makeWrapper, jre, pythonPackages
|
||||||
|
, mesosSupport ? true, mesos
|
||||||
|
}:
|
||||||
|
|
||||||
|
with stdenv.lib;
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
name = "spark-${version}";
|
name = "spark-${version}";
|
||||||
version = "0.9.1";
|
version = "1.1.1";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "http://d3kbcqa49mib13.cloudfront.net/${name}-bin-cdh4.tgz";
|
url = "http://d3kbcqa49mib13.cloudfront.net/${name}-bin-cdh4.tgz";
|
||||||
sha256 = "1k3954srx3km3ckmfi6wn8rldrljxc039g0pf5m3azgkmaz0gld5";
|
sha256 = "1k0lw8aq5is3gnsrm8q24s0knga6kb3c9xsca20g11fy8b0y4nvk";
|
||||||
};
|
};
|
||||||
|
|
||||||
unpackPhase = ''tar zxf $src'';
|
buildInputs = [ makeWrapper jre pythonPackages.python pythonPackages.numpy ]
|
||||||
|
++ optional mesosSupport [ mesos ];
|
||||||
|
|
||||||
untarDir = "${name}-bin-cdh4";
|
untarDir = "${name}-bin-cdh4";
|
||||||
installPhase = ''
|
installPhase = ''
|
||||||
set -x
|
mkdir -p $out/{lib/${untarDir}/conf,bin}
|
||||||
mkdir -p $out/lib $out/bin
|
mv * $out/lib/${untarDir}
|
||||||
mv ${untarDir} $out/lib
|
|
||||||
|
|
||||||
cat > $out/bin/spark-class <<EOF
|
cat > $out/lib/${untarDir}/conf/spark-env.sh <<- EOF
|
||||||
#!${bash}/bin/bash
|
export JAVA_HOME="${jre}"
|
||||||
export JAVA_HOME=${jre}
|
export SPARK_HOME="$out/lib/${untarDir}"
|
||||||
export SPARK_HOME=$out/lib/${untarDir}
|
export PYSPARK_PYTHON="${pythonPackages.python}/bin/${pythonPackages.python.executable}"
|
||||||
|
export PYTHONPATH="\$PYTHONPATH:$PYTHONPATH"
|
||||||
if [ -z "\$1" ]; then
|
${optionalString mesosSupport
|
||||||
echo "Usage: spark-class <class> [<args>]" >&2
|
''export MESOS_NATIVE_LIBRARY="$MESOS_NATIVE_LIBRARY"''}
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
export SPARK_MEM=\''${SPARK_MEM:-1024m}
|
|
||||||
|
|
||||||
JAVA_OPTS=""
|
|
||||||
JAVA_OPTS="\$JAVA_OPTS -Djava.library.path=\"\$SPARK_LIBRARY_PATH\""
|
|
||||||
JAVA_OPTS="\$JAVA_OPTS -Xms\$SPARK_MEM -Xmx\$SPARK_MEM"
|
|
||||||
export JAVA_OPTS
|
|
||||||
|
|
||||||
CLASSPATH=\`$out/lib/${untarDir}/bin/compute-classpath.sh\`
|
|
||||||
export CLASSPATH
|
|
||||||
|
|
||||||
exec ${jre}/bin/java -cp "\$CLASSPATH" \$JAVA_OPTS "\$@"
|
|
||||||
EOF
|
EOF
|
||||||
chmod +x $out/bin/spark-class
|
|
||||||
|
|
||||||
cat > $out/bin/spark-shell <<EOF
|
for n in $(find $out/lib/${untarDir}/bin -type f ! -name "*.*"); do
|
||||||
#!${bash}/bin/bash
|
makeWrapper "$n" "$out/bin/$(basename $n)"
|
||||||
set -o posix
|
|
||||||
export JAVA_HOME=${jre}
|
|
||||||
export SPARK_HOME=$out/lib/${untarDir}
|
|
||||||
for o in "\$@"; do
|
|
||||||
if [ "\$1" = "-c" -o "\$1" = "--cores" ]; then
|
|
||||||
shift
|
|
||||||
if [ -n "\$1" ]; then
|
|
||||||
OPTIONS="-Dspark.cores.max=\$1"
|
|
||||||
shift
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done
|
done
|
||||||
|
|
||||||
exit_status=127
|
|
||||||
saved_stty=""
|
|
||||||
|
|
||||||
function restoreSttySettings() {
|
|
||||||
stty \$saved_stty
|
|
||||||
saved_stty=""
|
|
||||||
}
|
|
||||||
|
|
||||||
function onExit() {
|
|
||||||
if [[ "\$saved_stty" != "" ]]; then
|
|
||||||
restoreSttySettings
|
|
||||||
fi
|
|
||||||
exit \$exit_status
|
|
||||||
}
|
|
||||||
|
|
||||||
trap onExit INT
|
|
||||||
|
|
||||||
saved_stty=\$(stty -g 2>/dev/null)
|
|
||||||
if [[ ! \$? ]]; then
|
|
||||||
saved_stty=""
|
|
||||||
fi
|
|
||||||
|
|
||||||
$out/bin/spark-class \$OPTIONS org.apache.spark.repl.Main "\$@"
|
|
||||||
|
|
||||||
exit_status=\$?
|
|
||||||
onExit
|
|
||||||
EOF
|
|
||||||
chmod +x $out/bin/spark-shell
|
|
||||||
|
|
||||||
cat > $out/bin/pyspark <<EOF
|
|
||||||
#!${bash}/bin/bash
|
|
||||||
export JAVA_HOME=${jre}
|
|
||||||
export SPARK_HOME=$out/lib/${untarDir}
|
|
||||||
export PYTHONPATH=$out/lib/${untarDir}/python:\$PYTHONPATH
|
|
||||||
export OLD_PYTHONSTARTUP=\$PYTHONSTARTUP
|
|
||||||
export PYTHONSTARTUP=$out/lib/${untarDir}/python/pyspark/shell.py
|
|
||||||
export SPARK_MEM=\''${SPARK_MEM:-1024m}
|
|
||||||
exec ${python27Packages.ipythonLight}/bin/ipython \$@
|
|
||||||
EOF
|
|
||||||
chmod +x $out/bin/pyspark
|
|
||||||
|
|
||||||
cat > $out/bin/spark-upload-scala <<EOF
|
|
||||||
#!${bash}/bin/bash
|
|
||||||
export JAVA_HOME=${jre}
|
|
||||||
export SPARK_HOME=$out/lib/${untarDir}
|
|
||||||
export SPARK_MEM=\''${SPARK_MEM:-1024m}
|
|
||||||
|
|
||||||
CLASS=\$1; shift
|
|
||||||
exec ${simpleBuildTool}/bin/sbt package "run-main \$CLASS \$@"
|
|
||||||
EOF
|
|
||||||
chmod +x $out/bin/spark-upload-scala
|
|
||||||
|
|
||||||
cat > $out/bin/spark-upload-python <<EOF
|
|
||||||
#!${bash}/bin/bash
|
|
||||||
exec $out/bin/pyspark \$@
|
|
||||||
EOF
|
|
||||||
chmod +x $out/bin/spark-upload-python
|
|
||||||
'';
|
'';
|
||||||
|
|
||||||
phases = "unpackPhase installPhase";
|
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "Lightning-fast cluster computing";
|
description = "Lightning-fast cluster computing";
|
||||||
homepage = "http://spark.apache.org";
|
homepage = "http://spark.apache.org";
|
||||||
license = stdenv.lib.licenses.asl20;
|
license = stdenv.lib.licenses.asl20;
|
||||||
platforms = stdenv.lib.platforms.all;
|
platforms = stdenv.lib.platforms.all;
|
||||||
maintainers = [ stdenv.lib.maintainers.thoughtpolice ];
|
maintainers = with maintainers; [ thoughtpolice offline ];
|
||||||
repositories.git = git://git.apache.org/spark.git;
|
repositories.git = git://git.apache.org/spark.git;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue