diff --git a/pkgs/applications/networking/cluster/hadoop/default.nix b/pkgs/applications/networking/cluster/hadoop/default.nix index a9e7325d181..6f4bc17f8b5 100644 --- a/pkgs/applications/networking/cluster/hadoop/default.nix +++ b/pkgs/applications/networking/cluster/hadoop/default.nix @@ -1,76 +1,155 @@ -{ stdenv, fetchurl, makeWrapper, which, jre, bash }: +{ stdenv, fetchurl, makeWrapper, pkgconfig, which, maven, cmake, jre, bash, coreutils, glibc, protobuf2_5, fuse, snappy, zlib, bzip2, openssl }: -let - hadoopDerivation = { version, sha256 }: stdenv.mkDerivation rec { +let + common = { version, sha256, dependencies-sha256, tomcat }: + let + # compile the hadoop tarball from sources, it requires some patches + binary-distributon = stdenv.mkDerivation rec { + name = "hadoop-${version}-bin"; + src = fetchurl { + url = "mirror://apache/hadoop/common/hadoop-${version}/hadoop-${version}-src.tar.gz"; + inherit sha256; + }; - name = "hadoop-${version}"; + # perform fake build to make a fixed-output derivation of dependencies downloaded from maven central (~100Mb in ~3000 files) + fetched-maven-deps = stdenv.mkDerivation { + name = "hadoop-${version}-maven-deps"; + inherit src nativeBuildInputs buildInputs configurePhase; + buildPhase = '' + while mvn package -Dmaven.repo.local=$out/.m2 ${mavenFlags} -Dmaven.wagon.rto=5000; [ $? = 1 ]; do + echo "timeout, restart maven to continue downloading" + done + ''; + # keep only *.{pom,jar,xml,sha1,so,dll,dylib} and delete all ephemeral files with lastModified timestamps inside + installPhase = ''find $out/.m2 -type f -regex '.+\(\.lastUpdated\|resolver-status\.properties\|_remote\.repositories\)' -delete''; + outputHashAlgo = "sha256"; + outputHashMode = "recursive"; + outputHash = dependencies-sha256; + }; + nativeBuildInputs = [ maven cmake pkgconfig ]; + buildInputs = [ fuse snappy zlib bzip2 openssl protobuf2_5 ]; + # most of the hardcoded pathes are fixed in 2.9.x and 3.0.0, this list of patched files might be reduced when 2.7.x and 2.8.x will be deprecated + postPatch = '' + for file in hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java \ + hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java \ + hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DefaultContainerExecutor.java \ + hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/DockerContainerExecutor.java \ + hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainerLaunch.java \ + hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java; do + if [ -f "$file" ]; then + substituteInPlace "$file" \ + --replace '/usr/bin/stat' 'stat' \ + --replace '/bin/bash' 'bash' \ + --replace '/bin/ls' 'ls' \ + --replace '/bin/mv' 'mv' + fi + done + ''; + configurePhase = "true"; # do not trigger cmake hook + mavenFlags = "-Drequire.snappy -Drequire.bzip2 -DskipTests -Pdist,native -e"; + # prevent downloading tomcat during the build + preBuild = stdenv.lib.optionalString (tomcat != null) '' + install -D ${tomcat.src} hadoop-hdfs-project/hadoop-hdfs-httpfs/downloads/apache-tomcat-${tomcat.version}.tar.gz + install -D ${tomcat.src} hadoop-common-project/hadoop-kms/downloads/apache-tomcat-${tomcat.version}.tar.gz + ''; + buildPhase = '' + # 'maven.repo.local' must be writable + mvn package --offline -Dmaven.repo.local=$(cp -dpR ${fetched-maven-deps}/.m2 ./ && chmod +w -R .m2 && pwd)/.m2 ${mavenFlags} + # remove runtime dependency on $jdk/jre/lib/amd64/server/libjvm.so + patchelf --set-rpath ${stdenv.lib.makeLibraryPath [glibc]} hadoop-dist/target/hadoop-${version}/lib/native/libhadoop.so.1.0.0 + patchelf --set-rpath ${stdenv.lib.makeLibraryPath [glibc]} hadoop-dist/target/hadoop-${version}/lib/native/libhdfs.so.0.0.0 + ''; + installPhase = "mv hadoop-dist/target/hadoop-${version} $out"; + }; + in + stdenv.mkDerivation rec { + name = "hadoop-${version}"; + + src = binary-distributon; + + nativeBuildInputs = [ makeWrapper ]; + + installPhase = '' + mkdir -p $out/share/doc/hadoop + cp -dpR * $out/ + mv $out/*.txt $out/share/doc/hadoop/ + + # + # Do not use `wrapProgram` here, script renaming may result to weird things: http://i.imgur.com/0Xee013.png + # + mkdir -p $out/bin.wrapped + for n in $out/bin/*; do + if [ -f "$n" ]; then # only regular files + mv $n $out/bin.wrapped/ + makeWrapper $out/bin.wrapped/$(basename $n) $n \ + --prefix PATH : "${stdenv.lib.makeBinPath [ which jre bash coreutils ]}" \ + --prefix JAVA_LIBRARY_PATH : "${stdenv.lib.makeLibraryPath [ openssl snappy zlib bzip2 ]}" \ + --set JAVA_HOME "${jre}" \ + --set HADOOP_PREFIX "$out" + fi + done + ''; + + meta = with stdenv.lib; { + homepage = "http://hadoop.apache.org/"; + description = "Framework for distributed processing of large data sets across clusters of computers"; + license = licenses.asl20; + + longDescription = '' + The Apache Hadoop software library is a framework that allows for + the distributed processing of large data sets across clusters of + computers using a simple programming model. It is designed to + scale up from single servers to thousands of machines, each + offering local computation and storage. Rather than rely on + hardware to deliver high-avaiability, the library itself is + designed to detect and handle failures at the application layer, + so delivering a highly-availabile service on top of a cluster of + computers, each of which may be prone to failures. + ''; + maintainers = with maintainers; [ volth ]; + platforms = [ "x86_64-linux" ]; + }; + }; + + tomcat_6_0_48 = rec { + version = "6.0.48"; src = fetchurl { - url = "mirror://apache/hadoop/common/${name}/${name}.tar.gz"; - sha256 = "${sha256}"; - }; - - buildInputs = [ makeWrapper ]; - - buildPhase = '' - for n in bin/{hadoop,hdfs,mapred,yarn} sbin/*.sh; do - sed -i $n -e "s|#!/usr/bin/env bash|#! ${bash}/bin/bash|" - done - '' + stdenv.lib.optionalString (!stdenv.isDarwin) '' - patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" bin/container-executor; - patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" bin/test-container-executor; - ''; - - installPhase = '' - mkdir -p $out - mv *.txt share/doc/hadoop/ - mv * $out - - for n in $out/bin/{hadoop,hdfs,mapred,yarn} $out/sbin/*.sh; do - wrapProgram $n --prefix PATH : "${stdenv.lib.makeBinPath [ which jre bash ]}" --set JAVA_HOME "${jre}" --set HADOOP_HOME "$out" - done - ''; - - meta = { - homepage = http://hadoop.apache.org/; - description = "Framework for distributed processing of large data sets across clusters of computers"; - license = stdenv.lib.licenses.asl20; - - longDescription = '' - The Apache Hadoop software library is a framework that allows for - the distributed processing of large data sets across clusters of - computers using a simple programming model. It is designed to - scale up from single servers to thousands of machines, each - offering local computation and storage. Rather than rely on - hardware to deliver high-avaiability, the library itself is - designed to detect and handle failures at the application layer, - so delivering a highly-availabile service on top of a cluster of - computers, each of which may be prone to failures. - ''; - - platforms = stdenv.lib.platforms.linux; + # do not use "mirror://apache/" here, tomcat-6 is legacy and has been removed from the mirrors + url = "https://archive.apache.org/dist/tomcat/tomcat-6/v${version}/bin/apache-tomcat-${version}.tar.gz"; + sha256 = "1w4jf28g8p25fmijixw6b02iqlagy2rvr57y3n90hvz341kb0bbc"; }; }; -in -{ - hadoop_2_7 = hadoopDerivation { + +in { + hadoop_2_7 = common { version = "2.7.6"; - sha256 = "0sanwam0k2m40pfsf9l5zxvklv8rvq78xvhd2pbsbiab7ylpwcpj"; + sha256 = "0wmg0iy0qxrf43fzajzmx03gxp4yx197vxacqwkxaj45clqwl010"; + dependencies-sha256 = "1lsr9nvrynzspxqcamb10d596zlnmnfpxhkd884gdiva0frm0b1r"; + tomcat = tomcat_6_0_48; }; - hadoop_2_8 = hadoopDerivation { + hadoop_2_8 = common { version = "2.8.4"; - sha256 = "05dik4qnazhf5aldwkljf610cwncsg5y3hyvgj476cfpzmr5jm3b"; + sha256 = "16c3ljhrzibkjn3y1bmjxdgf0kn60l23ay5hqpp7vpbnqx52x68w"; + dependencies-sha256 = "1j4f461487fydgr5978nnm245ksv4xbvskfr8pbmfhcyss6b7w03"; + tomcat = tomcat_6_0_48; }; - hadoop_2_9 = hadoopDerivation { + hadoop_2_9 = common { version = "2.9.1"; - sha256 = "1z22v46mmq9hfjc229x61ws332sa1rvmib3v4jsd6i1n29d03mpf"; + sha256 = "0qgmpfbpv7f521fkjy5ldzdb4lwiblhs0hyl8qy041ws17y5x7d7"; + dependencies-sha256 = "1d5i8jj5y746rrqb9lscycnd7acmxlkz64ydsiyqsh5cdqgy2x7x"; + tomcat = tomcat_6_0_48; }; - hadoop_3_0 = hadoopDerivation { - version = "3.0.2"; - sha256 = "10ig3rrcaizvs5bnni15fvm942mr5hfc2hr355g6ich722kpll0d"; + hadoop_3_0 = common { + version = "3.0.3"; + sha256 = "1vvkci0kx4b48dg0niifn2d3r4wwq8pb3c5z20wy8pqsqrqhlci5"; + dependencies-sha256 = "1kzkna9ywacm2m1cirj9cyip66bgqjhid2xf9rrhq6g10lhr8j9m"; + tomcat = null; }; - hadoop_3_1 = hadoopDerivation { + hadoop_3_1 = common { version = "3.1.0"; - sha256 = "1rs3a752is1y2vgxjlqmmln00iwzncwlwg59l6gjv92zb7njq3b7"; + sha256 = "0lig25jkffkzc2bfgyrnm3wymapgyw9fkai8sk9fnmp7cljia314"; + dependencies-sha256 = "1ri6a7lrijh538vy7v0fzgvkw603pf8jkh3ldl1kl7l0dvszd70d"; + tomcat = null; }; } diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index bc98e97588f..c18826c038d 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -7220,7 +7220,7 @@ with pkgs; guile = guile_2_2; - inherit (callPackage ../applications/networking/cluster/hadoop { }) + inherit (callPackages ../applications/networking/cluster/hadoop { }) hadoop_2_7 hadoop_2_8 hadoop_2_9