Merge branch 'master' into staging
... to get the openssl mass rebuild: 942dbf89c.
This commit is contained in:
commit
061758490f
@ -11,8 +11,8 @@ let
|
||||
else throw "ImageMagick is not supported on this platform.";
|
||||
|
||||
cfg = {
|
||||
version = "6.9.5-10";
|
||||
sha256 = "0cxjzqzca80vf6sfx4z9zq4wq2w0vy9ajp9kf88jb4na8mwsn198";
|
||||
version = "6.9.6-2";
|
||||
sha256 = "139h9lycxw3lszn052m34xm0rqyanin4nb529vxjcrkkzqilh91r";
|
||||
patches = [];
|
||||
}
|
||||
# Freeze version on mingw so we don't need to port the patch too often.
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
|
||||
let
|
||||
|
||||
version = "0.9.12";
|
||||
version = "0.9.14";
|
||||
|
||||
in
|
||||
|
||||
@ -10,8 +10,8 @@ stdenv.mkDerivation rec {
|
||||
name = "pythonmagick-${version}";
|
||||
|
||||
src = fetchurl {
|
||||
url = "http://www.imagemagick.org/download/python/releases/PythonMagick-${version}.tar.xz";
|
||||
sha256 = "1l1kr3d7l40fkxgs6mrlxj65alv2jizm9hhgg9i9g90a8qj8642b";
|
||||
url = "mirror://imagemagick/python/releases/PythonMagick-${version}.tar.xz";
|
||||
sha256 = "1flkdfi3c19wy2qcfzax1cqvmmri10rvmhc2y85gmagqvv01zz22";
|
||||
};
|
||||
|
||||
buildInputs = [python boost pkgconfig imagemagick];
|
||||
|
||||
@ -57,6 +57,7 @@ rec {
|
||||
sed -e 's,^\(GIMP_PLUGIN_DIR=\).*,\1'"$out/${gimp.name}-plugins", \
|
||||
-e 's,^\(GIMP_DATA_DIR=\).*,\1'"$out/share/${gimp.name}", -i configure
|
||||
'';
|
||||
hardeningDisable = [ "format" ];
|
||||
meta = with stdenv.lib; {
|
||||
description = "The GIMP Animation Package";
|
||||
homepage = http://www.gimp.org;
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{stdenv, fetchurl, bzip2, freetype, graphviz, ghostscript
|
||||
, libjpeg, libpng, libtiff, libxml2, zlib, libtool, xz
|
||||
, libX11, libwebp, quantumdepth ? 8}:
|
||||
{ stdenv, fetchurl, fetchpatch, bzip2, freetype, graphviz, ghostscript
|
||||
, libjpeg, libpng, libtiff, libxml2, zlib, libtool, xz, libX11
|
||||
, libwebp, quantumdepth ? 8 }:
|
||||
|
||||
let version = "1.3.25"; in
|
||||
|
||||
@ -12,7 +12,21 @@ stdenv.mkDerivation {
|
||||
sha256 = "17xcc7pfcmiwpfr1g8ys5a7bdnvqzka53vg3kkzhwwz0s99gljyn";
|
||||
};
|
||||
|
||||
patches = [ ./disable-popen.patch ];
|
||||
patches = [
|
||||
./disable-popen.patch
|
||||
(fetchpatch {
|
||||
url = "https://sources.debian.net/data/main/g/graphicsmagick/1.3.25-4/debian/patches/CVE-2016-7996_CVE-2016-7997.patch";
|
||||
sha256 = "0xsby2z8n7cnnln7szjznq7iaabq323wymvdjra59yb41aix74r2";
|
||||
})
|
||||
(fetchpatch {
|
||||
url = "https://sources.debian.net/data/main/g/graphicsmagick/1.3.25-4/debian/patches/CVE-2016-7800_part1.patch";
|
||||
sha256 = "02s0x9bkbnm5wrd0d2x9ld4d9z5xqpfk310lyylyr5zlnhqxmwgn";
|
||||
})
|
||||
(fetchpatch {
|
||||
url = "https://sources.debian.net/data/main/g/graphicsmagick/1.3.25-4/debian/patches/CVE-2016-7800_part2.patch";
|
||||
sha256 = "1h4xv3i1aq5avsd584rwa5sa7ca8f7w9ggmh7j2llqq5kymwsv5f";
|
||||
})
|
||||
];
|
||||
|
||||
configureFlags = [
|
||||
"--enable-shared"
|
||||
|
||||
@ -1,11 +1,14 @@
|
||||
diff -ru git-2.7.4-orig/http.c git-2.7.4/http.c
|
||||
--- git-2.7.4-orig/http.c 2016-03-17 21:47:59.000000000 +0100
|
||||
+++ git-2.7.4/http.c 2016-04-12 11:38:33.187070848 +0200
|
||||
@@ -544,6 +544,7 @@
|
||||
@@ -544,6 +544,10 @@
|
||||
#if LIBCURL_VERSION_NUM >= 0x070908
|
||||
set_from_env(&ssl_capath, "GIT_SSL_CAPATH");
|
||||
#endif
|
||||
+ set_from_env(&ssl_cainfo, "SSL_CERT_FILE");
|
||||
+ if (getenv("NIX_SSL_CERT_FILE"))
|
||||
+ set_from_env(&ssl_cainfo, "NIX_SSL_CERT_FILE");
|
||||
+ else
|
||||
+ set_from_env(&ssl_cainfo, "SSL_CERT_FILE");
|
||||
set_from_env(&ssl_cainfo, "GIT_SSL_CAINFO");
|
||||
|
||||
set_from_env(&user_agent, "GIT_HTTP_USER_AGENT");
|
||||
|
||||
@ -8,17 +8,16 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "smartgithg-${version}";
|
||||
version = "7_1_2";
|
||||
version = "8_0_3";
|
||||
|
||||
src = fetchurl {
|
||||
url = "http://www.syntevo.com/static/smart/download/smartgit/smartgit-linux-${version}.tar.gz";
|
||||
sha256 = "18jw4g2akhj6h9w8378kacv7ws35ndcnc3kkhci9iypwy432ak8d";
|
||||
sha256 = "1ghxjg5dm22kwfrq26nqp4qhh6h7f4l4fnf1cx9cksd30ypwy223";
|
||||
};
|
||||
|
||||
buildInputs = [
|
||||
makeWrapper
|
||||
jre
|
||||
];
|
||||
nativeBuildInputs = [ makeWrapper ];
|
||||
|
||||
buildInputs = [ jre ];
|
||||
|
||||
buildCommand = let
|
||||
pkg_path = "$out/${name}";
|
||||
|
||||
@ -1,12 +1,36 @@
|
||||
{ stdenv, lib, callPackage, runCommand, writeReferencesToFile, writeText, vmTools, writeScript
|
||||
, docker, shadow, utillinux, coreutils, jshon, e2fsprogs, go, pigz, findutils }:
|
||||
{
|
||||
callPackage,
|
||||
coreutils,
|
||||
docker,
|
||||
e2fsprogs,
|
||||
findutils,
|
||||
go,
|
||||
jshon,
|
||||
lib,
|
||||
pkgs,
|
||||
pigz,
|
||||
runCommand,
|
||||
rsync,
|
||||
shadow,
|
||||
stdenv,
|
||||
storeDir ? builtins.storeDir,
|
||||
utillinux,
|
||||
vmTools,
|
||||
writeReferencesToFile,
|
||||
writeScript,
|
||||
writeText,
|
||||
}:
|
||||
|
||||
# WARNING: this API is unstable and may be subject to backwards-incompatible changes in the future.
|
||||
|
||||
|
||||
rec {
|
||||
|
||||
examples = import ./examples.nix {
|
||||
inherit pkgs buildImage pullImage shadowSetup;
|
||||
};
|
||||
|
||||
pullImage = callPackage ./pull.nix {};
|
||||
|
||||
|
||||
# We need to sum layer.tar, not a directory, hence tarsum instead of nix-hash.
|
||||
# And we cannot untar it, because then we cannot preserve permissions ecc.
|
||||
tarsum = runCommand "tarsum" {
|
||||
@ -23,110 +47,138 @@ rec {
|
||||
|
||||
cp tarsum $out
|
||||
'';
|
||||
|
||||
|
||||
# buildEnv creates symlinks to dirs, which is hard to edit inside the overlay VM
|
||||
mergeDrvs = { drvs, onlyDeps ? false }:
|
||||
mergeDrvs = {
|
||||
derivations,
|
||||
onlyDeps ? false
|
||||
}:
|
||||
runCommand "merge-drvs" {
|
||||
inherit drvs onlyDeps;
|
||||
inherit derivations onlyDeps;
|
||||
} ''
|
||||
if [ -n "$onlyDeps" ]; then
|
||||
echo $drvs > $out
|
||||
if [[ -n "$onlyDeps" ]]; then
|
||||
echo $derivations > $out
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
mkdir $out
|
||||
for drv in $drvs; do
|
||||
echo Merging $drv
|
||||
if [ -d "$drv" ]; then
|
||||
cp -drf --preserve=mode -f $drv/* $out/
|
||||
for derivation in $derivations; do
|
||||
echo "Merging $derivation..."
|
||||
if [[ -d "$derivation" ]]; then
|
||||
# If it's a directory, copy all of its contents into $out.
|
||||
cp -drf --preserve=mode -f $derivation/* $out/
|
||||
else
|
||||
# Otherwise treat the derivation as a tarball and extract it
|
||||
# into $out.
|
||||
tar -C $out -xpf $drv || true
|
||||
fi
|
||||
done
|
||||
'';
|
||||
|
||||
shellScript = text:
|
||||
writeScript "script.sh" ''
|
||||
#!${stdenv.shell}
|
||||
set -e
|
||||
export PATH=${coreutils}/bin:/bin
|
||||
|
||||
${text}
|
||||
'';
|
||||
|
||||
# Helper for setting up the base files for managing users and
|
||||
# groups, only if such files don't exist already. It is suitable for
|
||||
# being used in a runAsRoot script.
|
||||
shadowSetup = ''
|
||||
export PATH=${shadow}/bin:$PATH
|
||||
mkdir -p /etc/pam.d
|
||||
if [ ! -f /etc/passwd ]; then
|
||||
if [[ ! -f /etc/passwd ]]; then
|
||||
echo "root:x:0:0::/root:/bin/sh" > /etc/passwd
|
||||
echo "root:!x:::::::" > /etc/shadow
|
||||
fi
|
||||
if [ ! -f /etc/group ]; then
|
||||
if [[ ! -f /etc/group ]]; then
|
||||
echo "root:x:0:" > /etc/group
|
||||
echo "root:x::" > /etc/gshadow
|
||||
fi
|
||||
if [ ! -f /etc/pam.d/other ]; then
|
||||
if [[ ! -f /etc/pam.d/other ]]; then
|
||||
cat > /etc/pam.d/other <<EOF
|
||||
account sufficient pam_unix.so
|
||||
auth sufficient pam_rootok.so
|
||||
password requisite pam_unix.so nullok sha512
|
||||
session required pam_unix.so
|
||||
EOF
|
||||
account sufficient pam_unix.so
|
||||
auth sufficient pam_rootok.so
|
||||
password requisite pam_unix.so nullok sha512
|
||||
session required pam_unix.so
|
||||
EOF
|
||||
fi
|
||||
if [ ! -f /etc/login.defs ]; then
|
||||
if [[ ! -f /etc/login.defs ]]; then
|
||||
touch /etc/login.defs
|
||||
fi
|
||||
'';
|
||||
|
||||
runWithOverlay = { name , fromImage ? null, fromImageName ? null, fromImageTag ? null
|
||||
, diskSize ? 1024, preMount ? "", postMount ? "", postUmount ? "" }:
|
||||
# Run commands in a virtual machine.
|
||||
runWithOverlay = {
|
||||
name,
|
||||
fromImage ? null,
|
||||
fromImageName ? null,
|
||||
fromImageTag ? null,
|
||||
diskSize ? 1024,
|
||||
preMount ? "",
|
||||
postMount ? "",
|
||||
postUmount ? ""
|
||||
}:
|
||||
vmTools.runInLinuxVM (
|
||||
runCommand name {
|
||||
preVM = vmTools.createEmptyImage { size = diskSize; fullName = "docker-run-disk"; };
|
||||
|
||||
preVM = vmTools.createEmptyImage {
|
||||
size = diskSize;
|
||||
fullName = "docker-run-disk";
|
||||
};
|
||||
inherit fromImage fromImageName fromImageTag;
|
||||
|
||||
buildInputs = [ utillinux e2fsprogs jshon ];
|
||||
|
||||
buildInputs = [ utillinux e2fsprogs jshon rsync ];
|
||||
} ''
|
||||
rm -rf $out
|
||||
|
||||
|
||||
mkdir disk
|
||||
mkfs /dev/${vmTools.hd}
|
||||
mount /dev/${vmTools.hd} disk
|
||||
cd disk
|
||||
|
||||
if [ -n "$fromImage" ]; then
|
||||
echo Unpacking base image
|
||||
if [[ -n "$fromImage" ]]; then
|
||||
echo "Unpacking base image..."
|
||||
mkdir image
|
||||
tar -C image -xpf "$fromImage"
|
||||
|
||||
if [ -z "$fromImageName" ]; then
|
||||
fromImageName=$(jshon -k < image/repositories|head -n1)
|
||||
# If the image name isn't set, read it from the image repository json.
|
||||
if [[ -z "$fromImageName" ]]; then
|
||||
fromImageName=$(jshon -k < image/repositories | head -n 1)
|
||||
echo "From-image name wasn't set. Read $fromImageName."
|
||||
fi
|
||||
if [ -z "$fromImageTag" ]; then
|
||||
fromImageTag=$(jshon -e $fromImageName -k < image/repositories|head -n1)
|
||||
|
||||
# If the tag isn't set, use the name as an index into the json
|
||||
# and read the first key found.
|
||||
if [[ -z "$fromImageTag" ]]; then
|
||||
fromImageTag=$(jshon -e $fromImageName -k < image/repositories \
|
||||
| head -n1)
|
||||
echo "From-image tag wasn't set. Read $fromImageTag."
|
||||
fi
|
||||
parentID=$(jshon -e $fromImageName -e $fromImageTag -u < image/repositories)
|
||||
|
||||
# Use the name and tag to get the parent ID field.
|
||||
parentID=$(jshon -e $fromImageName -e $fromImageTag -u \
|
||||
< image/repositories)
|
||||
fi
|
||||
|
||||
# Unpack all of the parent layers into the image.
|
||||
lowerdir=""
|
||||
while [ -n "$parentID" ]; do
|
||||
echo Unpacking layer $parentID
|
||||
while [[ -n "$parentID" ]]; do
|
||||
echo "Unpacking layer $parentID"
|
||||
mkdir -p image/$parentID/layer
|
||||
tar -C image/$parentID/layer -xpf image/$parentID/layer.tar
|
||||
rm image/$parentID/layer.tar
|
||||
|
||||
find image/$parentID/layer -name ".wh.*" -exec bash -c 'name="$(basename {}|sed "s/^.wh.//")"; mknod "$(dirname {})/$name" c 0 0; rm {}' \;
|
||||
|
||||
# Get the next lower directory and continue the loop.
|
||||
lowerdir=$lowerdir''${lowerdir:+:}image/$parentID/layer
|
||||
parentID=$(cat image/$parentID/json|(jshon -e parent -u 2>/dev/null || true))
|
||||
parentID=$(cat image/$parentID/json \
|
||||
| (jshon -e parent -u 2>/dev/null || true))
|
||||
done
|
||||
|
||||
mkdir work
|
||||
mkdir layer
|
||||
mkdir mnt
|
||||
|
||||
${preMount}
|
||||
${lib.optionalString (preMount != "") ''
|
||||
# Execute pre-mount steps
|
||||
echo "Executing pre-mount steps..."
|
||||
${preMount}
|
||||
''}
|
||||
|
||||
if [ -n "$lowerdir" ]; then
|
||||
mount -t overlay overlay -olowerdir=$lowerdir,workdir=work,upperdir=layer mnt
|
||||
@ -134,13 +186,19 @@ EOF
|
||||
mount --bind layer mnt
|
||||
fi
|
||||
|
||||
${postMount}
|
||||
|
||||
${lib.optionalString (postMount != "") ''
|
||||
# Execute post-mount steps
|
||||
echo "Executing post-mount steps..."
|
||||
${postMount}
|
||||
''}
|
||||
|
||||
umount mnt
|
||||
|
||||
pushd layer
|
||||
find . -type c -exec bash -c 'name="$(basename {})"; touch "$(dirname {})/.wh.$name"; rm "{}"' \;
|
||||
popd
|
||||
(
|
||||
cd layer
|
||||
cmd='name="$(basename {})"; touch "$(dirname {})/.wh.$name"; rm "{}"'
|
||||
find . -type c -exec bash -c "$cmd" \;
|
||||
)
|
||||
|
||||
${postUmount}
|
||||
'');
|
||||
@ -150,76 +208,148 @@ EOF
|
||||
inherit name fromImage fromImageName fromImageTag diskSize;
|
||||
|
||||
postMount = ''
|
||||
echo Packing raw image
|
||||
echo "Packing raw image..."
|
||||
tar -C mnt --mtime=0 -cf $out .
|
||||
'';
|
||||
};
|
||||
|
||||
mkPureLayer = { baseJson, contents ? null, extraCommands ? "" }:
|
||||
runCommand "docker-layer" {
|
||||
inherit baseJson contents extraCommands;
|
||||
|
||||
buildInputs = [ jshon ];
|
||||
} ''
|
||||
mkdir layer
|
||||
if [ -n "$contents" ]; then
|
||||
echo Adding contents
|
||||
for c in $contents; do
|
||||
cp -drf $c/* layer/
|
||||
chmod -R ug+w layer/
|
||||
done
|
||||
fi
|
||||
|
||||
pushd layer
|
||||
${extraCommands}
|
||||
popd
|
||||
|
||||
echo Packing layer
|
||||
mkdir $out
|
||||
tar -C layer --mtime=0 -cf $out/layer.tar .
|
||||
ts=$(${tarsum} < $out/layer.tar)
|
||||
cat ${baseJson} | jshon -s "$ts" -i checksum > $out/json
|
||||
echo -n "1.0" > $out/VERSION
|
||||
# Create an executable shell script which has the coreutils in its
|
||||
# PATH. Since root scripts are executed in a blank environment, even
|
||||
# things like `ls` or `echo` will be missing.
|
||||
shellScript = name: text:
|
||||
writeScript name ''
|
||||
#!${stdenv.shell}
|
||||
set -e
|
||||
export PATH=${coreutils}/bin:/bin
|
||||
${text}
|
||||
'';
|
||||
|
||||
mkRootLayer = { runAsRoot, baseJson, fromImage ? null, fromImageName ? null, fromImageTag ? null
|
||||
, diskSize ? 1024, contents ? null, extraCommands ? "" }:
|
||||
let runAsRootScript = writeScript "run-as-root.sh" runAsRoot;
|
||||
# Create a "layer" (set of files).
|
||||
mkPureLayer = {
|
||||
# Name of the layer
|
||||
name,
|
||||
# JSON containing configuration and metadata for this layer.
|
||||
baseJson,
|
||||
# Files to add to the layer.
|
||||
contents ? null,
|
||||
# Additional commands to run on the layer before it is tar'd up.
|
||||
extraCommands ? ""
|
||||
}:
|
||||
runCommand "docker-layer-${name}" {
|
||||
inherit baseJson contents extraCommands;
|
||||
|
||||
buildInputs = [ jshon rsync ];
|
||||
}
|
||||
''
|
||||
mkdir layer
|
||||
if [[ -n "$contents" ]]; then
|
||||
echo "Adding contents..."
|
||||
for item in $contents; do
|
||||
echo "Adding $item"
|
||||
rsync -a $item/ layer/
|
||||
done
|
||||
else
|
||||
echo "No contents to add to layer."
|
||||
fi
|
||||
|
||||
if [[ -n $extraCommands ]]; then
|
||||
(cd layer; eval "$extraCommands")
|
||||
fi
|
||||
|
||||
# Tar up the layer and throw it into 'layer.tar'.
|
||||
echo "Packing layer..."
|
||||
mkdir $out
|
||||
tar -C layer --mtime=0 -cf $out/layer.tar .
|
||||
|
||||
# Compute a checksum of the tarball.
|
||||
echo "Computing layer checksum..."
|
||||
tarsum=$(${tarsum} < $out/layer.tar)
|
||||
|
||||
# Add a 'checksum' field to the JSON, with the value set to the
|
||||
# checksum of the tarball.
|
||||
cat ${baseJson} | jshon -s "$tarsum" -i checksum > $out/json
|
||||
|
||||
# Indicate to docker that we're using schema version 1.0.
|
||||
echo -n "1.0" > $out/VERSION
|
||||
|
||||
echo "Finished building layer '${name}'"
|
||||
'';
|
||||
|
||||
# Make a "root" layer; required if we need to execute commands as a
|
||||
# privileged user on the image. The commands themselves will be
|
||||
# performed in a virtual machine sandbox.
|
||||
mkRootLayer = {
|
||||
# Name of the image.
|
||||
name,
|
||||
# Script to run as root. Bash.
|
||||
runAsRoot,
|
||||
# Files to add to the layer. If null, an empty layer will be created.
|
||||
contents ? null,
|
||||
# JSON containing configuration and metadata for this layer.
|
||||
baseJson,
|
||||
# Existing image onto which to append the new layer.
|
||||
fromImage ? null,
|
||||
# Name of the image we're appending onto.
|
||||
fromImageName ? null,
|
||||
# Tag of the image we're appending onto.
|
||||
fromImageTag ? null,
|
||||
# How much disk to allocate for the temporary virtual machine.
|
||||
diskSize ? 1024,
|
||||
# Commands (bash) to run on the layer; these do not require sudo.
|
||||
extraCommands ? ""
|
||||
}:
|
||||
# Generate an executable script from the `runAsRoot` text.
|
||||
let runAsRootScript = shellScript "run-as-root.sh" runAsRoot;
|
||||
in runWithOverlay {
|
||||
name = "docker-layer";
|
||||
|
||||
name = "docker-layer-${name}";
|
||||
|
||||
inherit fromImage fromImageName fromImageTag diskSize;
|
||||
|
||||
preMount = lib.optionalString (contents != null) ''
|
||||
echo Adding contents
|
||||
for c in ${builtins.toString contents}; do
|
||||
cp -drf $c/* layer/
|
||||
chmod -R ug+w layer/
|
||||
preMount = lib.optionalString (contents != null && contents != []) ''
|
||||
echo "Adding contents..."
|
||||
for item in ${toString contents}; do
|
||||
echo "Adding $item..."
|
||||
rsync -a $item/ layer/
|
||||
done
|
||||
'';
|
||||
|
||||
postMount = ''
|
||||
mkdir -p mnt/{dev,proc,sys,nix/store}
|
||||
mkdir -p mnt/{dev,proc,sys} mnt${storeDir}
|
||||
|
||||
# Mount /dev, /sys and the nix store as shared folders.
|
||||
mount --rbind /dev mnt/dev
|
||||
mount --rbind /sys mnt/sys
|
||||
mount --rbind /nix/store mnt/nix/store
|
||||
mount --rbind ${storeDir} mnt${storeDir}
|
||||
|
||||
# Execute the run as root script. See 'man unshare' for
|
||||
# details on what's going on here; basically this command
|
||||
# means that the runAsRootScript will be executed in a nearly
|
||||
# completely isolated environment.
|
||||
unshare -imnpuf --mount-proc chroot mnt ${runAsRootScript}
|
||||
umount -R mnt/dev mnt/sys mnt/nix/store
|
||||
rmdir --ignore-fail-on-non-empty mnt/dev mnt/proc mnt/sys mnt/nix/store mnt/nix
|
||||
'';
|
||||
|
||||
postUmount = ''
|
||||
pushd layer
|
||||
${extraCommands}
|
||||
popd
|
||||
|
||||
echo Packing layer
|
||||
# Unmount directories and remove them.
|
||||
umount -R mnt/dev mnt/sys mnt${storeDir}
|
||||
rmdir --ignore-fail-on-non-empty \
|
||||
mnt/dev mnt/proc mnt/sys mnt${storeDir} \
|
||||
mnt$(dirname ${storeDir})
|
||||
'';
|
||||
|
||||
postUmount = ''
|
||||
(cd layer; eval "${extraCommands}")
|
||||
|
||||
echo "Packing layer..."
|
||||
mkdir $out
|
||||
tar -C layer --mtime=0 -cf $out/layer.tar .
|
||||
|
||||
# Compute the tar checksum and add it to the output json.
|
||||
echo "Computing checksum..."
|
||||
ts=$(${tarsum} < $out/layer.tar)
|
||||
cat ${baseJson} | jshon -s "$ts" -i checksum > $out/json
|
||||
# Indicate to docker that we're using schema version 1.0.
|
||||
echo -n "1.0" > $out/VERSION
|
||||
|
||||
echo "Finished building layer '${name}'"
|
||||
'';
|
||||
};
|
||||
|
||||
@ -229,116 +359,144 @@ EOF
|
||||
# 4. compute the layer id
|
||||
# 5. put the layer in the image
|
||||
# 6. repack the image
|
||||
buildImage = args@{ name, tag ? "latest"
|
||||
, fromImage ? null, fromImageName ? null, fromImageTag ? null
|
||||
, contents ? null, config ? null, runAsRoot ? null
|
||||
, diskSize ? 1024, extraCommands ? "" }:
|
||||
buildImage = args@{
|
||||
# Image name.
|
||||
name,
|
||||
# Image tag.
|
||||
tag ? "latest",
|
||||
# Parent image, to append to.
|
||||
fromImage ? null,
|
||||
# Name of the parent image; will be read from the image otherwise.
|
||||
fromImageName ? null,
|
||||
# Tag of the parent image; will be read from the image otherwise.
|
||||
fromImageTag ? null,
|
||||
# Files to put on the image (a nix store path or list of paths).
|
||||
contents ? null,
|
||||
# Docker config; e.g. what command to run on the container.
|
||||
config ? null,
|
||||
# Optional bash script to run on the files prior to fixturizing the layer.
|
||||
extraCommands ? "",
|
||||
# Optional bash script to run as root on the image when provisioning.
|
||||
runAsRoot ? null,
|
||||
# Size of the virtual machine disk to provision when building the image.
|
||||
diskSize ? 1024,
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
baseName = baseNameOf name;
|
||||
|
||||
# Create a JSON blob of the configuration. Set the date to unix zero.
|
||||
baseJson = writeText "${baseName}-config.json" (builtins.toJSON {
|
||||
created = "1970-01-01T00:00:01Z";
|
||||
architecture = "amd64";
|
||||
os = "linux";
|
||||
config = config;
|
||||
created = "1970-01-01T00:00:01Z";
|
||||
architecture = "amd64";
|
||||
os = "linux";
|
||||
config = config;
|
||||
});
|
||||
|
||||
layer = (if runAsRoot == null
|
||||
then mkPureLayer { inherit baseJson contents extraCommands; }
|
||||
else mkRootLayer { inherit baseJson fromImage fromImageName fromImageTag contents runAsRoot diskSize extraCommands; });
|
||||
result = runCommand "${baseName}.tar.gz" {
|
||||
layer =
|
||||
if runAsRoot == null
|
||||
then mkPureLayer { inherit name baseJson contents extraCommands; }
|
||||
else mkRootLayer { inherit name baseJson fromImage fromImageName
|
||||
fromImageTag contents runAsRoot diskSize
|
||||
extraCommands; };
|
||||
result = runCommand "docker-image-${baseName}.tar.gz" {
|
||||
buildInputs = [ jshon pigz coreutils findutils ];
|
||||
|
||||
imageName = name;
|
||||
imageTag = tag;
|
||||
inherit fromImage baseJson;
|
||||
|
||||
layerClosure = writeReferencesToFile layer;
|
||||
|
||||
passthru = {
|
||||
buildArgs = args;
|
||||
};
|
||||
passthru.buildArgs = args;
|
||||
passthru.layer = layer;
|
||||
} ''
|
||||
# Print tar contents:
|
||||
# 1: Interpreted as relative to the root directory
|
||||
# 2: With no trailing slashes on directories
|
||||
# This is useful for ensuring that the output matches the values generated by the "find" command
|
||||
# This is useful for ensuring that the output matches the
|
||||
# values generated by the "find" command
|
||||
ls_tar() {
|
||||
for f in $(tar -tf $1 | xargs realpath -ms --relative-to=.); do
|
||||
if [ "$f" != "." ]; then
|
||||
echo "/$f"
|
||||
fi
|
||||
done
|
||||
for f in $(tar -tf $1 | xargs realpath -ms --relative-to=.); do
|
||||
if [[ "$f" != "." ]]; then
|
||||
echo "/$f"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
mkdir image
|
||||
touch baseFiles
|
||||
if [ -n "$fromImage" ]; then
|
||||
echo Unpacking base image
|
||||
if [[ -n "$fromImage" ]]; then
|
||||
echo "Unpacking base image..."
|
||||
tar -C image -xpf "$fromImage"
|
||||
|
||||
if [ -z "$fromImageName" ]; then
|
||||
|
||||
if [[ -z "$fromImageName" ]]; then
|
||||
fromImageName=$(jshon -k < image/repositories|head -n1)
|
||||
fi
|
||||
if [ -z "$fromImageTag" ]; then
|
||||
fromImageTag=$(jshon -e $fromImageName -k < image/repositories|head -n1)
|
||||
if [[ -z "$fromImageTag" ]]; then
|
||||
fromImageTag=$(jshon -e $fromImageName -k \
|
||||
< image/repositories|head -n1)
|
||||
fi
|
||||
parentID=$(jshon -e $fromImageName -e $fromImageTag -u < image/repositories)
|
||||
|
||||
parentID=$(jshon -e $fromImageName -e $fromImageTag -u \
|
||||
< image/repositories)
|
||||
|
||||
for l in image/*/layer.tar; do
|
||||
ls_tar $l >> baseFiles
|
||||
ls_tar image/*/layer.tar >> baseFiles
|
||||
done
|
||||
fi
|
||||
|
||||
chmod -R ug+rw image
|
||||
|
||||
|
||||
mkdir temp
|
||||
cp ${layer}/* temp/
|
||||
chmod ug+w temp/*
|
||||
|
||||
echo "$(dirname ${storeDir})" >> layerFiles
|
||||
echo '${storeDir}' >> layerFiles
|
||||
for dep in $(cat $layerClosure); do
|
||||
find $dep -path "${layer}" -prune -o -print >> layerFiles
|
||||
find $dep >> layerFiles
|
||||
done
|
||||
|
||||
if [ -s layerFiles ]; then
|
||||
# FIXME: might not be /nix/store
|
||||
echo '/nix' >> layerFiles
|
||||
echo '/nix/store' >> layerFiles
|
||||
fi
|
||||
|
||||
echo Adding layer
|
||||
echo "Adding layer..."
|
||||
# Record the contents of the tarball with ls_tar.
|
||||
ls_tar temp/layer.tar >> baseFiles
|
||||
comm <(sort -u baseFiles) <(sort -u layerFiles) -1 -3 > newFiles
|
||||
tar -rpf temp/layer.tar --mtime=0 --no-recursion --files-from newFiles 2>/dev/null || true
|
||||
|
||||
echo Adding meta
|
||||
|
||||
if [ -n "$parentID" ]; then
|
||||
# Get the files in the new layer which were *not* present in
|
||||
# the old layer, and record them as newFiles.
|
||||
comm <(sort -n baseFiles|uniq) \
|
||||
<(sort -n layerFiles|uniq|grep -v ${layer}) -1 -3 > newFiles
|
||||
# Append the new files to the layer.
|
||||
tar -rpf temp/layer.tar --mtime=0 --no-recursion --files-from newFiles
|
||||
|
||||
echo "Adding meta..."
|
||||
|
||||
# If we have a parentID, add it to the json metadata.
|
||||
if [[ -n "$parentID" ]]; then
|
||||
cat temp/json | jshon -s "$parentID" -i parent > tmpjson
|
||||
mv tmpjson temp/json
|
||||
fi
|
||||
|
||||
|
||||
# Take the sha256 sum of the generated json and use it as the layer ID.
|
||||
# Compute the size and add it to the json under the 'Size' field.
|
||||
layerID=$(sha256sum temp/json|cut -d ' ' -f 1)
|
||||
size=$(stat --printf="%s" temp/layer.tar)
|
||||
cat temp/json | jshon -s "$layerID" -i id -n $size -i Size > tmpjson
|
||||
mv tmpjson temp/json
|
||||
|
||||
# Use the temp folder we've been working on to create a new image.
|
||||
mv temp image/$layerID
|
||||
|
||||
|
||||
# Store the json under the name image/repositories.
|
||||
jshon -n object \
|
||||
-n object -s "$layerID" -i "$imageTag" \
|
||||
-i "$imageName" > image/repositories
|
||||
|
||||
# Make the image read-only.
|
||||
chmod -R a-w image
|
||||
|
||||
echo Cooking the image
|
||||
echo "Cooking the image..."
|
||||
tar -C image --mtime=0 -c . | pigz -nT > $out
|
||||
|
||||
echo "Finished."
|
||||
'';
|
||||
|
||||
in
|
||||
|
||||
result;
|
||||
|
||||
result;
|
||||
}
|
||||
|
||||
108
pkgs/build-support/docker/examples.nix
Normal file
108
pkgs/build-support/docker/examples.nix
Normal file
@ -0,0 +1,108 @@
|
||||
# Examples of using the docker tools to build packages.
|
||||
#
|
||||
# This file defines several docker images. In order to use an image,
|
||||
# build its derivation with `nix-build`, and then load the result with
|
||||
# `docker load`. For example:
|
||||
#
|
||||
# $ nix-build '<nixpkgs>' -A dockerTools.examples.redis
|
||||
# $ docker load < result
|
||||
|
||||
{ pkgs, buildImage, pullImage, shadowSetup }:
|
||||
|
||||
rec {
|
||||
# 1. basic example
|
||||
bash = buildImage {
|
||||
name = "bash";
|
||||
contents = pkgs.bashInteractive;
|
||||
};
|
||||
|
||||
# 2. service example, layered on another image
|
||||
redis = buildImage {
|
||||
name = "redis";
|
||||
tag = "latest";
|
||||
|
||||
# for example's sake, we can layer redis on top of bash or debian
|
||||
fromImage = bash;
|
||||
# fromImage = debian;
|
||||
|
||||
contents = pkgs.redis;
|
||||
runAsRoot = ''
|
||||
mkdir -p /data
|
||||
'';
|
||||
|
||||
config = {
|
||||
Cmd = [ "/bin/redis-server" ];
|
||||
WorkingDir = "/data";
|
||||
Volumes = {
|
||||
"/data" = {};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
# 3. another service example
|
||||
nginx = let
|
||||
nginxPort = "80";
|
||||
nginxConf = pkgs.writeText "nginx.conf" ''
|
||||
user nginx nginx;
|
||||
daemon off;
|
||||
error_log /dev/stdout info;
|
||||
pid /dev/null;
|
||||
events {}
|
||||
http {
|
||||
access_log /dev/stdout;
|
||||
server {
|
||||
listen ${nginxPort};
|
||||
index index.html;
|
||||
location / {
|
||||
root ${nginxWebRoot};
|
||||
}
|
||||
}
|
||||
}
|
||||
'';
|
||||
nginxWebRoot = pkgs.writeTextDir "index.html" ''
|
||||
<html><body><h1>Hello from NGINX</h1></body></html>
|
||||
'';
|
||||
in
|
||||
buildImage {
|
||||
name = "nginx-container";
|
||||
contents = pkgs.nginx;
|
||||
|
||||
runAsRoot = ''
|
||||
#!${pkgs.stdenv.shell}
|
||||
${shadowSetup}
|
||||
groupadd --system nginx
|
||||
useradd --system --gid nginx nginx
|
||||
'';
|
||||
|
||||
config = {
|
||||
Cmd = [ "nginx" "-c" nginxConf ];
|
||||
ExposedPorts = {
|
||||
"${nginxPort}/tcp" = {};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
# 4. example of pulling an image. could be used as a base for other images
|
||||
#
|
||||
# ***** Currently broken, getting 404s. Perhaps the docker API has changed?
|
||||
#
|
||||
#
|
||||
# debian = pullImage {
|
||||
# imageName = "debian";
|
||||
# imageTag = "jessie";
|
||||
# # this hash will need change if the tag is updated at docker hub
|
||||
# sha256 = "18kd495lc2k35h03bpcbdjnix17nlqbwf6nmq3sb161blf0dk14q";
|
||||
# };
|
||||
|
||||
# 5. example of multiple contents, emacs and vi happily coexisting
|
||||
editors = buildImage {
|
||||
name = "editors";
|
||||
contents = [
|
||||
pkgs.coreutils
|
||||
pkgs.bash
|
||||
pkgs.emacs
|
||||
pkgs.vim
|
||||
pkgs.nano
|
||||
];
|
||||
};
|
||||
}
|
||||
@ -1,6 +1,7 @@
|
||||
{ pkgs
|
||||
, kernel ? pkgs.linux
|
||||
, img ? "bzImage"
|
||||
, storeDir ? builtins.storeDir
|
||||
, rootModules ?
|
||||
[ "virtio_pci" "virtio_blk" "virtio_balloon" "virtio_rng" "ext4" "unix" "9p" "9pnet_virtio" "rtc_cmos" ]
|
||||
}:
|
||||
@ -128,8 +129,8 @@ rec {
|
||||
mount -t devpts none /fs/dev/pts
|
||||
|
||||
echo "mounting Nix store..."
|
||||
mkdir -p /fs/nix/store
|
||||
mount -t 9p store /fs/nix/store -o trans=virtio,version=9p2000.L,cache=loose
|
||||
mkdir -p /fs${storeDir}
|
||||
mount -t 9p store /fs${storeDir} -o trans=virtio,version=9p2000.L,cache=loose
|
||||
|
||||
mkdir -p /fs/tmp /fs/run /fs/var
|
||||
mount -t tmpfs -o "mode=1777" none /fs/tmp
|
||||
@ -172,7 +173,7 @@ rec {
|
||||
# apparent KVM > 1.5.2 bug.
|
||||
${pkgs.utillinux}/bin/hwclock -s
|
||||
|
||||
export NIX_STORE=/nix/store
|
||||
export NIX_STORE=${storeDir}
|
||||
export NIX_BUILD_TOP=/tmp
|
||||
export TMPDIR=/tmp
|
||||
export PATH=/empty
|
||||
@ -220,7 +221,7 @@ rec {
|
||||
${lib.optionalString (pkgs.stdenv.system == "x86_64-linux") "-cpu kvm64"} \
|
||||
-nographic -no-reboot \
|
||||
-device virtio-rng-pci \
|
||||
-virtfs local,path=/nix/store,security_model=none,mount_tag=store \
|
||||
-virtfs local,path=${storeDir},security_model=none,mount_tag=store \
|
||||
-virtfs local,path=$TMPDIR/xchg,security_model=none,mount_tag=xchg \
|
||||
-drive file=$diskImage,if=virtio,cache=unsafe,werror=report \
|
||||
-kernel ${kernel}/${img} \
|
||||
@ -298,7 +299,7 @@ rec {
|
||||
|
||||
/* Run a derivation in a Linux virtual machine (using Qemu/KVM). By
|
||||
default, there is no disk image; the root filesystem is a tmpfs,
|
||||
and /nix/store is shared with the host (via the 9P protocol).
|
||||
and the nix store is shared with the host (via the 9P protocol).
|
||||
Thus, any pure Nix derivation should run unmodified, e.g. the
|
||||
call
|
||||
|
||||
@ -434,8 +435,8 @@ rec {
|
||||
chroot=$(type -tP chroot)
|
||||
|
||||
# Make the Nix store available in /mnt, because that's where the RPMs live.
|
||||
mkdir -p /mnt/nix/store
|
||||
${utillinux}/bin/mount -o bind /nix/store /mnt/nix/store
|
||||
mkdir -p /mnt${storeDir}
|
||||
${utillinux}/bin/mount -o bind ${storeDir} /mnt${storeDir}
|
||||
|
||||
# Newer distributions like Fedora 18 require /lib etc. to be
|
||||
# symlinked to /usr.
|
||||
@ -474,7 +475,7 @@ rec {
|
||||
|
||||
rm /mnt/.debug
|
||||
|
||||
${utillinux}/bin/umount /mnt/nix/store /mnt/tmp ${lib.optionalString unifiedSystemDir "/mnt/proc"}
|
||||
${utillinux}/bin/umount /mnt${storeDir} /mnt/tmp ${lib.optionalString unifiedSystemDir "/mnt/proc"}
|
||||
${utillinux}/bin/umount /mnt
|
||||
'';
|
||||
|
||||
@ -604,8 +605,8 @@ rec {
|
||||
done
|
||||
|
||||
# Make the Nix store available in /mnt, because that's where the .debs live.
|
||||
mkdir -p /mnt/inst/nix/store
|
||||
${utillinux}/bin/mount -o bind /nix/store /mnt/inst/nix/store
|
||||
mkdir -p /mnt/inst${storeDir}
|
||||
${utillinux}/bin/mount -o bind ${storeDir} /mnt/inst${storeDir}
|
||||
${utillinux}/bin/mount -o bind /proc /mnt/proc
|
||||
${utillinux}/bin/mount -o bind /dev /mnt/dev
|
||||
|
||||
@ -653,7 +654,7 @@ rec {
|
||||
|
||||
rm /mnt/.debug
|
||||
|
||||
${utillinux}/bin/umount /mnt/inst/nix/store
|
||||
${utillinux}/bin/umount /mnt/inst${storeDir}
|
||||
${utillinux}/bin/umount /mnt/proc
|
||||
${utillinux}/bin/umount /mnt/dev
|
||||
${utillinux}/bin/umount /mnt
|
||||
|
||||
19
pkgs/development/compilers/pakcs/case-insensitive.patch
Normal file
19
pkgs/development/compilers/pakcs/case-insensitive.patch
Normal file
@ -0,0 +1,19 @@
|
||||
--- www/Makefile.orig 2016-10-10 21:04:36.000000000 +0300
|
||||
+++ pakcs-1.14.0/www/Makefile 2016-10-10 21:07:56.000000000 +0300
|
||||
@@ -6,7 +6,7 @@ all: submitform Registry
|
||||
submitform: SubmitForm.curry $(LIBDIR)/HtmlCgi.curry \
|
||||
$(LIBDIR)/NamedSocket.curry $(LIBDIR)/CPNS.curry
|
||||
$(REPL) $(REPL_OPTS) :load SubmitForm :save :q
|
||||
- mv SubmitForm submitform
|
||||
+ mv SubmitForm submitform.orig && mv submitform.orig submitform
|
||||
|
||||
Registry: Registry.curry $(LIBDIR)/HtmlCgi.curry
|
||||
$(REPL) $(REPL_OPTS) :load Registry :save :q
|
||||
--- currytools/erd2curry/Makefile.orig 2016-10-10 21:13:49.000000000 +0300
|
||||
+++ pakcs-1.14.0/currytools/erd2curry/Makefile 2016-10-10 21:21:14.000000000 +0300
|
||||
@@ -32,4 +32,4 @@ uninstall: clean
|
||||
erd2curry: $(DEPS)
|
||||
# create saved state for top-level function "main":
|
||||
$(REPL) $(REPL_OPTS) :load ERD2Curry :save "main \"$(CURDIR)\"" :q
|
||||
- mv ERD2Curry $@
|
||||
+ mv ERD2Curry $@.orig && mv $@.orig $@
|
||||
@ -82,7 +82,10 @@ stdenv.mkDerivation rec {
|
||||
|
||||
buildInputs = [ swiPrologLocked makeWrapper glibcLocales rlwrap tk which ];
|
||||
|
||||
patches = [ ./adjust-buildsystem.patch ];
|
||||
patches = [
|
||||
./adjust-buildsystem.patch
|
||||
./case-insensitive.patch
|
||||
];
|
||||
|
||||
configurePhase = ''
|
||||
# Phony HOME.
|
||||
@ -151,6 +154,6 @@ stdenv.mkDerivation rec {
|
||||
'';
|
||||
|
||||
maintainers = [ stdenv.lib.maintainers.gnidorah ];
|
||||
platforms = stdenv.lib.platforms.linux;
|
||||
platforms = stdenv.lib.platforms.unix;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{ stdenv, fetchurl, gmp, readline, openssl, libjpeg, unixODBC, zlib
|
||||
, libXinerama, libXft, libXpm, libSM, libXt, freetype, pkgconfig
|
||||
, fontconfig
|
||||
, fontconfig, makeWrapper ? stdenv.isDarwin
|
||||
}:
|
||||
|
||||
let
|
||||
@ -15,7 +15,8 @@ stdenv.mkDerivation {
|
||||
};
|
||||
|
||||
buildInputs = [ gmp readline openssl libjpeg unixODBC libXinerama
|
||||
libXft libXpm libSM libXt zlib freetype pkgconfig fontconfig ];
|
||||
libXft libXpm libSM libXt zlib freetype pkgconfig fontconfig ]
|
||||
++ stdenv.lib.optional stdenv.isDarwin makeWrapper;
|
||||
|
||||
hardeningDisable = [ "format" ];
|
||||
|
||||
@ -23,12 +24,24 @@ stdenv.mkDerivation {
|
||||
|
||||
buildFlags = "world";
|
||||
|
||||
# For macOS: still not fixed in upstream: "abort trap 6" when called
|
||||
# through symlink, so wrap binary.
|
||||
# We reinvent wrapProgram here but omit argv0 pass in order to not
|
||||
# break PAKCS package build. This is also safe for SWI-Prolog, since
|
||||
# there is no wrapping environment and hence no need to spoof $0
|
||||
postInstall = stdenv.lib.optionalString stdenv.isDarwin ''
|
||||
local prog="$out/bin/swipl"
|
||||
local hidden="$(dirname "$prog")/.$(basename "$prog")"-wrapped
|
||||
mv $prog $hidden
|
||||
makeWrapper $hidden $prog
|
||||
'';
|
||||
|
||||
meta = {
|
||||
homepage = http://www.swi-prolog.org/;
|
||||
description = "A Prolog compiler and interpreter";
|
||||
license = "LGPL";
|
||||
|
||||
platforms = stdenv.lib.platforms.linux;
|
||||
platforms = stdenv.lib.platforms.unix;
|
||||
maintainers = [ stdenv.lib.maintainers.peti ];
|
||||
};
|
||||
}
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
{ stdenv, fetchgit, clang }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "mujs-2016-02-22";
|
||||
name = "mujs-2016-09-21";
|
||||
|
||||
src = fetchgit {
|
||||
url = git://git.ghostscript.com/mujs.git;
|
||||
rev = "624f975aae6b451e35406d8cdde808626052ce2c";
|
||||
sha256 = "0cab7x73v380wklpkbrc1k4iyh4q2jyx3zxbymlfi1spmrpn6skl";
|
||||
rev = "5c337af4b3df80cf967e4f9f6a21522de84b392a";
|
||||
sha256 = "1x5g6nycggc83md2dbr2nahjbkkmmn64bg25a8hih7z72sw41dgw";
|
||||
};
|
||||
|
||||
buildInputs = [ clang ];
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
{ stdenv, fetchurl }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "libsodium-1.0.10";
|
||||
name = "libsodium-1.0.11";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://download.libsodium.org/libsodium/releases/${name}.tar.gz";
|
||||
sha256 = "1gn45g956lyz8l6iq187yc6l627vyivyp8qc5dkr6dnhdnlqddvi";
|
||||
sha256 = "0rf7z6bgpnf8lyz8sph4h43fbb28pmj4dgybf0hsxxj97kdljid1";
|
||||
};
|
||||
|
||||
outputs = [ "out" "dev" ];
|
||||
|
||||
24
pkgs/development/libraries/libuecc/default.nix
Normal file
24
pkgs/development/libraries/libuecc/default.nix
Normal file
@ -0,0 +1,24 @@
|
||||
{ stdenv, fetchgit, cmake }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "7";
|
||||
name = "libuecc-${version}";
|
||||
|
||||
src = fetchgit {
|
||||
url = "git://git.universe-factory.net/libuecc";
|
||||
rev = "refs/tags/v${version}";
|
||||
sha256 = "1sm05aql75sh13ykgsv3ns4x4zzw9lvzid6misd22gfgf6r9n5fs";
|
||||
};
|
||||
|
||||
buildInputs = [ cmake ];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
description = "Very small Elliptic Curve Cryptography library";
|
||||
homepage = https://git.universe-factory.net/libuecc;
|
||||
license = licenses.bsd2;
|
||||
platforms = platforms.unix;
|
||||
maintainers = with maintainers; [ fpletz ];
|
||||
};
|
||||
}
|
||||
@ -19,6 +19,7 @@ let
|
||||
|
||||
patches =
|
||||
(args.patches or [])
|
||||
++ [ ./nix-ssl-cert-file.patch ]
|
||||
++ optional (versionOlder version "1.1.0") ./use-etc-ssl-certs.patch
|
||||
++ optional stdenv.isCygwin ./1.0.1-cygwin64.patch
|
||||
++ optional
|
||||
|
||||
15
pkgs/development/libraries/openssl/nix-ssl-cert-file.patch
Normal file
15
pkgs/development/libraries/openssl/nix-ssl-cert-file.patch
Normal file
@ -0,0 +1,15 @@
|
||||
diff -ru -x '*~' openssl-1.0.2j-orig/crypto/x509/by_file.c openssl-1.0.2j/crypto/x509/by_file.c
|
||||
--- openssl-1.0.2j-orig/crypto/x509/by_file.c 2016-09-26 11:49:07.000000000 +0200
|
||||
+++ openssl-1.0.2j/crypto/x509/by_file.c 2016-10-13 16:54:31.400288302 +0200
|
||||
@@ -97,7 +97,10 @@
|
||||
switch (cmd) {
|
||||
case X509_L_FILE_LOAD:
|
||||
if (argl == X509_FILETYPE_DEFAULT) {
|
||||
- file = (char *)getenv(X509_get_default_cert_file_env());
|
||||
+ file = (char *)getenv("NIX_SSL_CERT_FILE");
|
||||
+ if (!file)
|
||||
+ file = (char *)getenv(X509_get_default_cert_file_env());
|
||||
+ fprintf(stderr, "OPEN %s", file);
|
||||
if (file)
|
||||
ok = (X509_load_cert_crl_file(ctx, file,
|
||||
X509_FILETYPE_PEM) != 0);
|
||||
@ -15,6 +15,6 @@ stdenv.mkDerivation rec {
|
||||
description = "ODBC driver manager for Unix";
|
||||
homepage = http://www.unixodbc.org/;
|
||||
license = licenses.lgpl2;
|
||||
platforms = platforms.linux;
|
||||
platforms = platforms.unix;
|
||||
};
|
||||
}
|
||||
|
||||
@ -29,6 +29,30 @@ profile. The set of available libraries can be discovered by running the
|
||||
command `nix-env -f "<nixpkgs>" -qaP -A rPackages`. The first column from that
|
||||
output is the name that has to be passed to rWrapper in the code snipped above.
|
||||
|
||||
However, if you'd like to add a file to your project source to make the
|
||||
environment available for other contributors, you can create a `default.nix`
|
||||
file like so:
|
||||
```nix
|
||||
let
|
||||
pkgs = import <nixpkgs> {};
|
||||
stdenv = pkgs.stdenv;
|
||||
in with pkgs; {
|
||||
myProject = stdenv.mkDerivation {
|
||||
name = "myProject";
|
||||
version = "1";
|
||||
src = if pkgs.lib.inNixShell then null else nix;
|
||||
|
||||
buildInputs = with rPackages; [
|
||||
R
|
||||
ggplot2
|
||||
knitr
|
||||
];
|
||||
};
|
||||
}
|
||||
```
|
||||
and then run `nix-shell .` to be dropped into a shell with those packages
|
||||
available.
|
||||
|
||||
## Updating the package set
|
||||
|
||||
```bash
|
||||
|
||||
@ -6,7 +6,14 @@
|
||||
, tree
|
||||
}@defs:
|
||||
|
||||
{ name, gemset, gemfile, lockfile, ruby ? defs.ruby, gemConfig ? defaultGemConfig
|
||||
{ name ? null
|
||||
, pname ? null
|
||||
, gemdir ? null
|
||||
, gemfile ? null
|
||||
, lockfile ? null
|
||||
, gemset ? null
|
||||
, ruby ? defs.ruby
|
||||
, gemConfig ? defaultGemConfig
|
||||
, postBuild ? null
|
||||
, document ? []
|
||||
, meta ? {}
|
||||
@ -16,54 +23,95 @@
|
||||
}@args:
|
||||
|
||||
let
|
||||
importedGemset = import gemset;
|
||||
drvName =
|
||||
if name != null then name
|
||||
else if pname != null then "${toString pname}-${mainGem.version}"
|
||||
else throw "bundlerEnv: either pname or name must be set";
|
||||
|
||||
mainGem =
|
||||
if pname == null then null
|
||||
else gems."${pname}" or (throw "bundlerEnv: gem ${pname} not found");
|
||||
|
||||
gemfile' =
|
||||
if gemfile == null then gemdir + "/Gemfile"
|
||||
else gemfile;
|
||||
|
||||
lockfile' =
|
||||
if lockfile == null then gemdir + "/Gemfile.lock"
|
||||
else lockfile;
|
||||
|
||||
gemset' =
|
||||
if gemset == null then gemdir + "/gemset.nix"
|
||||
else gemset;
|
||||
|
||||
importedGemset = import gemset';
|
||||
|
||||
filteredGemset = (lib.filterAttrs (name: attrs:
|
||||
if (builtins.hasAttr "groups" attrs)
|
||||
then (builtins.any (gemGroup: builtins.any (group: group == gemGroup) groups) attrs.groups)
|
||||
else true
|
||||
) importedGemset);
|
||||
|
||||
applyGemConfigs = attrs:
|
||||
(if gemConfig ? "${attrs.gemName}"
|
||||
then attrs // gemConfig."${attrs.gemName}" attrs
|
||||
else attrs);
|
||||
|
||||
configuredGemset = lib.flip lib.mapAttrs filteredGemset (name: attrs:
|
||||
applyGemConfigs (attrs // { inherit ruby; gemName = name; })
|
||||
);
|
||||
|
||||
hasBundler = builtins.hasAttr "bundler" filteredGemset;
|
||||
bundler = if hasBundler then gems.bundler else defs.bundler.override (attrs: { inherit ruby; });
|
||||
|
||||
bundler =
|
||||
if hasBundler then gems.bundler
|
||||
else defs.bundler.override (attrs: { inherit ruby; });
|
||||
|
||||
gems = lib.flip lib.mapAttrs configuredGemset (name: attrs:
|
||||
buildRubyGem ((removeAttrs attrs ["source"]) // attrs.source // {
|
||||
inherit ruby;
|
||||
gemName = name;
|
||||
gemPath = map (gemName: gems."${gemName}") (attrs.dependencies or []);
|
||||
}));
|
||||
|
||||
# We have to normalize the Gemfile.lock, otherwise bundler tries to be
|
||||
# helpful by doing so at run time, causing executables to immediately bail
|
||||
# out. Yes, I'm serious.
|
||||
confFiles = runCommand "gemfile-and-lockfile" {} ''
|
||||
mkdir -p $out
|
||||
cp ${gemfile} $out/Gemfile
|
||||
cp ${lockfile} $out/Gemfile.lock
|
||||
cp ${gemfile'} $out/Gemfile
|
||||
cp ${lockfile'} $out/Gemfile.lock
|
||||
'';
|
||||
|
||||
envPaths = lib.attrValues gems ++ lib.optional (!hasBundler) bundler;
|
||||
|
||||
binPaths = if mainGem != null then [ mainGem ] else envPaths;
|
||||
|
||||
bundlerEnv = buildEnv {
|
||||
inherit name ignoreCollisions;
|
||||
inherit ignoreCollisions;
|
||||
|
||||
name = drvName;
|
||||
|
||||
paths = envPaths;
|
||||
pathsToLink = [ "/lib" ];
|
||||
|
||||
postBuild = ''
|
||||
${ruby}/bin/ruby ${./gen-bin-stubs.rb} \
|
||||
"${ruby}/bin/ruby" \
|
||||
"${confFiles}/Gemfile" \
|
||||
"$out/${ruby.gemPath}" \
|
||||
"${bundler}/${ruby.gemPath}" \
|
||||
${lib.escapeShellArg envPaths} \
|
||||
${lib.escapeShellArg binPaths} \
|
||||
${lib.escapeShellArg groups}
|
||||
'' + lib.optionalString (postBuild != null) postBuild;
|
||||
|
||||
meta = { platforms = ruby.meta.platforms; } // meta;
|
||||
|
||||
passthru = rec {
|
||||
inherit ruby bundler meta gems;
|
||||
inherit ruby bundler gems;
|
||||
|
||||
wrappedRuby = stdenv.mkDerivation {
|
||||
name = "wrapped-ruby-${name}";
|
||||
name = "wrapped-ruby-${drvName}";
|
||||
nativeBuildInputs = [ makeWrapper ];
|
||||
buildCommand = ''
|
||||
mkdir -p $out/bin
|
||||
@ -87,7 +135,7 @@ let
|
||||
require 'bundler/setup'
|
||||
'';
|
||||
in stdenv.mkDerivation {
|
||||
name = "interactive-${name}-environment";
|
||||
name = "interactive-${drvName}-environment";
|
||||
nativeBuildInputs = [ wrappedRuby bundlerEnv ];
|
||||
shellHook = ''
|
||||
export OLD_IRBRC="$IRBRC"
|
||||
@ -102,7 +150,5 @@ let
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
bundlerEnv
|
||||
bundlerEnv
|
||||
|
||||
@ -43,6 +43,9 @@ stdenv.mkDerivation rec {
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
# darwin build fails with format hardening since v7.12
|
||||
hardeningDisable = stdenv.lib.optionals stdenv.isDarwin [ "format" ];
|
||||
|
||||
configureFlags = with stdenv.lib;
|
||||
[ "--with-gmp=${gmp.dev}" "--with-mpfr=${mpfr.dev}" "--with-system-readline"
|
||||
"--with-system-zlib" "--with-expat" "--with-libexpat-prefix=${expat.dev}"
|
||||
|
||||
@ -13,7 +13,7 @@ stdenv.mkDerivation rec {
|
||||
patches = [(fetchpatch {
|
||||
name = "perl-5.22.patch";
|
||||
url = "https://anonscm.debian.org/viewvc/pkg-gnome/desktop/unstable/intltool"
|
||||
+ "/debian/patches/perl5.22-regex-fixes?revision=47255&view=co";
|
||||
+ "/debian/patches/perl5.22-regex-fixes?revision=47258&view=co";
|
||||
sha256 = "17clqczb9fky7hp8czxa0fy82b5478irvz4f3fnans3sqxl95hx3";
|
||||
})];
|
||||
|
||||
|
||||
@ -1,29 +1,14 @@
|
||||
{ stdenv, lib, bundlerEnv, ruby }:
|
||||
{ lib, bundlerEnv, ruby }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "travis-${version}";
|
||||
version = env.gems.travis.version;
|
||||
|
||||
env = bundlerEnv {
|
||||
inherit ruby;
|
||||
name = "${name}-gems";
|
||||
gemset = ./gemset.nix;
|
||||
gemfile = ./Gemfile;
|
||||
lockfile = ./Gemfile.lock;
|
||||
};
|
||||
|
||||
phases = ["installPhase"];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
ln -s ${env}/bin/travis $out/bin/travis
|
||||
'';
|
||||
bundlerEnv {
|
||||
inherit ruby;
|
||||
pName = "travis";
|
||||
gemdir = ./.;
|
||||
|
||||
meta = with lib; {
|
||||
description = "CLI and Ruby client library for Travis CI";
|
||||
homepage = https://github.com/travis-ci/travis.rb;
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ zimbatm ];
|
||||
platforms = ruby.meta.platforms;
|
||||
};
|
||||
}
|
||||
|
||||
@ -6,7 +6,8 @@
|
||||
, preBuild ? ""
|
||||
, extraConfigFlags ? []
|
||||
, extraBuildInputs ? []
|
||||
, ...
|
||||
, patches ? [],
|
||||
...
|
||||
}:
|
||||
|
||||
assert stdenv.system != "armv5tel-linux";
|
||||
@ -44,7 +45,7 @@ in stdenv.mkDerivation {
|
||||
PATH=$out/bin:$PATH patchShebangs $out
|
||||
'';
|
||||
|
||||
patches = stdenv.lib.optionals stdenv.isDarwin [ ./no-xcode.patch ];
|
||||
patches = patches ++ stdenv.lib.optionals stdenv.isDarwin [ ./no-xcode.patch ];
|
||||
|
||||
buildInputs = extraBuildInputs
|
||||
++ [ python which zlib libuv openssl ]
|
||||
|
||||
@ -4,9 +4,9 @@
|
||||
}@args:
|
||||
|
||||
import ./nodejs.nix (args // rec {
|
||||
version = "4.4.6";
|
||||
version = "4.6.0";
|
||||
src = fetchurl {
|
||||
url = "http://nodejs.org/dist/v${version}/node-v${version}.tar.xz";
|
||||
sha256 = "0f6bbfbea525469c91932b1aac35e0810e6bcda96f1c720e42a433942ee66106";
|
||||
sha256 = "1566q1kkv8j30fgqx8sm2h8323f38wwpa1hfb10gr6z46jyhv4a2";
|
||||
};
|
||||
})
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
{ stdenv, fetchurl, openssl, python, zlib, libuv, v8, utillinux, http-parser
|
||||
, pkgconfig, runCommand, which, libtool
|
||||
, pkgconfig, runCommand, which, libtool, fetchpatch
|
||||
, callPackage
|
||||
, darwin ? null
|
||||
}@args:
|
||||
@ -8,12 +8,18 @@ let
|
||||
inherit (darwin.apple_sdk.frameworks) CoreServices ApplicationServices;
|
||||
|
||||
in import ./nodejs.nix (args // rec {
|
||||
version = "6.7.0";
|
||||
sha256 = "1r9vvnczjczqs29ja8gmbqgsfgkg0dph4qkaxb3yh7mb98r2ic6f";
|
||||
version = "6.8.0";
|
||||
sha256 = "13arzwki13688hr1lh871y06lrk019g4hkasmg11arm8j1dcwcpq";
|
||||
extraBuildInputs = stdenv.lib.optionals stdenv.isDarwin
|
||||
[ CoreServices ApplicationServices ];
|
||||
preBuild = stdenv.lib.optionalString stdenv.isDarwin ''
|
||||
sed -i -e "s|tr1/type_traits|type_traits|g" \
|
||||
-e "s|std::tr1|std|" src/util.h
|
||||
'';
|
||||
patches = [
|
||||
(fetchpatch {
|
||||
url = "https://github.com/nodejs/node/commit/fc164acbbb700fd50ab9c04b47fc1b2687e9c0f4.patch";
|
||||
sha256 = "1rms3n09622xmddn013yvf5c6p3s8w8s0d2h813zs8c1l15k4k1i";
|
||||
})
|
||||
];
|
||||
})
|
||||
|
||||
@ -234,8 +234,6 @@ __os_assert_log_ctx
|
||||
__os_assumes_log
|
||||
__os_assumes_log_ctx
|
||||
__os_avoid_tail_call
|
||||
__os_crash
|
||||
__os_crash_callback
|
||||
__os_debug_log
|
||||
__os_debug_log_error_str
|
||||
__putenvp
|
||||
@ -267,7 +265,6 @@ __unsetenvp
|
||||
__utmpxname
|
||||
_a64l
|
||||
_abort
|
||||
_abort_report_np
|
||||
_abs
|
||||
_acl_add_flag_np
|
||||
_acl_add_perm
|
||||
|
||||
@ -57,7 +57,6 @@ ___ioctl
|
||||
___iopolicysys
|
||||
___kdebug_trace
|
||||
___kdebug_trace64
|
||||
___kdebug_trace_string
|
||||
___kernelVersionNumber
|
||||
___kernelVersionString
|
||||
___kill
|
||||
@ -82,7 +81,6 @@ ___mac_set_file
|
||||
___mac_set_link
|
||||
___mac_set_proc
|
||||
___mac_syscall
|
||||
___microstackshot
|
||||
___mkdir_extended
|
||||
___mkfifo_extended
|
||||
___mmap
|
||||
@ -107,8 +105,6 @@ ___posix_spawn
|
||||
___pread_nocancel
|
||||
___proc_info
|
||||
___process_policy
|
||||
___pselect
|
||||
___pselect_nocancel
|
||||
___psynch_cvbroad
|
||||
___psynch_cvclrprepost
|
||||
___psynch_cvsignal
|
||||
@ -181,7 +177,6 @@ ___sigsuspend
|
||||
___sigsuspend_nocancel
|
||||
___sigwait
|
||||
___socketpair
|
||||
___stack_snapshot_with_config
|
||||
___stat64_extended
|
||||
___stat_extended
|
||||
___syscall
|
||||
@ -198,7 +193,6 @@ ___vfork
|
||||
___wait4
|
||||
___wait4_nocancel
|
||||
___waitid_nocancel
|
||||
___work_interval_ctl
|
||||
___workq_kernreturn
|
||||
___workq_open
|
||||
___write_nocancel
|
||||
@ -418,7 +412,6 @@ _getsockopt
|
||||
_getuid
|
||||
_getwgroups_np
|
||||
_getxattr
|
||||
_grab_pgo_data
|
||||
_guarded_close_np
|
||||
_guarded_kqueue_np
|
||||
_guarded_open_dprotected_np
|
||||
@ -429,7 +422,6 @@ _guarded_writev_np
|
||||
_host_create_mach_voucher
|
||||
_host_default_memory_manager
|
||||
_host_get_UNDServer
|
||||
_host_get_atm_diagnostic_flag
|
||||
_host_get_boot_info
|
||||
_host_get_clock_control
|
||||
_host_get_clock_service
|
||||
@ -454,7 +446,6 @@ _host_security_set_task_token
|
||||
_host_self
|
||||
_host_self_trap
|
||||
_host_set_UNDServer
|
||||
_host_set_atm_diagnostic_flag
|
||||
_host_set_exception_ports
|
||||
_host_set_special_port
|
||||
_host_statistics
|
||||
@ -470,10 +461,8 @@ _ioctl
|
||||
_issetugid
|
||||
_kas_info
|
||||
_kdebug_trace
|
||||
_kdebug_trace_string
|
||||
_kevent
|
||||
_kevent64
|
||||
_kevent_qos
|
||||
_kext_request
|
||||
_kill
|
||||
_kmod_control
|
||||
@ -510,7 +499,6 @@ _mach_host_self
|
||||
_mach_init
|
||||
_mach_make_memory_entry
|
||||
_mach_make_memory_entry_64
|
||||
_mach_memory_info
|
||||
_mach_memory_object_memory_entry
|
||||
_mach_memory_object_memory_entry_64
|
||||
_mach_msg
|
||||
@ -647,7 +635,6 @@ _munlock
|
||||
_munlockall
|
||||
_munmap
|
||||
_necp_match_policy
|
||||
_netagent_trigger
|
||||
_netname_check_in
|
||||
_netname_check_out
|
||||
_netname_look_up
|
||||
@ -686,7 +673,6 @@ _posix_spawn_file_actions_addopen
|
||||
_posix_spawn_file_actions_destroy
|
||||
_posix_spawn_file_actions_init
|
||||
_posix_spawnattr_destroy
|
||||
_posix_spawnattr_get_darwin_role_np
|
||||
_posix_spawnattr_get_qos_clamp_np
|
||||
_posix_spawnattr_getbinpref_np
|
||||
_posix_spawnattr_getcpumonitor
|
||||
@ -698,7 +684,6 @@ _posix_spawnattr_getprocesstype_np
|
||||
_posix_spawnattr_getsigdefault
|
||||
_posix_spawnattr_getsigmask
|
||||
_posix_spawnattr_init
|
||||
_posix_spawnattr_set_darwin_role_np
|
||||
_posix_spawnattr_set_importancewatch_port_np
|
||||
_posix_spawnattr_set_qos_clamp_np
|
||||
_posix_spawnattr_setauditsessionport_np
|
||||
@ -734,10 +719,8 @@ _proc_importance_assertion_begin_with_msg
|
||||
_proc_importance_assertion_complete
|
||||
_proc_kmsgbuf
|
||||
_proc_libversion
|
||||
_proc_list_uptrs
|
||||
_proc_listallpids
|
||||
_proc_listchildpids
|
||||
_proc_listcoalitions
|
||||
_proc_listpgrppids
|
||||
_proc_listpids
|
||||
_proc_listpidspath
|
||||
@ -898,15 +881,6 @@ _sigsuspend$NOCANCEL
|
||||
_socket
|
||||
_socket_delegate
|
||||
_socketpair
|
||||
_stackshot_capture_with_config
|
||||
_stackshot_config_create
|
||||
_stackshot_config_dealloc
|
||||
_stackshot_config_dealloc_buffer
|
||||
_stackshot_config_get_stackshot_buffer
|
||||
_stackshot_config_get_stackshot_size
|
||||
_stackshot_config_set_flags
|
||||
_stackshot_config_set_pid
|
||||
_stackshot_config_set_size_hint
|
||||
_stat
|
||||
_stat$INODE64
|
||||
_stat64
|
||||
@ -973,7 +947,6 @@ _thread_depress_abort
|
||||
_thread_get_assignment
|
||||
_thread_get_exception_ports
|
||||
_thread_get_mach_voucher
|
||||
_thread_get_register_pointer_values
|
||||
_thread_get_special_port
|
||||
_thread_get_state
|
||||
_thread_info
|
||||
@ -1040,10 +1013,6 @@ _waitevent
|
||||
_waitid
|
||||
_waitid$NOCANCEL
|
||||
_watchevent
|
||||
_work_interval_create
|
||||
_work_interval_destroy
|
||||
_work_interval_notify
|
||||
_work_interval_notify_simple
|
||||
_write
|
||||
_write$NOCANCEL
|
||||
_writev
|
||||
|
||||
@ -8,17 +8,19 @@ stdenv.mkDerivation rec {
|
||||
sha256 = "0nlwazxbnn0k6q5f5b09wdhw0f194lpzkp3l7vxansqhfczmcyx8";
|
||||
};
|
||||
|
||||
buildInputs = [ gettext libnl ncurses pciutils pkgconfig zlib ];
|
||||
nativeBuildInputs = [ pkgconfig ];
|
||||
buildInputs = [ gettext libnl ncurses pciutils zlib ];
|
||||
|
||||
postPatch = ''
|
||||
substituteInPlace src/main.cpp --replace "/sbin/modprobe" "modprobe"
|
||||
substituteInPlace src/calibrate/calibrate.cpp --replace "/usr/bin/xset" "xset"
|
||||
'';
|
||||
|
||||
meta = {
|
||||
meta = with stdenv.lib; {
|
||||
description = "Analyze power consumption on Intel-based laptops";
|
||||
license = stdenv.lib.licenses.gpl2;
|
||||
maintainers = [ stdenv.lib.maintainers.chaoflow ];
|
||||
platforms = stdenv.lib.platforms.linux;
|
||||
homepage = https://01.org/powertop;
|
||||
license = licenses.gpl2;
|
||||
maintainers = with maintainers; [ chaoflow fpletz ];
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
}
|
||||
|
||||
@ -1,21 +1,9 @@
|
||||
{ bundlerEnv, lib, stdenv }:
|
||||
{ bundlerEnv, lib, ruby }:
|
||||
|
||||
let
|
||||
name = "riemann-dash-${env.gems.riemann-dash.version}";
|
||||
|
||||
env = bundlerEnv {
|
||||
inherit name;
|
||||
gemfile = ./Gemfile;
|
||||
lockfile = ./Gemfile.lock;
|
||||
gemset = ./gemset.nix;
|
||||
};
|
||||
|
||||
in stdenv.mkDerivation {
|
||||
inherit name;
|
||||
buildCommand = ''
|
||||
mkdir -p $out/bin
|
||||
ln -s ${env}/bin/riemann-dash $out/bin/riemann-dash
|
||||
'';
|
||||
bundlerEnv {
|
||||
inherit ruby;
|
||||
pName = "riemann-dash";
|
||||
gemdir = ./.;
|
||||
|
||||
meta = with lib; {
|
||||
description = "A javascript, websockets-powered dashboard for Riemann";
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
{ system ? builtins.currentSystem }:
|
||||
{ pkgspath ? ../../.., test-pkgspath ? pkgspath, system ? builtins.currentSystem }:
|
||||
|
||||
with import ../../.. { inherit system; };
|
||||
with import pkgspath { inherit system; };
|
||||
|
||||
rec {
|
||||
let
|
||||
llvmPackages = llvmPackages_37;
|
||||
in rec {
|
||||
coreutils_ = coreutils.override (args: {
|
||||
# We want coreutils without ACL support.
|
||||
aclSupport = false;
|
||||
@ -19,17 +21,15 @@ rec {
|
||||
buildInputs = [nukeReferences cpio];
|
||||
|
||||
buildCommand = ''
|
||||
mkdir -p $out/bin $out/lib
|
||||
mkdir -p $out/bin $out/lib $out/lib/system
|
||||
|
||||
# Our (fake) loader
|
||||
cp -d ${darwin.dyld}/lib/dyld $out/lib/
|
||||
|
||||
# C standard library stuff
|
||||
cp -d ${darwin.Libsystem}/lib/*.o $out/lib/
|
||||
cp -d ${darwin.Libsystem}/lib/*.dylib $out/lib/
|
||||
# We're not going to bundle the actual libSystem.dylib; instead we reconstruct it on
|
||||
# the other side. See the notes in stdenv/darwin/default.nix for more information.
|
||||
# We also need the .o files for various low-level boot stuff.
|
||||
cp -d ${darwin.Libsystem}/lib/*.o $out/lib
|
||||
cp -d ${darwin.Libsystem}/lib/system/*.dylib $out/lib/system
|
||||
|
||||
# Resolv is actually a link to another package, so let's copy it properly
|
||||
rm $out/lib/libresolv.9.dylib
|
||||
cp -L ${darwin.Libsystem}/lib/libresolv.9.dylib $out/lib
|
||||
|
||||
cp -rL ${darwin.Libsystem}/include $out
|
||||
@ -78,11 +78,11 @@ rec {
|
||||
|
||||
cp -rL ${llvmPackages.clang-unwrapped}/lib/clang $out/lib
|
||||
|
||||
cp -d ${libcxx}/lib/libc++*.dylib $out/lib
|
||||
cp -d ${libcxxabi}/lib/libc++abi*.dylib $out/lib
|
||||
cp -d ${llvmPackages.libcxx}/lib/libc++*.dylib $out/lib
|
||||
cp -d ${llvmPackages.libcxxabi}/lib/libc++abi*.dylib $out/lib
|
||||
|
||||
mkdir $out/include
|
||||
cp -rd ${libcxx}/include/c++ $out/include
|
||||
cp -rd ${llvmPackages.libcxx}/include/c++ $out/include
|
||||
|
||||
cp -d ${icu.out}/lib/libicu*.dylib $out/lib
|
||||
cp -d ${zlib.out}/lib/libz.* $out/lib
|
||||
@ -107,33 +107,26 @@ rec {
|
||||
done
|
||||
}
|
||||
|
||||
fix_dyld() {
|
||||
# This is clearly a hack. Once we have an install_name_tool-alike that can patch dyld, this will be nicer.
|
||||
${perl}/bin/perl -i -0777 -pe 's/\/nix\/store\/eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee-dyld-239\.4\/lib\/dyld/\/usr\/lib\/dyld\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00/sg' "$1"
|
||||
}
|
||||
|
||||
# Strip executables even further
|
||||
for i in $out/bin/*; do
|
||||
if test -x $i -a ! -L $i; then
|
||||
chmod +w $i
|
||||
|
||||
fix_dyld $i
|
||||
strip $i || true
|
||||
fi
|
||||
done
|
||||
|
||||
for i in $out/bin/* $out/lib/*.dylib $out/lib/clang/*/lib/darwin/*.dylib $out/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation; do
|
||||
if test -x $i -a ! -L $i; then
|
||||
if test -x "$i" -a ! -L "$i"; then
|
||||
echo "Adding rpath to $i"
|
||||
rpathify $i
|
||||
fi
|
||||
done
|
||||
|
||||
nuke-refs $out/lib/*
|
||||
nuke-refs $out/lib/system/*
|
||||
nuke-refs $out/lib/clang/*/lib/darwin/*
|
||||
nuke-refs $out/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation
|
||||
|
||||
set -x
|
||||
mkdir $out/.pack
|
||||
mv $out/* $out/.pack
|
||||
mv $out/.pack $out/pack
|
||||
@ -148,10 +141,6 @@ rec {
|
||||
strip $out/on-server/*
|
||||
nuke-refs $out/on-server/*
|
||||
|
||||
for i in $out/on-server/*; do
|
||||
fix_dyld $i
|
||||
done
|
||||
|
||||
(cd $out/pack && (find | cpio -o -H newc)) | bzip2 > $out/on-server/bootstrap-tools.cpio.bz2
|
||||
'';
|
||||
|
||||
@ -294,8 +283,8 @@ rec {
|
||||
|
||||
# The ultimate test: bootstrap a whole stdenv from the tools specified above and get a package set out of it
|
||||
test-pkgs = let
|
||||
stdenv = import ./. { inherit system bootstrapFiles; };
|
||||
in import ../../.. {
|
||||
stdenv = import (test-pkgspath + "/pkgs/stdenv/darwin") { inherit system bootstrapFiles; };
|
||||
in import test-pkgspath {
|
||||
inherit system;
|
||||
bootStdenv = stdenv.stdenvDarwin;
|
||||
};
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
{ stdenv, lib, fetchurl, intltool, pkgconfig, pythonPackages, bluez, polkit, gtk3
|
||||
, obex_data_server, xdg_utils, libnotify, dconf, gsettings_desktop_schemas, dnsmasq, dhcp
|
||||
, withPulseAudio ? true, libpulseaudio }:
|
||||
, hicolor_icon_theme , withPulseAudio ? true, libpulseaudio }:
|
||||
|
||||
let
|
||||
binPath = lib.makeBinPath [ xdg_utils dnsmasq dhcp ];
|
||||
@ -16,7 +16,8 @@ in stdenv.mkDerivation rec {
|
||||
|
||||
nativeBuildInputs = [ intltool pkgconfig pythonPackages.wrapPython pythonPackages.cython ];
|
||||
|
||||
buildInputs = [ bluez gtk3 pythonPackages.python libnotify dconf gsettings_desktop_schemas ]
|
||||
buildInputs = [ bluez gtk3 pythonPackages.python libnotify dconf
|
||||
gsettings_desktop_schemas hicolor_icon_theme ]
|
||||
++ pythonPath
|
||||
++ lib.optional withPulseAudio libpulseaudio;
|
||||
|
||||
|
||||
@ -1,12 +1,10 @@
|
||||
{ stdenv, lib, bundlerEnv, ruby, curl }:
|
||||
|
||||
bundlerEnv {
|
||||
name = "fluentd-0.14.0";
|
||||
|
||||
inherit ruby;
|
||||
gemfile = ./Gemfile;
|
||||
lockfile = ./Gemfile.lock;
|
||||
gemset = ./gemset.nix;
|
||||
|
||||
pname = "fluentd";
|
||||
gemdir = ./.;
|
||||
|
||||
meta = with lib; {
|
||||
description = "A data collector";
|
||||
|
||||
@ -2,12 +2,10 @@
|
||||
, pkgconfig, which }:
|
||||
|
||||
bundlerEnv {
|
||||
name = "lolcat-42.1.0";
|
||||
|
||||
inherit ruby;
|
||||
gemfile = ./Gemfile;
|
||||
lockfile = ./Gemfile.lock;
|
||||
gemset = ./gemset.nix;
|
||||
|
||||
pname = "lolcat";
|
||||
gemdir = ./.;
|
||||
|
||||
meta = with lib; {
|
||||
description = "A rainbow version of cat";
|
||||
|
||||
@ -25,6 +25,8 @@ stdenv.mkDerivation rec {
|
||||
sha256 = "1v6q83qsrf7dgp3y5fa5vkppgqyy82pnsk8z9b4047b6fvclfwvv";
|
||||
};
|
||||
|
||||
patches = [ ./nix-ssl-cert-file.patch ];
|
||||
|
||||
outputs = [ "bin" "dev" "out" "man" "devdoc" ];
|
||||
|
||||
nativeBuildInputs = [ pkgconfig perl ];
|
||||
|
||||
14
pkgs/tools/networking/curl/nix-ssl-cert-file.patch
Normal file
14
pkgs/tools/networking/curl/nix-ssl-cert-file.patch
Normal file
@ -0,0 +1,14 @@
|
||||
diff -ru -x '*~' curl-7.50.3-orig/src/tool_operate.c curl-7.50.3/src/tool_operate.c
|
||||
--- curl-7.50.3-orig/src/tool_operate.c 2016-09-06 23:25:06.000000000 +0200
|
||||
+++ curl-7.50.3/src/tool_operate.c 2016-10-14 11:51:48.999943142 +0200
|
||||
@@ -269,7 +269,9 @@
|
||||
capath_from_env = true;
|
||||
}
|
||||
else {
|
||||
- env = curlx_getenv("SSL_CERT_FILE");
|
||||
+ env = curlx_getenv("NIX_SSL_CERT_FILE");
|
||||
+ if(!env)
|
||||
+ env = curlx_getenv("SSL_CERT_FILE");
|
||||
if(env) {
|
||||
config->cacert = strdup(env);
|
||||
if(!config->cacert) {
|
||||
26
pkgs/tools/networking/fastd/default.nix
Normal file
26
pkgs/tools/networking/fastd/default.nix
Normal file
@ -0,0 +1,26 @@
|
||||
{ stdenv, fetchgit, cmake, bison, pkgconfig
|
||||
, libuecc, libsodium, libcap, json_c }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "18";
|
||||
name = "fastd-${version}";
|
||||
|
||||
src = fetchgit {
|
||||
url = "git://git.universe-factory.net/fastd";
|
||||
rev = "refs/tags/v${version}";
|
||||
sha256 = "0c9v3igv3812b3jr7jk75a2np658yy00b3i4kpbpdjgvqzc1jrq8";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ pkgconfig bison cmake ];
|
||||
buildInputs = [ libuecc libsodium libcap json_c ];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
description = "Fast and Secure Tunneling Daemon";
|
||||
homepage = https://projects.universe-factory.net/projects/fastd/wiki;
|
||||
license = with licenses; [ bsd2 bsd3 ];
|
||||
platforms = platforms.linux;
|
||||
maintainers = with maintainers; [ fpletz ];
|
||||
};
|
||||
}
|
||||
@ -2,13 +2,13 @@
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "sshpass-${version}";
|
||||
version = "1.05";
|
||||
|
||||
version = "1.06";
|
||||
|
||||
src = fetchurl {
|
||||
url = "mirror://sourceforge/sshpass/sshpass-${version}.tar.gz";
|
||||
sha256 = "0gj8r05h1hy01vh4csygyw21z2hcxb72qcxkxxi3y34alr98gxy3";
|
||||
sha256 = "0q7fblaczb7kwbsz0gdy9267z0sllzgmf0c7z5c9mf88wv74ycn6";
|
||||
};
|
||||
|
||||
|
||||
meta = {
|
||||
homepage = http://sourceforge.net/projects/sshpass/;
|
||||
description = "Non-interactive ssh password auth";
|
||||
|
||||
@ -1,13 +1,9 @@
|
||||
{ lib, bundlerEnv, ruby }:
|
||||
|
||||
bundlerEnv rec {
|
||||
name = "fpm-${version}";
|
||||
|
||||
version = (import gemset).fpm.version;
|
||||
inherit ruby;
|
||||
gemfile = ./Gemfile;
|
||||
lockfile = ./Gemfile.lock;
|
||||
gemset = ./gemset.nix;
|
||||
pname = "fpm";
|
||||
gemdir = ./.;
|
||||
|
||||
meta = with lib; {
|
||||
description = "Tool to build packages for multiple platforms with ease";
|
||||
|
||||
@ -1,24 +1,9 @@
|
||||
{ stdenv, lib, ruby, bundlerEnv, makeWrapper }:
|
||||
{ bundlerEnv, lib, ruby }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
name = "foreman-${env.gems.foreman.version}";
|
||||
|
||||
env = bundlerEnv {
|
||||
inherit ruby;
|
||||
name = "${name}-gems";
|
||||
gemfile = ./Gemfile;
|
||||
lockfile = ./Gemfile.lock;
|
||||
gemset = ./gemset.nix;
|
||||
};
|
||||
|
||||
phases = ["installPhase"];
|
||||
|
||||
nativeBuildInputs = [ makeWrapper ];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
makeWrapper ${env}/bin/foreman $out/bin/foreman
|
||||
'';
|
||||
bundlerEnv {
|
||||
inherit ruby;
|
||||
pName = "foreman";
|
||||
gemdir = ./.;
|
||||
|
||||
meta = with lib; {
|
||||
description = "Process manager for applications with multiple components";
|
||||
|
||||
@ -1,22 +1,9 @@
|
||||
{ lib, bundlerEnv, stdenv }:
|
||||
{ lib, bundlerEnv, ruby }:
|
||||
|
||||
let
|
||||
name = "hiera-eyaml-${env.gems.hiera-eyaml.version}";
|
||||
|
||||
env = bundlerEnv {
|
||||
inherit name;
|
||||
gemfile = ./Gemfile;
|
||||
lockfile = ./Gemfile.lock;
|
||||
gemset = ./gemset.nix;
|
||||
};
|
||||
|
||||
in stdenv.mkDerivation {
|
||||
inherit name;
|
||||
|
||||
buildCommand = ''
|
||||
mkdir -p $out/bin
|
||||
ln -s ${env}/bin/eyaml $out/bin/eyaml
|
||||
'';
|
||||
bundlerEnv {
|
||||
inherit ruby;
|
||||
pName = "hiera-eyaml";
|
||||
gemdir = ./.;
|
||||
|
||||
meta = with lib; {
|
||||
description = "Per-value asymmetric encryption of sensitive data for Hiera";
|
||||
|
||||
@ -2,8 +2,8 @@
|
||||
|
||||
buildGoPackage rec {
|
||||
name = "shfmt-${version}";
|
||||
version = "2016-06-16";
|
||||
rev = "8add0072d6abdc892e4617c95e8bba21ebe0beeb";
|
||||
version = "0.2.0";
|
||||
rev = "v${version}";
|
||||
|
||||
goPackagePath = "github.com/mvdan/sh";
|
||||
|
||||
@ -11,7 +11,7 @@ buildGoPackage rec {
|
||||
owner = "mvdan";
|
||||
repo = "sh";
|
||||
inherit rev;
|
||||
sha256 = "1m2lkcw6m5gdqjp17m01d822cj1p04qk6hm9m94ni2x19f16qs8m";
|
||||
sha256 = "07jf9v6583vvmk07fp7xdlnh7rvgl6f06ib2588g3xf1wk9vrq3d";
|
||||
};
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
|
||||
@ -1,19 +1,9 @@
|
||||
{ stdenv, lib, bundlerEnv, ruby_2_2, curl }:
|
||||
|
||||
bundlerEnv rec {
|
||||
name = "asciidoctor-${version}";
|
||||
version = "1.5.4";
|
||||
|
||||
pname = "asciidoctor";
|
||||
ruby = ruby_2_2;
|
||||
gemfile = ./Gemfile;
|
||||
lockfile = ./Gemfile.lock;
|
||||
gemset = ./gemset.nix;
|
||||
|
||||
# Delete dependencies' executables
|
||||
postBuild = ''
|
||||
find $out/bin -type f -not -wholename '*bin/asciidoctor*' -print0 \
|
||||
| xargs -0 rm
|
||||
'';
|
||||
gemdir = ./.;
|
||||
|
||||
meta = with lib; {
|
||||
description = "A faster Asciidoc processor written in Ruby";
|
||||
|
||||
@ -1567,6 +1567,8 @@ in
|
||||
pillow;
|
||||
};
|
||||
|
||||
fastd = callPackage ../tools/networking/fastd { };
|
||||
|
||||
fatsort = callPackage ../tools/filesystems/fatsort { };
|
||||
|
||||
fcitx = callPackage ../tools/inputmethods/fcitx {
|
||||
@ -2306,7 +2308,7 @@ in
|
||||
less = callPackage ../tools/misc/less { };
|
||||
|
||||
lf = callPackage ../tools/misc/lf {};
|
||||
|
||||
|
||||
lhasa = callPackage ../tools/compression/lhasa {};
|
||||
|
||||
libcpuid = callPackage ../tools/misc/libcpuid { };
|
||||
@ -2418,10 +2420,6 @@ in
|
||||
libtool = darwin.cctools;
|
||||
};
|
||||
|
||||
nodejs-5_x = callPackage ../development/web/nodejs/v5.nix {
|
||||
libtool = darwin.cctools;
|
||||
};
|
||||
|
||||
nodejs-6_x = callPackage ../development/web/nodejs/v6.nix {
|
||||
libtool = darwin.cctools;
|
||||
};
|
||||
@ -2435,10 +2433,6 @@ in
|
||||
nodejs = pkgs.nodejs-6_x;
|
||||
};
|
||||
|
||||
nodePackages_5_x = callPackage ../development/node-packages/default-v5.nix {
|
||||
nodejs = pkgs.nodejs-5_x;
|
||||
};
|
||||
|
||||
nodePackages_4_x = callPackage ../development/node-packages/default-v4.nix {
|
||||
nodejs = pkgs.nodejs-4_x;
|
||||
};
|
||||
@ -5280,7 +5274,10 @@ in
|
||||
erlangR16 = callPackage ../development/interpreters/erlang/R16.nix {
|
||||
inherit (darwin.apple_sdk.frameworks) Carbon Cocoa;
|
||||
};
|
||||
erlangR16_odbc = callPackage ../development/interpreters/erlang/R16.nix { odbcSupport = true; };
|
||||
erlangR16_odbc = callPackage ../development/interpreters/erlang/R16.nix {
|
||||
inherit (darwin.apple_sdk.frameworks) Carbon Cocoa;
|
||||
odbcSupport = true;
|
||||
};
|
||||
erlangR17 = callPackage ../development/interpreters/erlang/R17.nix {
|
||||
inherit (darwin.apple_sdk.frameworks) Carbon Cocoa;
|
||||
};
|
||||
@ -8165,6 +8162,8 @@ in
|
||||
|
||||
libu2f-server = callPackage ../development/libraries/libu2f-server { };
|
||||
|
||||
libuecc = callPackage ../development/libraries/libuecc { };
|
||||
|
||||
libui = callPackage ../development/libraries/libui { };
|
||||
|
||||
libunity = callPackage ../development/libraries/libunity { };
|
||||
@ -15043,7 +15042,9 @@ in
|
||||
gtk = gtk2;
|
||||
};
|
||||
|
||||
kodiPlain = callPackage ../applications/video/kodi { };
|
||||
kodiPlain = callPackage ../applications/video/kodi {
|
||||
libva = libva-full;
|
||||
};
|
||||
xbmcPlain = kodiPlain;
|
||||
|
||||
kodiPlugins = recurseIntoAttrs (callPackage ../applications/video/kodi/plugins.nix {
|
||||
|
||||
@ -23,20 +23,11 @@ let
|
||||
sha256 = "0r5pfbjbmdj46h20jm3iqmy969qd27ajyf0phjhgykv6j0cqjlgd";
|
||||
};
|
||||
|
||||
imagick = if isPhp7 then imagick34 else imagick31;
|
||||
|
||||
imagick31 = assert !isPhp7; buildPecl {
|
||||
name = "imagick-3.1.2";
|
||||
sha256 = "14vclf2pqcgf3w8nzqbdw0b9v30q898344c84jdbw2sa62n6k1sj";
|
||||
imagick = buildPecl {
|
||||
name = "imagick-3.4.3RC1";
|
||||
sha256 = "0siyxpszjz6s095s2g2854bhprjq49rf22v6syjiwvndg1pc9fsh";
|
||||
configureFlags = "--with-imagick=${pkgs.imagemagick.dev}";
|
||||
buildInputs = [ pkgs.pkgconfig ];
|
||||
};
|
||||
|
||||
imagick34 = buildPecl {
|
||||
name = "imagick-3.4.0RC4";
|
||||
sha256 = "0fdkzdv3r8sm6y1x11kp3rxsimq6zf15xvi0mhn57svmnan4zh0i";
|
||||
configureFlags = "--with-imagick=${pkgs.imagemagick.dev}";
|
||||
buildInputs = [ pkgs.pkgconfig ];
|
||||
nativeBuildInputs = [ pkgs.pkgconfig ];
|
||||
};
|
||||
|
||||
# No support for PHP 7 yet
|
||||
|
||||
@ -21,57 +21,52 @@ with import ./release-lib.nix { inherit supportedSystems scrubJobs; };
|
||||
|
||||
let
|
||||
|
||||
unstable = pkgs.releaseTools.aggregate
|
||||
{ name = "nixpkgs-${jobs.tarball.version}";
|
||||
meta.description = "Release-critical builds for the Nixpkgs unstable channel";
|
||||
constituents =
|
||||
[ jobs.tarball
|
||||
jobs.metrics
|
||||
jobs.manual
|
||||
jobs.lib-tests
|
||||
jobs.stdenv.x86_64-linux
|
||||
jobs.stdenv.i686-linux
|
||||
jobs.stdenv.x86_64-darwin
|
||||
jobs.linux.x86_64-linux
|
||||
jobs.linux.i686-linux
|
||||
jobs.python.x86_64-linux
|
||||
jobs.python.i686-linux
|
||||
jobs.python.x86_64-darwin
|
||||
jobs.python3.x86_64-linux
|
||||
jobs.python3.i686-linux
|
||||
jobs.python3.x86_64-darwin
|
||||
# Many developers use nix-repl
|
||||
jobs.nix-repl.x86_64-linux
|
||||
jobs.nix-repl.i686-linux
|
||||
jobs.nix-repl.x86_64-darwin
|
||||
# Needed by travis-ci to test PRs
|
||||
jobs.nox.i686-linux
|
||||
jobs.nox.x86_64-linux
|
||||
jobs.nox.x86_64-darwin
|
||||
# Ensure that X11/GTK+ are in order.
|
||||
jobs.thunderbird.x86_64-linux
|
||||
jobs.thunderbird.i686-linux
|
||||
# Ensure that basic stuff works on darwin
|
||||
jobs.git.x86_64-darwin
|
||||
jobs.mysql.x86_64-darwin
|
||||
jobs.vim.x86_64-darwin
|
||||
] ++ lib.collect lib.isDerivation jobs.stdenvBootstrapTools;
|
||||
};
|
||||
|
||||
lib = pkgs.lib;
|
||||
|
||||
jobs =
|
||||
rec { tarball = import ./make-tarball.nix { inherit pkgs nixpkgs officialRelease; };
|
||||
{ tarball = import ./make-tarball.nix { inherit pkgs nixpkgs officialRelease; };
|
||||
|
||||
metrics = import ./metrics.nix { inherit pkgs nixpkgs; };
|
||||
|
||||
manual = import ../../doc;
|
||||
lib-tests = import ../../lib/tests/release.nix { inherit nixpkgs; };
|
||||
|
||||
# for consistency with NixOS tested job
|
||||
tested = unstable;
|
||||
inherit unstable;
|
||||
|
||||
unstable = pkgs.releaseTools.aggregate
|
||||
{ name = "nixpkgs-${jobs.tarball.version}";
|
||||
meta.description = "Release-critical builds for the Nixpkgs unstable channel";
|
||||
constituents =
|
||||
[ jobs.tarball
|
||||
jobs.metrics
|
||||
jobs.manual
|
||||
jobs.lib-tests
|
||||
jobs.stdenv.x86_64-linux
|
||||
jobs.stdenv.i686-linux
|
||||
jobs.stdenv.x86_64-darwin
|
||||
jobs.linux.x86_64-linux
|
||||
jobs.linux.i686-linux
|
||||
jobs.python.x86_64-linux
|
||||
jobs.python.i686-linux
|
||||
jobs.python.x86_64-darwin
|
||||
jobs.python3.x86_64-linux
|
||||
jobs.python3.i686-linux
|
||||
jobs.python3.x86_64-darwin
|
||||
# Many developers use nix-repl
|
||||
jobs.nix-repl.x86_64-linux
|
||||
jobs.nix-repl.i686-linux
|
||||
jobs.nix-repl.x86_64-darwin
|
||||
# Needed by travis-ci to test PRs
|
||||
jobs.nox.i686-linux
|
||||
jobs.nox.x86_64-linux
|
||||
jobs.nox.x86_64-darwin
|
||||
# Ensure that X11/GTK+ are in order.
|
||||
jobs.thunderbird.x86_64-linux
|
||||
jobs.thunderbird.i686-linux
|
||||
# Ensure that basic stuff works on darwin
|
||||
jobs.git.x86_64-darwin
|
||||
jobs.mysql.x86_64-darwin
|
||||
jobs.vim.x86_64-darwin
|
||||
] ++ lib.collect lib.isDerivation jobs.stdenvBootstrapTools;
|
||||
};
|
||||
|
||||
stdenvBootstrapTools.i686-linux =
|
||||
{ inherit (import ../stdenv/linux/make-bootstrap-tools.nix { system = "i686-linux"; }) dist test; };
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user