- fetchdarcs supports now partial

- fetchdarcs_2pre added
- flapjax added
- no longer used : annotatedDerivations
- added bleeding edge repos with a tiny nix repository manager which dowloads and
  updates repostiries, then creates tar.gz dist files which are used by bleeding_edge_source
  (darcs tested only by now)
- added experimental my_environment with example

svn path=/nixpkgs/trunk/; revision=10974
This commit is contained in:
Marc Weber 2008-03-06 02:46:29 +00:00
parent 64652c5dfc
commit 1b51b70317
8 changed files with 290 additions and 111 deletions

View File

@ -7,9 +7,9 @@ if test -n "$tag"; then
tagflags="--tag=$tag" tagflags="--tag=$tag"
fi fi
header "getting $url ${tagtext}into $out" header "getting $url $partial ${tagtext} into $out"
darcs get --no-pristine-tree --partial $tagflags "$url" "$out" darcs get --no-pristine-tree $partial $tagflags "$url" "$out"
# remove metadata, because it can change # remove metadata, because it can change
rm -rf "$out/_darcs" rm -rf "$out/_darcs"

View File

@ -1,9 +1,10 @@
{stdenv, darcs, nix}: {url, tag ? null, md5}: {stdenv, darcs, nix}: {url, tag ? null, md5, partial ? true}:
stdenv.mkDerivation { stdenv.mkDerivation {
name = "fetchdarcs"; name = "fetchdarcs";
builder = ./builder.sh; builder = ./builder.sh;
buildInputs = [darcs nix]; buildInputs = [darcs nix];
partial = if partial then "--partial" else "";
# Nix <= 0.7 compatibility. # Nix <= 0.7 compatibility.
id = md5; id = md5;

View File

@ -0,0 +1,25 @@
args:
args.stdenv.mkDerivation {
name = "flapjax-source-20070514";
src = args.fetchurl {
url = http://www.flapjax-lang.org/download/20070514/flapjax-source.tar.gz;
sha256 = "188dafpggbfdyciqhrjaq12q0q01z1rp3mpm2iixb0mvrci14flc";
};
phases = "unpackPhase buildPhase";
buildPhase = "
ensureDir \$out/bin
cd compiler;
ghc --make Fjc.hs -o \$out/bin/fjc
";
buildInputs =(with args; [ghc] ++ libs);
meta = {
description = "programming language designed around the demands of modern, client-based Web applications";
homepage = http://www.flapjax-lang.org/;
license = "BSD";
};
}

View File

@ -1,15 +1,11 @@
args: with args; with lib; with annotatedDerivations; args: with args; with lib;
stdenv.mkDerivation { stdenv.mkDerivation {
inherit suffix name ghc ; inherit suffix name ghc readline ncurses;
buildInputs = map delAnnotation (libraries ++ [ghcPkgUtil]); buildInputs = (libraries ++ [ghcPkgUtil]);
#tags = if (installSourceAndTags == true) then tags = map (x : sourceWithTagsDerivation ((lib.traceWhatis x).passthru.sourceWithTags))
# map sourceWithTagsDerivation ( uniqList { inputList = (uniqList { inputList= filter annotatedWithSourceAndTagInfo libraries; } );
# ( filterAnnotated ( concatLists (map uniqAnnotatedDeps libraries ) ) ) ; } )
# else [];
tags = map (x : sourceWithTagsDerivation (x.sourceWithTags))
(uniqList { inputList= filter annotatedWithSourceAndTagInfo libraries; } );
phases="installPhase"; phases="installPhase";
@ -23,16 +19,15 @@ stdenv.mkDerivation {
app=$(ls -al $ghc/bin/$a | sed -n 's%.*-> \\(.*\\)%\\1%p'); app=$(ls -al $ghc/bin/$a | sed -n 's%.*-> \\(.*\\)%\\1%p');
cat > \"\$out/bin/\$a$suffix\" << EOF cat > \"\$out/bin/\$a$suffix\" << EOF
#!`type -f sh | gawk '{ print $3; }'` #!`type -f sh | gawk '{ print $3; }'`
export LIBRARY_PATH=\$readline/lib:\$ncurses/lib
GHC_PACKAGE_PATH=\${GHC_PACKAGE_PATH}\${g} \$ghc/bin/$app \"\\\$@\" GHC_PACKAGE_PATH=\${GHC_PACKAGE_PATH}\${g} \$ghc/bin/$app \"\\\$@\"
EOF EOF
chmod +x \"\$out/bin/\$a$suffix\" chmod +x \"\$out/bin/\$a$suffix\"
done done
ensureDir \$out/{src,tags} ensureDir \$out/src
for i in \$tags; do for i in \$tags; do
for path in src tags; do ln -s \$i/src/* \$out/src
ln -s \$i/\$path/* \$out/\$path
done
done done
"; ";
} }

View File

@ -1,13 +1,11 @@
{ ghcPkgUtil, gnum4, perl, ghcboot, stdenv, fetchurl, recurseIntoAttrs, gmp, readline, lib, annotatedDerivations, hasktags, ctags } : { ghcPkgUtil, gnum4, perl, ghcboot, stdenv, fetchurl, recurseIntoAttrs, gmp, readline, lib, hasktags, ctags } :
with annotatedDerivations;
rec { rec {
/* What's in here? /* What's in here?
Goal: really pure GHC. This means put every library into its each package.conf Goal: really pure GHC. This means put every library into its each package.conf
and add all together using GHC_PACKAGE_PATH and add all together using GHC_PACKAGE_PATH
First I've tried separating the build of ghc from it's lib. It hase been to painful. I've failed. First I've tried separating the build of ghc from it's lib. It hase been to painful. I've failed. Now there is nix_ghc_pkg_tool.hs which just takes the installed package.conf
Now there is nix_ghc_pkg_tool.hs which just takes the installed package.conf
and creates a new package db file for each contained package. and creates a new package db file for each contained package.
The final attribute set looks similar to this: The final attribute set looks similar to this:
@ -111,7 +109,7 @@ rec {
deps = [ x.base x.old_locale x.old_time x.filepath ];}; deps = [ x.base x.old_locale x.old_time x.filepath ];};
filepath = { name = "filepath-1.1.0.0"; srcDir = "libraries/filepath"; filepath = { name = "filepath-1.1.0.0"; srcDir = "libraries/filepath";
deps = [ x.base ];}; deps = [ x.base ];};
ghc = { name = "ghc-${version}"; srcDir = "libraries/Cabal"; ghc = { name = "ghc-${version}"; srcDir = "compiler";
deps = [ x.base x.old_locale x.old_time x.filepath deps = [ x.base x.old_locale x.old_time x.filepath
x.directory x.array x.containers x.hpc x.bytestring x.directory x.array x.containers x.hpc x.bytestring
x.pretty x.packedstring x.template_haskell x.unix x.pretty x.packedstring x.template_haskell x.unix
@ -142,7 +140,7 @@ rec {
name = "rts-1.0"; srcDir = "rts"; # TODO: Doesn't have .hs files so I should use ctags if creating tags at all name = "rts-1.0"; srcDir = "rts"; # TODO: Doesn't have .hs files so I should use ctags if creating tags at all
deps = []; deps = [];
createTagFiles = [ createTagFiles = [
{ name = "${name}_haskell_tags"; { name = "${name}_haskell";
tagCmd = "${toString ctags}/bin/ctags -R .;mv tags \$TAG_FILE"; } tagCmd = "${toString ctags}/bin/ctags -R .;mv tags \$TAG_FILE"; }
]; ];
}; };
@ -153,30 +151,24 @@ rec {
}; };
toDerivation = attrs : with attrs; toDerivation = attrs : with attrs;
rec {
inherit name;
#aDeps = concatLists ( catAttrs ( subsetmap id args [ "buildInputs" "propagatedBuildInputs" ] ) ); stdenv.mkDerivation {
aDeps = deps; inherit (attrs) name;
# dummy derivation, only creates setup-hook for package database located in the ghc derivation
aDeriv = stdenv.mkDerivation {
inherit name;
phases = "buildPhase fixupPhase"; phases = "buildPhase fixupPhase";
buildInputs = [ ghcPkgUtil ]; buildInputs = [ ghcPkgUtil ];
propagatedBuildInputs = [ ghc ] ++ map delAnnotation attrs.deps; propagatedBuildInputs = [ ghc ] ++ attrs.deps;
buildPhase = "setupHookRegisteringPackageDatabase \"${ghc}/lib/ghc-${ghc.version}/${name}.conf\""; buildPhase = "setupHookRegisteringPackageDatabase \"${ghc}/lib/ghc-${ghc.version}/${attrs.name}.conf\"";
}; passthru = {
sourceWithTags = {
sourceWithTags = { src = ghc.src;
src = ghc.src; inherit srcDir;
inherit srcDir; name = attrs.name + "-src-with-tags";
name = name + "-src-with-tags"; createTagFiles = lib.maybeAttr "createTagFiles" [
createTagFiles = lib.maybeAttr "createTagFiles" [ { name = "${attrs.name}_haskell";
{ name = "${name}_haskell_tags"; tagCmd = "${toString hasktags}/bin/hasktags-modified --ctags `find . -type f -name \"*.*hs\"`; sort tags > \$TAG_FILE"; }
tagCmd = "${toString hasktags}/bin/hasktags-modified --ctags `find . -type f -name \"*.*hs\"`; sort tags > \$TAG_FILE"; } ] attrs;
] attrs; };
}; };
}; };
derivations = with lib; builtins.listToAttrs (lib.concatLists ( lib.mapRecordFlatten derivations = with lib; builtins.listToAttrs (lib.concatLists ( lib.mapRecordFlatten
( n : attrs : let d = (toDerivation attrs); in [ (nv n d) (nv attrs.name d) ] ) pkgs ) ); ( n : attrs : let d = (toDerivation attrs); in [ (nv n d) (nv attrs.name d) ] ) pkgs ) );

View File

@ -0,0 +1,16 @@
{
# each repository has
# a type, url and maybe a tag
# you can add group names to update some repositories at once
# see nix_repository_manager expression in all-packages.nix
http = { type= "darcs"; url="http://darcs.haskell.org/http/"; group="happs"; };
syb_with_class = { type="darcs"; url="http://happs.org/HAppS/syb-with-class"; group="happs"; };
happs_data = { type="darcs"; url=http://happs.org/repos/HAppS-Data; group="happs"; };
happs_util = { type="darcs"; url=http://happs.org/repos/HAppS-Util; group="happs"; };
happs_state = { type="darcs"; url=http://happs.org/repos/HAppS-State; group="happs"; };
happs_plugins = { type="darcs"; url=http://happs.org/repos/HAppS-Plugins; group="happs"; };
happs_ixset = { type="darcs"; url=http://happs.org/repos/HAppS-IxSet; group="happs"; };
happs_server = { type="darcs"; url=http://happs.org/repos/HAppS-HTTP; group="happs"; };
cabal = { type="darcs"; url=http://darcs.haskell.org/cabal; };
}

View File

@ -0,0 +1,42 @@
args: with args; with lib;
let
repoDir = builtins.getEnv "HOME" + "/managed_repos";
toConfigLine = name : set :
"[(\"name\",\"${name}\")," + ( concatStringsSep "," (map (a: "(\"${a}\",\"${__getAttr a set}\")" ) (__attrNames set)))+"]";
config = writeText "nix_repository_manager_config"
(repoDir+"\n" +
concatStringsSep "\n" (mapRecordFlatten toConfigLine bleeding_edge_repos));
in
args.stdenv.mkDerivation {
inherit repoDir; # amend repoDir so that you know which one to take when installing bleeding edge packages
name = "nix_repository_manager";
#src = args.fetchdarcs {
# url = http://mawercer.de/~marc/repos/nix_repository_manager;
# md5 = "b33ba7a5b756eda00a79ba34505ea7ee";
#};
source = /pr/haskell/nix_repository_manager/nix_repository_manager.hs;
phases = "buildPhase";
buildPhase = "
s=\$out/share/nix_repository_manager
ensureDir \$out/bin \$s
#ghc --make nix_repository_manager.hs -o \$s/nix_repository_manager
ghc --make \$source -o \$s/nix_repository_manager
b=\$out/bin/nix_repository_manager
echo -e \"#!/bin/sh\\n\$s/nix_repository_manager --config ${config} \\\$@\" > \$b
chmod +x \$b
";
buildInputs = [ghc];
meta = {
description = "makes it easy to keep some packages up to date";
homepage = http://mawercer.de/repos/nix_repository_manager;
license = "do with it what you want";
};
}

View File

@ -127,37 +127,47 @@ rec {
annotatedDerivations = (import ../lib/annotatedDerivations.nix) { inherit lib; }; annotatedDerivations = (import ../lib/annotatedDerivations.nix) { inherit lib; };
# optional srcDir # optional srcDir
annotatedWithSourceAndTagInfo = x : (x ? sourceWithTags); annotatedWithSourceAndTagInfo = x : (x ? passthru && x.passthru ? sourceWithTags);
# example arguments see annotatedGhcCabalDerivation # createTagFiles = [ { name = "my_tag_name_without_suffix", tagCmd = "ctags -R . -o \$TAG_FILE"; } ]
# tag command must create file named $TAG_FILE # tag command must create file named $TAG_FILE
sourceWithTagsDerivation = args: with args; sourceWithTagsDerivation = {name, src, srcDir ? ".", tagSuffix ? "_tags", createTagFiles ? []} :
let createTagFiles = (lib.maybeAttr "createTagFiles" [] args ); in stdenv.mkDerivation {
stdenv.mkDerivation {
phases = "unpackPhase buildPhase"; phases = "unpackPhase buildPhase";
inherit (args) src name; inherit src srcDir tagSuffix;
srcDir = (lib.maybeAttr "srcDir" "." args); name = "${name}-source-with-tags";
# using separate tag directory so that you don't have to glob that much files when starting your editor # using separate tag directory so that you don't have to glob that much files when starting your editor
# is this a good choice? # is this a good choice?
buildPhase = " buildPhase = "
SRC_DEST=\$out/src/\$name SRC_DEST=\$out/src/\$name
t=\$out/tags/\$name ensureDir \$SRC_DEST
ensureDir \$SRC_DEST \$t
cp -r \$srcDir \$SRC_DEST" cp -r \$srcDir \$SRC_DEST"
+ lib.defineShList "sh_list_names" (lib.catAttrs "name" createTagFiles) + lib.defineShList "sh_list_names" (lib.catAttrs "name" createTagFiles)
+ lib.defineShList "sh_list_cmds" (lib.catAttrs "tagCmd" createTagFiles) + lib.defineShList "sh_list_cmds" (lib.catAttrs "tagCmd" createTagFiles)
+ "cd \$SRC_DEST + "cd \$SRC_DEST
for a in `seq 0 \${#sh_list}`; do for a in `seq 0 \${#sh_list}`; do
TAG_FILE=\"\$SRC_DEST/\"\${sh_list_names[\$a]} TAG_FILE=\"\$SRC_DEST/\"\${sh_list_names[\$a]}\$tagSuffix
cmd=\"\${sh_list_cmds[\$a]}\" cmd=\"\${sh_list_cmds[\$a]}\"
echo running tag cmd \"\$cmd\" in `pwd` echo running tag cmd \"\$cmd\" in `pwd`
eval \"\$cmd\"; eval \"\$cmd\";
ln -s \$TAG_FILE \"\$t/\"\${sh_list_names[\$a]}
done done
"; ";
}; };
# example usage # example usage
testSourceWithTags = sourceWithTagsDerivation (ghc68_extra_libs ghcsAndLibs.ghc68).mtl.sourceWithTags; testSourceWithTags = sourceWithTagsDerivation (ghc68_extra_libs ghcsAndLibs.ghc68).happs_server_darcs.passthru.sourceWithTags;
addCTaggingInfo = deriv :
deriv // {
passthru = {
sourceWithTags = {
inherit (deriv) src;
name = "${deriv.name}-source-ctags";
createTagFiles = [
{ inherit (deriv) name;
tagCmd = "${toString ctags}/bin/ctags --sort=yes -o \$TAG_FILE -R ."; }
];
};
}; };
# Return an attribute from the Nixpkgs configuration file, or # Return an attribute from the Nixpkgs configuration file, or
# a default value if the attribute doesn't exist. # a default value if the attribute doesn't exist.
@ -234,7 +244,7 @@ rec {
let co = lib.chooseOptionsByFlags { inherit args flagConfig optionals defaults collectExtraPhaseActions; }; in let co = lib.chooseOptionsByFlags { inherit args flagConfig optionals defaults collectExtraPhaseActions; }; in
args.stdenv.mkDerivation ( args.stdenv.mkDerivation (
{ {
inherit (co) configureFlags buildInputs /*flags*/; inherit (co) configureFlags buildInputs propagatedBuildInputs /*flags*/;
} // extraAttrs co // co.pass // co.flags_prefixed ); } // extraAttrs co // co.pass // co.flags_prefixed );
@ -311,6 +321,13 @@ rec {
inherit stdenv darcs nix; inherit stdenv darcs nix;
}; };
# only temporarely / don't know yet wether it's save to switch
# but I have trouble getting HAppS repos
fetchdarcs_2pre = import ../build-support/fetchdarcs {
inherit stdenv nix;
darcs = darcs_2_pre;
};
fetchsvn = import ../build-support/fetchsvn { fetchsvn = import ../build-support/fetchsvn {
inherit stdenv subversion openssh; inherit stdenv subversion openssh;
sshSupport = true; sshSupport = true;
@ -1103,6 +1120,12 @@ rec {
inherit fetchurl stdenv gawk; inherit fetchurl stdenv gawk;
}; };
flapjax = import ../development/compilers/flapjax {
inherit fetchurl stdenv;
ghc = ghcsAndLibs.ghc68.ghc;
libs = with (ghc68_extra_libs ghcsAndLibs.ghc68 // ghcsAndLibs.ghc68.core_libs); [ mtl parsec random ];
};
g77 = import ../build-support/gcc-wrapper { g77 = import ../build-support/gcc-wrapper {
name = "g77"; name = "g77";
nativeTools = false; nativeTools = false;
@ -1204,14 +1227,17 @@ rec {
assert builtins ? listToAttrs; assert builtins ? listToAttrs;
recurseIntoAttrs (import ../development/compilers/ghcs { recurseIntoAttrs (import ../development/compilers/ghcs {
inherit ghcboot fetchurl stdenv recurseIntoAttrs perl gnum4 gmp readline lib; inherit ghcboot fetchurl stdenv recurseIntoAttrs perl gnum4 gmp readline lib;
inherit ghcPkgUtil annotatedDerivations hasktags ctags; inherit ghcPkgUtil hasktags ctags;
}); });
# creates ghc-X-wl wich adds the passed libraries to the env var GHC_PACKAGE_PATH # creates ghc-X-wl wich adds the passed libraries to the env var GHC_PACKAGE_PATH
createGhcWrapper = { ghcPackagedLibs ? false, ghc, libraries, name, suffix ? "ghc_wrapper_${ghc.name}" } : createGhcWrapper = { ghcPackagedLibs ? false, ghc, libraries, name, suffix ? "ghc_wrapper_${ghc.name}" } :
import ../development/compilers/ghc/createGhcWrapper { import ../development/compilers/ghc/createGhcWrapper {
inherit stdenv ghcPackagedLibs ghc name suffix libraries ghcPkgUtil inherit ghcPackagedLibs ghc name suffix libraries ghcPkgUtil
annotatedDerivations lib sourceWithTagsDerivation annotatedWithSourceAndTagInfo; lib sourceWithTagsDerivation annotatedWithSourceAndTagInfo
readline ncurses stdenv;
#inherit stdenv ghcPackagedLibs ghc name suffix libraries ghcPkgUtil
# annotatedDerivations lib sourceWithTagsDerivation annotatedWithSourceAndTagInfo;
installSourceAndTags = true; installSourceAndTags = true;
}; };
@ -1224,10 +1250,10 @@ rec {
# classic expression style.. seems to work fine # classic expression style.. seems to work fine
# used now # used now
# goSrc contains source directory (containing the .cabal file) # goSrc contains source directory (containing the .cabal file)
ghcCabalDerivation = args : null_ : with lib; with args; ghcCabalDerivation = args : with args;
stdenv.mkDerivation ({ stdenv.mkDerivation ({
goSrcDir = "cd ${srcDir}"; goSrcDir = "cd ${srcDir}";
inherit name src propagatedBuildInputs; inherit (args) name src propagatedBuildInputs;
phases = "unpackPhase patchPhase buildPhase"; phases = "unpackPhase patchPhase buildPhase";
buildInputs = (if (args ? buildInputs) then args.buildInputs else []) buildInputs = (if (args ? buildInputs) then args.buildInputs else [])
++ [ ghcPkgUtil ]; ++ [ ghcPkgUtil ];
@ -1256,28 +1282,21 @@ rec {
echo \"\$propagatedBuildInputs\" > \"\$out/nix-support/propagated-build-inputs\" echo \"\$propagatedBuildInputs\" > \"\$out/nix-support/propagated-build-inputs\"
"; ";
} // (subsetmap id args [ "patchPhase" ])); } // ( if args ? pass then args.pass else {} ) );
# creates annotated derivation (comments see above # creates annotated derivation (comments see above)
annotatedGhcCabalDerivation = args : null_ : with lib; with args; addHasktagsTaggingInfo = deriv : deriv // {
rec { passthru = {
inherit name; sourceWithTags = {
inherit (deriv) src;
#aDeps = concatLists ( catAttrs ( subsetmap id args [ "buildInputs" "propagatedBuildInputs" ] ) ); srcDir = if deriv ? srcDir then deriv.srcDir else ".";
aDeps = []; #TODO name = deriv.name + "-src-with-tags";
createTagFiles = [
aDeriv = ghcCabalDerivation (args // (annotatedDerivations.delAnnotationsFromInputs args) ) null; { name = "${deriv.name}_haskell";
# tagCmd = "${toString ghcsAndLibs.ghc68.ghc}/bin/hasktags --ctags `find . -type f -name \"*.*hs\"`; sort tags > \$TAG_FILE"; }
# annotation data tagCmd = "${toString hasktags}/bin/hasktags-modified --ctags `find . -type f -name \"*.*hs\"`; sort tags > \$TAG_FILE"; }
];
sourceWithTags = { };
inherit src srcDir;
name = name + "-src-with-tags";
createTagFiles = [
{ name = "${name}_haskell_tags";
# tagCmd = "${toString ghcsAndLibs.ghc68.ghc}/bin/hasktags --ctags `find . -type f -name \"*.*hs\"`; sort tags > \$TAG_FILE"; }
tagCmd = "${toString hasktags}/bin/hasktags-modified --ctags `find . -type f -name \"*.*hs\"`; sort tags > \$TAG_FILE"; }
];
}; };
}; };
@ -1309,6 +1328,33 @@ rec {
src = fetchurl { url = "http://hackage.haskell.org/packages/archive/binary/0.4.1/binary-0.4.1.tar.gz"; src = fetchurl { url = "http://hackage.haskell.org/packages/archive/binary/0.4.1/binary-0.4.1.tar.gz";
sha256 = "0jg5i1k5fz0xp1piaaf5bzhagqvfl3i73hlpdmgs4gc40r1q4x5v"; }; sha256 = "0jg5i1k5fz0xp1piaaf5bzhagqvfl3i73hlpdmgs4gc40r1q4x5v"; };
}; };
# using different name to not clash with postgresql
postgresql_bindings = rec { name = "PostgreSQL-0.2"; p_deps = [x.base x.mtl postgresql x.haskell98];
src = fetchurl { url = "http://hackage.haskell.org/packages/archive/PostgreSQL/0.2/PostgreSQL-0.2.tar.gz";
sha256 = "0p5q3yc8ymgzzlc600h4mb9w86ncrgjdbpqfi49b2jqvkcx5bwrr"; };
pass = {
inherit postgresql;
patchPhase = "echo 'extensions: MultiParamTypeClasses ForeignFunctionInterface EmptyDataDecls GeneralizedNewtypeDeriving FlexibleInstances UndecidableInstances' >> PostgreSQL.cabal
echo \"extra-lib-dirs: \$postgresql/lib\" >> PostgreSQL.cabal
echo \"extra-libraries: pq\" >> PostgreSQL.cabal
";
};
};
#wash = rec { name = "WashNGo-2.12"; p_deps = [x.base x.mtl x.haskell98 ];
# src = fetchurl { url = "http://www.informatik.uni-freiburg.de/~thiemann/WASH/WashNGo-2.12.tgz";
# sha256 = "1dyc2062jpl3xdlm0n7xkz620h060g2i5ghnb32cn95brcj9fgrz"; };
# patches = ../misc/WASHNGo_Patch_ghc682;
# };
#hsql = rec { name = "hsql-1.7"; p_deps = [x.base x.mtl x.haskell98 x.old_time ];
# src = fetchurl { url = "http://hackage.haskell.org/packages/archive/hsql/1.7/hsql-1.7.tar.gz";
# sha256 = "0j2lkvg5c0x5gf2sy7zmmgrda0c3l73i9d6hyka2f15d5n1rfjc9"; };
# patchPhase = "echo \"extra-lib-dirs: \$postgresql/lib\" >> *.cabal
# echo 'build-depends: old-locale, old-time' >> *.cabal";
# };
# 1.13 is stable. There are more recent non stable versions # 1.13 is stable. There are more recent non stable versions
haxml = rec { name = "HaXml-1.13.3"; p_deps = [ x.base x.rts x.directory x.process x.pretty x.containers x.filepath x.haskell98 ]; haxml = rec { name = "HaXml-1.13.3"; p_deps = [ x.base x.rts x.directory x.process x.pretty x.containers x.filepath x.haskell98 ];
src = fetchurl { url = "http://www.haskell.org/HaXml/${name}.tar.gz"; src = fetchurl { url = "http://www.haskell.org/HaXml/${name}.tar.gz";
@ -1333,33 +1379,40 @@ rec {
parsep = { name = "parsep-0.1"; p_deps = [ x.base x.mtl x.bytestring ]; parsep = { name = "parsep-0.1"; p_deps = [ x.base x.mtl x.bytestring ];
src = fetchurl { url = "http://twan.home.fmf.nl/parsep/parsep-0.1.tar.gz"; src = fetchurl { url = "http://twan.home.fmf.nl/parsep/parsep-0.1.tar.gz";
sha256 = "1y5pbs5mzaa21127cixsamahlbvmqzyhzpwh6x0nznsgmg2dpc9q"; }; sha256 = "1y5pbs5mzaa21127cixsamahlbvmqzyhzpwh6x0nznsgmg2dpc9q"; };
patchPhase = "pwd; sed -i 's/fps/bytestring/' *.cabal"; pass = { patchPhase = "pwd; sed -i 's/fps/bytestring/' *.cabal"; };
}; };
time = { name = "time-1.1.2.0"; p_deps = [ x.base x.old_locale ];
src = fetchurl { url = "http://hackage.haskell.org/packages/archive/time/1.1.2.0/time-1.1.2.0.tar.gz";
sha256 = "0zm4qqczwbqzy2pk7wz5p1virgylwyzd9zxp0406s5zvp35gvl89"; };
};
# HAPPS - Libraries # HAPPS - Libraries
http_darcs = { name="http-darcs"; p_deps = [x.network x.parsec]; http_darcs = { name="http-darcs"; p_deps = [x.network x.parsec];
src = fetchdarcs { url = "http://darcs.haskell.org/http/"; md5 = "4475f858cf94f4551b77963d08d7257c"; }; src = bleeding_edge_source "http_darcs";
#src = fetchdarcs { url = "http://darcs.haskell.org/http/"; md5 = "4475f858cf94f4551b77963d08d7257c"; };
}; };
syb_with_class_darcs = { name="syb-with-class-darcs"; p_deps = [x.template_haskell x.bytestring ]; syb_with_class_darcs = { name="syb-with-class-darcs"; p_deps = [x.template_haskell x.bytestring ];
src = fetchdarcs { url = "http://happs.org/HAppS/syb-with-class"; md5 = "b42336907f7bfef8bea73bc36282d6ac"; }; src =
# fetchdarcs { url = "http://happs.org/HAppS/syb-with-class"; md5 = "b42336907f7bfef8bea73bc36282d6ac"; };
bleeding_edge_source "syb_with_class"; # { url = "http://happs.org/HAppS/syb-with-class"; md5 = "b42336907f7bfef8bea73bc36282d6ac"; };
}; };
happs_data_darcs = { name="HAppS-Data-darcs"; p_deps=[ x.base x.mtl x.template_haskell x.syb_with_class_darcs x.haxml x.happs_util_darcs x.regex_compat x.bytestring x.pretty ]; happs_data_darcs = { name="HAppS-Data-darcs"; p_deps=[ x.base x.mtl x.template_haskell x.syb_with_class_darcs x.haxml x.happs_util_darcs x.regex_compat x.bytestring x.pretty x.binary ];
src = fetchdarcs { url = "http://happs.org/repos/HAppS-Data"; md5 = "10c505dd687e9dc999cb187090af9ba7"; }; src = bleeding_edge_source "happs_data"; # fetchdarcs { url = "http://happs.org/repos/HAppS-Data"; md5 = "10c505dd687e9dc999cb187090af9ba7"; };
}; };
happs_util_darcs = { name="HAppS-Util-darcs"; p_deps=[ x.base x.mtl x.hslogger x.template_haskell x.array x.bytestring x.old_time x.process x.directory ]; happs_util_darcs = { name="HAppS-Util-darcs"; p_deps=[ x.base x.mtl x.hslogger x.template_haskell x.array x.bytestring x.old_time x.process x.directory ];
src = fetchdarcs { url = "http://happs.org/repos/HAppS-Util"; md5 = "693cb79017e522031c307ee5e59fc250"; }; src = bleeding_edge_source "happs_util"; # fetchdarcs { url = "http://happs.org/repos/HAppS-Util"; md5 = "693cb79017e522031c307ee5e59fc250"; };
}; };
happs_state_darcs = { name="HAppS-State-darcs"; p_deps=[ x.base x.haxml happs_state_darcs = { name="HAppS-State-darcs"; p_deps=[ x.base x.haxml
x.mtl x.network x.stm x.template_haskell x.hslogger x.mtl x.network x.stm x.template_haskell x.hslogger
x.happs_util_darcs x.happs_data_darcs x.bytestring x.containers x.happs_util_darcs x.happs_data_darcs x.bytestring x.containers
x.random x.old_time x.old_locale x.unix x.directory x.binary ]; x.random x.old_time x.old_locale x.unix x.directory x.binary ];
src = fetchdarcs { url = "http://happs.org/repos/HAppS-State"; src = bleeding_edge_source "happs_state";
md5 = "956e5c293b60f4a98148fedc5fa38acc"; #src = fetchdarcs { url = "http://happs.org/repos/HAppS-State";
}; # md5 = "956e5c293b60f4a98148fedc5fa38acc";
}; # };
happs_plugins_darcs = { name="HAppS-plugins-darcs"; p_deps=[ x.base x.mtl x.hslogger x.happs_util_darcs x.happs_data_darcs x.happs_state_darcs ];
src = fetchdarcs { url = "http://happs.org/repos/HAppS-Util"; md5 = "693cb79017e522031c307ee5e59fc250"; };
}; };
# there is no .cabal yet # there is no .cabal yet
#happs_smtp_darcs = { name="HAppS-smtp-darcs"; p_deps=[]; #happs_smtp_darcs = { name="HAppS-smtp-darcs"; p_deps=[];
@ -1369,35 +1422,40 @@ rec {
happs_ixset_darcs = { name="HAppS-IxSet-darcs"; p_deps=[ x.base x.mtl happs_ixset_darcs = { name="HAppS-IxSet-darcs"; p_deps=[ x.base x.mtl
x.hslogger x.happs_util_darcs x.happs_state_darcs x.happs_data_darcs x.hslogger x.happs_util_darcs x.happs_state_darcs x.happs_data_darcs
x.template_haskell x.syb_with_class_darcs x.containers ]; x.template_haskell x.syb_with_class_darcs x.containers ];
src = fetchdarcs { url = "http://happs.org/repos/HAppS-IxSet"; src = bleeding_edge_source "happs_ixset";
#src = fetchdarcs { url = "http://happs.org/repos/HAppS-IxSet";
#md5 = "fa6b24517f09aa16e972f087430967fd"; #md5 = "fa6b24517f09aa16e972f087430967fd";
#tag = "0.9.2"; #tag = "0.9.2";
# no tag # no tag
md5 = "fa6b24517f09aa16e972f087430967fd"; #md5 = "fa6b24517f09aa16e972f087430967fd";
}; #};
}; };
happs_server_darcs = { name="HAppS-Server-darcs"; p_deps=[x.haxml x.parsec x.mtl happs_server_darcs = { name="HAppS-Server-darcs"; p_deps=[x.haxml x.parsec x.mtl
x.network x.regex_compat x.hslogger x.happs_data_darcs x.network x.regex_compat x.hslogger x.happs_data_darcs
x.happs_util_darcs x.happs_state_darcs x.happs_ixset_darcs x.http_darcs x.happs_util_darcs x.happs_state_darcs x.happs_ixset_darcs x.http_darcs
x.template_haskell x.xhtml x.html x.bytestring x.random x.template_haskell x.xhtml x.html x.bytestring x.random
x.containers x.old_time x.old_locale x.directory x.unix]; x.containers x.old_time x.old_locale x.directory x.unix];
src = fetchdarcs { url = "http://happs.org/repos/HAppS-HTTP"; md5 = "e1bb17eb30a39d30b8c34dffbf80edc2"; }; #src = fetchdarcs { url = "http://happs.org/repos/HAppS-HTTP"; md5 = "e1bb17eb30a39d30b8c34dffbf80edc2"; };
src = bleeding_edge_source "happs_server_darcs";
}; };
# we need recent version of cabal (because only this supports --pkg-config propably) Thu Feb 7 14:54:07 CET 2008 # we need recent version of cabal (because only this supports --pkg-config propably) Thu Feb 7 14:54:07 CET 2008
# is be added to buildInputs automatically # is be added to buildInputs automatically
cabal_darcs = { name=cabal_darcs_name; p_deps = with ghc.core_libs; [base rts directory process pretty containers filepath]; cabal_darcs =
src = fetchdarcs { url = "http://darcs.haskell.org/cabal"; md5 = "8b0bc3c7f2676ce642f98b1568794cd6"; }; { name=cabal_darcs_name; p_deps = with ghc.core_libs; [base rts directory process pretty containers filepath];
}; src = bleeding_edge_source "cabal";
#fetchdarcs { url = "http://darcs.haskell.org/cabal"; md5 = "8b0bc3c7f2676ce642f98b1568794cd6"; };
};
}; };
toDerivation = attrs : with attrs; toDerivation = attrs : with attrs;
# result is { mtl = <deriv>; # result is { mtl = <deriv>;
annotatedGhcCabalDerivation ({ addHasktagsTaggingInfo (ghcCabalDerivation {
inherit name src; inherit (attrs) name src;
propagatedBuildInputs = p_deps ++ (lib.optional (attrs.name != cabal_darcs_name) derivations.cabal_darcs ); propagatedBuildInputs = p_deps ++ (lib.optional (attrs.name != cabal_darcs_name) derivations.cabal_darcs );
srcDir = if attrs ? srcDir then attrs.srcDir else "."; srcDir = if attrs ? srcDir then attrs.srcDir else ".";
patches = if attrs ? patches then attrs.patches else []; patches = if attrs ? patches then attrs.patches else [];
# add cabal, take deps either from this list or from ghc.core_libs # add cabal, take deps either from this list or from ghc.core_libs
}//( lib.subsetmap lib.id attrs [ "patchPhase" ] )) null; pass = if attrs ? pass then attrs.pass else {};
});
derivations = with lib; builtins.listToAttrs (lib.concatLists ( lib.mapRecordFlatten derivations = with lib; builtins.listToAttrs (lib.concatLists ( lib.mapRecordFlatten
( n : attrs : let d = (toDerivation attrs); in [ (nv n d) (nv attrs.name d) ] ) pkgs ) ); ( n : attrs : let d = (toDerivation attrs); in [ (nv n d) (nv attrs.name d) ] ) pkgs ) );
}.derivations; }.derivations;
@ -1415,14 +1473,14 @@ rec {
ghcPackagedLibs = true; ghcPackagedLibs = true;
name = "ghc${ghc.version}_wrapper"; name = "ghc${ghc.version}_wrapper";
suffix = "${ghc.version}wrapper"; suffix = "${ghc.version}wrapper";
libraries = # map ( a : __getAttr a (ghc68_extra_libs ghcsAndLibs.ghc68 ) ) [ "mtl" ]; libraries = map ( a : __getAttr a (ghc68_extra_libs ghcsAndLibs.ghc68 ) ) [ "mtl" ]
# core_libs distributed with this ghc version # core_libs distributed with this ghc version
#(lib.flattenAttrs ghcsAndLibs.ghc68.core_libs) #(lib.flattenAttrs ghcsAndLibs.ghc68.core_libs)
map ( a : __getAttr a ghcsAndLibs.ghc68.core_libs ) [ #map ( a : __getAttr a ghcsAndLibs.ghc68.core_libs ) [
"cabal" "array" "base" "bytestring" "containers" "containers" "directory" # "cabal" "array" "base" "bytestring" "containers" "containers" "directory"
"filepath" "ghc-${ghc.version}" "haskell98" "hpc" "old_locale" "old_time" # "filepath" "ghc-${ghc.version}" "haskell98" "hpc" "old_locale" "old_time"
"old_time" "packedstring" "pretty" "process" "random" "readline" "rts" # "old_time" "packedstring" "pretty" "process" "random" "readline" "rts"
"template_haskell" "unix" "template_haskell" ] # "template_haskell" "unix" "template_haskell" ];
# some extra libs # some extra libs
++ (lib.flattenAttrs (ghc68_extra_libs ghcsAndLibs.ghc68) ); ++ (lib.flattenAttrs (ghc68_extra_libs ghcsAndLibs.ghc68) );
@ -1796,6 +1854,17 @@ rec {
}; };
*/ */
bleeding_edge_repos = import ../development/misc/bleeding_edge_repos;
# name must be foudn in bleeding_edge_repos attr set
bleeding_edge_source = name : (
let targz = nixRepositoryManager.repoDir+"/dist/${name}.tar.gz"; in
if builtins.pathExists targz
then targz
else let attr = __getAttr name bleeding_edge_repos;
in if (attr.type == "darcs")
then fetchdarcs_2pre { inherit (attr) url md5; }
else throw "TODO");
ecj = import ../development/eclipse/ecj { ecj = import ../development/eclipse/ecj {
inherit fetchurl stdenv unzip jre ant; inherit fetchurl stdenv unzip jre ant;
}; };
@ -5937,6 +6006,12 @@ rec {
db4 = db45; db4 = db45;
}; };
nixRepositoryManager = import ../tools/package-management/nixRepositoryManager {
inherit fetchurl stdenv bleeding_edge_repos lib writeText;
ghc = ghcsAndLibs.ghc68.ghc;
fetchdarcs = fetchdarcs_2pre;
};
nixStatic = import ../tools/package-management/nix-static { nixStatic = import ../tools/package-management/nix-static {
inherit fetchurl stdenv perl curl autoconf automake libtool; inherit fetchurl stdenv perl curl autoconf automake libtool;
aterm = aterm242fixes; aterm = aterm242fixes;
@ -6079,5 +6154,38 @@ rec {
inherit (xlibs) libX11; inherit (xlibs) libX11;
}; };
# idea: provide environment so that you can use let nix assemble all dependencies
# while keeping the same source base when developping
# experimental
my_environment = args: stdenv.mkDerivation (
{ userCmds =""; } // {
phases = "buildPhase";
buildPhase = "
ensureDir \$out/bin
name=${args.name}
o=\$out/bin/$name
echo -e \"#!/bin/sh --login\\n\" >> \$o
export | grep -v HOME= | grep -v PATH= >> \$o
echo \"export PATH=\$PATH:\\\$PATH entering $name\" >> \$o
echo \"echo entering $name\" >> \$o
echo \"$userCmds\" >> \$o
echo \"/bin/sh\" >> $o
echo \"echo leaving $name\" >> \$o
chmod +x $o
";
} //args);
# example for nix itself adding glibc tag file to an env var.
# experimental
env_nix = my_environment rec {
buildInputs = [perl curl bzip2 aterm242fixes db4]
++ map (x : sourceWithTagsDerivation ( (addCTaggingInfo x ).passthru.sourceWithTags ) ) [ glibc ];
db4 = db44;
aterm = aterm242fixes;
name = "env_nix";
userCmds = ". ~/.bashrc
PS1='\033]2;\h:\u:\w\007\\nenv ${name} \[\033[1;32m\][\u@\h: \w ]$\[\033[0m\] '
";
};
} }