diff --git a/.version b/.version index 7bc03e791d4..1c1d9713015 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -15.05 \ No newline at end of file +16.03 \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000000..b92308622ee --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,12 @@ +# How to contribute + +## Opening issues + +* Make sure you have a [GitHub account](https://github.com/signup/free) +* [Submit an issue](https://github.com/NixOS/nixpkgs/issues) - assuming one does not already exist. + * Clearly describe the issue including steps to reproduce when it is a bug. + * Include information what version of nixpkgs and Nix are you using (nixos-version or git revision). + +## Submitting changes + +See the nixpkgs manual for details on how to [Submit changes to nixpkgs](http://hydra.nixos.org/job/nixpkgs/trunk/manual/latest/download-by-type/doc/manual#chap-submitting-changes). diff --git a/README.md b/README.md index 86a5568727e..991d90dd97d 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,31 @@ -[logo -](https://nixos.org/nixos) +[logo](https://nixos.org/nixos) -[![Build Status](https://travis-ci.org/NixOS/nixpkgs.svg?branch=master)](https://travis-ci.org/NixOS/nixpkgs) [![Issue Stats](http://www.issuestats.com/github/nixos/nixpkgs/badge/pr)](http://www.issuestats.com/github/nixos/nixpkgs) [![Issue Stats](http://www.issuestats.com/github/nixos/nixpkgs/badge/issue)](http://www.issuestats.com/github/nixos/nixpkgs) +[![Build Status](https://travis-ci.org/NixOS/nixpkgs.svg?branch=master)](https://travis-ci.org/NixOS/nixpkgs) +[![Issue Stats](http://www.issuestats.com/github/nixos/nixpkgs/badge/pr)](http://www.issuestats.com/github/nixos/nixpkgs) +[![Issue Stats](http://www.issuestats.com/github/nixos/nixpkgs/badge/issue)](http://www.issuestats.com/github/nixos/nixpkgs) -Nixpkgs is a collection of packages for [Nix](https://nixos.org/nix/) package -manager. +Nixpkgs is a collection of packages for the [Nix](https://nixos.org/nix/) package +manager. It is periodically built and tested by the [hydra](http://hydra.nixos.org/) +build daemon as so-called channels. To get channel information via git, add +[nixpkgs-channels](https://github.com/NixOS/nixpkgs-channels.git) as a remote: -[NixOS](https://nixos.org/nixos/) linux distribution source code is located inside `nixos/` folder. +``` +% git remote add channels git://github.com/NixOS/nixpkgs-channels.git +``` + +For stability and maximum binary package support, it is recommended to maintain +custom changes on top of one of the channels, e.g. `nixos-14.12` for the latest +release and `nixos-unstable` for the latest successful build of master: + +``` +% git remote update channels +% git rebase channels/nixos-14.12 +``` + +For pull-requests, please rebase onto nixpkgs `master`. + +[NixOS](https://nixos.org/nixos/) linux distribution source code is located inside +`nixos/` folder. * [NixOS installation instructions](https://nixos.org/nixos/manual/#ch-installation) * [Documentation (Nix Expression Language chapter)](https://nixos.org/nix/manual/#ch-expression-language) @@ -14,13 +33,12 @@ manager. * [Manual (NixOS)](https://nixos.org/nixos/manual/) * [Continuous package builds for unstable/master](https://hydra.nixos.org/jobset/nixos/trunk-combined) * [Continuous package builds for 14.12 release](https://hydra.nixos.org/jobset/nixos/release-14.12) +* [Continuous package builds for 15.09 release](https://hydra.nixos.org/jobset/nixos/release-15.09) * [Tests for unstable/master](https://hydra.nixos.org/job/nixos/trunk-combined/tested#tabs-constituents) * [Tests for 14.12 release](https://hydra.nixos.org/job/nixos/release-14.12/tested#tabs-constituents) +* [Tests for 15.09 release](https://hydra.nixos.org/job/nixos/release-15.09/tested#tabs-constituents) Communication: * [Mailing list](http://lists.science.uu.nl/mailman/listinfo/nix-dev) * [IRC - #nixos on freenode.net](irc://irc.freenode.net/#nixos) - ---- -[![Throughput Graph](https://graphs.waffle.io/nixos/nixpkgs/throughput.svg)](https://waffle.io/nixos/nixpkgs/metrics) diff --git a/doc/coding-conventions.xml b/doc/coding-conventions.xml index 61d373738f9..e7166a64919 100644 --- a/doc/coding-conventions.xml +++ b/doc/coding-conventions.xml @@ -5,7 +5,7 @@ Coding conventions -
Syntax +
Syntax @@ -169,8 +169,8 @@ stdenv.mkDerivation { ... args: with args; ... - or - + or + { stdenv, fetchurl, perl, ... }: ... @@ -207,7 +207,7 @@ args.stdenv.mkDerivation (args // {
-
Package naming +
Package naming In Nixpkgs, there are generally three different names associated with a package: @@ -256,6 +256,12 @@ bound to the variable name e2fsprogs in a package named hello-svn by nix-env. + If package is fetched from git's commit then + the version part of the name must be the date of that + (fetched) commit. The date must be in "YYYY-MM-DD" format. + Also add "git" to the name - e.g., + "pkgname-git-2014-09-23". + Dashes in the package name should be preserved in new variable names, rather than converted to underscores (which was convention up to around 2013 and most names @@ -286,7 +292,7 @@ dashes between words — not in camel case. For instance, it should be allPackages.nix or AllPackages.nix. -
Hierarchy +
Hierarchy Each package should be stored in its own directory somewhere in the pkgs/ tree, i.e. in @@ -445,12 +451,17 @@ splitting up an existing category. - If it’s a desktop environment - (including window managers): + If it’s a desktop environment: desktops (e.g. kde, gnome, enlightenment) + + If it’s a window manager: + + applications/window-managers (e.g. awesome, compiz, stumpwm) + + If it’s an application: @@ -598,6 +609,57 @@ evaluate correctly.
- - +
Fetching Sources + There are multiple ways to fetch a package source in nixpkgs. The + general guidline is that you should package sources with a high degree of + availability. Right now there is only one fetcher which has mirroring + support and that is fetchurl. Note that you should also + prefer protocols which have a corresponding proxy environment variable. + + You can find many source fetch helpers in pkgs/build-support/fetch*. + + In the file pkgs/top-level/all-packages.nix you can + find fetch helpers, these have names on the form + fetchFrom*. The intention of these are to provide + snapshot fetches but using the same api as some of the version controlled + fetchers from pkgs/build-support/. As an example going + from bad to good: + + + Uses git:// which won't be proxied. + +src = fetchgit { + url = "git://github.com/NixOS/nix.git"; + rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae"; + sha256 = "1cw5fszffl5pkpa6s6wjnkiv6lm5k618s32sp60kvmvpy7a2v9kg"; +} + + + + + This is ok, but an archive fetch will still be faster. + +src = fetchgit { + url = "https://github.com/NixOS/nix.git"; + rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae"; + sha256 = "1cw5fszffl5pkpa6s6wjnkiv6lm5k618s32sp60kvmvpy7a2v9kg"; +} + + + + + Fetches a snapshot archive and you get the rev you want. + +src = fetchFromGitHub { + owner = "NixOS"; + repo = "nix"; + rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae"; + sha256 = "04yri911rj9j19qqqn6m82266fl05pz98inasni0vxr1cf1gdgv9"; +} + + + + + +
diff --git a/doc/contributing.xml b/doc/contributing.xml index f622845bf1a..a83059aa36e 100644 --- a/doc/contributing.xml +++ b/doc/contributing.xml @@ -2,18 +2,19 @@ xmlns:xlink="http://www.w3.org/1999/xlink" xml:id="chap-contributing"> -Contributing +Contributing to this documentation -If you make modifications to the manual, it's important to build the manual before contributing: +The DocBook sources of the Nixpkgs manual are in the doc +subdirectory of the Nixpkgs repository. If you make modifications to +the manual, it's important to build it before committing. You can do that as follows: - + +$ cd /path/to/nixpkgs +$ nix-build doc + - $ git clone git://github.com/NixOS/nixpkgs.git - - $ nix-build -A manual nixpkgs/pkgs/top-level/release.nix - - Inside the built derivation you shall see manual/index.html file. - - +If the build succeeds, the manual will be in +./result/share/doc/nixpkgs/manual.html. diff --git a/doc/default.nix b/doc/default.nix index 1e8974d6026..b8dac00eb65 100644 --- a/doc/default.nix +++ b/doc/default.nix @@ -6,7 +6,7 @@ stdenv.mkDerivation { sources = sourceFilesBySuffices ./. [".xml"]; - buildInputs = [ libxml2 libxslt ]; + buildInputs = [ pandoc libxml2 libxslt ]; xsltFlags = '' --param section.autolabel 1 @@ -19,7 +19,23 @@ stdenv.mkDerivation { ''; buildCommand = '' - ln -s $sources/*.xml . # */ + { + echo "" + echo "" + echo "User's Guide to the Haskell Infrastructure" + echo "" + pandoc ${./haskell-users-guide.md} -w docbook | \ + sed -e 's|||' \ + -e 's||
|' + echo "" + echo "" + } >haskell-users-guide.xml + + ln -s "$sources/"*.xml . echo ${nixpkgsVersion} > .version @@ -36,6 +52,9 @@ stdenv.mkDerivation { cp ${./style.css} $dst/style.css + mkdir -p $dst/images/callouts + cp "${docbook5_xsl}/xml/xsl/docbook/images/callouts/"*.gif $dst/images/callouts/ + mkdir -p $out/nix-support echo "doc manual $dst manual.html" >> $out/nix-support/hydra-build-products ''; diff --git a/doc/functions.xml b/doc/functions.xml new file mode 100644 index 00000000000..5378b59abcb --- /dev/null +++ b/doc/functions.xml @@ -0,0 +1,273 @@ + + +Functions reference + + + The nixpkgs repository has several utility functions to manipulate Nix expressions. + + +
+ pkgs.overridePackages + + + This function inside the nixpkgs expression (pkgs) + can be used to override the set of packages itself. + + + Warning: this function is expensive and must not be used from within + the nixpkgs repository. + + + Example usage: + + let + pkgs = import <nixpkgs> {}; + newpkgs = pkgs.overridePackages (self: super: { + foo = super.foo.override { ... }; + }; +in ... + + + + The resulting newpkgs will have the new foo + expression, and all other expressions depending on foo will also + use the new foo expression. + + + + The behavior of this function is similar to config.packageOverrides. + + + + The self parameter refers to the final package set with the + applied overrides. Using this parameter may lead to infinite recursion if not + used consciously. + + + + The super parameter refers to the old package set. + It's equivalent to pkgs in the above example. + + +
+ +
+ <pkg>.override + + + The function override is usually available for all the + derivations in the nixpkgs expression (pkgs). + + + It is used to override the arguments passed to a function. + + + Example usages: + + pkgs.foo.override { arg1 = val1; arg2 = val2; ... } + pkgs.overridePackages (self: super: { + foo = super.foo.override { barSupport = true ; }; +}) + mypkg = pkgs.callPackage ./mypkg.nix { + mydep = pkgs.mydep.override { ... }; +}) + + + + In the first example, pkgs.foo is the result of a function call + with some default arguments, usually a derivation. + Using pkgs.foo.override will call the same function with + the given new arguments. + + +
+ +
+ <pkg>.overrideDerivation + + + The function overrideDerivation is usually available for all the + derivations in the nixpkgs expression (pkgs). + + + It is used to create a new derivation by overriding the attributes of + the original derivation according to the given function. + + + + Example usage: + + mySed = pkgs.gnused.overrideDerivation (oldAttrs: { + name = "sed-4.2.2-pre"; + src = fetchurl { + url = ftp://alpha.gnu.org/gnu/sed/sed-4.2.2-pre.tar.bz2; + sha256 = "11nq06d131y4wmf3drm0yk502d2xc6n5qy82cg88rb9nqd2lj41k"; + }; + patches = []; +}); + + + + In the above example, the name, src and patches of the derivation + will be overridden, while all other attributes will be retained from the + original derivation. + + + + The argument oldAttrs is used to refer to the attribute set of + the original derivation. + + +
+ +
+ lib.makeOverridable + + + The function lib.makeOverridable is used to make the result + of a function easily customizable. This utility only makes sense for functions + that accept an argument set and return an attribute set. + + + + Example usage: + + f = { a, b }: { result = a+b; } +c = lib.makeOverridable f { a = 1; b = 2; } + + + + + The variable c is the value of the f function + applied with some default arguments. Hence the value of c.result + is 3, in this example. + + + + The variable c however also has some additional functions, like + c.override which can be used to + override the default arguments. In this example the value of + (c.override { a = 4; }).result is 6. + + +
+ + +
+ buildFHSChrootEnv/buildFHSUserEnv + + + buildFHSChrootEnv and + buildFHSUserEnv provide a way to build and run + FHS-compatible lightweight sandboxes. They get their own isolated root with + binded /nix/store, so their footprint in terms of disk + space needed is quite small. This allows one to run software which is hard or + unfeasible to patch for NixOS -- 3rd-party source trees with FHS assumptions, + games distributed as tarballs, software with integrity checking and/or external + self-updated binaries. + + + + buildFHSChrootEnv allows to create persistent + environments, which can be constructed, deconstructed and entered by + multiple users at once. A downside is that it requires + root access for both those who create and destroy and + those who enter it. It can be useful to create environments for daemons that + one can enter and observe. + + + + buildFHSUserEnv uses Linux namespaces feature to create + temporary lightweight environments which are destroyed after all child + processes exit. It does not require root access, and can be useful to create + sandboxes and wrap applications. + + + + Those functions both rely on buildFHSEnv, which creates + an actual directory structure given a list of necessary packages and extra + build commands. + buildFHSChrootEnv and buildFHSUserEnv + both accept those arguments which are passed to + buildFHSEnv: + + + + + name + + Environment name. + + + + targetPkgs + + Packages to be installed for the main host's architecture + (i.e. x86_64 on x86_64 installations). + + + + multiPkgs + + Packages to be installed for all architectures supported by + a host (i.e. i686 and x86_64 on x86_64 installations). + + + + extraBuildCommands + + Additional commands to be executed for finalizing the + directory structure. + + + + extraBuildCommandsMulti + + Like extraBuildCommandsMulti, but + executed only on multilib architectures. + + + + + Additionally, buildFHSUserEnv accepts + runScript parameter, which is a command that would be + executed inside the sandbox and passed all the command line arguments. It + default to bash. + One can create a simple environment using a shell.nix + like that: + + + {} }: + +(pkgs.buildFHSUserEnv { + name = "simple-x11-env"; + targetPkgs = pkgs: (with pkgs; + [ udev + alsaLib + ]) ++ (with pkgs.xorg; + [ libX11 + libXcursor + libXrandr + ]); + multiPkgs = pkgs: (with pkgs; + [ udev + alsaLib + ]) ++ (with []; + runScript = "bash"; +}).env +]]> + + + Running nix-shell would then drop you into a shell with + these libraries and binaries available. You can use this to run + closed-source applications which expect FHS structure without hassles: + simply change runScript to the application path, + e.g. ./bin/start.sh -- relative paths are supported. + +
+ +
diff --git a/doc/haskell-users-guide.md b/doc/haskell-users-guide.md new file mode 100644 index 00000000000..b06a81e5b36 --- /dev/null +++ b/doc/haskell-users-guide.md @@ -0,0 +1,698 @@ +--- +title: User's Guide for Haskell in Nixpkgs +author: Peter Simons +date: 2015-06-01 +--- + +# How to install Haskell packages + +Nixpkgs distributes build instructions for all Haskell packages registered on +[Hackage](http://hackage.haskell.org/), but strangely enough normal Nix package +lookups don't seem to discover any of them, except for the default version of ghc, cabal-install, and stack: + + $ nix-env -i alex + error: selector ‘alex’ matches no derivations + $ nix-env -qa ghc + ghc-7.10.2 + +The Haskell package set is not registered in the top-level namespace because it +is *huge*. If all Haskell packages were visible to these commands, then +name-based search/install operations would be much slower than they are now. We +avoided that by keeping all Haskell-related packages in a separate attribute +set called `haskellPackages`, which the following command will list: + + $ nix-env -f "" -qaP -A haskellPackages + haskellPackages.a50 a50-0.5 + haskellPackages.abacate haskell-abacate-0.0.0.0 + haskellPackages.abcBridge haskell-abcBridge-0.12 + haskellPackages.afv afv-0.1.1 + haskellPackages.alex alex-3.1.4 + haskellPackages.Allure Allure-0.4.101.1 + haskellPackages.alms alms-0.6.7 + [... some 8000 entries omitted ...] + +To install any of those packages into your profile, refer to them by their +attribute path (first column): + + $ nix-env -f "" -iA haskellPackages.Allure ... + +The attribute path of any Haskell packages corresponds to the name of that +particular package on Hackage: the package `cabal-install` has the attribute +`haskellPackages.cabal-install`, and so on. (Actually, this convention causes +trouble with packages like `3dmodels` and `4Blocks`, because these names are +invalid identifiers in the Nix language. The issue of how to deal with these +rare corner cases is currently unresolved.) + +Haskell packages who's Nix name (second column) begins with a `haskell-` prefix +are packages that provide a library whereas packages without that prefix +provide just executables. Libraries may provide executables too, though: the +package `haskell-pandoc`, for example, installs both a library and an +application. You can install and use Haskell executables just like any other +program in Nixpkgs, but using Haskell libraries for development is a bit +trickier and we'll address that subject in great detail in section [How to +create a development environment]. + +Attribute paths are deterministic inside of Nixpkgs, but the path necessary to +reach Nixpkgs varies from system to system. We dodged that problem by giving +`nix-env` an explicit `-f ""` parameter, but if you call `nix-env` +without that flag, then chances are the invocation fails: + + $ nix-env -iA haskellPackages.cabal-install + error: attribute ‘haskellPackages’ in selection path + ‘haskellPackages.cabal-install’ not found + +On NixOS, for example, Nixpkgs does *not* exist in the top-level namespace by +default. To figure out the proper attribute path, it's easiest to query for the +path of a well-known Nixpkgs package, i.e.: + + $ nix-env -qaP coreutils + nixos.coreutils coreutils-8.23 + +If your system responds like that (most NixOS installations will), then the +attribute path to `haskellPackages` is `nixos.haskellPackages`. Thus, if you +want to use `nix-env` without giving an explicit `-f` flag, then that's the way +to do it: + + $ nix-env -qaP -A nixos.haskellPackages + $ nix-env -iA nixos.haskellPackages.cabal-install + +Our current default compiler is GHC 7.10.x and the `haskellPackages` set +contains packages built with that particular version. Nixpkgs contains the +latest major release of every GHC since 6.10.4, however, and there is a whole +family of package sets available that defines Hackage packages built with each +of those compilers, too: + + $ nix-env -f "" -qaP -A haskell.packages.ghc6123 + $ nix-env -f "" -qaP -A haskell.packages.ghc763 + +The name `haskellPackages` is really just a synonym for +`haskell.packages.ghc7102`, because we prefer that package set internally and +recommend it to our users as their default choice, but ultimately you are free +to compile your Haskell packages with any GHC version you please. The following +command displays the complete list of available compilers: + + $ nix-env -f "" -qaP -A haskell.compiler + haskell.compiler.ghc6104 ghc-6.10.4 + haskell.compiler.ghc6123 ghc-6.12.3 + haskell.compiler.ghc704 ghc-7.0.4 + haskell.compiler.ghc722 ghc-7.2.2 + haskell.compiler.ghc742 ghc-7.4.2 + haskell.compiler.ghc763 ghc-7.6.3 + haskell.compiler.ghc784 ghc-7.8.4 + haskell.compiler.ghc7102 ghc-7.10.2 + haskell.compiler.ghcHEAD ghc-7.11.20150402 + haskell.compiler.ghcNokinds ghc-nokinds-7.11.20150704 + haskell.compiler.ghcjs ghcjs-0.1.0 + haskell.compiler.jhc jhc-0.8.2 + haskell.compiler.uhc uhc-1.1.9.0 + +We have no package sets for `jhc` or `uhc` yet, unfortunately, but for every +version of GHC listed above, there exists a package set based on that compiler. +Also, the attributes `haskell.compiler.ghcXYC` and +`haskell.packages.ghcXYC.ghc` are synonymous for the sake of convenience. + +# How to create a development environment + +## How to install a compiler + +A simple development environment consists of a Haskell compiler and the tool +`cabal-install`, and we saw in section [How to install Haskell packages] how +you can install those programs into your user profile: + + $ nix-env -f "" -iA haskellPackages.ghc haskellPackages.cabal-install + +Instead of the default package set `haskellPackages`, you can also use the more +precise name `haskell.compiler.ghc7102`, which has the advantage that it refers +to the same GHC version regardless of what Nixpkgs considers "default" at any +given time. + +Once you've made those tools available in `$PATH`, it's possible to build +Hackage packages the same way people without access to Nix do it all the time: + + $ cabal get lens-4.11 && cd lens-4.11 + $ cabal install -j --dependencies-only + $ cabal configure + $ cabal build + +If you enjoy working with Cabal sandboxes, then that's entirely possible too: +just execute the command + + $ cabal sandbox init + +before installing the required dependencies. + +The `nix-shell` utility makes it easy to switch to a different compiler +version; just enter the Nix shell environment with the command + + $ nix-shell -p haskell.compiler.ghc784 + +to bring GHC 7.8.4 into `$PATH`. Re-running `cabal configure` switches your +build to use that compiler instead. If you're working on a project that doesn't +depend on any additional system libraries outside of GHC, then it's sufficient +even to run the `cabal configure` command inside of the shell: + + $ nix-shell -p haskell.compiler.ghc784 --command "cabal configure" + +Afterwards, all other commands like `cabal build` work just fine in any shell +environment, because the configure phase recorded the absolute paths to all +required tools like GHC in its build configuration inside of the `dist/` +directory. Please note, however, that `nix-collect-garbage` can break such an +environment because the Nix store paths created by `nix-shell` aren't "alive" +anymore once `nix-shell` has terminated. If you find that your Haskell builds +no longer work after garbage collection, then you'll have to re-run `cabal +configure` inside of a new `nix-shell` environment. + +## How to install a compiler with libraries + +GHC expects to find all installed libraries inside of its own `lib` directory. +This approach works fine on traditional Unix systems, but it doesn't work for +Nix, because GHC's store path is immutable once it's built. We cannot install +additional libraries into that location. As a consequence, our copies of GHC +don't know any packages except their own core libraries, like `base`, +`containers`, `Cabal`, etc. + +We can register additional libraries to GHC, however, using a special build +function called `ghcWithPackages`. That function expects one argument: a +function that maps from an attribute set of Haskell packages to a list of +packages, which determines the libraries known to that particular version of +GHC. For example, the Nix expression `ghcWithPackages (pkgs: [pkgs.mtl])` +generates a copy of GHC that has the `mtl` library registered in addition to +its normal core packages: + + $ nix-shell -p "haskellPackages.ghcWithPackages (pkgs: [pkgs.mtl])" + + [nix-shell:~]$ ghc-pkg list mtl + /nix/store/zy79...-ghc-7.10.2/lib/ghc-7.10.2/package.conf.d: + mtl-2.2.1 + +This function allows users to define their own development environment by means +of an override. After adding the following snippet to `~/.nixpkgs/config.nix`, + + { + packageOverrides = super: let self = super.pkgs; in + { + myHaskellEnv = self.haskell.packages.ghc7102.ghcWithPackages + (haskellPackages: with haskellPackages; [ + # libraries + arrows async cgi criterion + # tools + cabal-install haskintex + ]); + }; + } + +it's possible to install that compiler with `nix-env -f "" -iA +myHaskellEnv`. If you'd like to switch that development environment to a +different version of GHC, just replace the `ghc7102` bit in the previous +definition with the appropriate name. Of course, it's also possible to define +any number of these development environments! (You can't install two of them +into the same profile at the same time, though, because that would result in +file conflicts.) + +The generated `ghc` program is a wrapper script that re-directs the real +GHC executable to use a new `lib` directory --- one that we specifically +constructed to contain all those packages the user requested: + + $ cat $(type -p ghc) + #! /nix/store/xlxj...-bash-4.3-p33/bin/bash -e + export NIX_GHC=/nix/store/19sm...-ghc-7.10.2/bin/ghc + export NIX_GHCPKG=/nix/store/19sm...-ghc-7.10.2/bin/ghc-pkg + export NIX_GHC_DOCDIR=/nix/store/19sm...-ghc-7.10.2/share/doc/ghc/html + export NIX_GHC_LIBDIR=/nix/store/19sm...-ghc-7.10.2/lib/ghc-7.10.2 + exec /nix/store/j50p...-ghc-7.10.2/bin/ghc "-B$NIX_GHC_LIBDIR" "$@" + +The variables `$NIX_GHC`, `$NIX_GHCPKG`, etc. point to the *new* store path +`ghcWithPackages` constructed specifically for this environment. The last line +of the wrapper script then executes the real `ghc`, but passes the path to the +new `lib` directory using GHC's `-B` flag. + +The purpose of those environment variables is to work around an impurity in the +popular [ghc-paths](http://hackage.haskell.org/package/ghc-paths) library. That +library promises to give its users access to GHC's installation paths. Only, +the library can't possible know that path when it's compiled, because the path +GHC considers its own is determined only much later, when the user configures +it through `ghcWithPackages`. So we [patched +ghc-paths](https://github.com/NixOS/nixpkgs/blob/master/pkgs/development/haskell-modules/ghc-paths-nix.patch) +to return the paths found in those environment variables at run-time rather +than trying to guess them at compile-time. + +To make sure that mechanism works properly all the time, we recommend that you +set those variables to meaningful values in your shell environment, too, i.e. +by adding the following code to your `~/.bashrc`: + + if type >/dev/null 2>&1 -p ghc; then + eval "$(egrep ^export "$(type -p ghc)")" + fi + +If you are certain that you'll use only one GHC environment which is located in +your user profile, then you can use the following code, too, which has the +advantage that it doesn't contain any paths from the Nix store, i.e. those +settings always remain valid even if a `nix-env -u` operation updates the GHC +environment in your profile: + + if [ -e ~/.nix-profile/bin/ghc ]; then + export NIX_GHC="$HOME/.nix-profile/bin/ghc" + export NIX_GHCPKG="$HOME/.nix-profile/bin/ghc-pkg" + export NIX_GHC_DOCDIR="$HOME/.nix-profile/share/doc/ghc/html" + export NIX_GHC_LIBDIR="$HOME/.nix-profile/lib/ghc-$($NIX_GHC --numeric-version)" + fi + +## How to install a compiler with libraries, hoogle and documentation indexes + +If you plan to use your environment for interactive programming, not just +compiling random Haskell code, you might want to replace `ghcWithPackages` in +all the listings above with `ghcWithHoogle`. + +This environment generator not only produces an environment with GHC and all +the specified libraries, but also generates a `hoogle` and `haddock` indexes +for all the packages, and provides a wrapper script around `hoogle` binary that +uses all those things. A precise name for this thing would be +"`ghcWithPackagesAndHoogleAndDocumentationIndexes`", which is, regrettably, too +long and scary. + +For example, installing the following environment + + { + packageOverrides = super: let self = super.pkgs; in + { + myHaskellEnv = self.haskellPackages.ghcWithHoogle + (haskellPackages: with haskellPackages; [ + # libraries + arrows async cgi criterion + # tools + cabal-install haskintex + ]); + }; + } + +allows one to browse module documentation index [not too dissimilar to +this](https://downloads.haskell.org/~ghc/latest/docs/html/libraries/index.html) +for all the specified packages and their dependencies by directing a browser of +choice to `~/.nix-profiles/share/doc/hoogle/index.html` (or +`/run/current-system/sw/share/doc/hoogle/index.html` in case you put it in +`environment.systemPackages` in NixOS). + +After you've marveled enough at that try adding the following to your +`~/.ghc/ghci.conf` + + :def hoogle \s -> return $ ":! hoogle search -cl --count=15 \"" ++ s ++ "\"" + :def doc \s -> return $ ":! hoogle search -cl --info \"" ++ s ++ "\"" + +and test it by typing into `ghci`: + + :hoogle a -> a + :doc a -> a + +Be sure to note the links to `haddock` files in the output. With any modern and +properly configured terminal emulator you can just click those links to +navigate there. + +Finally, you can run + + hoogle server -p 8080 + +and navigate to http://localhost:8080/ for your own local +[Hoogle](https://www.haskell.org/hoogle/). Note, however, that Firefox and +possibly other browsers disallow navigation from `http:` to `file:` URIs for +security reasons, which might be quite an inconvenience. See [this +page](http://kb.mozillazine.org/Links_to_local_pages_do_not_work) for +workarounds. + + +## How to create ad hoc environments for `nix-shell` + +The easiest way to create an ad hoc development environment is to run +`nix-shell` with the appropriate GHC environment given on the command-line: + + nix-shell -p "haskellPackages.ghcWithPackages (pkgs: with pkgs; [mtl pandoc])" + +For more sophisticated use-cases, however, it's more convenient to save the +desired configuration in a file called `shell.nix` that looks like this: + + { nixpkgs ? import {}, compiler ? "ghc7102" }: + let + inherit (nixpkgs) pkgs; + ghc = pkgs.haskell.packages.${compiler}.ghcWithPackages (ps: with ps; [ + monad-par mtl + ]); + in + pkgs.stdenv.mkDerivation { + name = "my-haskell-env-0"; + buildInputs = [ ghc ]; + shellHook = "eval $(egrep ^export ${ghc}/bin/ghc)"; + } + +Now run `nix-shell` --- or even `nix-shell --pure` --- to enter a shell +environment that has the appropriate compiler in `$PATH`. If you use `--pure`, +then add all other packages that your development environment needs into the +`buildInputs` attribute. If you'd like to switch to a different compiler +version, then pass an appropriate `compiler` argument to the expression, i.e. +`nix-shell --argstr compiler ghc784`. + +If you need such an environment because you'd like to compile a Hackage package +outside of Nix --- i.e. because you're hacking on the latest version from Git +---, then the package set provides suitable nix-shell environments for you +already! Every Haskell package has an `env` attribute that provides a shell +environment suitable for compiling that particular package. If you'd like to +hack the `lens` library, for example, then you just have to check out the +source code and enter the appropriate environment: + + $ cabal get lens-4.11 && cd lens-4.11 + Downloading lens-4.11... + Unpacking to lens-4.11/ + + $ nix-shell "" -A haskellPackages.lens.env + [nix-shell:/tmp/lens-4.11]$ + +At point, you can run `cabal configure`, `cabal build`, and all the other +development commands. Note that you need `cabal-install` installed in your +`$PATH` already to use it here --- the `nix-shell` environment does not provide +it. + +# How to create Nix builds for your own private Haskell packages + +If your own Haskell packages have build instructions for Cabal, then you can +convert those automatically into build instructions for Nix using the +`cabal2nix` utility, which you can install into your profile by running +`nix-env -i cabal2nix`. + +## How to build a stand-alone project + +For example, let's assume that you're working on a private project called +`foo`. To generate a Nix build expression for it, change into the project's +top-level directory and run the command: + + $ cabal2nix . >foo.nix + +Then write the following snippet into a file called `default.nix`: + + { nixpkgs ? import {}, compiler ? "ghc7102" }: + nixpkgs.pkgs.haskell.packages.${compiler}.callPackage ./foo.nix { } + +Finally, store the following code in a file called `shell.nix`: + + { nixpkgs ? import {}, compiler ? "ghc7102" }: + (import ./default.nix { inherit nixpkgs compiler; }).env + +At this point, you can run `nix-build` to have Nix compile your project and +install it into a Nix store path. The local directory will contain a symlink +called `result` after `nix-build` returns that points into that location. Of +course, passing the flag `--argstr compiler ghc763` allows switching the build +to any version of GHC currently supported. + +Furthermore, you can call `nix-shell` to enter an interactive development +environment in which you can use `cabal configure` and `cabal build` to develop +your code. That environment will automatically contain a proper GHC derivation +with all the required libraries registered as well as all the system-level +libraries your package might need. + +If your package does not depend on any system-level libraries, then it's +sufficient to run + + $ nix-shell --command "cabal configure" + +once to set up your build. `cabal-install` determines the absolute paths to all +resources required for the build and writes them into a config file in the +`dist/` directory. Once that's done, you can run `cabal build` and any other +command for that project even outside of the `nix-shell` environment. This +feature is particularly nice for those of us who like to edit their code with +an IDE, like Emacs' `haskell-mode`, because it's not necessary to start Emacs +inside of nix-shell just to make it find out the necessary settings for +building the project; `cabal-install` has already done that for us. + +If you want to do some quick-and-dirty hacking and don't want to bother setting +up a `default.nix` and `shell.nix` file manually, then you can use the +`--shell` flag offered by `cabal2nix` to have it generate a stand-alone +`nix-shell` environment for you. With that feature, running + + $ cabal2nix --shell . >shell.nix + $ nix-shell --command "cabal configure" + +is usually enough to set up a build environment for any given Haskell package. +You can even use that generated file to run `nix-build`, too: + + $ nix-build shell.nix + +## How to build projects that depend on each other + +If you have multiple private Haskell packages that depend on each other, then +you'll have to register those packages in the Nixpkgs set to make them visible +for the dependency resolution performed by `callPackage`. First of all, change +into each of your projects top-level directories and generate a `default.nix` +file with `cabal2nix`: + + $ cd ~/src/foo && cabal2nix . >default.nix + $ cd ~/src/bar && cabal2nix . >default.nix + +Then edit your `~/.nixpkgs/config.nix` file to register those builds in the +default Haskell package set: + + { + packageOverrides = super: let self = super.pkgs; in + { + haskellPackages = super.haskellPackages.override { + overrides = self: super: { + foo = self.callPackage ../src/foo {}; + bar = self.callPackage ../src/bar {}; + }; + }; + }; + } + +Once that's accomplished, `nix-env -f "" -qA haskellPackages` will +show your packages like any other package from Hackage, and you can build them + + $ nix-build "" -A haskellPackages.foo + +or enter an interactive shell environment suitable for building them: + + $ nix-shell "" -A haskellPackages.bar.env + +# Miscellaneous Topics + +## How to build with profiling enabled + +Every Haskell package set takes a function called `overrides` that you can use +to manipulate the package as much as you please. One useful application of this +feature is to replace the default `mkDerivation` function with one that enables +library profiling for all packages. To accomplish that, add configure the +following snippet in your `~/.nixpkgs/config.nix` file: + + { + packageOverrides = super: let self = super.pkgs; in + { + profiledHaskellPackages = self.haskellPackages.override { + overrides = self: super: { + mkDerivation = args: super.mkDerivation (args // { + enableLibraryProfiling = true; + }); + }; + }; + }; + } + +Then, replace instances of `haskellPackages` in the `cabal2nix`-generated +`default.nix` or `shell.nix` files with `profiledHaskellPackages`. + +## How to override package versions in a compiler-specific package set + +Nixpkgs provides the latest version of +[`ghc-events`](http://hackage.haskell.org/package/ghc-events), which is 0.4.4.0 +at the time of this writing. This is fine for users of GHC 7.10.x, but GHC +7.8.4 cannot compile that binary. Now, one way to solve that problem is to +register an older version of `ghc-events` in the 7.8.x-specific package set. +The first step is to generate Nix build instructions with `cabal2nix`: + + $ cabal2nix cabal://ghc-events-0.4.3.0 >~/.nixpkgs/ghc-events-0.4.3.0.nix + +Then add the override in `~/.nixpkgs/config.nix`: + + { + packageOverrides = super: let self = super.pkgs; in + { + haskell = super.haskell // { + packages = super.haskell.packages // { + ghc784 = super.haskell.packages.ghc784.override { + overrides = self: super: { + ghc-events = self.callPackage ./ghc-events-0.4.3.0.nix {}; + }; + }; + }; + }; + }; + } + +This code is a little crazy, no doubt, but it's necessary because the intuitive +version + + haskell.packages.ghc784 = super.haskell.packages.ghc784.override { + overrides = self: super: { + ghc-events = self.callPackage ./ghc-events-0.4.3.0.nix {}; + }; + }; + +doesn't do what we want it to: that code replaces the `haskell` package set in +Nixpkgs with one that contains only one entry,`packages`, which contains only +one entry `ghc784`. This override loses the `haskell.compiler` set, and it +loses the `haskell.packages.ghcXYZ` sets for all compilers but GHC 7.8.4. To +avoid that problem, we have to perform the convoluted little dance from above, +iterating over each step in hierarchy. + +Once it's accomplished, however, we can install a variant of `ghc-events` +that's compiled with GHC 7.8.4: + + nix-env -f "" -iA haskell.packages.ghc784.ghc-events + +Unfortunately, it turns out that this build fails again while executing the +test suite! Apparently, the release archive on Hackage is missing some data +files that the test suite requires, so we cannot run it. We accomplish that by +re-generating the Nix expression with the `--no-check` flag: + + $ cabal2nix --no-check cabal://ghc-events-0.4.3.0 >~/.nixpkgs/ghc-events-0.4.3.0.nix + +Now the builds succeeds. + +Of course, in the concrete example of `ghc-events` this whole exercise is not +an ideal solution, because `ghc-events` can analyze the output emitted by any +version of GHC later than 6.12 regardless of the compiler version that was used +to build the `ghc-events' executable, so strictly speaking there's no reason to +prefer one built with GHC 7.8.x in the first place. However, for users who +cannot use GHC 7.10.x at all for some reason, the approach of downgrading to an +older version might be useful. + +## How to recover from GHC's infamous non-deterministic library ID bug + +GHC and distributed build farms don't get along well: + + https://ghc.haskell.org/trac/ghc/ticket/4012 + +When you see an error like this one + + package foo-0.7.1.0 is broken due to missing package + text-1.2.0.4-98506efb1b9ada233bb5c2b2db516d91 + +then you have to download and re-install `foo` and all its dependents from +scratch: + + # nix-store -q --referrers /nix/store/*-haskell-text-1.2.0.4 \ + | xargs -L 1 nix-store --repair-path --option binary-caches http://hydra.nixos.org + +If you're using additional Hydra servers other than `hydra.nixos.org`, then it +might be necessary to purge the local caches that store data from those +machines to disable these binary channels for the duration of the previous +command, i.e. by running: + + rm /nix/var/nix/binary-cache-v3.sqlite + rm /nix/var/nix/manifests/* + rm /nix/var/nix/channel-cache/* + +## Builds on Darwin fail with `math.h` not found + +Users of GHC on Darwin have occasionally reported that builds fail, because the +compiler complains about a missing include file: + + fatal error: 'math.h' file not found + +The issue has been discussed at length in [ticket +6390](https://github.com/NixOS/nixpkgs/issues/6390), and so far no good +solution has been proposed. As a work-around, users who run into this problem +can configure the environment variables + + export NIX_CFLAGS_COMPILE="-idirafter /usr/include" + export NIX_CFLAGS_LINK="-L/usr/lib" + +in their `~/.bashrc` file to avoid the compiler error. + +## Using Stack together with Nix + + -- While building package zlib-0.5.4.2 using: + runhaskell -package=Cabal-1.22.4.0 -clear-package-db [... lots of flags ...] + Process exited with code: ExitFailure 1 + Logs have been written to: /home/foo/src/stack-ide/.stack-work/logs/zlib-0.5.4.2.log + + Configuring zlib-0.5.4.2... + Setup.hs: Missing dependency on a foreign library: + * Missing (or bad) header file: zlib.h + This problem can usually be solved by installing the system package that + provides this library (you may need the "-dev" version). If the library is + already installed but in a non-standard location then you can use the flags + --extra-include-dirs= and --extra-lib-dirs= to specify where it is. + If the header file does exist, it may contain errors that are caught by the C + compiler at the preprocessing stage. In this case you can re-run configure + with the verbosity flag -v3 to see the error messages. + +When you run the build inside of the nix-shell environment, the system +is configured to find libz.so without any special flags -- the compiler +and linker "just know" how to find it. Consequently, Cabal won't record +any search paths for libz.so in the package description, which means +that the package works fine inside of nix-shell, but once you leave the +shell the shared object can no longer be found. That issue is by no +means specific to Stack: you'll have that problem with any other +Haskell package that's built inside of nix-shell but run outside of that +environment. + +I suppose we could try to remedy the issue by wrapping `stack` or +`cabal` with a script that tries to find those kind of implicit search +paths and makes them explicit on the "cabal configure" command line. I +don't think anyone is working on that subject yet, though, because the +problem doesn't seem so bad in practice. + +You can remedy that issue in several ways. First of all, run + + $ nix-build --no-out-link "" -A zlib + /nix/store/alsvwzkiw4b7ip38l4nlfjijdvg3fvzn-zlib-1.2.8 + +to find out the store path of the system's zlib library. Now, you can + +1) add that path (plus a "/lib" suffix) to your $LD_LIBRARY_PATH + environment variable to make sure your system linker finds libz.so + automatically. It's no pretty solution, but it will work. + +2) As a variant of (1), you can also install any number of system + libraries into your user's profile (or some other profile) and point + $LD_LIBRARY_PATH to that profile instead, so that you don't have to + list dozens of those store paths all over the place. + +3) The solution I prefer is to call stack with an appropriate + --extra-lib-dirs flag like so: + + $ stack --extra-lib-dirs=/nix/store/alsvwzkiw4b7ip38l4nlfjijdvg3fvzn-zlib-1.2.8/lib build + + Typically, you'll need --extra-include-dirs as well. It's possible + to add those flag to the project's "stack.yaml" or your user's + global "~/.stack/global/stack.yaml" file so that you don't have to + specify them manually every time. + + The same thing applies to `cabal configure`, of course, if you're + building with `cabal-install` instead of Stack. + + +# Other resources + +- The Youtube video [Nix Loves Haskell](https://www.youtube.com/watch?v=BsBhi_r-OeE) + provides an introduction into Haskell NG aimed at beginners. The slides are + available at http://cryp.to/nixos-meetup-3-slides.pdf and also -- in a form + ready for cut & paste -- at + https://github.com/NixOS/cabal2nix/blob/master/doc/nixos-meetup-3-slides.md. + +- Another Youtube video is [Escaping Cabal Hell with Nix](https://www.youtube.com/watch?v=mQd3s57n_2Y), + which discusses the subject of Haskell development with Nix but also provides + a basic introduction to Nix as well, i.e. it's suitable for viewers with + almost no prior Nix experience. + +- Oliver Charles wrote a very nice [Tutorial how to develop Haskell packages with Nix](http://wiki.ocharles.org.uk/Nix). + +- The *Journey into the Haskell NG infrastructure* series of postings + describe the new Haskell infrastructure in great detail: + + - [Part 1](http://lists.science.uu.nl/pipermail/nix-dev/2015-January/015591.html) + explains the differences between the old and the new code and gives + instructions how to migrate to the new setup. + + - [Part 2](http://lists.science.uu.nl/pipermail/nix-dev/2015-January/015608.html) + looks in-depth at how to tweak and configure your setup by means of + overrides. + + - [Part 3](http://lists.science.uu.nl/pipermail/nix-dev/2015-April/016912.html) + describes the infrastructure that keeps the Haskell package set in Nixpkgs + up-to-date. diff --git a/doc/language-support.xml b/doc/language-support.xml index 4fd70431950..48b9209b0ad 100644 --- a/doc/language-support.xml +++ b/doc/language-support.xml @@ -1,3 +1,4 @@ + @@ -13,7 +14,7 @@ in Nixpkgs to easily build packages for other programming languages, such as Perl or Haskell. These are described in this chapter. -
Perl +
Perl Nixpkgs provides a function buildPerlPackage, a generic package builder function for any Perl package that has a @@ -151,7 +152,7 @@ ClassC3Componentised = buildPerlPackage rec { -
Generation from CPAN +
Generation from CPAN Nix expressions for Perl packages can be generated (almost) automatically from CPAN. This is done by the program @@ -191,7 +192,7 @@ you need it.
-
Python +
Python Currently supported interpreters are python26, python27, @@ -245,44 +246,44 @@ are provided with all modules included. Name of the folder in ${python}/lib/ for corresponding interpreter. - + interpreter Alias for ${python}/bin/${executable}. - + buildEnv Function to build python interpreter environments with extra packages bundled together. - See for usage and documentation. + See for usage and documentation. - + sitePackages Alias for lib/${libPrefix}/site-packages. - + executable Name of the interpreter executable, ie python3.4. - + -
<varname>buildPythonPackage</varname> function - +
<varname>buildPythonPackage</varname> function + The function is implemented in pkgs/development/python-modules/generic/default.nix. Example usage: - + twisted = buildPythonPackage { name = "twisted-8.1.0"; @@ -308,27 +309,27 @@ twisted = buildPythonPackage { python27Packages, python32Packages, python33Packages, python34Packages and pypyPackages. - + buildPythonPackage mainly does four things: - + In the configurePhase, it patches setup.py to always include setuptools before distutils for monkeypatching machinery to take place. - + - In the buildPhase, it calls + In the buildPhase, it calls ${python.interpreter} setup.py build ... - + - In the installPhase, it calls + In the installPhase, it calls ${python.interpreter} setup.py install ... - + In the postFixup phase, wrapPythonPrograms bash function is called to wrap all programs in $out/bin/* @@ -337,23 +338,30 @@ twisted = buildPythonPackage { - - By default doCheck = true is set and tests are run with + + By default doCheck = true is set and tests are run with ${python.interpreter} setup.py test command in checkPhase. - - propagatedBuildInputs packages are propagated to user environment. - + + + As in Perl, dependencies on other Python packages can be specified in the + buildInputs and + propagatedBuildInputs attributes. If something is + exclusively a build-time dependency, use + buildInputs; if it’s (also) a runtime dependency, + use propagatedBuildInputs. + + By default meta.platforms is set to the same value as the interpreter unless overriden otherwise. - + <varname>buildPythonPackage</varname> parameters (all parameters from <varname>mkDerivation</varname> function are still supported) - + namePrefix @@ -363,7 +371,7 @@ twisted = buildPythonPackage { if you're packaging an application or a command line tool. - + disabled @@ -373,21 +381,21 @@ twisted = buildPythonPackage { for examples. - + setupPyInstallFlags List of flags passed to setup.py install command. - + setupPyBuildFlags List of flags passed to setup.py build command. - + pythonPath @@ -396,21 +404,21 @@ twisted = buildPythonPackage { (contrary to propagatedBuildInputs). - + preShellHook Hook to execute commands before shellHook. - + postShellHook Hook to execute commands after shellHook. - + distutilsExtraCfg @@ -419,15 +427,29 @@ twisted = buildPythonPackage { configuration). - + + + makeWrapperArgs + + A list of strings. Arguments to be passed to + makeWrapper, which wraps generated binaries. By + default, the arguments to makeWrapper set + PATH and PYTHONPATH environment + variables before calling the binary. Additional arguments here can + allow a developer to set environment variables which will be + available when the binary is run. For example, + makeWrapperArgs = ["--set FOO BAR" "--set BAZ QUX"]. + + + - +
-
<function>python.buildEnv</function> function +
<function>python.buildEnv</function> function Create Python environments using low-level pkgs.buildEnv function. Example default.nix: - + {}; @@ -436,31 +458,52 @@ python.buildEnv.override { ignoreCollisions = true; }]]> - + Running nix-build will create /nix/store/cf1xhjwzmdki7fasgr4kz6di72ykicl5-python-2.7.8-env with wrapped binaries in bin/. - + + + You can also use env attribute to create local + environments with needed packages installed (somewhat comparable to + virtualenv). For example, with the following + shell.nix: + + + {}; + +(python3.buildEnv.override { + extraLibs = with python3Packages; + [ numpy + requests + ]; +}).env]]> + + + Running nix-shell will drop you into a shell where + python will have specified packages in its path. + + <function>python.buildEnv</function> arguments - + extraLibs List of packages installed inside the environment. - + postBuild Shell command executed after the build of environment. - + ignoreCollisions @@ -470,7 +513,7 @@ python.buildEnv.override {
-
Tools +
Tools Packages inside nixpkgs are written by hand. However many tools exist in community to help save time. No tool is preferred at the moment. @@ -497,20 +540,20 @@ exist in community to help save time. No tool is preferred at the moment.
-
Development +
Development To develop Python packages buildPythonPackage has additional logic inside shellPhase to run ${python.interpreter} setup.py develop for the package. - + shellPhase is executed only if setup.py exists. - + Given a default.nix: - + {}; @@ -522,18 +565,18 @@ buildPythonPackage { src = ./.; }]]> - + Running nix-shell with no arguments should give you the environment in which the package would be build with nix-build. - + Shortcut to setup environments with C headers/libraries and python packages: - + $ nix-shell -p pythonPackages.pyramid zlib libjpeg git - + There is a boolean value lib.inNixShell set to true if nix-shell is invoked. @@ -541,7 +584,7 @@ buildPythonPackage {
-
FAQ +
FAQ @@ -562,18 +605,18 @@ buildPythonPackage { Known bug in setuptools install_data does not respect --prefix. Example of such package using the feature is pkgs/tools/X11/xpra/default.nix. As workaround install it as an extra preInstall step: - + ${python.interpreter} setup.py install_data --install-dir=$out --root=$out sed -i '/ = data_files/d' setup.py - + Rationale of non-existent global site-packages There is no need to have global site-packages in Nix. Each package has isolated dependency tree and installing any python package will only populate $PATH - inside user environment. See to create self-contained + inside user environment. See to create self-contained interpreter with a set of packages. @@ -583,7 +626,7 @@ sed -i '/ = data_files/d' setup.py
-
Contributing guidelines +
Contributing guidelines Following rules are desired to be respected: @@ -611,12 +654,12 @@ sed -i '/ = data_files/d' setup.py
-
Ruby +
Ruby There currently is support to bundle applications that are packaged as Ruby gems. The utility "bundix" allows you to write a Gemfile, let bundler create a Gemfile.lock, and then convert this into a nix expression that contains all Gem dependencies automatically. For example, to package sensu, we did: - + Gemfile source 'https://rubygems.org' gem 'sensu' $ bundler package --path /tmp/vendor/bundle -$ $(nix-build '&nixpkgs>' -A bundix)/bin/bundix +$ $(nix-build '' -A bundix)/bin/bundix $ cat > default.nix { lib, bundlerEnv, ruby }: @@ -652,7 +695,7 @@ and scalable.";
-
Go +
Go The function buildGoPackage builds standard Go packages. @@ -662,20 +705,19 @@ standard Go packages. net = buildGoPackage rec { name = "go.net-${rev}"; - goPackagePath = "code.google.com/p/go.net"; + goPackagePath = "golang.org/x/net"; subPackages = [ "ipv4" "ipv6" ]; - rev = "28ff664507e4"; - src = fetchhg { + rev = "e0403b4e005"; + src = fetchFromGitHub { inherit rev; - url = "https://${goPackagePath}"; - sha256 = "1lkz4c9pyz3yz2yz18hiycvlfhgy3jxp68bs7mv7bcfpaj729qav"; + owner = "golang"; + repo = "net"; + sha256 = "1g7cjzw4g4301a3yqpbk8n1d4s97sfby2aysl275x04g0zh8jxqp"; }; - renameImports = [ - "code.google.com/p/go.crypto golang.org/x/crypto" - "code.google.com/p/goprotobuf github.com/golang/protobuf" - ]; + goPackageAliases = [ "code.google.com/p/go.net" ]; propagatedBuildInputs = [ goPackages.text ]; buildFlags = "--tags release"; + disabled = isGo13; }; @@ -703,17 +745,18 @@ the following arguments are of special significance to the function: - + - renameImports is a list of import paths to be renamed before - building the package. The path to be renamed can be a regular expression. + goPackageAliases is a list of alternative import paths + that are valid for this library. + Packages that depend on this library will automatically rename + import paths that match any of the aliases to goPackagePath. In this example imports will be renamed from - code.google.com/p/go.crypto to - golang.org/x/crypto and from - code.google.com/p/goprotobuf to - github.com/golang/protobuf. + code.google.com/p/go.net to + golang.org/x/net in every package that depend on the + go.net library. @@ -732,6 +775,18 @@ the following arguments are of special significance to the function: + + + If disabled is true, + nix will refuse to build this package. + + + In this example the package will not be built for go 1.3. The isGo13 + is an utility function that returns true if go used to build the + package has version 1.3.x. + + + @@ -761,7 +816,7 @@ done
-
Java +
Java Ant-based Java packages are typically built from source as follows: @@ -842,7 +897,7 @@ Runtime) instead of the OpenJRE.
-
Lua +
Lua Lua packages are built by the buildLuaPackage function. This function is @@ -850,7 +905,7 @@ Runtime) instead of the OpenJRE. in pkgs/development/lua-modules/generic/default.nix and works similarly to buildPerlPackage. (See - for details.) + for details.) @@ -864,7 +919,7 @@ fileSystem = buildLuaPackage { src = fetchurl { url = "https://github.com/keplerproject/luafilesystem/archive/v1_6_2.tar.gz"; sha256 = "1n8qdwa20ypbrny99vhkmx8q04zd2jjycdb5196xdhgvqzk10abz"; - }; + }; meta = { homepage = "https://github.com/keplerproject/luafilesystem"; hydraPlatforms = stdenv.lib.platforms.linux; @@ -875,7 +930,7 @@ fileSystem = buildLuaPackage { - Though, more complicated package should be placed in a seperate file in + Though, more complicated package should be placed in a seperate file in pkgs/development/lua-modules. @@ -889,7 +944,7 @@ fileSystem = buildLuaPackage {
-
Coq +
Coq Coq libraries should be installed in $(out)/lib/coq/${coq.coq-version}/user-contrib/. @@ -926,6 +981,72 @@ stdenv.mkDerivation {
+
Qt + +The information in this section applies to Qt 5.5 and later. + +Qt is an application development toolkit for C++. Although it is +not a distinct programming language, there are special considerations +for packaging Qt-based programs and libraries. A small set of tools +and conventions has grown out of these considerations. + +
Libraries + +Packages that provide libraries should be listed in +qt5LibsFun so that the library is built with each +Qt version. A set of packages is provided for each version of Qt; for +example, qt5Libs always provides libraries built +with the latest version, qt55Libs provides +libraries built with Qt 5.5, and so on. To avoid version conflicts, no +top-level attributes are created for these packages. + +
+ +
Programs + +Application packages do not need to be built with every Qt +version. To ensure consistency between the package's dependencies, +call the package with qt5Libs.callPackage instead +of the usual callPackage. An older version may be +selected in case of incompatibility. For example, to build with Qt +5.5, call the package with +qt55Libs.callPackage. + +Several environment variables must be set at runtime for Qt +applications to function correctly, including: + + + QT_PLUGIN_PATH + QML_IMPORT_PATH + QML2_IMPORT_PATH + XDG_DATA_DIRS + + +To ensure that these are set correctly, the program must be wrapped by +invoking wrapQtProgram program +during installation (for example, during +fixupPhase). wrapQtProgram +accepts the same options as makeWrapper. + + +
+ +
KDE + +Many of the considerations above also apply to KDE packages, +especially the need to set the correct environment variables at +runtime. To ensure that this is done, invoke wrapKDEProgram +program during +installation. wrapKDEProgram also generates a +ksycoca database so that required data and services +can be found. Like its Qt counterpart, +wrapKDEProgram accepts the same options as +makeWrapper. + +
+ +
+ -
+
X.org @@ -219,5 +219,151 @@ you should modify
--> + + +
+ + Eclipse + + + The Nix expressions related to the Eclipse platform and IDE are in + pkgs/applications/editors/eclipse. + + + + Nixpkgs provides a number of packages that will install Eclipse in + its various forms, these range from the bare-bones Eclipse + Platform to the more fully featured Eclipse SDK or Scala-IDE + packages and multiple version are often available. It is possible + to list available Eclipse packages by issuing the command: + + +$ nix-env -f '<nixpkgs>' -qaP -A eclipses --description + + + Once an Eclipse variant is installed it can be run using the + eclipse command, as expected. From within + Eclipse it is then possible to install plugins in the usual manner + by either manually specifying an Eclipse update site or by + installing the Marketplace Client plugin and using it to discover + and install other plugins. This installation method provides an + Eclipse installation that closely resemble a manually installed + Eclipse. + + + + If you prefer to install plugins in a more declarative manner then + Nixpkgs also offer a number of Eclipse plugins that can be + installed in an Eclipse environment. This + type of environment is created using the function + eclipseWithPlugins found inside the + nixpkgs.eclipses attribute set. This function + takes as argument { eclipse, plugins ? [], jvmArgs ? [] + } where eclipse is a one of the + Eclipse packages described above, plugins is a + list of plugin derivations, and jvmArgs is a + list of arguments given to the JVM running the Eclipse. For + example, say you wish to install the latest Eclipse Platform with + the popular Eclipse Color Theme plugin and also allow Eclipse to + use more RAM. You could then add + + +packageOverrides = pkgs: { + myEclipse = with pkgs.eclipses; eclipseWithPlugins { + eclipse = eclipse-platform; + jvmArgs = [ "-Xmx2048m" ]; + plugins = [ plugins.color-theme ]; + }; +} + + + to your Nixpkgs configuration + (~/.nixpkgs/config.nix) and install it by + running nix-env -f '<nixpkgs>' -iA + myEclipse and afterward run Eclipse as usual. It is + possible to find out which plugins are available for installation + using eclipseWithPlugins by running + + +$ nix-env -f '<nixpkgs>' -qaP -A eclipses.plugins --description + + + + + If there is a need to install plugins that are not available in + Nixpkgs then it may be possible to define these plugins outside + Nixpkgs using the buildEclipseUpdateSite and + buildEclipsePlugin functions found in the + nixpkgs.eclipses.plugins attribute set. Use the + buildEclipseUpdateSite function to install a + plugin distributed as an Eclipse update site. This function takes + { name, src } as argument where + src indicates the Eclipse update site archive. + All Eclipse features and plugins within the downloaded update site + will be installed. When an update site archive is not available + then the buildEclipsePlugin function can be + used to install a plugin that consists of a pair of feature and + plugin JARs. This function takes an argument { name, + srcFeature, srcPlugin } where + srcFeature and srcPlugin are + the feature and plugin JARs, respectively. + + + + Expanding the previous example with two plugins using the above + functions we have + +packageOverrides = pkgs: { + myEclipse = with pkgs.eclipses; eclipseWithPlugins { + eclipse = eclipse-platform; + jvmArgs = [ "-Xmx2048m" ]; + plugins = [ + plugins.color-theme + (plugins.buildEclipsePlugin { + name = "myplugin1-1.0"; + srcFeature = fetchurl { + url = "http://…/features/myplugin1.jar"; + sha256 = "123…"; + }; + srcPlugin = fetchurl { + url = "http://…/plugins/myplugin1.jar"; + sha256 = "123…"; + }; + }); + (plugins.buildEclipseUpdateSite { + name = "myplugin2-1.0"; + src = fetchurl { + stripRoot = false; + url = "http://…/myplugin2.zip"; + sha256 = "123…"; + }; + }); + ]; + }; +} + + + +
+ +
+ +Elm + + +The Nix expressions for Elm reside in +pkgs/development/compilers/elm. They are generated +automatically by update-elm.rb script. One should +specify versions of Elm packages inside the script, clear the +packages directory and run the script from inside it. +elm-reactor is special because it also has Elm package +dependencies. The process is not automated very much for now -- you should +get the elm-reactor source tree (e.g. with +nix-shell) and run elm2nix.rb inside +it. Place the resulting package.nix file into +packages/elm-reactor-elm.nix. + + +
diff --git a/doc/packageconfig.xml b/doc/packageconfig.xml index 44ce1974c6c..4e0fcc3b6a4 100644 --- a/doc/packageconfig.xml +++ b/doc/packageconfig.xml @@ -67,7 +67,8 @@ lib/licenses.nix of the nix package tree. -
Modify packages via <literal>packageOverrides</literal> +
Modify +packages via <literal>packageOverrides</literal> diff --git a/doc/quick-start.xml b/doc/quick-start.xml index c0f96f87152..5ed959abace 100644 --- a/doc/quick-start.xml +++ b/doc/quick-start.xml @@ -55,18 +55,18 @@ $ git add pkgs/development/libraries/libfoo/default.nix - GNU cpio: pkgs/tools/archivers/cpio/default.nix. - The simplest possible package. The generic builder in - stdenv does everything for you. It has - no dependencies beyond stdenv. + GNU Hello: pkgs/applications/misc/hello/default.nix. + Trivial package, which specifies some meta + attributes which is good practice. - GNU Hello: pkgs/applications/misc/hello/ex-2/default.nix. - Also trivial, but it specifies some meta - attributes which is good practice. + GNU cpio: pkgs/tools/archivers/cpio/default.nix. + Also a simple package. The generic builder in + stdenv does everything for you. It has + no dependencies beyond stdenv. diff --git a/doc/stdenv.xml b/doc/stdenv.xml index fa3a7ef056e..6bb1002a4c6 100644 --- a/doc/stdenv.xml +++ b/doc/stdenv.xml @@ -15,7 +15,8 @@ environment does everything automatically. If can easily customise or override the various build phases. -
Using <literal>stdenv</literal> +
Using +<literal>stdenv</literal> To build a package with the standard environment, you use the function stdenv.mkDerivation, instead of the @@ -58,7 +59,7 @@ build. To make this easier, the standard environment breaks the package build into a number of phases, all of which can be overridden or modified individually: unpacking the sources, applying patches, configuring, building, and installing. -(There are some others; see .) +(There are some others; see .) For instance, a package that doesn’t supply a makefile but instead has to be compiled “manually” could be handled like this: @@ -124,7 +125,8 @@ genericBuild
-
Tools provided by <literal>stdenv</literal> +
Tools provided by +<literal>stdenv</literal> The standard environment provides the following packages: @@ -225,7 +227,7 @@ genericBuild
-
Phases +
Phases The generic builder has a number of phases. Package builds are split into phases to make it easier to override @@ -243,7 +245,8 @@ is convenient to override a phase from the derivation, while the latter is convenient from a build script. -
Controlling phases +
Controlling +phases There are a number of variables that control what phases are executed and in what order: @@ -327,7 +330,7 @@ executed and in what order:
-
The unpack phase +
The unpack phase The unpack phase is responsible for unpacking the source code of the package. The default implementation of @@ -434,7 +437,7 @@ Additional file types can be supported by setting the
-
The patch phase +
The patch phase The patch phase applies the list of patches defined in the patches variable. @@ -477,7 +480,7 @@ Additional file types can be supported by setting the
-
The configure phase +
The configure phase The configure phase prepares the source tree for building. The default configurePhase runs @@ -513,8 +516,8 @@ script) if it exists. dontAddPrefix By default, the flag --prefix=$prefix is added to the configure - flags. If this is undesirable, set this variable to a non-empty - value. + flags. If this is undesirable, set this variable to + true. @@ -530,8 +533,7 @@ script) if it exists. By default, the flag --disable-dependency-tracking is added to the configure flags to speed up Automake-based builds. If this is - undesirable, set this variable to a non-empty - value. + undesirable, set this variable to true. @@ -544,7 +546,16 @@ script) if it exists. variables in the Libtool script to prevent Libtool from using libraries in /usr/lib and such.. If this is undesirable, set this - variable to a non-empty value. + variable to true. + + + + dontDisableStatic + By default, when the configure script has + , the option + is added to the configure flags. + If this is undesirable, set this variable to + true. @@ -565,7 +576,7 @@ script) if it exists.
-
The build phase +
The build phase The build phase is responsible for actually building the package (e.g. compiling it). The default buildPhase @@ -649,7 +660,7 @@ called, respectively.
-
The check phase +
The check phase The check phase checks whether the package was built correctly by running its test suite. The default @@ -709,7 +720,7 @@ doCheck = true;
-
The install phase +
The install phase The install phase is responsible for installing the package in the Nix store under out. The default @@ -764,7 +775,7 @@ installTargets = "install-bin install-doc";
-
The fixup phase +
The fixup phase The fixup phase performs some (Nix-specific) post-processing actions on the files installed under $out by the @@ -805,6 +816,12 @@ following: stripped. By default, they are. + + dontMoveSbin + If set, files in $out/sbin are not moved + to $out/bin. By default, they are. + + stripAllList List of directories to search for libraries and @@ -882,12 +899,41 @@ following: phase. + + separateDebugInfo + If set to true, the standard + environment will enable debug information in C/C++ builds. After + installation, the debug information will be separated from the + executables and stored in the output named + debug. (This output is enabled automatically; + you don’t need to set the outputs attribute + explicitly.) To be precise, the debug information is stored in + debug/lib/debug/.build-id/XX/YYYY…, + where XXYYYY… is the build + ID of the binary — a SHA-1 hash of the contents of + the binary. Debuggers like GDB use the build ID to look up the + separated debug information. + + For example, with GDB, you can add + + +set debug-file-directory ~/.nix-profile/lib/debug + + + to ~/.gdbinit. GDB will then be able to find + debug information installed via nix-env + -i. + + + +
-
The distribution phase +
The distribution +phase The distribution phase is intended to produce a source distribution of the package. The default @@ -1158,7 +1204,7 @@ echo @foo@ - Qt + Qt 4 Sets the QTDIR environment variable to Qt’s path. @@ -1191,7 +1237,7 @@ echo @foo@
-
Purity in Nixpkgs +
Purity in Nixpkgs [measures taken to prevent dependencies on packages outside the store, and what you can do to prevent them] diff --git a/doc/submitting-changes.xml b/doc/submitting-changes.xml new file mode 100644 index 00000000000..fe331d08250 --- /dev/null +++ b/doc/submitting-changes.xml @@ -0,0 +1,283 @@ + + +Submitting changes + +
+Making patches + + + +Read Manual (How to write packages for Nix). + + + +Fork the repository on GitHub. + + + +Create a branch for your future fix. + + + +You can make branch from a commit of your local nixos-version. That will help you to avoid additional local compilations. Because you will receive packages from binary cache. + + + +For example: nixos-version returns 15.05.git.0998212 (Dingo). So you can do: + + + + +$ git checkout 0998212 +$ git checkout -b 'fix/pkg-name-update' + + + + + +Please avoid working directly on the master branch. + + + + + + +Make commits of logical units. + + + +If you removed pkgs, made some major NixOS changes etc., write about them in nixos/doc/manual/release-notes/rl-unstable.xml. + + + + + + +Check for unnecessary whitespace with git diff --check before committing. + + + +Format the commit in a following way: + +(pkg-name | service-name): (from -> to | init at version | refactor | etc) +Additional information. + + + + +Examples: + + + + +nginx: init at 2.0.1 + + + + + +firefox: 3.0 -> 3.1.1 + + + + + +hydra service: add bazBaz option + + + + + +nginx service: refactor config generation + + + + + + + + + +Test your changes. If you work with + + + +nixpkgs: + + + +update pkg -> + + + + +nix-env -i pkg-name -f <path to your local nixpkgs folder> + + + + + + + +add pkg -> + + + +Make sure it's in pkgs/top-level/all-packages.nix + + + + + +nix-env -i pkg-name -f <path to your local nixpkgs folder> + + + + + + + + +If you don't want to install pkg in you profile. + + + + +nix-build -A pkg-attribute-name <path to your local nixpkgs folder>/default.nix and check results in the folder result. It will appear in the same directory where you did nix-build. + + + + + + +If you did nix-env -i pkg-name you can do nix-env -e pkg-name to uninstall it from your system. + + + + + + +NixOS and its modules: + + + +You can add new module to your NixOS configuration file (usually it's /etc/nixos/configuration.nix). + And do sudo nixos-rebuild test -I nixpkgs=<path to your local nixpkgs folder> --fast. + + + + + + + + + +If you have commits pkg-name: oh, forgot to insert whitespace: squash commits in this case. Use git rebase -i. + + + +Rebase you branch against current master. + + +
+ +
+Submitting changes + + + +Push your changes to your fork of nixpkgs. + + + +Create pull request: + + + +Write the title in format (pkg-name | service): improvement. + + + +If you update the pkg, write versions from -> to. + + + + + + +Write in comment if you have tested your patch. Do not rely much on TravisCI. + + + +If you make an improvement, write about your motivation. + + + +Notify maintainers of the package. For example add to the message: cc @jagajaga @domenkozar. + + + + + +
+ +
+Hotfixing pull requests + + + +Make the appropriate changes in you branch. + + + +Don't create additional commits, do + + + +git rebase -i + + + +git push --force to your branch. + + + + + + +
+ +
+Commit policy + + + +Commits must be sufficiently tested before being merged, both for the master and staging branches. + + + +Hydra builds for master and staging should not be used as testing platform, it's a build farm for changes that have been already tested. + + + +Master should only see non-breaking commits that do not cause mass rebuilds. + + + +Staging should only see non-breaking mass-rebuild commits. That means it's not to be used for testing, and changes must have been well tested already. Read policy here. + + + +If staging is already in a broken state, please refrain from adding extra new breakages. Stabilize it for a few days, merge into master, then resume development on staging. Keep an eye on the staging evaluations here. + + + +When changing the bootloader installation process, extra care must be taken. Grub installations cannot be rolled back, hence changes may break people's installations forever. For any non-trivial change to the bootloader please file a PR asking for review, especially from @edolstra. + + + +
+
+ diff --git a/lib/attrsets.nix b/lib/attrsets.nix index cb4091b916c..5aad76e75e4 100644 --- a/lib/attrsets.nix +++ b/lib/attrsets.nix @@ -6,7 +6,6 @@ with { inherit (import ./default.nix) fold; inherit (import ./strings.nix) concatStringsSep; inherit (import ./lists.nix) concatMap concatLists all deepSeqList; - inherit (import ./misc.nix) maybeAttr; }; rec { @@ -76,9 +75,29 @@ rec { => { foo = 1; } */ filterAttrs = pred: set: - listToAttrs (fold (n: ys: let v = set.${n}; in if pred n v then [(nameValuePair n v)] ++ ys else ys) [] (attrNames set)); + listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set)); + /* Filter an attribute set recursivelly by removing all attributes for + which the given predicate return false. + + Example: + filterAttrsRecursive (n: v: v != null) { foo = { bar = null; }; } + => { foo = {}; } + */ + filterAttrsRecursive = pred: set: + listToAttrs ( + concatMap (name: + let v = set.${name}; in + if pred name v then [ + (nameValuePair name ( + if isAttrs v then filterAttrsRecursive pred v + else v + )) + ] else [] + ) (attrNames set) + ); + /* foldAttrs: apply fold functions to values grouped by key. Eg accumulate values as list: foldAttrs (n: a: [n] ++ a) [] [{ a = 2; } { a = 3; }] => { a = [ 2 3 ]; } @@ -86,7 +105,7 @@ rec { foldAttrs = op: nul: list_of_attrs: fold (n: a: fold (name: o: - o // (listToAttrs [{inherit name; value = op n.${name} (maybeAttr name nul a); }]) + o // (listToAttrs [{inherit name; value = op n.${name} (a.${name} or nul); }]) ) a (attrNames n) ) {} list_of_attrs; @@ -222,6 +241,16 @@ rec { isDerivation = x: isAttrs x && x ? type && x.type == "derivation"; + /* Convert a store path to a fake derivation. */ + toDerivation = path: + let path' = builtins.storePath path; in + { type = "derivation"; + name = builtins.unsafeDiscardStringContext (builtins.substring 33 (-1) (baseNameOf path')); + outPath = path'; + outputs = [ "out" ]; + }; + + /* If the Boolean `cond' is true, return the attribute set `as', otherwise an empty attribute set. */ optionalAttrs = cond: as: if cond then as else {}; diff --git a/lib/customisation.nix b/lib/customisation.nix index 91a25055df2..585495469b2 100644 --- a/lib/customisation.nix +++ b/lib/customisation.nix @@ -1,6 +1,8 @@ let + lib = import ./default.nix; inherit (builtins) attrNames isFunction; + in rec { @@ -49,10 +51,6 @@ rec { else { })); - # usage: (you can use override multiple times) - # let d = makeOverridable stdenv.mkDerivation { name = ..; buildInputs; } - # noBuildInputs = d.override { buildInputs = []; } - # additionalBuildInputs = d.override ( args : args // { buildInputs = args.buildInputs ++ [ additional ]; } ) makeOverridable = f: origArgs: let ff = f origArgs; @@ -60,24 +58,16 @@ rec { in if builtins.isAttrs ff then (ff // { override = newArgs: makeOverridable f (overrideWith newArgs); - deepOverride = newArgs: - makeOverridable f (lib.overrideExisting (lib.mapAttrs (deepOverrider newArgs) origArgs) newArgs); overrideDerivation = fdrv: makeOverridable (args: overrideDerivation (f args) fdrv) origArgs; }) else if builtins.isFunction ff then { override = newArgs: makeOverridable f (overrideWith newArgs); __functor = self: ff; - deepOverride = throw "deepOverride not yet supported for functors"; overrideDerivation = throw "overrideDerivation not yet supported for functors"; } else ff; - deepOverrider = newArgs: name: x: if builtins.isAttrs x then ( - if x ? deepOverride then (x.deepOverride newArgs) else - if x ? override then (x.override newArgs) else - x) else x; - /* Call the package function in the file `fn' with the required arguments automatically. The function is called with the @@ -102,12 +92,28 @@ rec { */ callPackageWith = autoArgs: fn: args: let - f = if builtins.isFunction fn then fn else import fn; + f = if builtins.isFunction fn then fn else import fn; auto = builtins.intersectAttrs (builtins.functionArgs f) autoArgs; in makeOverridable f (auto // args); - /* Add attributes to each output of a derivation without changing the derivation itself */ + /* Like callPackage, but for a function that returns an attribute + set of derivations. The override function is added to the + individual attributes. */ + callPackagesWith = autoArgs: fn: args: + let + f = if builtins.isFunction fn then fn else import fn; + auto = builtins.intersectAttrs (builtins.functionArgs f) autoArgs; + finalArgs = auto // args; + pkgs = f finalArgs; + mkAttrOverridable = name: pkg: pkg // { + override = newArgs: mkAttrOverridable name (f (finalArgs // newArgs)).${name}; + }; + in lib.mapAttrs mkAttrOverridable pkgs; + + + /* Add attributes to each output of a derivation without changing + the derivation itself. */ addPassthru = drv: passthru: let outputs = drv.outputs or [ "out" ]; @@ -158,4 +164,23 @@ rec { drv' = (lib.head outputsList).value; in lib.deepSeq drv' drv'; + /* Make a set of packages with a common scope. All packages called + with the provided `callPackage' will be evaluated with the same + arguments. Any package in the set may depend on any other. The + `override' function allows subsequent modification of the package + set in a consistent way, i.e. all packages in the set will be + called with the overridden packages. The package sets may be + hierarchical: the packages in the set are called with the scope + provided by `newScope' and the set provides a `newScope' attribute + which can form the parent scope for later package sets. */ + makeScope = newScope: f: + let self = f self // { + newScope = scope: newScope (self // scope); + callPackage = self.newScope {}; + override = g: makeScope newScope (self_: + let super = f self_; + in super // g super self_); + }; + in self; + } diff --git a/lib/default.nix b/lib/default.nix index 4b6027c437b..cd0d8161c8c 100644 --- a/lib/default.nix +++ b/lib/default.nix @@ -11,7 +11,7 @@ let types = import ./types.nix; meta = import ./meta.nix; debug = import ./debug.nix; - misc = import ./misc.nix; + misc = import ./deprecated.nix; maintainers = import ./maintainers.nix; platforms = import ./platforms.nix; systems = import ./systems.nix; diff --git a/lib/deprecated.nix b/lib/deprecated.nix new file mode 100644 index 00000000000..3646f9e032a --- /dev/null +++ b/lib/deprecated.nix @@ -0,0 +1,423 @@ +let lib = import ./default.nix; + inherit (builtins) isFunction head tail isList isAttrs isInt attrNames; + +in + +with import ./lists.nix; +with import ./attrsets.nix; +with import ./strings.nix; + +rec { + + # returns default if env var is not set + maybeEnv = name: default: + let value = builtins.getEnv name; in + if value == "" then default else value; + + defaultMergeArg = x : y: if builtins.isAttrs y then + y + else + (y x); + defaultMerge = x: y: x // (defaultMergeArg x y); + foldArgs = merger: f: init: x: + let arg=(merger init (defaultMergeArg init x)); + # now add the function with composed args already applied to the final attrs + base = (setAttrMerge "passthru" {} (f arg) + ( z : z // rec { + function = foldArgs merger f arg; + args = (lib.attrByPath ["passthru" "args"] {} z) // x; + } )); + withStdOverrides = base // { + override = base.passthru.function; + } ; + in + withStdOverrides; + + + # predecessors: proposed replacement for applyAndFun (which has a bug cause it merges twice) + # the naming "overridableDelayableArgs" tries to express that you can + # - override attr values which have been supplied earlier + # - use attr values before they have been supplied by accessing the fix point + # name "fixed" + # f: the (delayed overridden) arguments are applied to this + # + # initial: initial attrs arguments and settings. see defaultOverridableDelayableArgs + # + # returns: f applied to the arguments // special attributes attrs + # a) merge: merge applied args with new args. Wether an argument is overridden depends on the merge settings + # b) replace: this let's you replace and remove names no matter which merge function has been set + # + # examples: see test cases "res" below; + overridableDelayableArgs = + f : # the function applied to the arguments + initial : # you pass attrs, the functions below are passing a function taking the fix argument + let + takeFixed = if isFunction initial then initial else (fixed : initial); # transform initial to an expression always taking the fixed argument + tidy = args : + let # apply all functions given in "applyPreTidy" in sequence + applyPreTidyFun = fold ( n : a : x : n ( a x ) ) lib.id (maybeAttr "applyPreTidy" [] args); + in removeAttrs (applyPreTidyFun args) ( ["applyPreTidy"] ++ (maybeAttr "removeAttrs" [] args) ); # tidy up args before applying them + fun = n : x : + let newArgs = fixed : + let args = takeFixed fixed; + mergeFun = args.${n}; + in if isAttrs x then (mergeFun args x) + else assert isFunction x; + mergeFun args (x ( args // { inherit fixed; })); + in overridableDelayableArgs f newArgs; + in + (f (tidy (lib.fix takeFixed))) // { + merge = fun "mergeFun"; + replace = fun "keepFun"; + }; + defaultOverridableDelayableArgs = f : + let defaults = { + mergeFun = mergeAttrByFunc; # default merge function. merge strategie (concatenate lists, strings) is given by mergeAttrBy + keepFun = a : b : { inherit (a) removeAttrs mergeFun keepFun mergeAttrBy; } // b; # even when using replace preserve these values + applyPreTidy = []; # list of functions applied to args before args are tidied up (usage case : prepareDerivationArgs) + mergeAttrBy = mergeAttrBy // { + applyPreTidy = a : b : a ++ b; + removeAttrs = a : b: a ++ b; + }; + removeAttrs = ["mergeFun" "keepFun" "mergeAttrBy" "removeAttrs" "fixed" ]; # before applying the arguments to the function make sure these names are gone + }; + in (overridableDelayableArgs f defaults).merge; + + + + # rec { # an example of how composedArgsAndFun can be used + # a = composedArgsAndFun (x : x) { a = ["2"]; meta = { d = "bar";}; }; + # # meta.d will be lost ! It's your task to preserve it (eg using a merge function) + # b = a.passthru.function { a = [ "3" ]; meta = { d2 = "bar2";}; }; + # # instead of passing/ overriding values you can use a merge function: + # c = b.passthru.function ( x: { a = x.a ++ ["4"]; }); # consider using (maybeAttr "a" [] x) + # } + # result: + # { + # a = { a = ["2"]; meta = { d = "bar"; }; passthru = { function = .. }; }; + # b = { a = ["3"]; meta = { d2 = "bar2"; }; passthru = { function = .. }; }; + # c = { a = ["3" "4"]; meta = { d2 = "bar2"; }; passthru = { function = .. }; }; + # # c2 is equal to c + # } + composedArgsAndFun = f: foldArgs defaultMerge f {}; + + + # shortcut for attrByPath ["name"] default attrs + maybeAttrNullable = maybeAttr; + + # shortcut for attrByPath ["name"] default attrs + maybeAttr = name: default: attrs: attrs.${name} or default; + + + # Return the second argument if the first one is true or the empty version + # of the second argument. + ifEnable = cond: val: + if cond then val + else if builtins.isList val then [] + else if builtins.isAttrs val then {} + # else if builtins.isString val then "" + else if val == true || val == false then false + else null; + + + # Return true only if there is an attribute and it is true. + checkFlag = attrSet: name: + if name == "true" then true else + if name == "false" then false else + if (elem name (attrByPath ["flags"] [] attrSet)) then true else + attrByPath [name] false attrSet ; + + + # Input : attrSet, [ [name default] ... ], name + # Output : its value or default. + getValue = attrSet: argList: name: + ( attrByPath [name] (if checkFlag attrSet name then true else + if argList == [] then null else + let x = builtins.head argList; in + if (head x) == name then + (head (tail x)) + else (getValue attrSet + (tail argList) name)) attrSet ); + + + # Input : attrSet, [[name default] ...], [ [flagname reqs..] ... ] + # Output : are reqs satisfied? It's asserted. + checkReqs = attrSet : argList : condList : + ( + fold lib.and true + (map (x: let name = (head x) ; in + + ((checkFlag attrSet name) -> + (fold lib.and true + (map (y: let val=(getValue attrSet argList y); in + (val!=null) && (val!=false)) + (tail x))))) condList)) ; + + + # This function has O(n^2) performance. + uniqList = {inputList, acc ? []} : + let go = xs : acc : + if xs == [] + then [] + else let x = head xs; + y = if elem x acc then [] else [x]; + in y ++ go (tail xs) (y ++ acc); + in go inputList acc; + + uniqListExt = {inputList, outputList ? [], + getter ? (x : x), compare ? (x: y: x==y)}: + if inputList == [] then outputList else + let x=head inputList; + isX = y: (compare (getter y) (getter x)); + newOutputList = outputList ++ + (if any isX outputList then [] else [x]); + in uniqListExt {outputList=newOutputList; + inputList = (tail inputList); + inherit getter compare; + }; + + + + condConcat = name: list: checker: + if list == [] then name else + if checker (head list) then + condConcat + (name + (head (tail list))) + (tail (tail list)) + checker + else condConcat + name (tail (tail list)) checker; + + lazyGenericClosure = {startSet, operator}: + let + work = list: doneKeys: result: + if list == [] then + result + else + let x = head list; key = x.key; in + if elem key doneKeys then + work (tail list) doneKeys result + else + work (tail list ++ operator x) ([key] ++ doneKeys) ([x] ++ result); + in + work startSet [] []; + + innerModifySumArgs = f: x: a: b: if b == null then (f a b) // x else + innerModifySumArgs f x (a // b); + modifySumArgs = f: x: innerModifySumArgs f x {}; + + + innerClosePropagation = acc : xs : + if xs == [] + then acc + else let y = head xs; + ys = tail xs; + in if ! isAttrs y + then innerClosePropagation acc ys + else let acc' = [y] ++ acc; + in innerClosePropagation + acc' + (uniqList { inputList = (maybeAttrNullable "propagatedBuildInputs" [] y) + ++ (maybeAttrNullable "propagatedNativeBuildInputs" [] y) + ++ ys; + acc = acc'; + } + ); + + closePropagation = list: (uniqList {inputList = (innerClosePropagation [] list);}); + + # calls a function (f attr value ) for each record item. returns a list + mapAttrsFlatten = f : r : map (attr: f attr r.${attr}) (attrNames r); + + # attribute set containing one attribute + nvs = name : value : listToAttrs [ (nameValuePair name value) ]; + # adds / replaces an attribute of an attribute set + setAttr = set : name : v : set // (nvs name v); + + # setAttrMerge (similar to mergeAttrsWithFunc but only merges the values of a particular name) + # setAttrMerge "a" [] { a = [2];} (x : x ++ [3]) -> { a = [2 3]; } + # setAttrMerge "a" [] { } (x : x ++ [3]) -> { a = [ 3]; } + setAttrMerge = name : default : attrs : f : + setAttr attrs name (f (maybeAttr name default attrs)); + + # Using f = a : b = b the result is similar to // + # merge attributes with custom function handling the case that the attribute + # exists in both sets + mergeAttrsWithFunc = f : set1 : set2 : + fold (n: set : if set ? ${n} + then setAttr set n (f set.${n} set2.${n}) + else set ) + (set2 // set1) (attrNames set2); + + # merging two attribute set concatenating the values of same attribute names + # eg { a = 7; } { a = [ 2 3 ]; } becomes { a = [ 7 2 3 ]; } + mergeAttrsConcatenateValues = mergeAttrsWithFunc ( a : b : (toList a) ++ (toList b) ); + + # merges attributes using //, if a name exisits in both attributes + # an error will be triggered unless its listed in mergeLists + # so you can mergeAttrsNoOverride { buildInputs = [a]; } { buildInputs = [a]; } {} to get + # { buildInputs = [a b]; } + # merging buildPhase does'nt really make sense. The cases will be rare where appending /prefixing will fit your needs? + # in these cases the first buildPhase will override the second one + # ! deprecated, use mergeAttrByFunc instead + mergeAttrsNoOverride = { mergeLists ? ["buildInputs" "propagatedBuildInputs"], + overrideSnd ? [ "buildPhase" ] + } : attrs1 : attrs2 : + fold (n: set : + setAttr set n ( if set ? ${n} + then # merge + if elem n mergeLists # attribute contains list, merge them by concatenating + then attrs2.${n} ++ attrs1.${n} + else if elem n overrideSnd + then attrs1.${n} + else throw "error mergeAttrsNoOverride, attribute ${n} given in both attributes - no merge func defined" + else attrs2.${n} # add attribute not existing in attr1 + )) attrs1 (attrNames attrs2); + + + # example usage: + # mergeAttrByFunc { + # inherit mergeAttrBy; # defined below + # buildInputs = [ a b ]; + # } { + # buildInputs = [ c d ]; + # }; + # will result in + # { mergeAttrsBy = [...]; buildInputs = [ a b c d ]; } + # is used by prepareDerivationArgs, defaultOverridableDelayableArgs and can be used when composing using + # foldArgs, composedArgsAndFun or applyAndFun. Example: composableDerivation in all-packages.nix + mergeAttrByFunc = x : y : + let + mergeAttrBy2 = { mergeAttrBy=lib.mergeAttrs; } + // (maybeAttr "mergeAttrBy" {} x) + // (maybeAttr "mergeAttrBy" {} y); in + fold lib.mergeAttrs {} [ + x y + (mapAttrs ( a : v : # merge special names using given functions + if x ? ${a} + then if y ? ${a} + then v x.${a} y.${a} # both have attr, use merge func + else x.${a} # only x has attr + else y.${a} # only y has attr) + ) (removeAttrs mergeAttrBy2 + # don't merge attrs which are neither in x nor y + (filter (a: ! x ? ${a} && ! y ? ${a}) + (attrNames mergeAttrBy2)) + ) + ) + ]; + mergeAttrsByFuncDefaults = foldl mergeAttrByFunc { inherit mergeAttrBy; }; + mergeAttrsByFuncDefaultsClean = list: removeAttrs (mergeAttrsByFuncDefaults list) ["mergeAttrBy"]; + + # merge attrs based on version key into mkDerivation args, see mergeAttrBy to learn about smart merge defaults + # + # This function is best explained by an example: + # + # {version ? "2.x"} : + # + # mkDerivation (mergeAttrsByVersion "package-name" version + # { # version specific settings + # "git" = { src = ..; preConfigre = "autogen.sh"; buildInputs = [automake autoconf libtool]; }; + # "2.x" = { src = ..; }; + # } + # { // shared settings + # buildInputs = [ common build inputs ]; + # meta = { .. } + # } + # ) + # + # Please note that e.g. Eelco Dolstra usually prefers having one file for + # each version. On the other hand there are valuable additional design goals + # - readability + # - do it once only + # - try to avoid duplication + # + # Marc Weber and Michael Raskin sometimes prefer keeping older + # versions around for testing and regression tests - as long as its cheap to + # do so. + # + # Very often it just happens that the "shared" code is the bigger part. + # Then using this function might be appropriate. + # + # Be aware that its easy to cause recompilations in all versions when using + # this function - also if derivations get too complex splitting into multiple + # files is the way to go. + # + # See misc.nix -> versionedDerivation + # discussion: nixpkgs: pull/310 + mergeAttrsByVersion = name: version: attrsByVersion: base: + mergeAttrsByFuncDefaultsClean [ { name = "${name}-${version}"; } base (maybeAttr version (throw "bad version ${version} for ${name}") attrsByVersion)]; + + # sane defaults (same name as attr name so that inherit can be used) + mergeAttrBy = # { buildInputs = concatList; [...]; passthru = mergeAttr; [..]; } + listToAttrs (map (n : nameValuePair n lib.concat) + [ "nativeBuildInputs" "buildInputs" "propagatedBuildInputs" "configureFlags" "prePhases" "postAll" "patches" ]) + // listToAttrs (map (n : nameValuePair n lib.mergeAttrs) [ "passthru" "meta" "cfg" "flags" ]) + // listToAttrs (map (n : nameValuePair n (a: b: "${a}\n${b}") ) [ "preConfigure" "postInstall" ]) + ; + + # prepareDerivationArgs tries to make writing configurable derivations easier + # example: + # prepareDerivationArgs { + # mergeAttrBy = { + # myScript = x : y : x ++ "\n" ++ y; + # }; + # cfg = { + # readlineSupport = true; + # }; + # flags = { + # readline = { + # set = { + # configureFlags = [ "--with-compiler=${compiler}" ]; + # buildInputs = [ compiler ]; + # pass = { inherit compiler; READLINE=1; }; + # assertion = compiler.dllSupport; + # myScript = "foo"; + # }; + # unset = { configureFlags = ["--without-compiler"]; }; + # }; + # }; + # src = ... + # buildPhase = '' ... ''; + # name = ... + # myScript = "bar"; + # }; + # if you don't have need for unset you can omit the surrounding set = { .. } attr + # all attrs except flags cfg and mergeAttrBy will be merged with the + # additional data from flags depending on config settings + # It's used in composableDerivation in all-packages.nix. It's also used + # heavily in the new python and libs implementation + # + # should we check for misspelled cfg options? + # TODO use args.mergeFun here as well? + prepareDerivationArgs = args: + let args2 = { cfg = {}; flags = {}; } // args; + flagName = name : "${name}Support"; + cfgWithDefaults = (listToAttrs (map (n : nameValuePair (flagName n) false) (attrNames args2.flags))) + // args2.cfg; + opts = attrValues (mapAttrs (a : v : + let v2 = if v ? set || v ? unset then v else { set = v; }; + n = if cfgWithDefaults.${flagName a} then "set" else "unset"; + attr = maybeAttr n {} v2; in + if (maybeAttr "assertion" true attr) + then attr + else throw "assertion of flag ${a} of derivation ${args.name} failed" + ) args2.flags ); + in removeAttrs + (mergeAttrsByFuncDefaults ([args] ++ opts ++ [{ passthru = cfgWithDefaults; }])) + ["flags" "cfg" "mergeAttrBy" ]; + + + nixType = x: + if isAttrs x then + if x ? outPath then "derivation" + else "aattrs" + else if isFunction x then "function" + else if isList x then "list" + else if x == true then "bool" + else if x == false then "bool" + else if x == null then "null" + else if isInt x then "int" + else "string"; + +} diff --git a/lib/licenses.nix b/lib/licenses.nix index 08376b7e7e0..107296089d0 100644 --- a/lib/licenses.nix +++ b/lib/licenses.nix @@ -85,6 +85,11 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec { fullName = "Creative Commons Zero v1.0 Universal"; }; + cc-by-sa-25 = spdx { + spdxId = "CC-BY-SA-2.5"; + fullName = "Creative Commons Attribution Share Alike 2.5"; + }; + cc-by-30 = spdx { spdxId = "CC-BY-3.0"; fullName = "Creative Commons Attribution 3.0"; @@ -150,6 +155,11 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec { fullName = "GNU Free Documentation License v1.2"; }; + fdl13 = spdx { + spdxId = "GFDL-1.3"; + fullName = "GNU Free Documentation License v1.2"; + }; + free = { fullName = "Unspecified free software license"; }; @@ -322,11 +332,21 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec { fullName = "University of Illinois/NCSA Open Source License"; }; + notion_lgpl = { + url = "https://raw.githubusercontent.com/raboof/notion/master/LICENSE"; + fullName = "Notion modified LGPL"; + }; + ofl = spdx { spdxId = "OFL-1.1"; fullName = "SIL Open Font License 1.1"; }; + openldap = spdx { + spdxId = "OLDAP-2.8"; + fullName = "Open LDAP Public License v2.8"; + }; + openssl = spdx { spdxId = "OpenSSL"; fullName = "OpenSSL License"; @@ -403,6 +423,11 @@ lib.mapAttrs (n: v: v // { shortName = n; }) rec { fullName = "The Unlicense"; }; + vim = spdx { + spdxId = "Vim"; + fullName = "Vim License"; + }; + vsl10 = spdx { spdxId = "VSL-1.0"; fullName = "Vovida Software License v1.0"; diff --git a/lib/lists.nix b/lib/lists.nix index fa8cbddfd94..3bcf366f0c2 100644 --- a/lib/lists.nix +++ b/lib/lists.nix @@ -4,7 +4,7 @@ with import ./trivial.nix; rec { - inherit (builtins) head tail length isList elemAt concatLists filter elem; + inherit (builtins) head tail length isList elemAt concatLists filter elem genList; # Create a list consisting of a single element. `singleton x' is @@ -38,16 +38,24 @@ rec { in foldl' (length list - 1); - # map with index: `imap (i: v: "${v}-${toString i}") ["a" "b"] == - # ["a-1" "b-2"]' - imap = f: list: - let - len = length list; - imap' = n: - if n == len - then [] - else [ (f (n + 1) (elemAt list n)) ] ++ imap' (n + 1); - in imap' 0; + # Strict version of foldl. + foldl' = builtins.foldl' or foldl; + + + # Map with index: `imap (i: v: "${v}-${toString i}") ["a" "b"] == + # ["a-1" "b-2"]'. FIXME: why does this start to count at 1? + imap = + if builtins ? genList then + f: list: genList (n: f (n + 1) (elemAt list n)) (length list) + else + f: list: + let + len = length list; + imap' = n: + if n == len + then [] + else [ (f (n + 1) (elemAt list n)) ] ++ imap' (n + 1); + in imap' 0; # Map and concatenate the result. @@ -59,7 +67,7 @@ rec { # == [1 2 3 4 5]' and `flatten 1 == [1]'. flatten = x: if isList x - then fold (x: y: (flatten x) ++ y) [] x + then foldl' (x: y: x ++ (flatten y)) [] x else [x]; @@ -86,17 +94,17 @@ rec { # Return true iff function `pred' returns true for at least element # of `list'. - any = pred: fold (x: y: if pred x then true else y) false; + any = builtins.any or (pred: fold (x: y: if pred x then true else y) false); # Return true iff function `pred' returns true for all elements of # `list'. - all = pred: fold (x: y: if pred x then y else false) true; + all = builtins.all or (pred: fold (x: y: if pred x then y else false) true); # Count how many times function `pred' returns true for the elements # of `list'. - count = pred: fold (x: c: if pred x then c + 1 else c) 0; + count = pred: foldl' (c: x: if pred x then c + 1 else c) 0; # Return a singleton list or an empty list, depending on a boolean @@ -116,10 +124,17 @@ rec { # Return a list of integers from `first' up to and including `last'. - range = first: last: - if last < first - then [] - else [first] ++ range (first + 1) last; + range = + if builtins ? genList then + first: last: + if first > last + then [] + else genList (n: first + n) (last - first + 1) + else + first: last: + if last < first + then [] + else [first] ++ range (first + 1) last; # Partition the elements of a list in two lists, `right' and @@ -132,30 +147,37 @@ rec { ) { right = []; wrong = []; }; - zipListsWith = f: fst: snd: - let - len1 = length fst; - len2 = length snd; - len = if len1 < len2 then len1 else len2; - zipListsWith' = n: - if n != len then - [ (f (elemAt fst n) (elemAt snd n)) ] - ++ zipListsWith' (n + 1) - else []; - in zipListsWith' 0; + zipListsWith = + if builtins ? genList then + f: fst: snd: genList (n: f (elemAt fst n) (elemAt snd n)) (min (length fst) (length snd)) + else + f: fst: snd: + let + len = min (length fst) (length snd); + zipListsWith' = n: + if n != len then + [ (f (elemAt fst n) (elemAt snd n)) ] + ++ zipListsWith' (n + 1) + else []; + in zipListsWith' 0; zipLists = zipListsWith (fst: snd: { inherit fst snd; }); - # Reverse the order of the elements of a list. FIXME: O(n^2)! - reverseList = fold (e: acc: acc ++ [ e ]) []; + # Reverse the order of the elements of a list. + reverseList = + if builtins ? genList then + xs: let l = length xs; in genList (n: elemAt xs (l - n - 1)) l + else + fold (e: acc: acc ++ [ e ]) []; # Sort a list based on a comparator function which compares two # elements and returns true if the first argument is strictly below # the second argument. The returned list is sorted in an increasing # order. The implementation does a quick-sort. - sort = strictLess: list: + sort = builtins.sort or ( + strictLess: list: let len = length list; first = head list; @@ -169,31 +191,50 @@ rec { pivot = pivot' 1 { left = []; right = []; }; in if len < 2 then list - else (sort strictLess pivot.left) ++ [ first ] ++ (sort strictLess pivot.right); + else (sort strictLess pivot.left) ++ [ first ] ++ (sort strictLess pivot.right)); # Return the first (at most) N elements of a list. - take = count: list: - let - len = length list; - take' = n: - if n == len || n == count - then [] - else - [ (elemAt list n) ] ++ take' (n + 1); - in take' 0; + take = + if builtins ? genList then + count: sublist 0 count + else + count: list: + let + len = length list; + take' = n: + if n == len || n == count + then [] + else + [ (elemAt list n) ] ++ take' (n + 1); + in take' 0; # Remove the first (at most) N elements of a list. - drop = count: list: - let - len = length list; - drop' = n: - if n == -1 || n < count - then [] - else - drop' (n - 1) ++ [ (elemAt list n) ]; - in drop' (len - 1); + drop = + if builtins ? genList then + count: list: sublist count (length list) list + else + count: list: + let + len = length list; + drop' = n: + if n == -1 || n < count + then [] + else + drop' (n - 1) ++ [ (elemAt list n) ]; + in drop' (len - 1); + + + # Return a list consisting of at most ‘count’ elements of ‘list’, + # starting at index ‘start’. + sublist = start: count: list: + let len = length list; in + genList + (n: elemAt list (n + start)) + (if start >= len then 0 + else if start + count > len then len - start + else count); # Return the last element of a list. @@ -205,25 +246,13 @@ rec { init = list: assert list != []; take (length list - 1) list; - # Zip two lists together. - zipTwoLists = xs: ys: - let - len1 = length xs; - len2 = length ys; - len = if len1 < len2 then len1 else len2; - zipTwoLists' = n: - if n != len then - [ { first = elemAt xs n; second = elemAt ys n; } ] - ++ zipTwoLists' (n + 1) - else []; - in zipTwoLists' 0; - - deepSeqList = xs: y: if any (x: deepSeq x false) xs then y else y; + crossLists = f: foldl (fs: args: concatMap (f: map f args) fs) [f]; - # Remove duplicate elements from the list + + # Remove duplicate elements from the list. O(n^2) complexity. unique = list: if list == [] then [] @@ -233,9 +262,12 @@ rec { xs = unique (drop 1 list); in [x] ++ remove x xs; - # Intersects list 'e' and another list + + # Intersects list 'e' and another list. O(nm) complexity. intersectLists = e: filter (x: elem x e); - # Subtracts list 'e' from another list + + # Subtracts list 'e' from another list. O(nm) complexity. subtractLists = e: filter (x: !(elem x e)); + } diff --git a/lib/maintainers.nix b/lib/maintainers.nix index f0e87507ceb..121fca95164 100644 --- a/lib/maintainers.nix +++ b/lib/maintainers.nix @@ -1,22 +1,31 @@ /* -*- coding: utf-8; -*- */ { - /* Add your name and email address here. Keep the list - alphabetically sorted. */ + /* Add your name and email address here. + Keep the list alphabetically sorted. + Prefer the same attrname as your github username, please, + so it's easy to ping a package @maintainer. + */ - _1126 = "Christian Lask "; + a1russell = "Adam Russell "; abaldeau = "Andreas Baldeau "; abbradar = "Nikolay Amiantov "; + adev = "Adrien Devresse "; aforemny = "Alexander Foremny "; + aflatter = "Alexander Flatter "; aherrmann = "Andreas Herrmann "; ak = "Alexander Kjeldaas "; + akaWolf = "Artjom Vejsel "; akc = "Anders Claesson "; algorith = "Dries Van Daele "; all = "Nix Committers "; + ambrop72 = "Ambroz Bizjak "; amiddelk = "Arie Middelkoop "; amorsillo = "Andrew Morsillo "; AndersonTorres = "Anderson Torres "; + anderspapitto = "Anders Papitto "; andres = "Andres Loeh "; + andrewrk = "Andrew Kelley "; antono = "Antono Vasiljev "; ardumont = "Antoine R. Dumont "; aristid = "Aristid Breitkreuz "; @@ -25,9 +34,12 @@ astsmtl = "Alexander Tsamutali "; aszlig = "aszlig "; auntie = "Jonathan Glines "; + avnik = "Alexander V. Nikolaev "; aycanirican = "Aycan iRiCAN "; + badi = "Badi' Abdul-Wahid "; balajisivaraman = "Balaji Sivaraman"; bbenoist = "Baptist BENOIST "; + bcarrell = "Brandon Carrell "; bcdarwin = "Ben Darwin "; bdimcheff = "Brandon Dimcheff "; bennofs = "Benno Fünfstück "; @@ -41,27 +53,36 @@ bodil = "Bodil Stokke "; boothead = "Ben Ford "; bosu = "Boris Sukholitko "; + bramd = "Bram Duvigneau "; bstrik = "Berno Strik "; + c0dehero = "CodeHero "; calrama = "Moritz Maxeiner "; campadrenalin = "Philip Horger "; cdepillabout = "Dennis Gosnell "; cfouche = "Chaddaï Fouché "; chaoflow = "Florian Friesdorf "; + chattered = "Phil Scott "; christopherpoole = "Christopher Mark Poole "; coconnor = "Corey O'Connor "; codyopel = "Cody Opel "; copumpkin = "Dan Peebles "; coroa = "Jonas Hörsch "; + couchemar = "Andrey Pavlov "; cstrahan = "Charles Strahan "; cwoac = "Oliver Matthews "; - DamienCassou = "Damien Cassou "; + DamienCassou = "Damien Cassou "; + davidak = "David Kleuker "; davidrusu = "David Rusu "; dbohdan = "Danyil Bohdan "; DerGuteMoritz = "Moritz Heidkamp "; + deepfire = "Kosyrev Serge <_deepfire@feelingofgreen.ru>"; + desiderius = "Didier J. Devroye "; devhell = "devhell <\"^\"@regexmail.net>"; dezgeg = "Tuomas Tynkkynen "; + dfoxfranke = "Daniel Fox Franke "; dmalikov = "Dmitry Malikov "; doublec = "Chris Double "; + ebzzry = "Rommel Martinez "; ederoyd46 = "Matthew Brown "; eduarrrd = "Eduard Bachmakov "; edwtjo = "Edward Tjörnhammar "; @@ -69,6 +90,9 @@ eikek = "Eike Kettner "; ellis = "Ellis Whitehead "; emery = "Emery Hemingway "; + epitrochoid = "Mabry Cervin "; + ericbmerritt = "Eric Merritt "; + erikryb = "Erik Rybakken "; ertes = "Ertugrul Söylemez "; exlevan = "Alexey Levan "; falsifian = "James Cook "; @@ -76,6 +100,8 @@ fluffynukeit = "Daniel Austin "; forkk = "Andrew Okin "; fpletz = "Franz Pletz "; + fridh = "Frederik Rietdijk "; + fro_ozen = "fro_ozen "; ftrvxmtrx = "Siarhei Zirukin "; funfunctor = "Edward O'Callaghan "; fuuzetsu = "Mateusz Kowalczyk "; @@ -84,23 +110,29 @@ garrison = "Jim Garrison "; gavin = "Gavin Rogers "; gebner = "Gabriel Ebner "; + gfxmonk = "Tim Cuthbertson "; giogadi = "Luis G. Torres "; globin = "Robin Gloster "; goibhniu = "Cillian de Róiste "; gridaphobe = "Eric Seidel "; guibert = "David Guibert "; + havvy = "Ryan Scheel "; hbunke = "Hendrik Bunke "; henrytill = "Henry Till "; + hiberno = "Christian Lask "; hinton = "Tom Hinton "; hrdinka = "Christoph Hrdinka "; iand675 = "Ian Duncan "; ianwookim = "Ian-Woo Kim "; iElectric = "Domen Kozar "; + ikervagyok = "Balázs Lengyel "; iyzsong = "Song Wenwu "; j-keck = "Jürgen Keck "; jagajaga = "Arseniy Seroka "; jb55 = "William Casarin "; jcumming = "Jack Cummings "; + jefdaj = "Jeffrey David Johnson "; + jfb = "James Felix Black "; jgeerds = "Jascha Geerds "; jirkamarsik = "Jirka Marsik "; joachifm = "Joachim Fasting "; @@ -109,33 +141,52 @@ joelteon = "Joel Taylor "; jpbernardy = "Jean-Philippe Bernardy "; jwiegley = "John Wiegley "; + jwilberding = "Jordan Wilberding "; jzellner = "Jeff Zellner "; + kamilchm = "Kamil Chmielewski "; + khumba = "Bryan Gardiner "; kkallio = "Karn Kallio "; koral = "Koral "; kovirobi = "Kovacsics Robert "; kragniz = "Louis Taylor "; ktosiek = "Tomasz Kontusz "; + lassulus = "Lassulus "; + layus = "Guillaume Maudoux "; + lebastr = "Alexander Lebedev "; + leonardoce = "Leonardo Cecchi "; lethalman = "Luca Bruno "; lhvwb = "Nathaniel Baxter "; + lihop = "Leroy Hopson "; linquize = "Linquize "; linus = "Linus Arver "; + lnl7 = "Daiderd Jordan "; lovek323 = "Jason O'Conal "; + lowfatcomputing = "Andreas Wagner "; + lsix = "Lancelot SIX "; ludo = "Ludovic Courtès "; madjar = "Georges Dubus "; magnetophon = "Bart Brouns "; + mahe = "Matthias Herrmann "; + makefu = "Felix Richter "; malyn = "Michael Alyn Miller "; manveru = "Michael Fellinger "; marcweber = "Marc Weber "; + maurer = "Matthew Maurer "; matejc = "Matej Cotman "; + mathnerd314 = "Mathnerd314 "; matthiasbeyer = "Matthias Beyer "; + mbakke = "Marius Bakke "; meditans = "Carlo Nucera "; meisternu = "Matt Miemiec "; michelk = "Michel Kuhlmann "; + mirdhyn = "Merlin Gaillard "; + mschristiansen = "Mikkel Christiansen "; modulistic = "Pablo Costa "; mornfall = "Petr Ročkai "; MP2E = "Cray Elliott "; msackman = "Matthew Sackman "; mtreskin = "Max Treskin "; + mudri = "James Wood "; muflax = "Stefan Dorn "; nathan-gs = "Nathan Bijnens "; nckx = "Tobias Geerinckx-Rice "; @@ -144,14 +195,18 @@ nslqqq = "Nikita Mikhailov "; obadz = "obadz "; ocharles = "Oliver Charles "; + odi = "Oliver Dunkl "; offline = "Jaka Hudoklin "; olcai = "Erik Timan "; orbitz = "Malcolm Matalka "; + osener = "Ozan Sener "; page = "Carles Pagès "; paholg = "Paho Lurie-Gregg "; pakhfn = "Fedor Pakhomov "; pashev = "Igor Pashev "; + pesterhazy = "Paulus Esterhazy "; phausmann = "Philipp Hausmann "; + philandstuff = "Philip Potter "; phreedom = "Evgeny Egorochkin "; pierron = "Nicolas B. Pierron "; piotr = "Piotr Pietraszkiewicz "; @@ -159,7 +214,9 @@ pkmx = "Chih-Mao Chen "; plcplc = "Philip Lykke Carlsen "; pmahoney = "Patrick Mahoney "; + pmiddend = "Philipp Middendorf "; prikhi = "Pavan Rikhi "; + psibi = "Sibi "; pSub = "Pascal Wittmann "; puffnfresh = "Brian McKenna "; qknight = "Joachim Schiele "; @@ -169,37 +226,47 @@ refnil = "Martin Lavoie "; relrod = "Ricky Elrod "; renzo = "Renzo Carbonara "; + rick68 = "Wei-Ming Yang "; rickynils = "Rickard Nilsson "; rob = "Rob Vermaas "; robberer = "Longrin Wischnewski "; + robbinch = "Robbin C. "; roconnor = "Russell O'Connor "; roelof = "Roelof Wobben "; romildo = "José Romildo Malaquias "; rszibele = "Richard Szibele "; rushmorem = "Rushmore Mushambi "; rycee = "Robert Helgesson "; + samuelrivas = "Samuel Rivas "; sander = "Sander van der Burg "; schmitthenner = "Fabian Schmitthenner "; schristo = "Scott Christopher "; sepi = "Raffael Mancini "; + sheganinans = "Aistis Raulinaitis "; shell = "Shell Turner "; shlevy = "Shea Levy "; simons = "Peter Simons "; + simonvandel = "Simon Vandel Sillesen "; sjagoe = "Simon Jagoe "; sjmackenzie = "Stewart Mackenzie "; skeidel = "Sven Keidel "; smironov = "Sergey Mironov "; + spacefrogg = "Michael Raitza "; sprock = "Roger Mason "; spwhitt = "Spencer Whitt "; + stephenmw = "Stephen Weinberg "; + szczyp = "Szczyp "; sztupi = "Attila Sztupak "; tailhook = "Paul Colomiets "; taktoa = "Remy Goldschmidt "; + telotortium = "Robert Irelan "; thammers = "Tobias Hammerschmidt "; the-kenny = "Moritz Ulrich "; theuni = "Christian Theune "; thoughtpolice = "Austin Seipp "; titanous = "Jonathan Rudenberg "; tomberek = "Thomas Bereknyei "; + travisbhartwell = "Travis B. Hartwell "; trino = "Hubert Mühlhans "; tstrobel = "Thomas Strobel "; ttuegel = "Thomas Tuegel "; @@ -218,6 +285,7 @@ winden = "Antonio Vargas Gonzalez "; wizeman = "Ricardo M. Correia "; wjlroe = "William Roe "; + womfoo = "Kranium Gikos Mendoza "; wkennington = "William A. Kennington III "; wmertens = "Wout Mertens "; wscott = "Wayne Scott "; diff --git a/lib/misc.nix b/lib/misc.nix deleted file mode 100644 index fd20ce25010..00000000000 --- a/lib/misc.nix +++ /dev/null @@ -1,426 +0,0 @@ -let lib = import ./default.nix; - inherit (builtins) isFunction head tail isList isAttrs isInt attrNames; - -in - -with import ./lists.nix; -with import ./attrsets.nix; -with import ./strings.nix; - -rec { - - # returns default if env var is not set - maybeEnv = name: default: - let value = builtins.getEnv name; in - if value == "" then default else value; - - defaultMergeArg = x : y: if builtins.isAttrs y then - y - else - (y x); - defaultMerge = x: y: x // (defaultMergeArg x y); - foldArgs = merger: f: init: x: - let arg=(merger init (defaultMergeArg init x)); - # now add the function with composed args already applied to the final attrs - base = (setAttrMerge "passthru" {} (f arg) - ( z : z // rec { - function = foldArgs merger f arg; - args = (lib.attrByPath ["passthru" "args"] {} z) // x; - } )); - withStdOverrides = base // { - override = base.passthru.function; - deepOverride = a : (base.passthru.function ((lib.mapAttrs (lib.deepOverrider a) base.passthru.args) // a)); - } ; - in - withStdOverrides; - - - # predecessors: proposed replacement for applyAndFun (which has a bug cause it merges twice) - # the naming "overridableDelayableArgs" tries to express that you can - # - override attr values which have been supplied earlier - # - use attr values before they have been supplied by accessing the fix point - # name "fixed" - # f: the (delayed overridden) arguments are applied to this - # - # initial: initial attrs arguments and settings. see defaultOverridableDelayableArgs - # - # returns: f applied to the arguments // special attributes attrs - # a) merge: merge applied args with new args. Wether an argument is overridden depends on the merge settings - # b) replace: this let's you replace and remove names no matter which merge function has been set - # - # examples: see test cases "res" below; - overridableDelayableArgs = - f : # the function applied to the arguments - initial : # you pass attrs, the functions below are passing a function taking the fix argument - let - takeFixed = if isFunction initial then initial else (fixed : initial); # transform initial to an expression always taking the fixed argument - tidy = args : - let # apply all functions given in "applyPreTidy" in sequence - applyPreTidyFun = fold ( n : a : x : n ( a x ) ) lib.id (maybeAttr "applyPreTidy" [] args); - in removeAttrs (applyPreTidyFun args) ( ["applyPreTidy"] ++ (maybeAttr "removeAttrs" [] args) ); # tidy up args before applying them - fun = n : x : - let newArgs = fixed : - let args = takeFixed fixed; - mergeFun = args.${n}; - in if isAttrs x then (mergeFun args x) - else assert isFunction x; - mergeFun args (x ( args // { inherit fixed; })); - in overridableDelayableArgs f newArgs; - in - (f (tidy (lib.fix takeFixed))) // { - merge = fun "mergeFun"; - replace = fun "keepFun"; - }; - defaultOverridableDelayableArgs = f : - let defaults = { - mergeFun = mergeAttrByFunc; # default merge function. merge strategie (concatenate lists, strings) is given by mergeAttrBy - keepFun = a : b : { inherit (a) removeAttrs mergeFun keepFun mergeAttrBy; } // b; # even when using replace preserve these values - applyPreTidy = []; # list of functions applied to args before args are tidied up (usage case : prepareDerivationArgs) - mergeAttrBy = mergeAttrBy // { - applyPreTidy = a : b : a ++ b; - removeAttrs = a : b: a ++ b; - }; - removeAttrs = ["mergeFun" "keepFun" "mergeAttrBy" "removeAttrs" "fixed" ]; # before applying the arguments to the function make sure these names are gone - }; - in (overridableDelayableArgs f defaults).merge; - - - - # rec { # an example of how composedArgsAndFun can be used - # a = composedArgsAndFun (x : x) { a = ["2"]; meta = { d = "bar";}; }; - # # meta.d will be lost ! It's your task to preserve it (eg using a merge function) - # b = a.passthru.function { a = [ "3" ]; meta = { d2 = "bar2";}; }; - # # instead of passing/ overriding values you can use a merge function: - # c = b.passthru.function ( x: { a = x.a ++ ["4"]; }); # consider using (maybeAttr "a" [] x) - # } - # result: - # { - # a = { a = ["2"]; meta = { d = "bar"; }; passthru = { function = .. }; }; - # b = { a = ["3"]; meta = { d2 = "bar2"; }; passthru = { function = .. }; }; - # c = { a = ["3" "4"]; meta = { d2 = "bar2"; }; passthru = { function = .. }; }; - # # c2 is equal to c - # } - composedArgsAndFun = f: foldArgs defaultMerge f {}; - - - # shortcut for attrByPath ["name"] default attrs - maybeAttrNullable = maybeAttr; - - # shortcut for attrByPath ["name"] default attrs - maybeAttr = name: default: attrs: attrs.${name} or default; - - - # Return the second argument if the first one is true or the empty version - # of the second argument. - ifEnable = cond: val: - if cond then val - else if builtins.isList val then [] - else if builtins.isAttrs val then {} - # else if builtins.isString val then "" - else if val == true || val == false then false - else null; - - - # Return true only if there is an attribute and it is true. - checkFlag = attrSet: name: - if name == "true" then true else - if name == "false" then false else - if (elem name (attrByPath ["flags"] [] attrSet)) then true else - attrByPath [name] false attrSet ; - - - # Input : attrSet, [ [name default] ... ], name - # Output : its value or default. - getValue = attrSet: argList: name: - ( attrByPath [name] (if checkFlag attrSet name then true else - if argList == [] then null else - let x = builtins.head argList; in - if (head x) == name then - (head (tail x)) - else (getValue attrSet - (tail argList) name)) attrSet ); - - - # Input : attrSet, [[name default] ...], [ [flagname reqs..] ... ] - # Output : are reqs satisfied? It's asserted. - checkReqs = attrSet : argList : condList : - ( - fold lib.and true - (map (x: let name = (head x) ; in - - ((checkFlag attrSet name) -> - (fold lib.and true - (map (y: let val=(getValue attrSet argList y); in - (val!=null) && (val!=false)) - (tail x))))) condList)) ; - - - # This function has O(n^2) performance. - uniqList = {inputList, acc ? []} : - let go = xs : acc : - if xs == [] - then [] - else let x = head xs; - y = if elem x acc then [] else [x]; - in y ++ go (tail xs) (y ++ acc); - in go inputList acc; - - uniqListExt = {inputList, outputList ? [], - getter ? (x : x), compare ? (x: y: x==y)}: - if inputList == [] then outputList else - let x=head inputList; - isX = y: (compare (getter y) (getter x)); - newOutputList = outputList ++ - (if any isX outputList then [] else [x]); - in uniqListExt {outputList=newOutputList; - inputList = (tail inputList); - inherit getter compare; - }; - - - - condConcat = name: list: checker: - if list == [] then name else - if checker (head list) then - condConcat - (name + (head (tail list))) - (tail (tail list)) - checker - else condConcat - name (tail (tail list)) checker; - - lazyGenericClosure = {startSet, operator}: - let - work = list: doneKeys: result: - if list == [] then - result - else - let x = head list; key = x.key; in - if elem key doneKeys then - work (tail list) doneKeys result - else - work (tail list ++ operator x) ([key] ++ doneKeys) ([x] ++ result); - in - work startSet [] []; - - genericClosure = builtins.genericClosure or lazyGenericClosure; - - innerModifySumArgs = f: x: a: b: if b == null then (f a b) // x else - innerModifySumArgs f x (a // b); - modifySumArgs = f: x: innerModifySumArgs f x {}; - - - innerClosePropagation = acc : xs : - if xs == [] - then acc - else let y = head xs; - ys = tail xs; - in if ! isAttrs y - then innerClosePropagation acc ys - else let acc' = [y] ++ acc; - in innerClosePropagation - acc' - (uniqList { inputList = (maybeAttrNullable "propagatedBuildInputs" [] y) - ++ (maybeAttrNullable "propagatedNativeBuildInputs" [] y) - ++ ys; - acc = acc'; - } - ); - - closePropagation = list: (uniqList {inputList = (innerClosePropagation [] list);}); - - # calls a function (f attr value ) for each record item. returns a list - mapAttrsFlatten = f : r : map (attr: f attr r.${attr}) (attrNames r); - - # attribute set containing one attribute - nvs = name : value : listToAttrs [ (nameValuePair name value) ]; - # adds / replaces an attribute of an attribute set - setAttr = set : name : v : set // (nvs name v); - - # setAttrMerge (similar to mergeAttrsWithFunc but only merges the values of a particular name) - # setAttrMerge "a" [] { a = [2];} (x : x ++ [3]) -> { a = [2 3]; } - # setAttrMerge "a" [] { } (x : x ++ [3]) -> { a = [ 3]; } - setAttrMerge = name : default : attrs : f : - setAttr attrs name (f (maybeAttr name default attrs)); - - # Using f = a : b = b the result is similar to // - # merge attributes with custom function handling the case that the attribute - # exists in both sets - mergeAttrsWithFunc = f : set1 : set2 : - fold (n: set : if set ? ${n} - then setAttr set n (f set.${n} set2.${n}) - else set ) - (set2 // set1) (attrNames set2); - - # merging two attribute set concatenating the values of same attribute names - # eg { a = 7; } { a = [ 2 3 ]; } becomes { a = [ 7 2 3 ]; } - mergeAttrsConcatenateValues = mergeAttrsWithFunc ( a : b : (toList a) ++ (toList b) ); - - # merges attributes using //, if a name exisits in both attributes - # an error will be triggered unless its listed in mergeLists - # so you can mergeAttrsNoOverride { buildInputs = [a]; } { buildInputs = [a]; } {} to get - # { buildInputs = [a b]; } - # merging buildPhase does'nt really make sense. The cases will be rare where appending /prefixing will fit your needs? - # in these cases the first buildPhase will override the second one - # ! deprecated, use mergeAttrByFunc instead - mergeAttrsNoOverride = { mergeLists ? ["buildInputs" "propagatedBuildInputs"], - overrideSnd ? [ "buildPhase" ] - } : attrs1 : attrs2 : - fold (n: set : - setAttr set n ( if set ? ${n} - then # merge - if elem n mergeLists # attribute contains list, merge them by concatenating - then attrs2.${n} ++ attrs1.${n} - else if elem n overrideSnd - then attrs1.${n} - else throw "error mergeAttrsNoOverride, attribute ${n} given in both attributes - no merge func defined" - else attrs2.${n} # add attribute not existing in attr1 - )) attrs1 (attrNames attrs2); - - - # example usage: - # mergeAttrByFunc { - # inherit mergeAttrBy; # defined below - # buildInputs = [ a b ]; - # } { - # buildInputs = [ c d ]; - # }; - # will result in - # { mergeAttrsBy = [...]; buildInputs = [ a b c d ]; } - # is used by prepareDerivationArgs, defaultOverridableDelayableArgs and can be used when composing using - # foldArgs, composedArgsAndFun or applyAndFun. Example: composableDerivation in all-packages.nix - mergeAttrByFunc = x : y : - let - mergeAttrBy2 = { mergeAttrBy=lib.mergeAttrs; } - // (maybeAttr "mergeAttrBy" {} x) - // (maybeAttr "mergeAttrBy" {} y); in - fold lib.mergeAttrs {} [ - x y - (mapAttrs ( a : v : # merge special names using given functions - if x ? ${a} - then if y ? ${a} - then v x.${a} y.${a} # both have attr, use merge func - else x.${a} # only x has attr - else y.${a} # only y has attr) - ) (removeAttrs mergeAttrBy2 - # don't merge attrs which are neither in x nor y - (filter (a: ! x ? ${a} && ! y ? ${a}) - (attrNames mergeAttrBy2)) - ) - ) - ]; - mergeAttrsByFuncDefaults = foldl mergeAttrByFunc { inherit mergeAttrBy; }; - mergeAttrsByFuncDefaultsClean = list: removeAttrs (mergeAttrsByFuncDefaults list) ["mergeAttrBy"]; - - # merge attrs based on version key into mkDerivation args, see mergeAttrBy to learn about smart merge defaults - # - # This function is best explained by an example: - # - # {version ? "2.x"} : - # - # mkDerivation (mergeAttrsByVersion "package-name" version - # { # version specific settings - # "git" = { src = ..; preConfigre = "autogen.sh"; buildInputs = [automake autoconf libtool]; }; - # "2.x" = { src = ..; }; - # } - # { // shared settings - # buildInputs = [ common build inputs ]; - # meta = { .. } - # } - # ) - # - # Please note that e.g. Eelco Dolstra usually prefers having one file for - # each version. On the other hand there are valuable additional design goals - # - readability - # - do it once only - # - try to avoid duplication - # - # Marc Weber and Michael Raskin sometimes prefer keeping older - # versions around for testing and regression tests - as long as its cheap to - # do so. - # - # Very often it just happens that the "shared" code is the bigger part. - # Then using this function might be appropriate. - # - # Be aware that its easy to cause recompilations in all versions when using - # this function - also if derivations get too complex splitting into multiple - # files is the way to go. - # - # See misc.nix -> versionedDerivation - # discussion: nixpkgs: pull/310 - mergeAttrsByVersion = name: version: attrsByVersion: base: - mergeAttrsByFuncDefaultsClean [ { name = "${name}-${version}"; } base (maybeAttr version (throw "bad version ${version} for ${name}") attrsByVersion)]; - - # sane defaults (same name as attr name so that inherit can be used) - mergeAttrBy = # { buildInputs = concatList; [...]; passthru = mergeAttr; [..]; } - listToAttrs (map (n : nameValuePair n lib.concat) - [ "nativeBuildInputs" "buildInputs" "propagatedBuildInputs" "configureFlags" "prePhases" "postAll" "patches" ]) - // listToAttrs (map (n : nameValuePair n lib.mergeAttrs) [ "passthru" "meta" "cfg" "flags" ]) - // listToAttrs (map (n : nameValuePair n (a: b: "${a}\n${b}") ) [ "preConfigure" "postInstall" ]) - ; - - # prepareDerivationArgs tries to make writing configurable derivations easier - # example: - # prepareDerivationArgs { - # mergeAttrBy = { - # myScript = x : y : x ++ "\n" ++ y; - # }; - # cfg = { - # readlineSupport = true; - # }; - # flags = { - # readline = { - # set = { - # configureFlags = [ "--with-compiler=${compiler}" ]; - # buildInputs = [ compiler ]; - # pass = { inherit compiler; READLINE=1; }; - # assertion = compiler.dllSupport; - # myScript = "foo"; - # }; - # unset = { configureFlags = ["--without-compiler"]; }; - # }; - # }; - # src = ... - # buildPhase = '' ... ''; - # name = ... - # myScript = "bar"; - # }; - # if you don't have need for unset you can omit the surrounding set = { .. } attr - # all attrs except flags cfg and mergeAttrBy will be merged with the - # additional data from flags depending on config settings - # It's used in composableDerivation in all-packages.nix. It's also used - # heavily in the new python and libs implementation - # - # should we check for misspelled cfg options? - # TODO use args.mergeFun here as well? - prepareDerivationArgs = args: - let args2 = { cfg = {}; flags = {}; } // args; - flagName = name : "${name}Support"; - cfgWithDefaults = (listToAttrs (map (n : nameValuePair (flagName n) false) (attrNames args2.flags))) - // args2.cfg; - opts = attrValues (mapAttrs (a : v : - let v2 = if v ? set || v ? unset then v else { set = v; }; - n = if cfgWithDefaults.${flagName a} then "set" else "unset"; - attr = maybeAttr n {} v2; in - if (maybeAttr "assertion" true attr) - then attr - else throw "assertion of flag ${a} of derivation ${args.name} failed" - ) args2.flags ); - in removeAttrs - (mergeAttrsByFuncDefaults ([args] ++ opts ++ [{ passthru = cfgWithDefaults; }])) - ["flags" "cfg" "mergeAttrBy" ]; - - - nixType = x: - if isAttrs x then - if x ? outPath then "derivation" - else "aattrs" - else if isFunction x then "function" - else if isList x then "list" - else if x == true then "bool" - else if x == false then "bool" - else if x == null then "null" - else if isInt x then "int" - else "string"; - -} diff --git a/lib/modules.nix b/lib/modules.nix index dcede0c46c6..3e4d0547ecc 100644 --- a/lib/modules.nix +++ b/lib/modules.nix @@ -17,6 +17,10 @@ rec { evalModules) and the less declarative the module set is. */ evalModules = { modules , prefix ? [] + , # This should only be used for special arguments that need to be evaluated + # when resolving module structure (like in imports). For everything else, + # there's _module.args. + specialArgs ? {} , # This would be remove in the future, Prefer _module.args option instead. args ? {} , # This would be remove in the future, Prefer _module.check option instead. @@ -39,7 +43,7 @@ rec { }; _module.check = mkOption { - type = types.uniq types.bool; + type = types.bool; internal = true; default = check; description = "Whether to check whether all option definitions have matching declarations."; @@ -51,7 +55,7 @@ rec { }; }; - closed = closeModules (modules ++ [ internalModule ]) { inherit config options; lib = import ./.; }; + closed = closeModules (modules ++ [ internalModule ]) ({ inherit config options; lib = import ./.; } // specialArgs); # Note: the list of modules is reversed to maintain backward # compatibility with the old module system. Not sure if this is @@ -72,8 +76,8 @@ rec { else yieldConfig (prefix ++ [n]) v) set) ["_definedNames"]; in if options._module.check.value && set ? _definedNames then - fold (m: res: - fold (name: res: + foldl' (res: m: + foldl' (res: name: if set ? ${name} then res else throw "The option `${showOption (prefix ++ [name])}' defined in `${m.file}' does not exist.") res m.names) res set._definedNames @@ -87,9 +91,11 @@ rec { let toClosureList = file: parentKey: imap (n: x: if isAttrs x || isFunction x then - unifyModuleSyntax file "${parentKey}:anon-${toString n}" (unpackSubmodule applyIfFunction x args) + let key = "${parentKey}:anon-${toString n}"; in + unifyModuleSyntax file key (unpackSubmodule (applyIfFunction key) x args) else - unifyModuleSyntax (toString x) (toString x) (applyIfFunction (import x) args)); + let file = toString x; key = toString x; in + unifyModuleSyntax file key (applyIfFunction key (import x) args)); in builtins.genericClosure { startSet = toClosureList unknownModule "" modules; @@ -118,7 +124,7 @@ rec { config = removeAttrs m ["key" "_file" "require" "imports"]; }; - applyIfFunction = f: arg@{ config, options, lib }: if isFunction f then + applyIfFunction = key: f: args@{ config, options, lib, ... }: if isFunction f then let # Module arguments are resolved in a strict manner when attribute set # deconstruction is used. As the arguments are now defined with the @@ -133,11 +139,18 @@ rec { # not their values. The values are forwarding the result of the # evaluation of the option. requiredArgs = builtins.attrNames (builtins.functionArgs f); + context = name: ''while evaluating the module argument `${name}' in "${key}":''; extraArgs = builtins.listToAttrs (map (name: { inherit name; - value = config._module.args.${name}; + value = addErrorContext (context name) + (args.${name} or config._module.args.${name}); }) requiredArgs); - in f (extraArgs // arg) + + # Note: we append in the opposite order such that we can add an error + # context on the explicited arguments of "args" too. This update + # operator is used to make the "args@{ ... }: with args.lib;" notation + # works. + in f (args // extraArgs) else f; @@ -169,18 +182,18 @@ rec { let loc = prefix ++ [name]; # Get all submodules that declare ‘name’. - decls = concatLists (map (m: + decls = concatMap (m: if m.options ? ${name} then [ { inherit (m) file; options = m.options.${name}; } ] else [] - ) options); + ) options; # Get all submodules that define ‘name’. - defns = concatLists (map (m: + defns = concatMap (m: if m.config ? ${name} then map (config: { inherit (m) file; inherit config; }) (pushDownProperties m.config.${name}) else [] - ) configs); + ) configs; nrOptions = count (m: isOption m.options) decls; # Extract the definitions for this loc defns' = map (m: { inherit (m) file; value = m.config.${name}; }) @@ -212,7 +225,7 @@ rec { 'opts' is a list of modules. Each module has an options attribute which correspond to the definition of 'loc' in 'opt.file'. */ mergeOptionDecls = loc: opts: - fold (opt: res: + foldl' (res: opt: if opt.options ? default && res ? default || opt.options ? example && res ? example || opt.options ? description && res ? description || @@ -238,7 +251,7 @@ rec { else if opt.options ? options then map (coerceOption opt.file) options' ++ res.options else res.options; in opt.options // res // - { declarations = [opt.file] ++ res.declarations; + { declarations = res.declarations ++ [opt.file]; options = submodules; } ) { inherit loc; declarations = []; options = []; } opts; @@ -248,58 +261,67 @@ rec { evalOptionValue = loc: opt: defs: let # Add in the default value for this option, if any. - defs' = (optional (opt ? default) - { file = head opt.declarations; value = mkOptionDefault opt.default; }) ++ defs; + defs' = + (optional (opt ? default) + { file = head opt.declarations; value = mkOptionDefault opt.default; }) ++ defs; - # Handle properties, check types, and merge everything together - inherit (mergeDefinitions loc opt.type defs') isDefined defsFinal mergedValue; - files = map (def: def.file) defsFinal; - merged = - if isDefined then mergedValue - else throw "The option `${showOption loc}' is used but not defined."; + # Handle properties, check types, and merge everything together. + res = + if opt.readOnly or false && length defs' > 1 then + throw "The option `${showOption loc}' is read-only, but it's set multiple times." + else + mergeDefinitions loc opt.type defs'; + + # Check whether the option is defined, and apply the ‘apply’ + # function to the merged value. This allows options to yield a + # value computed from the definitions. + value = + if !res.isDefined then + throw "The option `${showOption loc}' is used but not defined." + else if opt ? apply then + opt.apply res.mergedValue + else + res.mergedValue; - # Finally, apply the ‘apply’ function to the merged - # value. This allows options to yield a value computed - # from the definitions. - value = (opt.apply or id) merged; in opt // { value = addErrorContext "while evaluating the option `${showOption loc}':" value; - definitions = map (def: def.value) defsFinal; - inherit isDefined files; + definitions = map (def: def.value) res.defsFinal; + files = map (def: def.file) res.defsFinal; + inherit (res) isDefined; }; - # Merge definitions of a value of a given type - mergeDefinitions = loc: type: defs: rec { - defsFinal = - let - # Process mkMerge and mkIf properties - processIfAndMerge = defs: concatMap (m: - map (value: { inherit (m) file; inherit value; }) (dischargeProperties m.value) - ) defs; + # Merge definitions of a value of a given type. + mergeDefinitions = loc: type: defs: rec { + defsFinal = + let + # Process mkMerge and mkIf properties. + defs' = concatMap (m: + map (value: { inherit (m) file; inherit value; }) (dischargeProperties m.value) + ) defs; - # Process mkOverride properties - processOverride = defs: filterOverrides defs; + # Process mkOverride properties. + defs'' = filterOverrides defs'; - # Sort mkOrder properties - processOrder = defs: - # Avoid sorting if we don't have to. - if any (def: def.value._type or "" == "order") defs - then sortProperties defs - else defs; - in - processOrder (processOverride (processIfAndMerge defs)); + # Sort mkOrder properties. + defs''' = + # Avoid sorting if we don't have to. + if any (def: def.value._type or "" == "order") defs'' + then sortProperties defs'' + else defs''; + in defs'''; - # Type-check the remaining definitions, and merge them - mergedValue = fold (def: res: - if type.check def.value then res - else throw "The option value `${showOption loc}' in `${def.file}' is not a ${type.name}.") - (type.merge loc defsFinal) defsFinal; + # Type-check the remaining definitions, and merge them. + mergedValue = foldl' (res: def: + if type.check def.value then res + else throw "The option value `${showOption loc}' in `${def.file}' is not a ${type.name}.") + (type.merge loc defsFinal) defsFinal; - isDefined = defsFinal != []; - optionalValue = - if isDefined then { value = mergedValue; } - else {}; - }; + isDefined = defsFinal != []; + + optionalValue = + if isDefined then { value = mergedValue; } + else {}; + }; /* Given a config set, expand mkMerge properties, and push down the other properties into the children. The result is a list of @@ -370,8 +392,7 @@ rec { let defaultPrio = 100; getPrio = def: if def.value._type or "" == "override" then def.value.priority else defaultPrio; - min = x: y: if x < y then x else y; - highestPrio = fold (def: prio: min (getPrio def) prio) 9999 defs; + highestPrio = foldl' (prio: def: min (getPrio def) prio) 9999 defs; strip = def: if def.value._type or "" == "override" then def // { value = def.value.content; } else def; in concatMap (def: if getPrio def == highestPrio then [(strip def)] else []) defs; diff --git a/lib/options.nix b/lib/options.nix index eed43daaecc..444ec37e6ea 100644 --- a/lib/options.nix +++ b/lib/options.nix @@ -4,7 +4,6 @@ let lib = import ./default.nix; in with import ./trivial.nix; with import ./lists.nix; -with import ./misc.nix; with import ./attrsets.nix; with import ./strings.nix; @@ -20,6 +19,7 @@ rec { , apply ? null # Function that converts the option value to something else. , internal ? null # Whether the option is for NixOS developers only. , visible ? null # Whether the option shows up in the manual. + , readOnly ? null # Whether the option can be set only once , options ? null # Obsolete, used by types.optionSet. } @ attrs: attrs // { _type = "option"; }; @@ -53,32 +53,27 @@ rec { if length list == 1 then head list else if all isFunction list then x: mergeDefaultOption loc (map (f: f x) list) else if all isList list then concatLists list - else if all isAttrs list then fold lib.mergeAttrs {} list - else if all isBool list then fold lib.or false list + else if all isAttrs list then foldl' lib.mergeAttrs {} list + else if all isBool list then foldl' lib.or false list else if all isString list then lib.concatStrings list else if all isInt list && all (x: x == head list) list then head list else throw "Cannot merge definitions of `${showOption loc}' given in ${showFiles (getFiles defs)}."; - /* Obsolete, will remove soon. Specify an option type or apply - function instead. */ - mergeTypedOption = typeName: predicate: merge: loc: list: - let list' = map (x: x.value) list; in - if all predicate list then merge list' - else throw "Expected a ${typeName}."; - - mergeEnableOption = mergeTypedOption "boolean" - (x: true == x || false == x) (fold lib.or false); - - mergeListOption = mergeTypedOption "list" isList concatLists; - - mergeStringOption = mergeTypedOption "string" isString lib.concatStrings; - mergeOneOption = loc: defs: if defs == [] then abort "This case should never happen." else if length defs != 1 then throw "The unique option `${showOption loc}' is defined multiple times, in ${showFiles (getFiles defs)}." else (head defs).value; + /* "Merge" option definitions by checking that they all have the same value. */ + mergeEqualOption = loc: defs: + if defs == [] then abort "This case should never happen." + else foldl' (val: def: + if def.value != val then + throw "The option `${showOption loc}' has conflicting definitions, in ${showFiles (getFiles defs)}." + else + val) (head defs).value defs; + getValues = map (x: x.value); getFiles = map (x: x.file); @@ -88,7 +83,7 @@ rec { optionAttrSetToDocList = optionAttrSetToDocList' []; optionAttrSetToDocList' = prefix: options: - fold (opt: rest: + concatMap (opt: let docOption = rec { name = showOption opt.loc; @@ -96,6 +91,7 @@ rec { declarations = filter (x: x != unknownModule) opt.declarations; internal = opt.internal or false; visible = opt.visible or true; + readOnly = opt.readOnly or false; type = opt.type.name or null; } // (if opt ? example then { example = scrubOptionValue opt.example; } else {}) @@ -106,8 +102,7 @@ rec { let ss = opt.type.getSubOptions opt.loc; in if ss != {} then optionAttrSetToDocList' opt.loc ss else []; in - # FIXME: expensive, O(n^2) - [ docOption ] ++ subOptions ++ rest) [] (collect isOption options); + [ docOption ] ++ subOptions) (collect isOption options); /* This function recursively removes all derivation attributes from diff --git a/lib/strings.nix b/lib/strings.nix index ee5a59bdaf8..372c8833c32 100644 --- a/lib/strings.nix +++ b/lib/strings.nix @@ -8,11 +8,15 @@ in rec { - inherit (builtins) stringLength substring head tail isString; + inherit (builtins) stringLength substring head tail isString replaceStrings; # Concatenate a list of strings. - concatStrings = lib.fold (x: y: x + y) ""; + concatStrings = + if builtins ? concatStringsSep then + builtins.concatStringsSep "" + else + lib.foldl' (x: y: x + y) ""; # Map a function over a list and concatenate the resulting strings. @@ -25,14 +29,13 @@ rec { intersperse = separator: list: if list == [] || length list == 1 then list - else [(head list) separator] - ++ (intersperse separator (tail list)); + else tail (lib.concatMap (x: [separator x]) list); # Concatenate a list of strings with a separator between each element, e.g. # concatStringsSep " " ["foo" "bar" "xyzzy"] == "foo bar xyzzy" - concatStringsSep = separator: list: - concatStrings (intersperse separator list); + concatStringsSep = builtins.concatStringsSep or (separator: list: + concatStrings (intersperse separator list)); concatMapStringsSep = sep: f: list: concatStringsSep sep (map f list); concatImapStringsSep = sep: f: list: concatStringsSep sep (lib.imap f list); @@ -63,13 +66,13 @@ rec { # Determine whether a string has given prefix/suffix. hasPrefix = pref: str: - eqStrings (substring 0 (stringLength pref) str) pref; + substring 0 (stringLength pref) str == pref; hasSuffix = suff: str: let lenStr = stringLength str; lenSuff = stringLength suff; in lenStr >= lenSuff && - eqStrings (substring (lenStr - lenSuff) lenStr str) suff; + substring (lenStr - lenSuff) lenStr str == suff; # Convert a string to a list of characters (i.e. singleton strings). @@ -78,63 +81,57 @@ rec { # will likely be horribly inefficient; Nix is not a general purpose # programming language. Complex string manipulations should, if # appropriate, be done in a derivation. - stringToCharacters = s: let l = stringLength s; in - if l == 0 - then [] - else map (p: substring p 1 s) (lib.range 0 (l - 1)); + stringToCharacters = s: + map (p: substring p 1 s) (lib.range 0 (stringLength s - 1)); - # Manipulate a string charcater by character and replace them by strings - # before concatenating the results. + # Manipulate a string charactter by character and replace them by + # strings before concatenating the results. stringAsChars = f: s: concatStrings ( map f (stringToCharacters s) ); - # same as vim escape function. - # Each character contained in list is prefixed by "\" - escape = list : string : - stringAsChars (c: if lib.elem c list then "\\${c}" else c) string; + # Escape occurrence of the elements of ‘list’ in ‘string’ by + # prefixing it with a backslash. For example, ‘escape ["(" ")"] + # "(foo)"’ returns the string ‘\(foo\)’. + escape = list: replaceChars list (map (c: "\\${c}") list); - # still ugly slow. But more correct now - # [] for zsh + # Escape all characters that have special meaning in the Bourne shell. escapeShellArg = lib.escape (stringToCharacters "\\ ';$`()|<>\t*[]"); - # replace characters by their substitutes. This function is equivalent to - # the `tr' command except that one character can be replace by multiple - # ones. e.g., - # replaceChars ["<" ">"] ["<" ">"] "" returns "<foo>". - replaceChars = del: new: s: + # Obsolete - use replaceStrings instead. + replaceChars = builtins.replaceStrings or ( + del: new: s: let + substList = lib.zipLists del new; subst = c: - (lib.fold - (sub: res: if sub.fst == c then sub else res) - {fst = c; snd = c;} (lib.zipLists del new) - ).snd; + let found = lib.findFirst (sub: sub.fst == c) null substList; in + if found == null then + c + else + found.snd; in - stringAsChars subst s; + stringAsChars subst s); - # Case conversion utilities + # Case conversion utilities. lowerChars = stringToCharacters "abcdefghijklmnopqrstuvwxyz"; upperChars = stringToCharacters "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; toLower = replaceChars upperChars lowerChars; toUpper = replaceChars lowerChars upperChars; - # Appends string context from another string + + # Appends string context from another string. addContextFrom = a: b: substring 0 0 a + b; - # Compares strings not requiring context equality - # Obviously, a workaround but works on all Nix versions - eqStrings = a: b: addContextFrom b a == addContextFrom a b; - - # Cut a string with a separator and produces a list of strings which were - # separated by this separator. e.g., - # `splitString "." "foo.bar.baz"' returns ["foo" "bar" "baz"]. + # Cut a string with a separator and produces a list of strings which + # were separated by this separator; e.g., `splitString "." + # "foo.bar.baz"' returns ["foo" "bar" "baz"]. splitString = _sep: _s: let sep = addContextFrom _s _sep; @@ -177,7 +174,7 @@ rec { sufLen = stringLength suf; sLen = stringLength s; in - if sufLen <= sLen && eqStrings suf (substring (sLen - sufLen) sufLen s) then + if sufLen <= sLen && suf == substring (sLen - sufLen) sufLen s then substring 0 (sLen - sufLen) s else s; @@ -196,21 +193,22 @@ rec { # Extract name with version from URL. Ask for separator which is - # supposed to start extension - nameFromURL = url: sep: let - components = splitString "/" url; - filename = lib.last components; - name = builtins.head (splitString sep filename); - in - assert ! eqStrings name filename; - name; + # supposed to start extension. + nameFromURL = url: sep: + let + components = splitString "/" url; + filename = lib.last components; + name = builtins.head (splitString sep filename); + in assert name != filename; name; # Create an --{enable,disable}- string that can be passed to # standard GNU Autoconf scripts. enableFeature = enable: feat: "--${if enable then "enable" else "disable"}-${feat}"; - # Create a fixed width string with additional prefix to match required width + + # Create a fixed width string with additional prefix to match + # required width. fixedWidthString = width: filler: str: let strw = lib.stringLength str; @@ -219,6 +217,12 @@ rec { assert strw <= width; if strw == width then str else filler + fixedWidthString reqWidth filler str; - # Format a number adding leading zeroes up to fixed width + + # Format a number adding leading zeroes up to fixed width. fixedWidthNumber = width: n: fixedWidthString width "0" (toString n); + + + # Check whether a value is a store path. + isStorePath = x: builtins.substring 0 1 (toString x) == "/" && dirOf (builtins.toPath x) == builtins.storeDir; + } diff --git a/lib/tests/modules.sh b/lib/tests/modules.sh index 66c6f560fbe..65de8e378c7 100755 --- a/lib/tests/modules.sh +++ b/lib/tests/modules.sh @@ -12,7 +12,7 @@ evalConfig() { local attr=$1 shift; local script="import ./default.nix { modules = [ $@ ];}" - nix-instantiate --timeout 1 -E "$script" -A "$attr" --eval-only + nix-instantiate --timeout 1 -E "$script" -A "$attr" --eval-only --show-trace } reportFailure() { @@ -100,7 +100,15 @@ checkConfigOutput 'true' "$@" ./define-enable.nix ./define-loaOfSub-foo-if-enabl checkConfigOutput 'true' "$@" ./define-enable.nix ./define-loaOfSub-foo-enable-if.nix # Check _module.args. -checkConfigOutput "true" config.enable ./declare-enable.nix ./custom-arg-define-enable.nix +set -- config.enable ./declare-enable.nix ./define-enable-with-custom-arg.nix +checkConfigError 'while evaluating the module argument .*custom.* in .*define-enable-with-custom-arg.nix.*:' "$@" +checkConfigOutput "true" "$@" ./define-_module-args-custom.nix + +# Check that using _module.args on imports cause infinite recursions, with +# the proper error context. +set -- "$@" ./define-_module-args-custom.nix ./import-custom-arg.nix +checkConfigError 'while evaluating the module argument .*custom.* in .*import-custom-arg.nix.*:' "$@" +checkConfigError 'infinite recursion encountered' "$@" # Check _module.check. set -- config.enable ./declare-enable.nix ./define-enable.nix ./define-loaOfSub-foo.nix diff --git a/lib/tests/modules/custom-arg-define-enable.nix b/lib/tests/modules/custom-arg-define-enable.nix deleted file mode 100644 index f04d30dd9b9..00000000000 --- a/lib/tests/modules/custom-arg-define-enable.nix +++ /dev/null @@ -1,8 +0,0 @@ -{ lib, custom, ... }: - -{ - config = { - _module.args.custom = true; - enable = custom; - }; -} diff --git a/lib/tests/modules/define-_module-args-custom.nix b/lib/tests/modules/define-_module-args-custom.nix new file mode 100644 index 00000000000..e565fd215a5 --- /dev/null +++ b/lib/tests/modules/define-_module-args-custom.nix @@ -0,0 +1,7 @@ +{ lib, ... }: + +{ + config = { + _module.args.custom = true; + }; +} diff --git a/lib/tests/modules/define-enable-with-custom-arg.nix b/lib/tests/modules/define-enable-with-custom-arg.nix new file mode 100644 index 00000000000..7da74671d14 --- /dev/null +++ b/lib/tests/modules/define-enable-with-custom-arg.nix @@ -0,0 +1,7 @@ +{ lib, custom, ... }: + +{ + config = { + enable = custom; + }; +} diff --git a/lib/tests/modules/import-custom-arg.nix b/lib/tests/modules/import-custom-arg.nix new file mode 100644 index 00000000000..3e687b661c1 --- /dev/null +++ b/lib/tests/modules/import-custom-arg.nix @@ -0,0 +1,6 @@ +{ lib, custom, ... }: + +{ + imports = [] + ++ lib.optional custom ./define-enable-force.nix; +} diff --git a/lib/trivial.nix b/lib/trivial.nix index 8addde1b86c..9fd5a7e1c57 100644 --- a/lib/trivial.nix +++ b/lib/trivial.nix @@ -22,7 +22,7 @@ rec { inherit (builtins) pathExists readFile isBool isFunction isInt add sub lessThan - seq deepSeq; + seq deepSeq genericClosure; # Return the Nixpkgs version number. nixpkgsVersion = diff --git a/lib/types.nix b/lib/types.nix index f22c7661634..7276f9af9fe 100644 --- a/lib/types.nix +++ b/lib/types.nix @@ -6,7 +6,7 @@ with import ./attrsets.nix; with import ./options.nix; with import ./trivial.nix; with import ./strings.nix; -with {inherit (import ./modules.nix) mergeDefinitions; }; +with {inherit (import ./modules.nix) mergeDefinitions filterOverrides; }; rec { @@ -54,7 +54,7 @@ rec { bool = mkOptionType { name = "boolean"; check = isBool; - merge = loc: fold (x: y: x.value || y) false; + merge = mergeEqualOption; }; int = mkOptionType { @@ -88,20 +88,22 @@ rec { attrs = mkOptionType { name = "attribute set"; check = isAttrs; - merge = loc: fold (def: mergeAttrs def.value) {}; + merge = loc: foldl' (res: def: mergeAttrs res def.value) {}; }; # derivation is a reserved keyword. package = mkOptionType { name = "derivation"; - check = isDerivation; - merge = mergeOneOption; + check = x: isDerivation x || isStorePath x; + merge = loc: defs: + let res = mergeOneOption loc defs; + in if isDerivation res then res else toDerivation res; }; path = mkOptionType { name = "path"; # Hacky: there is no ‘isPath’ primop. - check = x: builtins.unsafeDiscardStringContext (builtins.substring 0 1 (toString x)) == "/"; + check = x: builtins.substring 0 1 (toString x) == "/"; merge = mergeOneOption; }; @@ -164,6 +166,23 @@ rec { substSubModules = m: loaOf (elemType.substSubModules m); }; + # List or element of ... + loeOf = elemType: mkOptionType { + name = "element or list of ${elemType.name}s"; + check = x: isList x || elemType.check x; + merge = loc: defs: + let + defs' = filterOverrides defs; + res = (head defs').value; + in + if isList res then concatLists (getValues defs') + else if lessThan 1 (length defs') then + throw "The option `${showOption loc}' is defined multiple times, in ${showFiles (getFiles defs)}." + else if !isString res then + throw "The option `${showOption loc}' does not have a string value, in ${showFiles (getFiles defs)}." + else res; + }; + uniq = elemType: mkOptionType { inherit (elemType) name check; merge = mergeOneOption; diff --git a/maintainers/scripts/gnome-latest.sh b/maintainers/scripts/gnome-latest.sh deleted file mode 100755 index 9290b6deaff..00000000000 --- a/maintainers/scripts/gnome-latest.sh +++ /dev/null @@ -1,95 +0,0 @@ -#!/bin/sh - -GNOME_FTP="ftp.gnome.org/pub/GNOME/sources" - -project=$1 - -if [ "$project" == "--help" ]; then - echo "Usage: $0 project [major.minor]" - exit 0 -fi - -baseVersion=$2 - -if [ -z "$project" ]; then - echo "No project specified, exiting" - exit 1 -fi - -# curl -l ftp://... doesn't work from my office in HSE, and I don't want to have -# any conversations with sysadmin. Somehow lftp works. -if [ "$FTP_CLIENT" = "lftp" ]; then - ls_ftp() { - lftp -c "open $1; cls" - } -else - ls_ftp() { - curl -l "$1"/ - } -fi - -if [ -z "$baseVersion" ]; then - echo "Looking for available versions..." >&2 - available_baseversions=( `ls_ftp ftp://${GNOME_FTP}/${project} | grep '[0-9]\.[0-9]' | sort -t. -k1,1n -k 2,2n` ) - echo -e "The following versions are available:\n ${available_baseversions[@]}" >&2 - echo -en "Choose one of them: " >&2 - read baseVersion -fi - -FTPDIR="${GNOME_FTP}/${project}/${baseVersion}" - -#version=`curl -l ${FTPDIR}/ 2>/dev/null | grep LATEST-IS | sed -e s/LATEST-IS-//` -# gnome's LATEST-IS is broken. Do not trust it. - -files=$(ls_ftp "${FTPDIR}") -declare -A versions - -for f in $files; do - case $f in - (LATEST-IS-*|*.news|*.changes|*.sha256sum|*.diff*): - ;; - ($project-*.*.9*.tar.*): - tmp=${f#$project-} - tmp=${tmp%.tar*} - echo "Ignored unstable version ${tmp}" >&2 - ;; - ($project-*.tar.*): - tmp=${f#$project-} - tmp=${tmp%.tar*} - versions[${tmp}]=1 - ;; - (*): - echo "UNKNOWN FILE $f" - ;; - esac -done -echo "Found versions ${!versions[@]}" >&2 -version=`echo ${!versions[@]} | sed -e 's/ /\n/g' | sort -t. -k1,1n -k 2,2n -k 3,3n | tail -n1` -echo "Latest version is: ${version}" >&2 - -name=${project}-${version} -echo "Fetching .sha256 file" >&2 -curl -O http://${FTPDIR}/${name}.sha256sum - -extensions=( "xz" "bz2" "gz" ) -echo "Choosing archive extension (known are ${extensions[@]})..." >&2 -for ext in ${extensions[@]}; do - if grep "\\.tar\\.${ext}$" ${name}.sha256sum >& /dev/null; then - ext_pref=$ext - sha256=$(grep "\\.tar\\.${ext}$" ${name}.sha256sum | cut -f1 -d\ ) - break - fi -done -sha256=`nix-hash --to-base32 --type sha256 $sha256` -echo "Chosen ${ext_pref}, hash is ${sha256}" >&2 - -cat <&2 diff --git a/maintainers/scripts/gnome.sh b/maintainers/scripts/gnome.sh new file mode 100755 index 00000000000..9398331d0d6 --- /dev/null +++ b/maintainers/scripts/gnome.sh @@ -0,0 +1,194 @@ +#!/usr/bin/env bash + +set -o pipefail + +GNOME_FTP="ftp.gnome.org/pub/GNOME/sources" + +# projects that don't follow the GNOME major versioning, or that we don't want to +# programmatically update +NO_GNOME_MAJOR="gtkhtml gdm" + +usage() { + echo "Usage: $0 gnome_dir || [major.minor]" >&2 + echo "gnome_dir is for example pkgs/desktops/gnome-3/3.18" >&2 + exit 0 +} + +if [ "$#" -lt 2 ]; then + usage +fi + +GNOME_TOP="$1" +shift + +action="$1" + +# curl -l ftp://... doesn't work from my office in HSE, and I don't want to have +# any conversations with sysadmin. Somehow lftp works. +if [ "$FTP_CLIENT" = "lftp" ]; then + ls_ftp() { + lftp -c "open $1; cls" + } +else + ls_ftp() { + curl -s -l "$1"/ + } +fi + +find_project() { + exec find "$GNOME_TOP" -mindepth 2 -maxdepth 2 -type d $@ +} + +show_project() { + local project="$1" + local majorVersion="$2" + local version="" + + if [ -z "$majorVersion" ]; then + echo "Looking for available versions..." >&2 + local available_baseversions=( `ls_ftp ftp://${GNOME_FTP}/${project} | grep '[0-9]\.[0-9]' | sort -t. -k1,1n -k 2,2n` ) + if [ "$?" -ne "0" ]; then + echo "Project $project not found" >&2 + return 1 + fi + + echo -e "The following versions are available:\n ${available_baseversions[@]}" >&2 + echo -en "Choose one of them: " >&2 + read majorVersion + fi + + if echo "$majorVersion" | grep -q "[0-9]\+\.[0-9]\+\.[0-9]\+"; then + # not a major version + version="$majorVersion" + majorVersion=$(echo "$majorVersion" | cut -d '.' -f 1,2) + fi + + local FTPDIR="${GNOME_FTP}/${project}/${majorVersion}" + + #version=`curl -l ${FTPDIR}/ 2>/dev/null | grep LATEST-IS | sed -e s/LATEST-IS-//` + # gnome's LATEST-IS is broken. Do not trust it. + + if [ -z "$version" ]; then + local files=$(ls_ftp "${FTPDIR}") + declare -A versions + + for f in $files; do + case $f in + (LATEST-IS-*|*.news|*.changes|*.sha256sum|*.diff*): + ;; + ($project-*.*.9*.tar.*): + tmp=${f#$project-} + tmp=${tmp%.tar*} + echo "Ignored unstable version ${tmp}" >&2 + ;; + ($project-*.tar.*): + tmp=${f#$project-} + tmp=${tmp%.tar*} + versions[${tmp}]=1 + ;; + (*): + echo "UNKNOWN FILE $f" >&2 + ;; + esac + done + echo "Found versions ${!versions[@]}" >&2 + version=`echo ${!versions[@]} | sed -e 's/ /\n/g' | sort -t. -k1,1n -k 2,2n -k 3,3n | tail -n1` + if [ -z "$version" ]; then + echo "No version available for major $majorVersion" >&2 + return 1 + fi + + echo "Latest version is: ${version}" >&2 + fi + + local name=${project}-${version} + echo "Fetching .sha256 file" >&2 + local sha256out=$(curl -s -f http://${FTPDIR}/${name}.sha256sum) + + if [ "$?" -ne "0" ]; then + echo "Version not found" >&2 + return 1 + fi + + extensions=( "xz" "bz2" "gz" ) + echo "Choosing archive extension (known are ${extensions[@]})..." >&2 + for ext in ${extensions[@]}; do + if echo -e "$sha256out" | grep -q "\\.tar\\.${ext}$"; then + ext_pref=$ext + sha256=$(echo -e "$sha256out" | grep "\\.tar\\.${ext}$" | cut -f1 -d\ ) + break + fi + done + echo "Chosen ${ext_pref}, hash is ${sha256}" >&2 + + echo "# Autogenerated by maintainers/scripts/gnome.sh update + +fetchurl: { + name = \"${project}-${version}\"; + + src = fetchurl { + url = mirror://gnome/sources/${project}/${majorVersion}/${project}-${version}.tar.${ext_pref}; + sha256 = \"${sha256}\"; + }; +}" + + return 0 +} + +update_project() { + local project="$1" + local majorVersion="$2" + + # find project in nixpkgs tree + projectPath=$(find_project -name "$project" -print) + if [ -z "$projectPath" ]; then + echo "Project $project not found under $GNOME_TOP" + exit 1 + fi + + src=$(show_project "$project" "$majorVersion") + + if [ "$?" -eq "0" ]; then + echo "Updating $projectPath/src.nix" >&2 + echo -e "$src" > "$projectPath/src.nix" + fi + + return 0 +} + +if [ "$action" == "update-all" ]; then + majorVersion="$2" + if [ -z "$majorVersion" ]; then + echo "No major version specified" >&2 + usage + fi + + # find projects + projects=$(find_project -exec basename '{}' \;) + for project in $projects; do + if echo "$NO_GNOME_MAJOR"|grep -q $project; then + echo "Skipping $project" + else + echo "= Updating $project to $majorVersion" >&2 + update_project $project $majorVersion + echo >&2 + fi + done +else + project="$2" + majorVersion="$3" + + if [ -z "$project" ]; then + echo "No project specified, exiting" >&2 + usage + fi + + if [ "$action" == "show" ]; then + show_project $project $majorVersion + elif [ "$action" == "update" ]; then + update_project $project $majorVersion + else + echo "Unknown action $action" >&2 + usage + fi +fi diff --git a/maintainers/scripts/hydra_eval_check b/maintainers/scripts/hydra_eval_check index e16a40455a3..c8e03424f32 100755 --- a/maintainers/scripts/hydra_eval_check +++ b/maintainers/scripts/hydra_eval_check @@ -6,6 +6,7 @@ hydra_eval_jobs \ --argstr system i686-linux \ --argstr system x86_64-darwin \ --argstr system i686-cygwin \ + --argstr system x86_64-cygwin \ --argstr system i686-freebsd \ --arg officialRelease false \ --arg nixpkgs "{ outPath = builtins.storePath ./. ; rev = 1234; }" \ diff --git a/maintainers/scripts/travis-nox-review-pr.sh b/maintainers/scripts/travis-nox-review-pr.sh index 3c3f9151d83..54549955747 100755 --- a/maintainers/scripts/travis-nox-review-pr.sh +++ b/maintainers/scripts/travis-nox-review-pr.sh @@ -31,7 +31,15 @@ elif [[ $1 == build ]]; then echo "=== Not a pull request" else echo "=== Checking PR" - nox-review pr ${TRAVIS_PULL_REQUEST} + + if ! nox-review pr ${TRAVIS_PULL_REQUEST}; then + if sudo dmesg | egrep 'Out of memory|Killed process' > /tmp/oom-log; then + echo "=== The build failed due to running out of memory:" + cat /tmp/oom-log + echo "=== Please disregard the result of this Travis build." + fi + exit 1 + fi fi # echo "=== Checking tarball creation" # nix-build pkgs/top-level/release.nix -A tarball diff --git a/nixos/doc/manual/configuration/ad-hoc-packages.xml b/nixos/doc/manual/configuration/ad-hoc-packages.xml index e237e20c4ff..a147291c4f3 100644 --- a/nixos/doc/manual/configuration/ad-hoc-packages.xml +++ b/nixos/doc/manual/configuration/ad-hoc-packages.xml @@ -11,7 +11,7 @@ uninstall packages from the command line. For instance, to install Mozilla Thunderbird: -$ nix-env -iA nixos.pkgs.thunderbird +$ nix-env -iA nixos.thunderbird If you invoke this as root, the package is installed in the Nix profile /nix/var/nix/profiles/default and visible diff --git a/nixos/doc/manual/configuration/declarative-packages.xml b/nixos/doc/manual/configuration/declarative-packages.xml index 6de38b452e2..dc2fa715097 100644 --- a/nixos/doc/manual/configuration/declarative-packages.xml +++ b/nixos/doc/manual/configuration/declarative-packages.xml @@ -23,13 +23,13 @@ Nixpkgs will be built or downloaded as part of the system when you run You can get a list of the available packages as follows: $ nix-env -qaP '*' --description -nixos.pkgs.firefox firefox-23.0 Mozilla Firefox - the browser, reloaded +nixos.firefox firefox-23.0 Mozilla Firefox - the browser, reloaded ... The first column in the output is the attribute name, such as -nixos.pkgs.thunderbird. (The +nixos.thunderbird. (The nixos prefix allows distinguishing between different channels that you might have.) diff --git a/nixos/doc/manual/configuration/x-windows.xml b/nixos/doc/manual/configuration/x-windows.xml index 95e66f0c70c..7f43acab2c3 100644 --- a/nixos/doc/manual/configuration/x-windows.xml +++ b/nixos/doc/manual/configuration/x-windows.xml @@ -61,6 +61,12 @@ by default because it’s not free software. You can enable it as follows: services.xserver.videoDrivers = [ "nvidia" ]; +Or if you have an older card, you may have to use one of the legacy drivers: + +services.xserver.videoDrivers = [ "nvidiaLegacy340" ]; +services.xserver.videoDrivers = [ "nvidiaLegacy304" ]; +services.xserver.videoDrivers = [ "nvidiaLegacy173" ]; + You may need to reboot after enabling this driver to prevent a clash with other kernel modules. diff --git a/nixos/doc/manual/default.nix b/nixos/doc/manual/default.nix index fdfeb5ca07c..87964e27bb9 100644 --- a/nixos/doc/manual/default.nix +++ b/nixos/doc/manual/default.nix @@ -31,10 +31,8 @@ let else fn; - # Convert the list of options into an XML file. The builtin - # unsafeDiscardStringContext is used to prevent the realisation of - # the store paths which are used in options definitions. - optionsXML = builtins.toFile "options.xml" (builtins.unsafeDiscardStringContext (builtins.toXML optionsList')); + # Convert the list of options into an XML file. + optionsXML = builtins.toFile "options.xml" (builtins.toXML optionsList'); optionsDocBook = runCommand "options-db.xml" {} '' optionsXML=${optionsXML} @@ -61,6 +59,16 @@ let echo "${version}" > version ''; + toc = builtins.toFile "toc.xml" + '' + + + + + + + ''; + in rec { # The NixOS options in JSON format. @@ -113,9 +121,10 @@ in rec { --param chunk.section.depth 0 \ --param chunk.first.sections 1 \ --param use.id.as.filename 1 \ - --stringparam generate.toc "book toc chapter toc appendix toc" \ + --stringparam generate.toc "book toc appendix toc" \ + --stringparam chunk.toc ${toc} \ --nonet --xinclude --output $dst/ \ - ${docbook5_xsl}/xml/xsl/docbook/xhtml/chunkfast.xsl ./manual.xml + ${docbook5_xsl}/xml/xsl/docbook/xhtml/chunktoc.xsl ./manual.xml mkdir -p $dst/images/callouts cp ${docbook5_xsl}/xml/xsl/docbook/images/callouts/*.gif $dst/images/callouts/ @@ -128,6 +137,8 @@ in rec { ''; # */ meta.description = "The NixOS manual in HTML format"; + + allowedReferences = ["out"]; }; manualPDF = stdenv.mkDerivation { @@ -135,12 +146,9 @@ in rec { inherit sources; - buildInputs = [ libxml2 libxslt dblatex tetex ]; + buildInputs = [ libxml2 libxslt dblatex dblatex.tex ]; buildCommand = '' - # TeX needs a writable font cache. - export VARTEXFONTS=$TMPDIR/texfonts - ${copySources} dst=$out/share/doc/nixos @@ -151,7 +159,7 @@ in rec { mkdir -p $out/nix-support echo "doc-pdf manual $dst/manual.pdf" >> $out/nix-support/hydra-build-products - ''; # */ + ''; }; # Generate the NixOS manpages. @@ -179,6 +187,8 @@ in rec { ${docbook5_xsl}/xml/xsl/docbook/manpages/docbook.xsl \ ./man-pages.xml ''; + + allowedReferences = ["out"]; }; } diff --git a/nixos/doc/manual/development/option-declarations.xml b/nixos/doc/manual/development/option-declarations.xml index 6d93dc5c009..ea5d1241876 100644 --- a/nixos/doc/manual/development/option-declarations.xml +++ b/nixos/doc/manual/development/option-declarations.xml @@ -106,6 +106,15 @@ options = { + + types.package + + A derivation (such as pkgs.hello) or a + store path (such as + /nix/store/1ifi1cfbfs5iajmvwgrbmrnrw3a147h9-hello-2.10). + + + types.listOf t @@ -138,4 +147,4 @@ You can also create new types using the function mkOptionType. See lib/types.nix in Nixpkgs for details. -
\ No newline at end of file +
diff --git a/nixos/doc/manual/development/sources.xml b/nixos/doc/manual/development/sources.xml index 3ac07da19f1..879a31e32c5 100644 --- a/nixos/doc/manual/development/sources.xml +++ b/nixos/doc/manual/development/sources.xml @@ -24,6 +24,9 @@ $ mkdir -p /my/sources $ cd /my/sources $ nix-env -i git $ git clone git://github.com/NixOS/nixpkgs.git +$ cd nixpkgs +$ git remote add channels git://github.com/NixOS/nixpkgs-channels.git +$ git remote update channels This will check out the latest NixOS sources to @@ -31,7 +34,12 @@ This will check out the latest NixOS sources to and the Nixpkgs sources to /my/sources/nixpkgs. (The NixOS source tree lives in a subdirectory of the Nixpkgs -repository.) +repository.) The remote channels refers to a +read-only repository that tracks the Nixpkgs/NixOS channels (see for more information about channels). Thus, +the Git branch channels/nixos-14.12 will contain +the latest built and tested version available in the +nixos-14.12 channel. It’s often inconvenient to develop directly on the master branch, since if somebody has just committed (say) a change to GCC, @@ -40,28 +48,32 @@ rebuild everything from source. So you may want to create a local branch based on your current NixOS version: -$ /my/sources/nixpkgs/maintainers/scripts/update-channel-branches.sh -Fetching channels from https://nixos.org/channels: - * [new branch] cbe467e -> channels/remotes/nixos-unstable -Fetching channels from nixos-version: - * [new branch] 9ff4738 -> channels/current-system -Fetching channels from ~/.nix-defexpr: - * [new branch] 0d4acad -> channels/root/nixos -$ git checkout -b local channels/current-system +$ nixos-version +14.04.273.ea1952b (Baboon) + +$ git checkout -b local ea1952b -Or, to base your local branch on the latest version available in the +Or, to base your local branch on the latest version available in a NixOS channel: -$ /my/sources/nixpkgs/maintainers/scripts/update-channel-branches.sh -$ git checkout -b local channels/remotes/nixos-unstable +$ git remote update channels +$ git checkout -b local channels/nixos-14.12 -You can then use git rebase to sync your local -branch with the upstream branch, and use git -cherry-pick to copy commits from your local branch to the -upstream branch. +(Replace nixos-14.12 with the name of the channel +you want to use.) You can use git merge or +git rebase to keep your local branch in sync with +the channel, e.g. + + +$ git remote update channels +$ git merge channels/nixos-14.12 + + +You can use git cherry-pick to copy commits from +your local branch to the upstream branch. If you want to rebuild your system using your (modified) sources, you need to tell nixos-rebuild about them diff --git a/nixos/doc/manual/development/writing-modules.xml b/nixos/doc/manual/development/writing-modules.xml index 9cf29e5dc57..a699e74e5f6 100644 --- a/nixos/doc/manual/development/writing-modules.xml +++ b/nixos/doc/manual/development/writing-modules.xml @@ -158,7 +158,7 @@ let locatedb = "/var/cache/locatedb"; in script = '' mkdir -m 0755 -p $(dirname ${locatedb}) - exec updatedb --localuser=nobody --output=${locatedb} --prunepaths='/tmp /var/tmp /media /run' + exec updatedb --localuser=nobody --output=${locatedb} --prunepaths='/tmp /var/tmp /run' ''; }; @@ -172,4 +172,4 @@ let locatedb = "/var/cache/locatedb"; in - \ No newline at end of file + diff --git a/nixos/doc/manual/development/writing-nixos-tests.xml b/nixos/doc/manual/development/writing-nixos-tests.xml index bbb655eed2a..b9da712b86f 100644 --- a/nixos/doc/manual/development/writing-nixos-tests.xml +++ b/nixos/doc/manual/development/writing-nixos-tests.xml @@ -154,6 +154,15 @@ startAll; log. + + getScreenText + Return a textual representation of what is currently + visible on the machine's screen using optical character + recognition. + This requires passing to the test + attribute set. + + sendMonitorCommand Send a command to the QEMU monitor. This is rarely @@ -237,6 +246,15 @@ startAll; connections. + + waitForText + Wait until the supplied regular expressions matches + the textual contents of the screen by using optical character recognition + (see getScreenText). + This requires passing to the test + attribute set. + + waitForWindow Wait until an X11 window has appeared whose name diff --git a/nixos/doc/manual/installation/installing-uefi.xml b/nixos/doc/manual/installation/installing-uefi.xml index dbd5606c4a5..90d18695447 100644 --- a/nixos/doc/manual/installation/installing-uefi.xml +++ b/nixos/doc/manual/installation/installing-uefi.xml @@ -41,10 +41,6 @@ changes: and as well. - - To see console messages during early boot, add "fbcon" - to your . - diff --git a/nixos/doc/manual/installation/installing-usb.xml b/nixos/doc/manual/installation/installing-usb.xml index 97e3d2eaa1c..5def6e8753f 100644 --- a/nixos/doc/manual/installation/installing-usb.xml +++ b/nixos/doc/manual/installation/installing-usb.xml @@ -6,8 +6,8 @@ Booting from a USB Drive -For systems without CD drive, the NixOS livecd can be booted from -a usb stick. For non-UEFI installations, +For systems without CD drive, the NixOS live CD can be booted from +a USB stick. For non-UEFI installations, unetbootin will work. For UEFI installations, you should mount the ISO, copy its contents verbatim to your drive, then either: diff --git a/nixos/doc/manual/installation/installing.xml b/nixos/doc/manual/installation/installing.xml index c21759bc926..e40c15e8316 100644 --- a/nixos/doc/manual/installation/installing.xml +++ b/nixos/doc/manual/installation/installing.xml @@ -120,7 +120,11 @@ $ nixos-generate-config --root /mnt $ nano /mnt/etc/nixos/configuration.nix - The vim text editor is also available. + If you’re using the graphical ISO image, other editors may be + available (such as vim). If you have network + access, you can also install other editors — for instance, you can + install Emacs by running nix-env -i + emacs. You must set the option to specify on which disk @@ -189,11 +193,13 @@ $ reboot - You should now be able to boot into the installed NixOS. The GRUB boot menu shows a list - of available configurations (initially just one). Every time - you change the NixOS configuration (seeChanging - Configuration ), a new item appears in the menu. This allows you to - easily roll back to another configuration if something goes wrong. + You should now be able to boot into the installed NixOS. The + GRUB boot menu shows a list of available + configurations (initially just one). Every time you + change the NixOS configuration (see Changing Configuration ), a + new item is added to the menu. This allows you to easily roll back + to a previous configuration if something goes wrong. You should log in and change the root password with passwd. diff --git a/nixos/doc/manual/installation/upgrading.xml b/nixos/doc/manual/installation/upgrading.xml index 5a9d1f24f7c..c4812cc637c 100644 --- a/nixos/doc/manual/installation/upgrading.xml +++ b/nixos/doc/manual/installation/upgrading.xml @@ -107,4 +107,30 @@ newer Nix version, which may involve an upgrade of Nix’s database schema. This cannot be undone easily, so in that case you will not be able to go back to your original channel. + +
Automatic Upgrades + +You can keep a NixOS system up-to-date automatically by adding +the following to configuration.nix: + + +system.autoUpgrade.enable = true; + + +This enables a periodically executed systemd service named +nixos-upgrade.service. It runs +nixos-rebuild switch --upgrade to upgrade NixOS to +the latest version in the current channel. (To see when the service +runs, see systemctl list-timers.) You can also +specify a channel explicitly, e.g. + + +system.autoUpgrade.channel = https://nixos.org/channels/nixos-15.09; + + + + +
+ + diff --git a/nixos/doc/manual/manual.xml b/nixos/doc/manual/manual.xml index f6aedfd6b66..736d1d4eff7 100644 --- a/nixos/doc/manual/manual.xml +++ b/nixos/doc/manual/manual.xml @@ -2,7 +2,7 @@ xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xi="http://www.w3.org/2001/XInclude" version="5.0" - xml:id="NixOSManual"> + xml:id="book-nixos-manual"> NixOS Manual @@ -33,11 +33,12 @@ - Configuration Options + + diff --git a/nixos/doc/manual/options-to-docbook.xsl b/nixos/doc/manual/options-to-docbook.xsl index af9eb0e48fb..cd30ae36ae5 100644 --- a/nixos/doc/manual/options-to-docbook.xsl +++ b/nixos/doc/manual/options-to-docbook.xsl @@ -25,61 +25,65 @@ - + - + - - - + + + - - - Type: - - - - + + + Type: + + + + + (read only) + + + - - - Default: - - - - + + + Default: + + + + - - - Example: - - - - - - - - - - - + + + Example: + + + + + + + + + + + - - - Declared by: - - - + + + Declared by: + + + - - - Defined by: - - - + + + Defined by: + + + - +
diff --git a/nixos/doc/manual/release-notes/release-notes.xml b/nixos/doc/manual/release-notes/release-notes.xml index a988a5b0199..6ed99315a7a 100644 --- a/nixos/doc/manual/release-notes/release-notes.xml +++ b/nixos/doc/manual/release-notes/release-notes.xml @@ -1,19 +1,18 @@ - + Release Notes - This section lists the release notes for each stable version of NixOS and current unstable revision. - + - + diff --git a/nixos/doc/manual/release-notes/rl-1310.xml b/nixos/doc/manual/release-notes/rl-1310.xml index 234fb5a643f..583912d7073 100644 --- a/nixos/doc/manual/release-notes/rl-1310.xml +++ b/nixos/doc/manual/release-notes/rl-1310.xml @@ -1,11 +1,11 @@ - +
Release 13.10 (“Aardvark”, 2013/10/31) This is the first stable release branch of NixOS. - \ No newline at end of file +
diff --git a/nixos/doc/manual/release-notes/rl-1404.xml b/nixos/doc/manual/release-notes/rl-1404.xml index 74af1ed1274..36f67ed88b0 100644 --- a/nixos/doc/manual/release-notes/rl-1404.xml +++ b/nixos/doc/manual/release-notes/rl-1404.xml @@ -1,8 +1,8 @@ - +
Release 14.04 (“Baboon”, 2014/04/30) @@ -157,4 +157,4 @@ networking.firewall.enable = false; - \ No newline at end of file +
diff --git a/nixos/doc/manual/release-notes/rl-1412.xml b/nixos/doc/manual/release-notes/rl-1412.xml index 36ae48af759..42b51cd4a8e 100644 --- a/nixos/doc/manual/release-notes/rl-1412.xml +++ b/nixos/doc/manual/release-notes/rl-1412.xml @@ -1,8 +1,8 @@ - +
Release 14.12 (“Caterpillar”, 2014/12/30) @@ -174,4 +174,4 @@ now. - +
diff --git a/nixos/doc/manual/release-notes/rl-1509.xml b/nixos/doc/manual/release-notes/rl-1509.xml new file mode 100644 index 00000000000..a68baa0d807 --- /dev/null +++ b/nixos/doc/manual/release-notes/rl-1509.xml @@ -0,0 +1,491 @@ +
+ +Release 15.09 (“Dingo”, 2015/09/30) + +In addition to numerous new and upgraded packages, this release +has the following highlights: + + + + + The Haskell + packages infrastructure has been re-designed from the ground up + ("Haskell NG"). NixOS now distributes the latest version + of every single package registered on Hackage -- well in + excess of 8,000 Haskell packages. Detailed instructions on how to + use that infrastructure can be found in the User's + Guide to the Haskell Infrastructure. Users migrating from an + earlier release may find helpful information below, in the list of + backwards-incompatible changes. Furthermore, we distribute 51(!) + additional Haskell package sets that provide every single LTS Haskell release + since version 0.0 as well as the most recent Stackage Nightly + snapshot. The announcement "Full + Stackage Support in Nixpkgs" gives additional + details. + + + + Nix has been updated to version 1.10, which among other + improvements enables cryptographic signatures on binary caches for + improved security. + + + + You can now keep your NixOS system up to date automatically + by setting + + +system.autoUpgrade.enable = true; + + + This will cause the system to periodically check for updates in + your current channel and run nixos-rebuild. + + + + This release is based on Glibc 2.21, GCC 4.9 and Linux + 3.18. + + + + GNOME has been upgraded to 3.16. + + + + + Xfce has been upgraded to 4.12. + + + + + KDE 5 has been upgraded to KDE Frameworks 5.10, + Plasma 5.3.2 and Applications 15.04.3. + KDE 4 has been updated to kdelibs-4.14.10. + + + + + E19 has been upgraded to 0.16.8.15. + + + + + + +The following new services were added since the last release: + + + services/mail/exim.nix + services/misc/apache-kafka.nix + services/misc/canto-daemon.nix + services/misc/confd.nix + services/misc/devmon.nix + services/misc/gitit.nix + services/misc/ihaskell.nix + services/misc/mbpfan.nix + services/misc/mediatomb.nix + services/misc/mwlib.nix + services/misc/parsoid.nix + services/misc/plex.nix + services/misc/ripple-rest.nix + services/misc/ripple-data-api.nix + services/misc/subsonic.nix + services/misc/sundtek.nix + services/monitoring/cadvisor.nix + services/monitoring/das_watchdog.nix + services/monitoring/grafana.nix + services/monitoring/riemann-tools.nix + services/monitoring/teamviewer.nix + services/network-filesystems/u9fs.nix + services/networking/aiccu.nix + services/networking/asterisk.nix + services/networking/bird.nix + services/networking/charybdis.nix + services/networking/docker-registry-server.nix + services/networking/fan.nix + services/networking/firefox/sync-server.nix + services/networking/gateone.nix + services/networking/heyefi.nix + services/networking/i2p.nix + services/networking/lambdabot.nix + services/networking/mstpd.nix + services/networking/nix-serve.nix + services/networking/nylon.nix + services/networking/racoon.nix + services/networking/skydns.nix + services/networking/shout.nix + services/networking/softether.nix + services/networking/sslh.nix + services/networking/tinc.nix + services/networking/tlsdated.nix + services/networking/tox-bootstrapd.nix + services/networking/tvheadend.nix + services/networking/zerotierone.nix + services/scheduling/marathon.nix + services/security/fprintd.nix + services/security/hologram.nix + services/security/munge.nix + services/system/cloud-init.nix + services/web-servers/shellinabox.nix + services/web-servers/uwsgi.nix + services/x11/unclutter.nix + services/x11/display-managers/sddm.nix + system/boot/coredump.nix + system/boot/loader/loader.nix + system/boot/loader/generic-extlinux-compatible + system/boot/networkd.nix + system/boot/resolved.nix + system/boot/timesyncd.nix + tasks/filesystems/exfat.nix + tasks/filesystems/ntfs.nix + tasks/filesystems/vboxsf.nix + virtualisation/virtualbox-host.nix + virtualisation/vmware-guest.nix + virtualisation/xen-dom0.nix + + + + +When upgrading from a previous release, please be aware of the +following incompatible changes: + + + +sshd no longer supports DSA and ECDSA +host keys by default. If you have existing systems with such host keys +and want to continue to use them, please set + + +system.stateVersion = "14.12"; + + +The new option ensures that +certain configuration changes that could break existing systems (such +as the sshd host key setting) will maintain +compatibility with the specified NixOS release. NixOps sets the state +version of existing deployments automatically. + +cron is no longer enabled by +default, unless you have a non-empty +. To force +cron to be enabled, set +. + +Nix now requires binary caches to be cryptographically +signed. If you have unsigned binary caches that you want to continue +to use, you should set . + +Steam now doesn't need root rights to work. Instead of using +*-steam-chrootenv, you should now just run steam. +steamChrootEnv package was renamed to steam, +and old steam package -- to steamOriginal. + + +CMPlayer has been renamed to bomi upstream. Package +cmplayer was accordingly renamed to +bomi + +Atom Shell has been renamed to Electron upstream. Package atom-shell +was accordingly renamed to electron + + +Elm is not released on Hackage anymore. You should now use elmPackages.elm +which contains the latest Elm platform. + + + The CUPS printing service has been updated to version + 2.0.2. Furthermore its systemd service has been + renamed to cups.service. + + Local printers are no longer shared or advertised by + default. This behavior can be changed by enabling + or + respectively. + + + + + The VirtualBox host and guest options have been named more + consistently. They can now found in + instead of + and + instead of + . + + + + Also, there now is support for the vboxsf file + system using the configuration + attribute. An example of how this can be used in a configuration: + + +fileSystems."/shiny" = { + device = "myshinysharedfolder"; + fsType = "vboxsf"; +}; + + + + + + + + "nix-env -qa" no longer discovers + Haskell packages by name. The only packages visible in the global + scope are ghc, cabal-install, + and stack, but all other packages are hidden. The + reason for this inconvenience is the sheer size of the Haskell + package set. Name-based lookups are expensive, and most + nix-env -qa operations would become much slower + if we'd add the entire Hackage database into the top level attribute + set. Instead, the list of Haskell packages can be displayed by + running: + + +nix-env -f "<nixpkgs>" -qaP -A haskellPackages + + + Executable programs written in Haskell can be installed with: + + +nix-env -f "<nixpkgs>" -iA haskellPackages.pandoc + + + Installing Haskell libraries this way, however, is no + longer supported. See the next item for more details. + + + + + + Previous versions of NixOS came with a feature called + ghc-wrapper, a small script that allowed GHC to + transparently pick up on libraries installed in the user's profile. This + feature has been deprecated; ghc-wrapper was removed + from the distribution. The proper way to register Haskell libraries with + the compiler now is the haskellPackages.ghcWithPackages + function. The User's + Guide to the Haskell Infrastructure provides more information about + this subject. + + + + + + All Haskell builds that have been generated with version 1.x of + the cabal2nix utility are now invalid and need + to be re-generated with a current version of + cabal2nix to function. The most recent version + of this tool can be installed by running + nix-env -i cabal2nix. + + + + + + The haskellPackages set in Nixpkgs used to have a + function attribute called extension that users + could override in their ~/.nixpkgs/config.nix + files to configure additional attributes, etc. That function still + exists, but it's now called overrides. + + + + + + The OpenBLAS library has been updated to version + 0.2.14. Support for the + x86_64-darwin platform was added. Dynamic + architecture detection was enabled; OpenBLAS now selects + microarchitecture-optimized routines at runtime, so optimal + performance is achieved without the need to rebuild OpenBLAS + locally. OpenBLAS has replaced ATLAS in most packages which use an + optimized BLAS or LAPACK implementation. + + + + + + The phpfpm is now using the default PHP version + (pkgs.php) instead of PHP 5.4 (pkgs.php54). + + + + + + The locate service no longer indexes the Nix store + by default, preventing packages with potentially numerous versions from + cluttering the output. Indexing the store can be activated by setting + . + + + + + + The Nix expression search path (NIX_PATH) no longer + contains /etc/nixos/nixpkgs by default. You + can override NIX_PATH by setting + . + + + + + + Python 2.6 has been marked as broken (as it no longer recieves + security updates from upstream). + + + + + + Any use of module arguments such as pkgs to access + library functions, or to define imports attributes + will now lead to an infinite loop at the time of the evaluation. + + + + In case of an infinite loop, use the --show-trace + command line argument and read the line just above the error message. + + +$ nixos-rebuild build --show-trace +… +while evaluating the module argument `pkgs' in "/etc/nixos/my-module.nix": +infinite recursion encountered + + + + + + Any use of pkgs.lib, should be replaced by + lib, after adding it as argument of the module. The + following module + + +{ config, pkgs, ... }: + +with pkgs.lib; + +{ + options = { + foo = mkOption { … }; + }; + config = mkIf config.foo { … }; +} + + + should be modified to look like: + + +{ config, pkgs, lib, ... }: + +with lib; + +{ + options = { + foo = mkOption { option declaration }; + }; + config = mkIf config.foo { option definition }; +} + + + + + When pkgs is used to download other projects to + import their modules, and only in such cases, it should be replaced by + (import <nixpkgs> {}). The following module + + +{ config, pkgs, ... }: + +let + myProject = pkgs.fetchurl { + src = url; + sha256 = hash; + }; +in + +{ + imports = [ "${myProject}/module.nix" ]; +} + + + should be modified to look like: + + +{ config, pkgs, ... }: + +let + myProject = (import <nixpkgs> {}).fetchurl { + src = url; + sha256 = hash; + }; +in + +{ + imports = [ "${myProject}/module.nix" ]; +} + + + + + + + + + +Other notable improvements: + + + + The nixos and nixpkgs channels were unified, + so one can use nix-env -iA nixos.bash + instead of nix-env -iA nixos.pkgs.bash. + See the commit for details. + + + + + Users running an SSH server who worry about the quality of their + /etc/ssh/moduli file with respect to the + vulnerabilities + discovered in the Diffie-Hellman key exchange can now + replace OpenSSH's default version with one they generated + themselves using the new + option. + + + + + A newly packaged TeX Live 2015 is provided in pkgs.texlive, + split into 6500 nix packages. For basic user documentation see + the source. + Beware of an issue when installing a too large package set. + + The plan is to deprecate and maybe delete the original TeX packages + until the next release. + + + + on all Python interpreters + is now available for nix-shell interoperability. + + + + + +
diff --git a/nixos/doc/manual/release-notes/rl-unstable.xml b/nixos/doc/manual/release-notes/rl-unstable.xml index cdbd074e782..2745fb2cbe4 100644 --- a/nixos/doc/manual/release-notes/rl-unstable.xml +++ b/nixos/doc/manual/release-notes/rl-unstable.xml @@ -1,55 +1,45 @@ - +
-Unstable revision - -In addition to numerous new and upgraded packages, this release has the following highlights: - - - - -Following new services were added since the last release: - - - +Unstable When upgrading from a previous release, please be aware of the -following incompatible changes: +following incompatible changes: - -Steam now doesn't need root rights to work. Instead of using -*-steam-chrootenv, you should now just run steam. -steamChrootEnv package was renamed to steam, -and old steam package -- to steamOriginal. - - -CMPlayer has been renamed to bomi upstream. Package cmplayer -was accordingly renamed to bomi - - - - - The default NIX_PATH for NixOS now includes - /nix/var/nix/profiles/per-user/root/channels, so it's - easy to add custom channels. + + wmiiSnap has been replaced with + wmii_hg, but + services.xserver.windowManager.wmii.enable has + been updated respectively so this only affects you if you have + explicitly installed wmiiSnap. - - Moreover, whenever a nixos-rebuild <action> - --upgrade is issued, every channel that includes a file - called .update-on-nixos-rebuild will be upgraded - alongside of the nixos channel. - - + + + + wmiimenu is removed, as it has been + removed by the developers upstream. Use wimenu + from the wmii-hg package. + + + + Gitit is no longer automatically added to the module list in + NixOS and as such there will not be any manual entries for it. You + will need to add an import statement to your NixOS configuration + in order to use it, e.g. + + ]; +} +]]> + + will include the Gitit service configuration options. + - - +
diff --git a/nixos/lib/build-vms.nix b/nixos/lib/build-vms.nix index d0c9e7c6d23..a97bae57d9b 100644 --- a/nixos/lib/build-vms.nix +++ b/nixos/lib/build-vms.nix @@ -1,6 +1,6 @@ { system, minimal ? false }: -let pkgs = import ./nixpkgs.nix { config = {}; inherit system; }; in +let pkgs = import ../.. { config = {}; inherit system; }; in with pkgs.lib; with import ../lib/qemu-flags.nix; @@ -41,22 +41,22 @@ rec { machines = attrNames nodes; - machinesNumbered = zipTwoLists machines (range 1 254); + machinesNumbered = zipLists machines (range 1 254); - nodes_ = flip map machinesNumbered (m: nameValuePair m.first + nodes_ = flip map machinesNumbered (m: nameValuePair m.fst [ ( { config, pkgs, nodes, ... }: let - interfacesNumbered = zipTwoLists config.virtualisation.vlans (range 1 255); - interfaces = flip map interfacesNumbered ({ first, second }: - nameValuePair "eth${toString second}" { ip4 = - [ { address = "192.168.${toString first}.${toString m.second}"; + interfacesNumbered = zipLists config.virtualisation.vlans (range 1 255); + interfaces = flip map interfacesNumbered ({ fst, snd }: + nameValuePair "eth${toString snd}" { ip4 = + [ { address = "192.168.${toString fst}.${toString m.snd}"; prefixLength = 24; } ]; }); in { key = "ip-address"; config = - { networking.hostName = m.first; + { networking.hostName = m.fst; networking.interfaces = listToAttrs interfaces; @@ -76,11 +76,11 @@ rec { virtualisation.qemu.options = flip map interfacesNumbered - ({ first, second }: qemuNICFlags second first m.second); + ({ fst, snd }: qemuNICFlags snd fst m.snd); }; } ) - (getAttr m.first nodes) + (getAttr m.fst nodes) ] ); in listToAttrs nodes_; diff --git a/nixos/lib/channel-expr.nix b/nixos/lib/channel-expr.nix deleted file mode 100644 index 453bdd506b8..00000000000 --- a/nixos/lib/channel-expr.nix +++ /dev/null @@ -1,6 +0,0 @@ -{ system ? builtins.currentSystem }: - -{ pkgs = - (import nixpkgs/default.nix { inherit system; }) - // { recurseForDerivations = true; }; -} diff --git a/nixos/lib/eval-config.nix b/nixos/lib/eval-config.nix index adacbd0863e..a87b285c5b7 100644 --- a/nixos/lib/eval-config.nix +++ b/nixos/lib/eval-config.nix @@ -17,6 +17,8 @@ baseModules ? import ../modules/module-list.nix , # !!! See comment about args in lib/modules.nix extraArgs ? {} +, # !!! See comment about args in lib/modules.nix + specialArgs ? {} , modules , # !!! See comment about check in lib/modules.nix check ? true @@ -47,15 +49,11 @@ in rec { inherit prefix check; modules = modules ++ extraModules ++ baseModules ++ [ pkgsModule ]; args = extraArgs; + specialArgs = { modulesPath = ../modules; } // specialArgs; }) config options; # These are the extra arguments passed to every module. In # particular, Nixpkgs is passed through the "pkgs" argument. - # FIXME: we enable config.allowUnfree to make packages like - # nvidia-x11 available. This isn't a problem because if the user has - # ‘nixpkgs.config.allowUnfree = false’, then evaluation will fail on - # the 64-bit package anyway. However, it would be cleaner to respect - # nixpkgs.config here. extraArgs = extraArgs_ // { inherit modules baseModules; }; diff --git a/nixos/lib/make-channel.nix b/nixos/lib/make-channel.nix new file mode 100644 index 00000000000..fd805f7f943 --- /dev/null +++ b/nixos/lib/make-channel.nix @@ -0,0 +1,27 @@ +{ pkgs, nixpkgs, version, versionSuffix }: + +pkgs.releaseTools.makeSourceTarball { + name = "nixos-channel"; + + src = nixpkgs; + + officialRelease = false; # FIXME: fix this in makeSourceTarball + inherit version versionSuffix; + + buildInputs = [ pkgs.nix ]; + + distPhase = '' + rm -rf .git + echo -n $VERSION_SUFFIX > .version-suffix + echo -n ${nixpkgs.rev or nixpkgs.shortRev} > .git-revision + releaseName=nixos-$VERSION$VERSION_SUFFIX + mkdir -p $out/tarballs + cp -prd . ../$releaseName + chmod -R u+w ../$releaseName + ln -s . ../$releaseName/nixpkgs # hack to make ‘’ work + NIX_STATE_DIR=$TMPDIR nix-env -f ../$releaseName/default.nix -qaP --meta --xml \* > /dev/null + cd .. + chmod -R u+w $releaseName + tar cfJ $out/tarballs/$releaseName.tar.xz $releaseName + ''; +} diff --git a/nixos/lib/make-disk-image.nix b/nixos/lib/make-disk-image.nix new file mode 100644 index 00000000000..79c5199cbec --- /dev/null +++ b/nixos/lib/make-disk-image.nix @@ -0,0 +1,115 @@ +{ pkgs +, lib + +, # The NixOS configuration to be installed onto the disk image. + config + +, # The size of the disk, in megabytes. + diskSize + +, # Whether the disk should be partitioned (with a single partition + # containing the root filesystem) or contain the root filesystem + # directly. + partitioned ? true + +, # The root file system type. + fsType ? "ext4" + +, # The initial NixOS configuration file to be copied to + # /etc/nixos/configuration.nix. + configFile ? null + +, # Shell code executed after the VM has finished. + postVM ? "" + +}: + +with lib; + +pkgs.vmTools.runInLinuxVM ( + pkgs.runCommand "nixos-disk-image" + { preVM = + '' + mkdir $out + diskImage=$out/nixos.img + ${pkgs.vmTools.qemu}/bin/qemu-img create -f raw $diskImage "${toString diskSize}M" + mv closure xchg/ + ''; + buildInputs = [ pkgs.utillinux pkgs.perl pkgs.e2fsprogs pkgs.parted ]; + exportReferencesGraph = + [ "closure" config.system.build.toplevel ]; + inherit postVM; + } + '' + ${if partitioned then '' + # Create a single / partition. + parted /dev/vda mklabel msdos + parted /dev/vda -- mkpart primary ext2 1M -1s + . /sys/class/block/vda1/uevent + mknod /dev/vda1 b $MAJOR $MINOR + rootDisk=/dev/vda1 + '' else '' + rootDisk=/dev/vda + ''} + + # Create an empty filesystem and mount it. + mkfs.${fsType} -L nixos $rootDisk + ${optionalString (fsType == "ext4") '' + tune2fs -c 0 -i 0 $rootDisk + ''} + mkdir /mnt + mount $rootDisk /mnt + + # The initrd expects these directories to exist. + mkdir /mnt/dev /mnt/proc /mnt/sys + + mount -o bind /proc /mnt/proc + mount -o bind /dev /mnt/dev + mount -o bind /sys /mnt/sys + + # Copy all paths in the closure to the filesystem. + storePaths=$(perl ${pkgs.pathsFromGraph} /tmp/xchg/closure) + + mkdir -p /mnt/nix/store + echo "copying everything (will take a while)..." + set -f + cp -prd $storePaths /mnt/nix/store/ + + # Register the paths in the Nix database. + printRegistration=1 perl ${pkgs.pathsFromGraph} /tmp/xchg/closure | \ + chroot /mnt ${config.nix.package}/bin/nix-store --load-db --option build-users-group "" + + # Add missing size/hash fields to the database. FIXME: + # exportReferencesGraph should provide these directly. + chroot /mnt ${config.nix.package}/bin/nix-store --verify --check-contents + + # Create the system profile to allow nixos-rebuild to work. + chroot /mnt ${config.nix.package}/bin/nix-env --option build-users-group "" \ + -p /nix/var/nix/profiles/system --set ${config.system.build.toplevel} + + # `nixos-rebuild' requires an /etc/NIXOS. + mkdir -p /mnt/etc + touch /mnt/etc/NIXOS + + # `switch-to-configuration' requires a /bin/sh + mkdir -p /mnt/bin + ln -s ${config.system.build.binsh}/bin/sh /mnt/bin/sh + + # Install a configuration.nix. + mkdir -p /mnt/etc/nixos + ${optionalString (configFile != null) '' + cp ${configFile} /mnt/etc/nixos/configuration.nix + ''} + + # Generate the GRUB menu. + ln -s vda /dev/xvda + ln -s vda /dev/sda + chroot /mnt ${config.system.build.toplevel}/bin/switch-to-configuration boot + + umount /mnt/proc /mnt/dev /mnt/sys + umount /mnt + + # Do an fsck to make sure resize2fs works. + fsck.${fsType} -f -y $rootDisk + '' +) diff --git a/nixos/lib/make-ext4-fs.nix b/nixos/lib/make-ext4-fs.nix new file mode 100644 index 00000000000..23839ea487d --- /dev/null +++ b/nixos/lib/make-ext4-fs.nix @@ -0,0 +1,88 @@ +# Builds an ext4 image containing a populated /nix/store with the closure +# of store paths passed in the storePaths parameter. The generated image +# is sized to only fit its contents, with the expectation that a script +# resizes the filesystem at boot time. +{ pkgs +, storePaths +, volumeLabel +}: + +pkgs.stdenv.mkDerivation { + name = "ext4-fs.img"; + + buildInputs = with pkgs; [e2fsprogs libfaketime perl]; + + # For obtaining the closure of `storePaths'. + exportReferencesGraph = + map (x: [("closure-" + baseNameOf x) x]) storePaths; + + buildCommand = + '' + # Add the closures of the top-level store objects. + storePaths=$(perl ${pkgs.pathsFromGraph} closure-*) + + # Also include a manifest of the closures in a format suitable + # for nix-store --load-db. + printRegistration=1 perl ${pkgs.pathsFromGraph} closure-* > nix-path-registration + + # Make a crude approximation of the size of the target image. + # If the script starts failing, increase the fudge factors here. + numInodes=$(find $storePaths | wc -l) + numDataBlocks=$(du -c -B 4096 --apparent-size $storePaths | awk '$2 == "total" { print int($1 * 1.03) }') + bytes=$((2 * 4096 * $numInodes + 4096 * $numDataBlocks)) + echo "Creating an EXT4 image of $bytes bytes (numInodes=$numInodes, numDataBlocks=$numDataBlocks)" + + truncate -s $bytes $out + faketime "1970-01-01 00:00:00" mkfs.ext4 -L ${volumeLabel} -U 44444444-4444-4444-8888-888888888888 $out + + # Populate the image contents by piping a bunch of commands to the `debugfs` tool from e2fsprogs. + # For example, to copy /nix/store/abcd...efg-coreutils-8.23/bin/sleep: + # cd /nix/store/abcd...efg-coreutils-8.23/bin + # write /nix/store/abcd...efg-coreutils-8.23/bin/sleep sleep + # sif sleep mode 040555 + # sif sleep gid 30000 + # In particular, debugfs doesn't handle absolute target paths; you have to 'cd' in the virtual + # filesystem first. Likewise the intermediate directories must already exist (using `find` + # handles that for us). And when setting the file's permissions, the inode type flags (__S_IFDIR, + # __S_IFREG) need to be set as well. + ( + echo write nix-path-registration nix-path-registration + echo mkdir nix + echo cd /nix + echo mkdir store + + # XXX: This explodes in exciting ways if anything in /nix/store has a space in it. + find $storePaths -printf '%y %f %h %m\n'| while read -r type file dir perms; do + # echo "TYPE=$type DIR=$dir FILE=$file PERMS=$perms" >&2 + + echo "cd $dir" + case $type in + d) + echo "mkdir $file" + echo sif $file mode $((040000 | 0$perms)) # magic constant is __S_IFDIR + ;; + f) + echo "write $dir/$file $file" + echo sif $file mode $((0100000 | 0$perms)) # magic constant is __S_IFREG + ;; + l) + echo "symlink $file $(readlink "$dir/$file")" + ;; + *) + echo "Unknown entry: $type $dir $file $perms" >&2 + exit 1 + ;; + esac + + echo sif $file gid 30000 # chgrp to nixbld + done + ) | faketime "1970-01-01 00:00:00" debugfs -w $out -f /dev/stdin > errorlog 2>&1 + + # The debugfs tool doesn't terminate on error nor exit with a non-zero status. Check manually. + if egrep -q 'Could not allocate|File not found' errorlog; then + cat errorlog + echo "--- Failed to create EXT4 image of $bytes bytes (numInodes=$numInodes, numDataBlocks=$numDataBlocks) ---" + return 1 + fi + ''; +} diff --git a/nixos/lib/nixpkgs.nix b/nixos/lib/nixpkgs.nix deleted file mode 100644 index 10096f58c79..00000000000 --- a/nixos/lib/nixpkgs.nix +++ /dev/null @@ -1,8 +0,0 @@ -/* Terrible backward compatibility hack to get the path to Nixpkgs - from here. Usually, that's the relative path ‘../..’. However, - when using the NixOS channel, resolves to a symlink to - nixpkgs/nixos, so ‘../..’ doesn't resolve to the top-level Nixpkgs - directory but one above it. So check for that situation. */ -if builtins.pathExists ../../.version then import ../.. -else if builtins.pathExists ../../nixpkgs then import ../../nixpkgs -else abort "Can't find Nixpkgs, please set ‘NIX_PATH=nixpkgs=/path/to/nixpkgs’." diff --git a/nixos/lib/test-driver/Machine.pm b/nixos/lib/test-driver/Machine.pm index e0791692d3e..41088ed75f7 100644 --- a/nixos/lib/test-driver/Machine.pm +++ b/nixos/lib/test-driver/Machine.pm @@ -9,6 +9,7 @@ use FileHandle; use Cwd; use File::Basename; use File::Path qw(make_path); +use File::Slurp; my $showGraphics = defined $ENV{'DISPLAY'}; @@ -20,7 +21,7 @@ sub new { my ($class, $args) = @_; my $startCommand = $args->{startCommand}; - + my $name = $args->{name}; if (!$name) { $startCommand =~ /run-(.*)-vm$/ if defined $startCommand; @@ -33,7 +34,7 @@ sub new { "qemu-kvm -m 384 " . "-net nic,model=virtio \$QEMU_OPTS "; my $iface = $args->{hdaInterface} || "virtio"; - $startCommand .= "-drive file=" . Cwd::abs_path($args->{hda}) . ",if=$iface,boot=on,werror=report " + $startCommand .= "-drive file=" . Cwd::abs_path($args->{hda}) . ",if=$iface,werror=report " if defined $args->{hda}; $startCommand .= "-cdrom $args->{cdrom} " if defined $args->{cdrom}; @@ -42,8 +43,6 @@ sub new { $startCommand .= "-bios $args->{bios} " if defined $args->{bios}; $startCommand .= $args->{qemuFlags} || ""; - } else { - $startCommand = Cwd::abs_path $startCommand; } my $tmpDir = $ENV{'TMPDIR'} || "/tmp"; @@ -170,7 +169,7 @@ sub start { eval { local $SIG{CHLD} = sub { die "QEMU died prematurely\n"; }; - + # Wait until QEMU connects to the monitor. accept($self->{monitor}, $monitorS) or die; @@ -181,11 +180,11 @@ sub start { $self->{socket}->autoflush(1); }; die "$@" if $@; - + $self->waitForMonitorPrompt; $self->log("QEMU running (pid $pid)"); - + $self->{pid} = $pid; $self->{booted} = 1; } @@ -240,7 +239,7 @@ sub connect { alarm 300; readline $self->{socket} or die "the VM quit before connecting\n"; alarm 0; - + $self->log("connected to guest root shell"); $self->{connected} = 1; @@ -269,7 +268,7 @@ sub isUp { sub execute_ { my ($self, $command) = @_; - + $self->connect; print { $self->{socket} } ("( $command ); echo '|!=EOF' \$?\n"); @@ -452,7 +451,7 @@ sub shutdown { sub crash { my ($self) = @_; return unless $self->{booted}; - + $self->log("forced crash"); $self->sendMonitorCommand("quit"); @@ -493,6 +492,44 @@ sub screenshot { } +# Take a screenshot and return the result as text using optical character +# recognition. +sub getScreenText { + my ($self) = @_; + + system("command -v tesseract &> /dev/null") == 0 + or die "getScreenText used but enableOCR is false"; + + my $text; + $self->nest("performing optical character recognition", sub { + my $tmpbase = Cwd::abs_path(".")."/ocr"; + my $tmpin = $tmpbase."in.ppm"; + my $tmpout = "$tmpbase.ppm"; + + $self->sendMonitorCommand("screendump $tmpin"); + system("ppmtopgm $tmpin | pamscale 4 -filter=lanczos > $tmpout") == 0 + or die "cannot scale screenshot"; + unlink $tmpin; + system("tesseract $tmpout $tmpbase") == 0 or die "OCR failed"; + unlink $tmpout; + $text = read_file("$tmpbase.txt"); + unlink "$tmpbase.txt"; + }); + return $text; +} + + +# Wait until a specific regexp matches the textual contents of the screen. +sub waitForText { + my ($self, $regexp) = @_; + $self->nest("waiting for $regexp to appear on the screen", sub { + retry sub { + return 1 if $self->getScreenText =~ /$regexp/; + } + }); +} + + # Wait until it is possible to connect to the X server. Note that # testing the existence of /tmp/.X11-unix/X0 is insufficient. sub waitForX { diff --git a/nixos/lib/testing.nix b/nixos/lib/testing.nix index c14f15a1ad5..aa1cec1ea02 100644 --- a/nixos/lib/testing.nix +++ b/nixos/lib/testing.nix @@ -15,6 +15,8 @@ rec { unpackPhase = "true"; + preferLocalBuild = true; + installPhase = '' mkdir -p $out/bin @@ -28,7 +30,7 @@ rec { wrapProgram $out/bin/nixos-test-driver \ --prefix PATH : "${qemu_kvm}/bin:${vde2}/bin:${netpbm}/bin:${coreutils}/bin" \ - --prefix PERL5LIB : "${lib.makePerlPath [ perlPackages.TermReadLineGnu perlPackages.XMLWriter perlPackages.IOTty ]}:$out/lib/perl5/site_perl" + --prefix PERL5LIB : "${with perlPackages; lib.makePerlPath [ TermReadLineGnu XMLWriter IOTty FileSlurp ]}:$out/lib/perl5/site_perl" ''; }; @@ -68,7 +70,12 @@ rec { makeTest = - { testScript, makeCoverageReport ? false, name ? "unnamed", ... } @ t: + { testScript + , makeCoverageReport ? false + , enableOCR ? false + , name ? "unnamed" + , ... + } @ t: let testDriverName = "nixos-test-driver-${name}"; @@ -86,6 +93,8 @@ rec { vms = map (m: m.config.system.build.vm) (lib.attrValues nodes); + ocrProg = tesseract.override { enableLanguages = [ "eng" ]; }; + # Generate onvenience wrappers for running the test driver # interactively with the specified network, and for starting the # VMs from the command line. @@ -102,23 +111,29 @@ rec { vms="$(for i in ${toString vms}; do echo $i/bin/run-*-vm; done)" wrapProgram $out/bin/nixos-test-driver \ --add-flags "$vms" \ + ${lib.optionalString enableOCR "--prefix PATH : '${ocrProg}/bin'"} \ --run "testScript=\"\$(cat $out/test-script)\"" \ --set testScript '"$testScript"' \ --set VLANS '"${toString vlans}"' ln -s ${testDriver}/bin/nixos-test-driver $out/bin/nixos-run-vms wrapProgram $out/bin/nixos-run-vms \ --add-flags "$vms" \ + ${lib.optionalString enableOCR "--prefix PATH : '${ocrProg}/bin'"} \ --set tests '"startAll; joinAll;"' \ --set VLANS '"${toString vlans}"' \ ${lib.optionalString (builtins.length vms == 1) "--set USE_SERIAL 1"} ''; # " - test = runTests driver; + passMeta = drv: drv // lib.optionalAttrs (t ? meta) { + meta = (drv.meta or {}) // t.meta; + }; - report = releaseTools.gcovReport { coverageRuns = [ test ]; }; - - in (if makeCoverageReport then report else test) // { inherit nodes driver test; }; + test = passMeta (runTests driver); + report = passMeta (releaseTools.gcovReport { coverageRuns = [ test ]; }); + in (if makeCoverageReport then report else test) // { + inherit nodes driver test; + }; runInMachine = { drv diff --git a/nixos/maintainers/option-usages.nix b/nixos/maintainers/option-usages.nix index 7413b9e18ce..854ecf7eac5 100644 --- a/nixos/maintainers/option-usages.nix +++ b/nixos/maintainers/option-usages.nix @@ -1,59 +1,125 @@ { configuration ? import ../lib/from-env.nix "NIXOS_CONFIG" -# []: display all options -# [