diff --git a/lib/modules.nix b/lib/modules.nix index 8db17c60579..e66d6a6926c 100644 --- a/lib/modules.nix +++ b/lib/modules.nix @@ -231,12 +231,20 @@ rec { correspond to the definition of 'loc' in 'opt.file'. */ mergeOptionDecls = loc: opts: foldl' (res: opt: - if opt.options ? default && res ? default || - opt.options ? example && res ? example || - opt.options ? description && res ? description || - opt.options ? apply && res ? apply || - # Accept to merge options which have identical types. - opt.options ? type && res ? type && opt.options.type.name != res.type.name + let t = res.type; + t' = opt.options.type; + mergedType = t.typeMerge t'.functor; + typesMergeable = mergedType != null; + typeSet = if (bothHave "type") && typesMergeable + then { type = mergedType; } + else {}; + bothHave = k: opt.options ? ${k} && res ? ${k}; + in + if bothHave "default" || + bothHave "example" || + bothHave "description" || + bothHave "apply" || + (bothHave "type" && (! typesMergeable)) then throw "The option `${showOption loc}' in `${opt.file}' is already declared in ${showFiles res.declarations}." else @@ -258,7 +266,7 @@ rec { in opt.options // res // { declarations = res.declarations ++ [opt.file]; options = submodules; - } + } // typeSet ) { inherit loc; declarations = []; options = []; } opts; /* Merge all the definitions of an option to produce the final @@ -422,12 +430,14 @@ rec { options = opt.options or (throw "Option `${showOption loc'}' has type optionSet but has no option attribute, in ${showFiles opt.declarations}."); f = tp: + let optionSetIn = type: (tp.name == type) && (tp.functor.wrapped.name == "optionSet"); + in if tp.name == "option set" || tp.name == "submodule" then throw "The option ${showOption loc} uses submodules without a wrapping type, in ${showFiles opt.declarations}." - else if tp.name == "attribute set of option sets" then types.attrsOf (types.submodule options) - else if tp.name == "list or attribute set of option sets" then types.loaOf (types.submodule options) - else if tp.name == "list of option sets" then types.listOf (types.submodule options) - else if tp.name == "null or option set" then types.nullOr (types.submodule options) + else if optionSetIn "attrsOf" then types.attrsOf (types.submodule options) + else if optionSetIn "loaOf" then types.loaOf (types.submodule options) + else if optionSetIn "listOf" then types.listOf (types.submodule options) + else if optionSetIn "nullOr" then types.nullOr (types.submodule options) else tp; in if opt.type.getSubModules or null == null diff --git a/lib/options.nix b/lib/options.nix index 444ec37e6ea..2092b65bbc3 100644 --- a/lib/options.nix +++ b/lib/options.nix @@ -92,7 +92,7 @@ rec { internal = opt.internal or false; visible = opt.visible or true; readOnly = opt.readOnly or false; - type = opt.type.name or null; + type = opt.type.description or null; } // (if opt ? example then { example = scrubOptionValue opt.example; } else {}) // (if opt ? default then { default = scrubOptionValue opt.default; } else {}) diff --git a/lib/types.nix b/lib/types.nix index 991fa0e5c29..26523f59f25 100644 --- a/lib/types.nix +++ b/lib/types.nix @@ -17,10 +17,43 @@ rec { }; + # Default type merging function + # takes two type functors and return the merged type + defaultTypeMerge = f: f': + let wrapped = f.wrapped.typeMerge f'.wrapped.functor; + payload = f.binOp f.payload f'.payload; + in + # cannot merge different types + if f.name != f'.name + then null + # simple types + else if (f.wrapped == null && f'.wrapped == null) + && (f.payload == null && f'.payload == null) + then f.type + # composed types + else if (f.wrapped != null && f'.wrapped != null) && (wrapped != null) + then f.type wrapped + # value types + else if (f.payload != null && f'.payload != null) && (payload != null) + then f.type payload + else null; + + # Default type functor + defaultFunctor = name: { + inherit name; + type = types."${name}" or null; + wrapped = null; + payload = null; + binOp = a: b: null; + }; + isOptionType = isType "option-type"; mkOptionType = - { # Human-readable representation of the type. + { # Human-readable representation of the type, should be equivalent to + # the type function name. name + , # Description of the type, defined recursively by embedding the the wrapped type if any. + description ? null , # Function applied to each definition that should return true if # its type-correct, false otherwise. check ? (x: true) @@ -36,12 +69,26 @@ rec { getSubOptions ? prefix: {} , # List of modules if any, or null if none. getSubModules ? null - , # Function for building the same option type with a different list of + , # Function for building the same option type with a different list of # modules. substSubModules ? m: null + , # Function that merge type declarations. + # internal, takes a functor as argument and returns the merged type. + # returning null means the type is not mergeable + typeMerge ? defaultTypeMerge functor + , # The type functor. + # internal, representation of the type as an attribute set. + # name: name of the type + # type: type function. + # wrapped: the type wrapped in case of compound types. + # payload: values of the type, two payloads of the same type must be + # combinable with the binOp binary operation. + # binOp: binary operation that merge two payloads of the same type. + functor ? defaultFunctor name }: { _type = "option-type"; - inherit name check merge getSubOptions getSubModules substSubModules; + inherit name check merge getSubOptions getSubModules substSubModules typeMerge functor; + description = if description == null then name else description; }; @@ -52,29 +99,39 @@ rec { }; bool = mkOptionType { - name = "boolean"; + name = "bool"; + description = "boolean"; check = isBool; merge = mergeEqualOption; }; - int = mkOptionType { - name = "integer"; + int = mkOptionType rec { + name = "int"; + description = "integer"; check = isInt; merge = mergeOneOption; }; str = mkOptionType { - name = "string"; + name = "str"; + description = "string"; check = isString; merge = mergeOneOption; }; # Merge multiple definitions by concatenating them (with the given # separator between the values). - separatedString = sep: mkOptionType { - name = "string"; + separatedString = sep: mkOptionType rec { + name = "separatedString"; + description = "string"; check = isString; merge = loc: defs: concatStringsSep sep (getValues defs); + functor = (defaultFunctor name) // { + payload = sep; + binOp = sepLhs: sepRhs: + if sepLhs == sepRhs then sepLhs + else null; + }; }; lines = separatedString "\n"; @@ -86,7 +143,8 @@ rec { string = separatedString ""; attrs = mkOptionType { - name = "attribute set"; + name = "attrs"; + description = "attribute set"; check = isAttrs; merge = loc: foldl' (res: def: mergeAttrs res def.value) {}; }; @@ -114,8 +172,9 @@ rec { # drop this in the future: list = builtins.trace "`types.list' is deprecated; use `types.listOf' instead" types.listOf; - listOf = elemType: mkOptionType { - name = "list of ${elemType.name}s"; + listOf = elemType: mkOptionType rec { + name = "listOf"; + description = "list of ${elemType.description}s"; check = isList; merge = loc: defs: map (x: x.value) (filter (x: x ? value) (concatLists (imap (n: def: @@ -132,10 +191,12 @@ rec { getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["*"]); getSubModules = elemType.getSubModules; substSubModules = m: listOf (elemType.substSubModules m); + functor = (defaultFunctor name) // { wrapped = elemType; }; }; - attrsOf = elemType: mkOptionType { - name = "attribute set of ${elemType.name}s"; + attrsOf = elemType: mkOptionType rec { + name = "attrsOf"; + description = "attribute set of ${elemType.description}s"; check = isAttrs; merge = loc: defs: mapAttrs (n: v: v.value) (filterAttrs (n: v: v ? value) (zipAttrsWith (name: defs: @@ -147,6 +208,7 @@ rec { getSubOptions = prefix: elemType.getSubOptions (prefix ++ [""]); getSubModules = elemType.getSubModules; substSubModules = m: attrsOf (elemType.substSubModules m); + functor = (defaultFunctor name) // { wrapped = elemType; }; }; # List or attribute set of ... @@ -165,18 +227,21 @@ rec { def; listOnly = listOf elemType; attrOnly = attrsOf elemType; - in mkOptionType { - name = "list or attribute set of ${elemType.name}s"; + in mkOptionType rec { + name = "loaOf"; + description = "list or attribute set of ${elemType.description}s"; check = x: isList x || isAttrs x; merge = loc: defs: attrOnly.merge loc (imap convertIfList defs); getSubOptions = prefix: elemType.getSubOptions (prefix ++ [""]); getSubModules = elemType.getSubModules; substSubModules = m: loaOf (elemType.substSubModules m); + functor = (defaultFunctor name) // { wrapped = elemType; }; }; # List or element of ... - loeOf = elemType: mkOptionType { - name = "element or list of ${elemType.name}s"; + loeOf = elemType: mkOptionType rec { + name = "loeOf"; + description = "element or list of ${elemType.description}s"; check = x: isList x || elemType.check x; merge = loc: defs: let @@ -189,18 +254,22 @@ rec { else if !isString res then throw "The option `${showOption loc}' does not have a string value, in ${showFiles (getFiles defs)}." else res; + functor = (defaultFunctor name) // { wrapped = elemType; }; }; - uniq = elemType: mkOptionType { - inherit (elemType) name check; + uniq = elemType: mkOptionType rec { + name = "uniq"; + inherit (elemType) description check; merge = mergeOneOption; getSubOptions = elemType.getSubOptions; getSubModules = elemType.getSubModules; substSubModules = m: uniq (elemType.substSubModules m); + functor = (defaultFunctor name) // { wrapped = elemType; }; }; - nullOr = elemType: mkOptionType { - name = "null or ${elemType.name}"; + nullOr = elemType: mkOptionType rec { + name = "nullOr"; + description = "null or ${elemType.description}"; check = x: x == null || elemType.check x; merge = loc: defs: let nrNulls = count (def: def.value == null) defs; in @@ -211,6 +280,7 @@ rec { getSubOptions = elemType.getSubOptions; getSubModules = elemType.getSubModules; substSubModules = m: nullOr (elemType.substSubModules m); + functor = (defaultFunctor name) // { wrapped = elemType; }; }; submodule = opts: @@ -236,6 +306,12 @@ rec { args = { name = ""; }; }).options; getSubModules = opts'; substSubModules = m: submodule m; + functor = (defaultFunctor name) // { + # Merging of submodules is done as part of mergeOptionDecls, as we have to annotate + # each submodule with its location. + payload = []; + binOp = lhs: rhs: []; + }; }; enum = values: @@ -245,23 +321,35 @@ rec { else if builtins.isInt v then builtins.toString v else ''<${builtins.typeOf v}>''; in - mkOptionType { - name = "one of ${concatMapStringsSep ", " show values}"; + mkOptionType rec { + name = "enum"; + description = "one of ${concatMapStringsSep ", " show values}"; check = flip elem values; merge = mergeOneOption; + functor = (defaultFunctor name) // { payload = values; binOp = a: b: unique (a ++ b); }; }; - either = t1: t2: mkOptionType { - name = "${t1.name} or ${t2.name}"; + either = t1: t2: mkOptionType rec { + name = "either"; + description = "${t1.description} or ${t2.description}"; check = x: t1.check x || t2.check x; merge = mergeOneOption; + typeMerge = f': + let mt1 = t1.typeMerge (elemAt f'.wrapped 0).functor; + mt2 = t2.typeMerge (elemAt f'.wrapped 1).functor; + in + if (name == f'.name) && (mt1 != null) && (mt2 != null) + then functor.type mt1 mt2 + else null; + functor = (defaultFunctor name) // { wrapped = [ t1 t2 ]; }; }; # Obsolete alternative to configOf. It takes its option # declarations from the ‘options’ attribute of containing option # declaration. optionSet = mkOptionType { - name = builtins.trace "types.optionSet is deprecated; use types.submodule instead" "option set"; + name = builtins.trace "types.optionSet is deprecated; use types.submodule instead" "optionSet"; + description = "option set"; }; # Augment the given type with an additional type check function. diff --git a/nixos/doc/manual/configuration/customizing-packages.xml b/nixos/doc/manual/configuration/customizing-packages.xml index 6ee7a95dc6f..8aa01fb57a0 100644 --- a/nixos/doc/manual/configuration/customizing-packages.xml +++ b/nixos/doc/manual/configuration/customizing-packages.xml @@ -42,29 +42,30 @@ construction, so without them, elements.) Even greater customisation is possible using the function -overrideDerivation. While the +overrideAttrs. While the override mechanism above overrides the arguments of -a package function, overrideDerivation allows -changing the result of the function. This -permits changing any aspect of the package, such as the source code. +a package function, overrideAttrs allows +changing the attributes passed to mkDerivation. +This permits changing any aspect of the package, such as the source code. For instance, if you want to override the source code of Emacs, you can say: -environment.systemPackages = - [ (pkgs.lib.overrideDerivation pkgs.emacs (attrs: { - name = "emacs-25.0-pre"; - src = /path/to/my/emacs/tree; - })) - ]; +environment.systemPackages = [ + (pkgs.emacs.overrideAttrs (oldAttrs: { + name = "emacs-25.0-pre"; + src = /path/to/my/emacs/tree; + })) +]; -Here, overrideDerivation takes the Nix derivation +Here, overrideAttrs takes the Nix derivation specified by pkgs.emacs and produces a new derivation in which the original’s name and src attribute have been replaced by the given -values. The original attributes are accessible via -attrs. +values by re-calling stdenv.mkDerivation. +The original attributes are accessible via the function argument, +which is conventionally named oldAttrs. The overrides shown above are not global. They do not affect the original package; other packages in Nixpkgs continue to depend on diff --git a/nixos/doc/manual/development/option-declarations.xml b/nixos/doc/manual/development/option-declarations.xml index 7be5e9d51d4..ce432a7fa6c 100644 --- a/nixos/doc/manual/development/option-declarations.xml +++ b/nixos/doc/manual/development/option-declarations.xml @@ -65,4 +65,92 @@ options = { +
Extensible Option + Types + + Extensible option types is a feature that allow to extend certain types + declaration through multiple module files. + This feature only work with a restricted set of types, namely + enum and submodules and any composed + forms of them. + + Extensible option types can be used for enum options + that affects multiple modules, or as an alternative to related + enable options. + + As an example, we will take the case of display managers. There is a + central display manager module for generic display manager options and a + module file per display manager backend (slim, kdm, gdm ...). + + + There are two approach to this module structure: + + + Managing the display managers independently by adding an + enable option to every display manager module backend. (NixOS) + + Managing the display managers in the central module by + adding an option to select which display manager backend to use. + + + + + Both approachs have problems. + + Making backends independent can quickly become hard to manage. For + display managers, there can be only one enabled at a time, but the type + system can not enforce this restriction as there is no relation between + each backend enable option. As a result, this restriction + has to be done explicitely by adding assertions in each display manager + backend module. + + On the other hand, managing the display managers backends in the + central module will require to change the central module option every time + a new backend is added or removed. + + By using extensible option types, it is possible to create a placeholder + option in the central module (), and to extend it in each backend module (, ). + + As a result, displayManager.enable option values can + be added without changing the main service module file and the type system + automatically enforce that there can only be a single display manager + enabled. + +Extensible type + placeholder in the service module + +services.xserver.displayManager.enable = mkOption { + description = "Display manager to use"; + type = with types; nullOr (enum [ ]); +}; + +Extending + <literal>services.xserver.displayManager.enable</literal> in the + <literal>slim</literal> module + +services.xserver.displayManager.enable = mkOption { + type = with types; nullOr (enum [ "slim" ]); +}; + +Extending + <literal>services.foo.backend</literal> in the <literal>kdm</literal> + module + +services.xserver.displayManager.enable = mkOption { + type = with types; nullOr (enum [ "kdm" ]); +}; + +The placeholder declaration is a standard mkOption + declaration, but it is important that extensible option declarations only use + the type argument. + +Extensible option types work with any of the composed variants of + enum such as + with types; nullOr (enum [ "foo" "bar" ]) + or with types; listOf (enum [ "foo" "bar" ]). + +
diff --git a/nixos/doc/manual/development/option-types.xml b/nixos/doc/manual/development/option-types.xml index 8871b02cebf..8e6ac53ad48 100644 --- a/nixos/doc/manual/development/option-types.xml +++ b/nixos/doc/manual/development/option-types.xml @@ -62,23 +62,45 @@ A string. Multiple definitions are concatenated with a collon ":". - - types.separatedString - sep - A string with a custom separator - sep, e.g. types.separatedString - "|". - +
Value Types + + Value types are type that take a value parameter. The only value type + in the library is enum. + + + + types.enum l + One element of the list l, e.g. + types.enum [ "left" "right" ]. Multiple definitions + cannot be merged. + + + types.separatedString + sep + A string with a custom separator + sep, e.g. types.separatedString + "|". + + + types.submodule o + A set of sub options o. + o can be an attribute set or a function + returning an attribute set. Submodules are used in composed types to + create modular options. Submodule are detailed in . + + +
+
Composed Types - Composed types allow to create complex types by taking another type(s) - or value(s) as parameter(s). - It is possible to compose types multiple times, e.g. with types; - nullOr (enum [ "left" "right" ]). + Composed types are types that take a type as parameter. listOf + int and either int str are examples of + composed types. @@ -99,12 +121,6 @@ type. Multiple definitions are merged according to the value. - - types.loeOf t - A list or an element of t type. - Multiple definitions are merged according to the - values. - types.nullOr t null or type @@ -117,12 +133,6 @@ merged. It is used to ensure option definitions are declared only once. - - types.enum l - One element of the list l, e.g. - types.enum [ "left" "right" ]. Multiple definitions - cannot be merged - types.either t1 t2 @@ -131,14 +141,6 @@ str. Multiple definitions cannot be merged. - - types.submodule o - A set of sub options o. - o can be an attribute set or a function - returning an attribute set. Submodules are used in composed types to - create modular options. Submodule are detailed in . -
@@ -197,7 +199,6 @@ options.mod = mkOption { type = with types; listOf (submodule modOptions); }; -
Composed with <literal>listOf</literal> When composed with listOf, submodule allows multiple @@ -323,9 +324,13 @@ code before creating a new type. name - A string representation of the type function name, name - usually changes accordingly parameters passed to - types. + A string representation of the type function + name. + + + definition + Description of the type used in documentation. Give + information of the type and any of its arguments. check @@ -388,6 +393,53 @@ code before creating a new type. type parameter, this function should be defined as m: composedType (elemType.substSubModules m). + + typeMerge + A function to merge multiple type declarations. Takes the + type to merge functor as parameter. A + null return value means that type cannot be + merged. + + + f + The type to merge + functor. + + + Note: There is a generic defaultTypeMerge that + work with most of value and composed types. + + + + functor + An attribute set representing the type. It is used for type + operations and has the following keys: + + + type + The type function. + + + wrapped + Holds the type parameter for composed types. + + + + payload + Holds the value parameter for value types. + The types that have a payload are the + enum, separatedString and + submodule types. + + + binOp + A binary operation that can merge the payloads of two + same types. Defined as a function that take two payloads as + parameters and return the payloads merged. + + + +
diff --git a/nixos/doc/manual/release-notes/rl-1703.xml b/nixos/doc/manual/release-notes/rl-1703.xml index efff8b895a1..743f3dce230 100644 --- a/nixos/doc/manual/release-notes/rl-1703.xml +++ b/nixos/doc/manual/release-notes/rl-1703.xml @@ -75,7 +75,10 @@ following incompatible changes: - + Module type system have a new extensible option types feature that + allow to extend certain types, such as enum, through multiple option + declarations of the same option across multiple modules. + diff --git a/nixos/modules/config/debug-info.nix b/nixos/modules/config/debug-info.nix index 671a59f52f6..49991d22a93 100644 --- a/nixos/modules/config/debug-info.nix +++ b/nixos/modules/config/debug-info.nix @@ -17,12 +17,10 @@ with lib; where tools such as gdb can find them. If you need debug symbols for a package that doesn't provide them by default, you can enable them as follows: - nixpkgs.config.packageOverrides = pkgs: { - hello = pkgs.lib.overrideDerivation pkgs.hello (attrs: { - outputs = attrs.outputs or ["out"] ++ ["debug"]; - buildInputs = attrs.buildInputs ++ [<nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh>]; + hello = pkgs.hello.overrideAttrs (oldAttrs: { + separateDebugInfo = true; }); }; diff --git a/nixos/modules/config/shells-environment.nix b/nixos/modules/config/shells-environment.nix index f458bc39ada..8147fed39d0 100644 --- a/nixos/modules/config/shells-environment.nix +++ b/nixos/modules/config/shells-environment.nix @@ -41,7 +41,7 @@ in strings. The latter is concatenated, interspersed with colon characters. ''; - type = types.attrsOf (types.loeOf types.str); + type = with types; attrsOf (either str (listOf str)); apply = mapAttrs (n: v: if isList v then concatStringsSep ":" v else v); }; diff --git a/nixos/modules/config/system-environment.nix b/nixos/modules/config/system-environment.nix index 3362400326d..6011e354ece 100644 --- a/nixos/modules/config/system-environment.nix +++ b/nixos/modules/config/system-environment.nix @@ -23,7 +23,7 @@ in strings. The latter is concatenated, interspersed with colon characters. ''; - type = types.attrsOf (types.loeOf types.str); + type = with types; attrsOf (either str (listOf str)); apply = mapAttrs (n: v: if isList v then concatStringsSep ":" v else v); }; diff --git a/nixos/modules/installer/tools/nixos-option.sh b/nixos/modules/installer/tools/nixos-option.sh index 17c17d05e28..27eacda48a8 100644 --- a/nixos/modules/installer/tools/nixos-option.sh +++ b/nixos/modules/installer/tools/nixos-option.sh @@ -256,7 +256,7 @@ if isOption opt then // optionalAttrs (opt ? default) { inherit (opt) default; } // optionalAttrs (opt ? example) { inherit (opt) example; } // optionalAttrs (opt ? description) { inherit (opt) description; } - // optionalAttrs (opt ? type) { typename = opt.type.name; } + // optionalAttrs (opt ? type) { typename = opt.type.description; } // optionalAttrs (opt ? options) { inherit (opt) options; } // { # to disambiguate the xml output. diff --git a/nixos/modules/misc/ids.nix b/nixos/modules/misc/ids.nix index 8c0f0c2624b..c75c22472bd 100644 --- a/nixos/modules/misc/ids.nix +++ b/nixos/modules/misc/ids.nix @@ -277,6 +277,7 @@ gitlab-runner = 257; postgrey = 258; hound = 259; + leaps = 260; # When adding a uid, make sure it doesn't match an existing gid. And don't use uids above 399! @@ -524,6 +525,7 @@ gitlab-runner = 257; postgrey = 258; hound = 259; + leaps = 260; # When adding a gid, make sure it doesn't match an existing # uid. Users and groups with the same name should have equal diff --git a/nixos/modules/module-list.nix b/nixos/modules/module-list.nix index 08d73970408..356cb5a92ed 100644 --- a/nixos/modules/module-list.nix +++ b/nixos/modules/module-list.nix @@ -251,6 +251,7 @@ ./services/misc/gitolite.nix ./services/misc/gpsd.nix ./services/misc/ihaskell.nix + ./services/misc/leaps.nix ./services/misc/mantisbt.nix ./services/misc/mathics.nix ./services/misc/matrix-synapse.nix diff --git a/nixos/modules/services/editors/emacs.xml b/nixos/modules/services/editors/emacs.xml index bcaa8b8df3d..e03f6046de8 100644 --- a/nixos/modules/services/editors/emacs.xml +++ b/nixos/modules/services/editors/emacs.xml @@ -356,14 +356,14 @@ https://nixos.org/nixpkgs/manual/#sec-modify-via-packageOverrides {} }: let - myEmacs = pkgs.lib.overrideDerivation (pkgs.emacs.override { + myEmacs = (pkgs.emacs.override { # Use gtk3 instead of the default gtk2 withGTK3 = true; withGTK2 = false; - }) (attrs: { + }).overrideAttrs (attrs: { # I don't want emacs.desktop file because I only use # emacsclient. - postInstall = attrs.postInstall + '' + postInstall = (attrs.postInstall or "") + '' rm $out/share/applications/emacs.desktop ''; }); diff --git a/nixos/modules/services/games/ghost-one.nix b/nixos/modules/services/games/ghost-one.nix index 5762148df2b..71ff6bb2f3f 100644 --- a/nixos/modules/services/games/ghost-one.nix +++ b/nixos/modules/services/games/ghost-one.nix @@ -21,8 +21,7 @@ in language = mkOption { default = "English"; - type = types.addCheck types.str - (lang: elem lang [ "English" "Spanish" "Russian" "Serbian" "Turkish" ]); + type = types.enum [ "English" "Spanish" "Russian" "Serbian" "Turkish" ]; description = "The language of bot messages: English, Spanish, Russian, Serbian or Turkish."; }; diff --git a/nixos/modules/services/logging/logcheck.nix b/nixos/modules/services/logging/logcheck.nix index a8a214b2155..27ed5374f56 100644 --- a/nixos/modules/services/logging/logcheck.nix +++ b/nixos/modules/services/logging/logcheck.nix @@ -55,9 +55,9 @@ let levelOption = mkOption { default = "server"; - type = types.str; + type = types.enum [ "workstation" "server" "paranoid" ]; description = '' - Set the logcheck level. Either "workstation", "server", or "paranoid". + Set the logcheck level. ''; }; diff --git a/nixos/modules/services/misc/leaps.nix b/nixos/modules/services/misc/leaps.nix new file mode 100644 index 00000000000..b92cf27f58d --- /dev/null +++ b/nixos/modules/services/misc/leaps.nix @@ -0,0 +1,62 @@ +{ config, pkgs, lib, ... } @ args: + +with lib; + +let + cfg = config.services.leaps; + stateDir = "/var/lib/leaps/"; +in +{ + options = { + services.leaps = { + enable = mkEnableOption "leaps"; + port = mkOption { + type = types.int; + default = 8080; + description = "A port where leaps listens for incoming http requests"; + }; + address = mkOption { + default = ""; + type = types.str; + example = "127.0.0.1"; + description = "Hostname or IP-address to listen to. By default it will listen on all interfaces."; + }; + path = mkOption { + default = "/"; + type = types.path; + description = "Subdirectory used for reverse proxy setups"; + }; + }; + }; + + config = mkIf cfg.enable { + users = { + users.leaps = { + uid = config.ids.uids.leaps; + description = "Leaps server user"; + group = "leaps"; + home = stateDir; + createHome = true; + }; + + groups.leaps = { + gid = config.ids.gids.leaps; + }; + }; + + systemd.services.leaps = { + description = "leaps service"; + wantedBy = [ "multi-user.target" ]; + after = [ "network.target" ]; + + serviceConfig = { + User = "leaps"; + Group = "leaps"; + Restart = "on-failure"; + WorkingDirectory = stateDir; + PrivateTmp = true; + ExecStart = "${pkgs.leaps.bin}/bin/leaps -path ${toString cfg.path} -address ${cfg.address}:${toString cfg.port}"; + }; + }; + }; +} diff --git a/nixos/modules/services/misc/taskserver/default.nix b/nixos/modules/services/misc/taskserver/default.nix index 233c47684b7..ca82a733f6f 100644 --- a/nixos/modules/services/misc/taskserver/default.nix +++ b/nixos/modules/services/misc/taskserver/default.nix @@ -292,7 +292,7 @@ in { }; allowedClientIDs = mkOption { - type = with types; loeOf (either (enum ["all" "none"]) str); + type = with types; either str (listOf str); default = []; example = [ "[Tt]ask [2-9]+" ]; description = '' @@ -306,7 +306,7 @@ in { }; disallowedClientIDs = mkOption { - type = with types; loeOf (either (enum ["all" "none"]) str); + type = with types; either str (listOf str); default = []; example = [ "[Tt]ask [2-9]+" ]; description = '' diff --git a/nixos/modules/services/networking/bitlbee.nix b/nixos/modules/services/networking/bitlbee.nix index 5e6847097a9..e72ea20ccce 100644 --- a/nixos/modules/services/networking/bitlbee.nix +++ b/nixos/modules/services/networking/bitlbee.nix @@ -7,11 +7,6 @@ let cfg = config.services.bitlbee; bitlbeeUid = config.ids.uids.bitlbee; - authModeCheck = v: - v == "Open" || - v == "Closed" || - v == "Registered"; - bitlbeeConfig = pkgs.writeText "bitlbee.conf" '' [settings] @@ -67,7 +62,7 @@ in authMode = mkOption { default = "Open"; - type = types.addCheck types.str authModeCheck; + type = types.enum [ "Open" "Closed" "Registered" ]; description = '' The following authentication modes are available: Open -- Accept connections from anyone, use NickServ for user authentication. diff --git a/nixos/modules/services/networking/dnscrypt-proxy.nix b/nixos/modules/services/networking/dnscrypt-proxy.nix index 5a24db8ccba..82bf178f4cb 100644 --- a/nixos/modules/services/networking/dnscrypt-proxy.nix +++ b/nixos/modules/services/networking/dnscrypt-proxy.nix @@ -5,15 +5,25 @@ let apparmorEnabled = config.security.apparmor.enable; dnscrypt-proxy = pkgs.dnscrypt-proxy; cfg = config.services.dnscrypt-proxy; + stateDirectory = "/var/lib/dnscrypt-proxy"; localAddress = "${cfg.localAddress}:${toString cfg.localPort}"; - daemonArgs = - [ "--local-address=${localAddress}" - (optionalString cfg.tcpOnly "--tcp-only") - (optionalString cfg.ephemeralKeys "-E") - ] - ++ resolverArgs; + # The minisign public key used to sign the upstream resolver list. + # This is somewhat more flexible than preloading the key as an + # embedded string. + upstreamResolverListPubKey = pkgs.fetchurl { + url = https://raw.githubusercontent.com/jedisct1/dnscrypt-proxy/master/minisign.pub; + sha256 = "18lnp8qr6ghfc2sd46nn1rhcpr324fqlvgsp4zaigw396cd7vnnh"; + }; + + # Internal flag indicating whether the upstream resolver list is used + useUpstreamResolverList = cfg.resolverList == null && cfg.customResolver == null; + + resolverList = + if (cfg.resolverList != null) + then cfg.resolverList + else "${stateDirectory}/dnscrypt-resolvers.csv"; resolverArgs = if (cfg.customResolver != null) then @@ -22,9 +32,16 @@ let "--provider-key=${cfg.customResolver.key}" ] else - [ "--resolvers-list=${cfg.resolverList}" - "--resolver-name=${toString cfg.resolverName}" + [ "--resolvers-list=${resolverList}" + "--resolver-name=${cfg.resolverName}" ]; + + # The final command line arguments passed to the daemon + daemonArgs = + [ "--local-address=${localAddress}" ] + ++ optional cfg.tcpOnly "--tcp-only" + ++ optional cfg.ephemeralKeys "-E" + ++ resolverArgs; in { @@ -66,24 +83,20 @@ in default = "dnscrypt.eu-nl"; type = types.nullOr types.str; description = '' - The name of the upstream DNSCrypt resolver to use, taken from the - list named in the resolverList option. - The default resolver is located in Holland, supports DNS security - extensions, and claims to not keep logs. + The name of the upstream DNSCrypt resolver to use, taken from + ${resolverList}. The default resolver is + located in Holland, supports DNS security extensions, and + claims to not keep logs. ''; }; resolverList = mkOption { + default = null; + type = types.nullOr types.path; description = '' - The list of upstream DNSCrypt resolvers. By default, we use the most - recent list published by upstream. + List of DNSCrypt resolvers. The default is to use the list of + public resolvers provided by upstream. ''; - example = literalExample "${pkgs.dnscrypt-proxy}/share/dnscrypt-proxy/dnscrypt-resolvers.csv"; - default = pkgs.fetchurl { - url = https://raw.githubusercontent.com/jedisct1/dnscrypt-proxy/master/dnscrypt-resolvers.csv; - sha256 = "1i9wzw4zl052h5nyp28bwl8d66cgj0awvjhw5wgwz0warkjl1g8g"; - }; - defaultText = "pkgs.fetchurl { url = ...; sha256 = ...; }"; }; customResolver = mkOption { @@ -150,7 +163,7 @@ in } ]; - security.apparmor.profiles = mkIf apparmorEnabled (singleton (pkgs.writeText "apparmor-dnscrypt-proxy" '' + security.apparmor.profiles = optional apparmorEnabled (pkgs.writeText "apparmor-dnscrypt-proxy" '' ${dnscrypt-proxy}/bin/dnscrypt-proxy { /dev/null rw, /dev/urandom r, @@ -177,9 +190,9 @@ in ${getLib pkgs.lz4}/lib/liblz4.so.* mr, ${getLib pkgs.attr}/lib/libattr.so.* mr, - ${cfg.resolverList} r, + ${resolverList} r, } - '')); + ''); users.users.dnscrypt-proxy = { description = "dnscrypt-proxy daemon user"; @@ -188,11 +201,61 @@ in }; users.groups.dnscrypt-proxy = {}; + systemd.services.init-dnscrypt-proxy-statedir = optionalAttrs useUpstreamResolverList { + description = "Initialize dnscrypt-proxy state directory"; + script = '' + mkdir -pv ${stateDirectory} + chown -c dnscrypt-proxy:dnscrypt-proxy ${stateDirectory} + cp --preserve=timestamps -uv \ + ${pkgs.dnscrypt-proxy}/share/dnscrypt-proxy/dnscrypt-resolvers.csv \ + ${stateDirectory} + ''; + serviceConfig = { + Type = "oneshot"; + RemainAfterExit = true; + }; + }; + + systemd.services.update-dnscrypt-resolvers = optionalAttrs useUpstreamResolverList { + description = "Update list of DNSCrypt resolvers"; + + requires = [ "init-dnscrypt-proxy-statedir.service" ]; + after = [ "init-dnscrypt-proxy-statedir.service" ]; + + path = with pkgs; [ curl minisign ]; + script = '' + cd ${stateDirectory} + curl -fSsL -o dnscrypt-resolvers.csv.tmp \ + https://download.dnscrypt.org/dnscrypt-proxy/dnscrypt-resolvers.csv + curl -fSsL -o dnscrypt-resolvers.csv.minisig.tmp \ + https://download.dnscrypt.org/dnscrypt-proxy/dnscrypt-resolvers.csv.minisig + mv dnscrypt-resolvers.csv.minisig{.tmp,} + minisign -q -V -p ${upstreamResolverListPubKey} \ + -m dnscrypt-resolvers.csv.tmp -x dnscrypt-resolvers.csv.minisig + mv dnscrypt-resolvers.csv{.tmp,} + ''; + + serviceConfig = { + PrivateTmp = true; + PrivateDevices = true; + ProtectHome = true; + ProtectSystem = true; + }; + }; + + systemd.timers.update-dnscrypt-resolvers = optionalAttrs useUpstreamResolverList { + timerConfig = { + OnBootSec = "5min"; + OnUnitActiveSec = "6h"; + }; + wantedBy = [ "timers.target" ]; + }; + systemd.sockets.dnscrypt-proxy = { description = "dnscrypt-proxy listening socket"; socketConfig = { - ListenStream = "${localAddress}"; - ListenDatagram = "${localAddress}"; + ListenStream = localAddress; + ListenDatagram = localAddress; }; wantedBy = [ "sockets.target" ]; }; @@ -200,8 +263,13 @@ in systemd.services.dnscrypt-proxy = { description = "dnscrypt-proxy daemon"; - after = [ "network.target" ] ++ optional apparmorEnabled "apparmor.service"; - requires = [ "dnscrypt-proxy.socket "] ++ optional apparmorEnabled "apparmor.service"; + after = [ "network.target" ] + ++ optional apparmorEnabled "apparmor.service" + ++ optional useUpstreamResolverList "init-dnscrypt-proxy-statedir.service"; + + requires = [ "dnscrypt-proxy.socket "] + ++ optional apparmorEnabled "apparmor.service" + ++ optional useUpstreamResolverList "init-dnscrypt-proxy-statedir.service"; serviceConfig = { Type = "simple"; diff --git a/nixos/modules/services/networking/i2pd.nix b/nixos/modules/services/networking/i2pd.nix index 926857a0ff4..578376764eb 100644 --- a/nixos/modules/services/networking/i2pd.nix +++ b/nixos/modules/services/networking/i2pd.nix @@ -10,7 +10,7 @@ let extip = "EXTIP=\$(${pkgs.curl.bin}/bin/curl -sf \"http://jsonip.com\" | ${pkgs.gawk}/bin/awk -F'\"' '{print $4}')"; - toYesNo = b: if b then "yes" else "no"; + toYesNo = b: if b then "true" else "false"; mkEndpointOpt = name: addr: port: { enable = mkEnableOption name; @@ -31,6 +31,17 @@ let }; }; + mkKeyedEndpointOpt = name: addr: port: keyFile: + (mkEndpointOpt name addr port) // { + keys = mkOption { + type = types.str; + default = ""; + description = '' + File to persist ${lib.toUpper name} keys. + ''; + }; + }; + commonTunOpts = let i2cpOpts = { length = mkOption { @@ -63,19 +74,49 @@ let }; } // mkEndpointOpt name "127.0.0.1" 0; - i2pdConf = pkgs.writeText "i2pd.conf" '' - ipv6 = ${toYesNo cfg.enableIPv6} - notransit = ${toYesNo cfg.notransit} - floodfill = ${toYesNo cfg.floodfill} - ${if isNull cfg.port then "" else "port = ${toString cfg.port}"} - ${flip concatMapStrings - (collect (proto: proto ? port && proto ? address && proto ? name) cfg.proto) - (proto: let portStr = toString proto.port; in '' - [${proto.name}] - address = ${proto.address} - port = ${toString proto.port} - enabled = ${toYesNo proto.enable} - '') + i2pdConf = pkgs.writeText "i2pd.conf" + '' + ipv4 = ${toYesNo cfg.enableIPv4} + ipv6 = ${toYesNo cfg.enableIPv6} + notransit = ${toYesNo cfg.notransit} + floodfill = ${toYesNo cfg.floodfill} + netid = ${toString cfg.netid} + ${if isNull cfg.bandwidth then "" else "bandwidth = ${toString cfg.bandwidth}" } + ${if isNull cfg.port then "" else "port = ${toString cfg.port}"} + + [limits] + transittunnels = ${toString cfg.limits.transittunnels} + + [upnp] + enabled = ${toYesNo cfg.upnp.enable} + name = ${cfg.upnp.name} + + [precomputation] + elgamal = ${toYesNo cfg.precomputation.elgamal} + + [reseed] + verify = ${toYesNo cfg.reseed.verify} + file = ${cfg.reseed.file} + urls = ${builtins.concatStringsSep "," cfg.reseed.urls} + + [addressbook] + defaulturl = ${cfg.addressbook.defaulturl} + subscriptions = ${builtins.concatStringsSep "," cfg.addressbook.subscriptions} + ${flip concatMapStrings + (collect (proto: proto ? port && proto ? address && proto ? name) cfg.proto) + (proto: let portStr = toString proto.port; in + '' + [${proto.name}] + enabled = ${toYesNo proto.enable} + address = ${proto.address} + port = ${toString proto.port} + ${if proto ? keys then "keys = ${proto.keys}" else ""} + ${if proto ? auth then "auth = ${toYesNo proto.auth}" else ""} + ${if proto ? user then "user = ${proto.user}" else ""} + ${if proto ? pass then "pass = ${proto.pass}" else ""} + ${if proto ? outproxy then "outproxy = ${proto.outproxy}" else ""} + ${if proto ? outproxyPort then "outproxyport = ${toString proto.outproxyPort}" else ""} + '') } ''; @@ -114,7 +155,7 @@ let i2pdSh = pkgs.writeScriptBin "i2pd" '' #!/bin/sh ${if isNull cfg.extIp then extip else ""} - ${pkgs.i2pd}/bin/i2pd --log=1 \ + ${pkgs.i2pd}/bin/i2pd \ --host=${if isNull cfg.extIp then "$EXTIP" else cfg.extIp} \ --conf=${i2pdConf} \ --tunconf=${i2pdTunnelConf} @@ -135,6 +176,8 @@ in default = false; description = '' Enables I2Pd as a running service upon activation. + Please read http://i2pd.readthedocs.io/en/latest/ for further + configuration help. ''; }; @@ -162,6 +205,22 @@ in ''; }; + netid = mkOption { + type = types.int; + default = 2; + description = '' + I2P overlay netid. + ''; + }; + + bandwidth = mkOption { + type = with types; nullOr int; + default = null; + description = '' + Set a router bandwidth limit integer in kbps or letters: L (32), O (256), P (2048), X (>9000) + ''; + }; + port = mkOption { type = with types; nullOr int; default = null; @@ -170,6 +229,14 @@ in ''; }; + enableIPv4 = mkOption { + type = types.bool; + default = true; + description = '' + Enables IPv4 connectivity. Enabled by default. + ''; + }; + enableIPv6 = mkOption { type = types.bool; default = false; @@ -178,16 +245,141 @@ in ''; }; - proto.http = mkEndpointOpt "http" "127.0.0.1" 7070; + upnp = { + enable = mkOption { + type = types.bool; + default = false; + description = '' + Enables UPnP. + ''; + }; + + name = mkOption { + type = types.str; + default = "I2Pd"; + description = '' + Name i2pd appears in UPnP forwardings list. + ''; + }; + }; + + precomputation.elgamal = mkOption { + type = types.bool; + default = false; + description = '' + Use ElGamal precomputated tables. + ''; + }; + + reseed = { + verify = mkOption { + type = types.bool; + default = false; + description = '' + Request SU3 signature verification + ''; + }; + + file = mkOption { + type = types.str; + default = ""; + description = '' + Full path to SU3 file to reseed from + ''; + }; + + urls = mkOption { + type = with types; listOf str; + default = [ + "https://reseed.i2p-project.de/" + "https://i2p.mooo.com/netDb/" + "https://netdb.i2p2.no/" + "https://us.reseed.i2p2.no:444/" + "https://uk.reseed.i2p2.no:444/" + "https://i2p.manas.ca:8443/" + ]; + description = '' + Reseed URLs + ''; + }; + }; + + addressbook = { + defaulturl = mkOption { + type = types.str; + default = "http://joajgazyztfssty4w2on5oaqksz6tqoxbduy553y34mf4byv6gpq.b32.i2p/export/alive-hosts.txt"; + description = '' + AddressBook subscription URL for initial setup + ''; + }; + subscriptions = mkOption { + type = with types; listOf str; + default = [ + "http://inr.i2p/export/alive-hosts.txt" + "http://i2p-projekt.i2p/hosts.txt" + "http://stats.i2p/cgi-bin/newhosts.txt" + ]; + description = '' + AddressBook subscription URLs + ''; + }; + }; + + limits.transittunnels = mkOption { + type = types.int; + default = 2500; + description = '' + Maximum number of active transit sessions + ''; + }; + + proto.http = (mkEndpointOpt "http" "127.0.0.1" 7070) // { + auth = mkOption { + type = types.bool; + default = false; + description = '' + Enable authentication for webconsole. + ''; + }; + user = mkOption { + type = types.str; + default = "i2pd"; + description = '' + Username for webconsole access + ''; + }; + pass = mkOption { + type = types.str; + default = "i2pd"; + description = '' + Password for webconsole access. + ''; + }; + }; + + proto.httpProxy = mkKeyedEndpointOpt "httpproxy" "127.0.0.1" 4446 ""; + proto.socksProxy = (mkKeyedEndpointOpt "socksproxy" "127.0.0.1" 4447 "") + // { + outproxy = mkOption { + type = types.str; + default = "127.0.0.1"; + description = "Upstream outproxy bind address."; + }; + outproxyPort = mkOption { + type = types.int; + default = 4444; + description = "Upstream outproxy bind port."; + }; + }; + proto.sam = mkEndpointOpt "sam" "127.0.0.1" 7656; proto.bob = mkEndpointOpt "bob" "127.0.0.1" 2827; + proto.i2cp = mkEndpointOpt "i2cp" "127.0.0.1" 7654; proto.i2pControl = mkEndpointOpt "i2pcontrol" "127.0.0.1" 7650; - proto.httpProxy = mkEndpointOpt "httpproxy" "127.0.0.1" 4446; - proto.socksProxy = mkEndpointOpt "socksproxy" "127.0.0.1" 4447; outTunnels = mkOption { default = {}; - type = with types; loaOf (submodule ( + type = with types; loaOf (submodule ( { name, config, ... }: { options = commonTunOpts name; config = { diff --git a/nixos/modules/services/networking/tinc.nix b/nixos/modules/services/networking/tinc.nix index b26d30597b1..f8e68fda7fc 100644 --- a/nixos/modules/services/networking/tinc.nix +++ b/nixos/modules/services/networking/tinc.nix @@ -68,7 +68,7 @@ in interfaceType = mkOption { default = "tun"; - type = types.addCheck types.str (n: n == "tun" || n == "tap"); + type = types.enum [ "tun" "tap" ]; description = '' The type of virtual interface used for the network connection ''; diff --git a/nixos/modules/services/web-servers/fcgiwrap.nix b/nixos/modules/services/web-servers/fcgiwrap.nix index 2c5e433003c..a64a187255a 100644 --- a/nixos/modules/services/web-servers/fcgiwrap.nix +++ b/nixos/modules/services/web-servers/fcgiwrap.nix @@ -21,7 +21,7 @@ in { }; socketType = mkOption { - type = types.addCheck types.str (t: t == "unix" || t == "tcp" || t == "tcp6"); + type = types.enum [ "unix" "tcp" "tcp6" ]; default = "unix"; description = "Socket type: 'unix', 'tcp' or 'tcp6'."; }; diff --git a/nixos/modules/system/boot/loader/grub/grub.nix b/nixos/modules/system/boot/loader/grub/grub.nix index c3be7407d59..17c842ddc53 100644 --- a/nixos/modules/system/boot/loader/grub/grub.nix +++ b/nixos/modules/system/boot/loader/grub/grub.nix @@ -324,8 +324,7 @@ in fsIdentifier = mkOption { default = "uuid"; - type = types.addCheck types.str - (type: type == "uuid" || type == "label" || type == "provided"); + type = types.enum [ "uuid" "label" "provided" ]; description = '' Determines how GRUB will identify devices when generating the configuration file. A value of uuid / label signifies that grub diff --git a/nixos/modules/tasks/network-interfaces.nix b/nixos/modules/tasks/network-interfaces.nix index aae4dc5fdad..1faa8abd5f7 100644 --- a/nixos/modules/tasks/network-interfaces.nix +++ b/nixos/modules/tasks/network-interfaces.nix @@ -245,7 +245,7 @@ let virtualType = mkOption { default = null; - type = types.nullOr (types.addCheck types.str (v: v == "tun" || v == "tap")); + type = with types; nullOr (enum [ "tun" "tap" ]); description = '' The explicit type of interface to create. Accepts tun or tap strings. Also accepts null to implicitly detect the type of device. diff --git a/nixos/release.nix b/nixos/release.nix index fbd3efd16ff..639ee45b38d 100644 --- a/nixos/release.nix +++ b/nixos/release.nix @@ -275,6 +275,7 @@ in rec { tests.networkingProxy = callTest tests/networking-proxy.nix {}; tests.nfs3 = callTest tests/nfs.nix { version = 3; }; tests.nfs4 = callTest tests/nfs.nix { version = 4; }; + tests.leaps = callTest tests/leaps.nix { }; tests.nsd = callTest tests/nsd.nix {}; tests.openssh = callTest tests/openssh.nix {}; #tests.panamax = hydraJob (import tests/panamax.nix { system = "x86_64-linux"; }); diff --git a/nixos/tests/dnscrypt-proxy.nix b/nixos/tests/dnscrypt-proxy.nix index b686e9582a7..26409949ec6 100644 --- a/nixos/tests/dnscrypt-proxy.nix +++ b/nixos/tests/dnscrypt-proxy.nix @@ -22,8 +22,6 @@ import ./make-test.nix ({ pkgs, ... }: { }; testScript = '' - $client->start; - $client->waitForUnit("sockets.target"); $client->waitForUnit("dnsmasq"); # The daemon is socket activated; sending a single ping should activate it. diff --git a/nixos/tests/leaps.nix b/nixos/tests/leaps.nix new file mode 100644 index 00000000000..3c390e1a169 --- /dev/null +++ b/nixos/tests/leaps.nix @@ -0,0 +1,29 @@ +import ./make-test.nix ({ pkgs, ... }: + +{ + name = "leaps"; + meta = with pkgs.stdenv.lib.maintainers; { + maintainers = [ qknight ]; + }; + + nodes = + { + client = { }; + + server = + { services.leaps = { + enable = true; + port = 6666; + path = "/leaps/"; + }; + networking.firewall.enable = false; + }; + }; + + testScript = + '' + startAll; + $server->waitForOpenPort(6666); + $client->succeed("curl http://server:6666/leaps/ | grep -i 'leaps'"); + ''; +}) diff --git a/pkgs/applications/audio/audacity/default.nix b/pkgs/applications/audio/audacity/default.nix index b31cecffbd1..cc96f6dbbb3 100644 --- a/pkgs/applications/audio/audacity/default.nix +++ b/pkgs/applications/audio/audacity/default.nix @@ -1,5 +1,5 @@ { stdenv, fetchurl, wxGTK30, pkgconfig, gettext, gtk2, glib, zlib, perl, intltool, - libogg, libvorbis, libmad, alsaLib, libsndfile, soxr, flac, lame, fetchpatch, + libogg, libvorbis, libmad, libjack2, lv2, lilv, serd, sord, sratom, suil, alsaLib, libsndfile, soxr, flac, lame, fetchpatch, expat, libid3tag, ffmpeg, soundtouch /*, portaudio - given up fighting their portaudio.patch */ }: @@ -29,7 +29,7 @@ stdenv.mkDerivation rec { buildInputs = [ pkgconfig gettext wxGTK30 expat alsaLib - libsndfile soxr libid3tag gtk2 + libsndfile soxr libid3tag libjack2 lv2 lilv serd sord sratom suil gtk2 ffmpeg libmad lame libvorbis flac soundtouch ]; #ToDo: detach sbsms diff --git a/pkgs/applications/audio/ladspa-plugins/default.nix b/pkgs/applications/audio/ladspa-plugins/default.nix index b563c850d52..1b68caccf4e 100644 --- a/pkgs/applications/audio/ladspa-plugins/default.nix +++ b/pkgs/applications/audio/ladspa-plugins/default.nix @@ -1,22 +1,29 @@ -{ stdenv, fetchurl, fftw, ladspaH, pkgconfig }: +{ stdenv, fetchurl, autoreconfHook, automake, fftw, ladspaH, libxml2, pkgconfig +, perlPackages }: + +stdenv.mkDerivation rec { + name = "swh-plugins-${version}"; + version = "0.4.17"; -stdenv.mkDerivation { - name = "swh-plugins-0.4.15"; src = fetchurl { - url = http://plugin.org.uk/releases/0.4.15/swh-plugins-0.4.15.tar.gz; - sha256 = "0h462s4mmqg4iw7zdsihnrmz2vjg0fd49qxw2a284bnryjjfhpnh"; + url = "https://github.com/swh/ladspa/archive/v${version}.tar.gz"; + sha256 = "1rqwh8xrw6hnp69dg4gy336bfbfpmbx4fjrk0nb8ypjcxkz91c6i"; }; - - buildInputs = [fftw ladspaH pkgconfig]; - postInstall = - '' - mkdir -p $out/share/ladspa/ - ln -sv $out/lib/ladspa $out/share/ladspa/lib - ''; + buildInputs = [ autoreconfHook fftw ladspaH libxml2 pkgconfig perlPackages.perl perlPackages.XMLParser ]; - meta = { + patchPhase = '' + patchShebangs . + patchShebangs ./metadata/ + cp ${automake}/share/automake-*/mkinstalldirs . + ''; + + meta = with stdenv.lib; { + homepage = http://plugin.org.uk/; description = "LADSPA format audio plugins"; + license = licenses.gpl2; + maintainers = [ maintainers.magnetophon ]; + platforms = platforms.linux; }; } diff --git a/pkgs/applications/audio/ladspa-plugins/git.nix b/pkgs/applications/audio/ladspa-plugins/git.nix deleted file mode 100644 index ef34eb91600..00000000000 --- a/pkgs/applications/audio/ladspa-plugins/git.nix +++ /dev/null @@ -1,28 +0,0 @@ -{ stdenv, fetchgit, autoreconfHook, automake, fftw, ladspaH, libxml2, pkgconfig -, perl, perlPackages }: - -stdenv.mkDerivation { - name = "swh-plugins-git-2015-03-04"; - - src = fetchgit { - url = https://github.com/swh/ladspa.git; - rev = "4b8437e8037cace3d5bf8ce6d1d1da0182aba686"; - sha256 = "1rmqm4780dhp0pj2scl3k7m8hpp1x6w6ln4wwg954zb9570rqaxx"; - }; - - buildInputs = [ autoreconfHook fftw ladspaH libxml2 pkgconfig perl perlPackages.XMLParser ]; - - patchPhase = '' - patchShebangs . - patchShebangs ./metadata/ - cp ${automake}/share/automake-*/mkinstalldirs . - ''; - - meta = with stdenv.lib; { - homepage = http://plugin.org.uk/; - description = "LADSPA format audio plugins"; - license = licenses.gpl2; - maintainers = [ maintainers.magnetophon ]; - platforms = platforms.linux; - }; -} diff --git a/pkgs/applications/audio/snd/default.nix b/pkgs/applications/audio/snd/default.nix index 8abf7cea6eb..da76d7f16db 100644 --- a/pkgs/applications/audio/snd/default.nix +++ b/pkgs/applications/audio/snd/default.nix @@ -4,11 +4,11 @@ }: stdenv.mkDerivation rec { - name = "snd-15.9"; + name = "snd-16.9"; src = fetchurl { url = "mirror://sourceforge/snd/${name}.tar.gz"; - sha256 = "0hs9ailgaphgyi3smnrpwksvdww85aa7szqgi6l6d2jwfx9g4bhd"; + sha256 = "1rw9wrj1f0g413ya32s9mwhvv3c6iasjza22irzf6xlv49b9s5dp"; }; nativeBuildInputs = [ pkgconfig ]; diff --git a/pkgs/applications/editors/emacs-modes/stratego/builder.sh b/pkgs/applications/editors/emacs-modes/stratego/builder.sh deleted file mode 100644 index 7d734ec0888..00000000000 --- a/pkgs/applications/editors/emacs-modes/stratego/builder.sh +++ /dev/null @@ -1,4 +0,0 @@ -source $stdenv/setup - -mkdir -p $out/share/emacs/site-lisp -cp $src $out/share/emacs/site-lisp/stratego.el diff --git a/pkgs/applications/editors/emacs-modes/stratego/default.nix b/pkgs/applications/editors/emacs-modes/stratego/default.nix deleted file mode 100644 index bb4078d2d2d..00000000000 --- a/pkgs/applications/editors/emacs-modes/stratego/default.nix +++ /dev/null @@ -1,10 +0,0 @@ -{stdenv, fetchsvn}: -stdenv.mkDerivation { - name = "stratego-mode"; - builder = ./builder.sh; - src = fetchsvn { - url = https://svn.strategoxt.org/repos/StrategoXT/stratego-editors/trunk/emacs/stratego.el; - rev = 12678; - sha256 = "4ab4ec587550233f29ca08b82fa0a9f7e5b33fc178348037e3ab1816bd60f538"; - }; -} diff --git a/pkgs/applications/networking/esniper/default.nix b/pkgs/applications/networking/esniper/default.nix index 751f6f9855b..bf6da8c207a 100644 --- a/pkgs/applications/networking/esniper/default.nix +++ b/pkgs/applications/networking/esniper/default.nix @@ -1,11 +1,11 @@ { stdenv, fetchurl, openssl, curl, coreutils, gawk, bash, which }: stdenv.mkDerivation { - name = "esniper-2.31.0"; + name = "esniper-2.32.0"; src = fetchurl { - url = "mirror://sourceforge/esniper/esniper-2-31-0.tgz"; - sha256 = "0xn6gdyr0c18khwcsi2brp49wkancrsrxxca7hvbawhbf263glih"; + url = "mirror://sourceforge/esniper/esniper-2-32-0.tgz"; + sha256 = "04lka4d0mnrwc369yzvq28n8qi1qbm8810ykx6d0a4kaghiybqsy"; }; buildInputs = [ openssl curl ]; diff --git a/pkgs/applications/networking/instant-messengers/discord/default.nix b/pkgs/applications/networking/instant-messengers/discord/default.nix index 3a08cd26292..2ca16eb4458 100644 --- a/pkgs/applications/networking/instant-messengers/discord/default.nix +++ b/pkgs/applications/networking/instant-messengers/discord/default.nix @@ -4,7 +4,7 @@ , libXext, libXfixes, libXi, libXrandr, libXrender, libXtst, nspr, nss, pango , systemd, libXScrnSaver }: -let version = "0.0.9"; in +let version = "0.0.10"; in stdenv.mkDerivation { @@ -12,7 +12,7 @@ stdenv.mkDerivation { src = fetchurl { url = "https://cdn-canary.discordapp.com/apps/linux/${version}/discord-canary-${version}.tar.gz"; - sha256 = "72f692cea62b836220f40d81d110846f9cde9a0fba7a8d47226d89e0980255b9"; + sha256 = "1wkbbnbqbwgixdbm69dlirhgjnn8llqyzil01nqwpknh1qwd06pr"; }; libPath = stdenv.lib.makeLibraryPath [ diff --git a/pkgs/applications/networking/siproxd/cheaders.patch b/pkgs/applications/networking/siproxd/cheaders.patch index 53c4813cc33..69a3e328737 100644 --- a/pkgs/applications/networking/siproxd/cheaders.patch +++ b/pkgs/applications/networking/siproxd/cheaders.patch @@ -3,11 +3,12 @@ index 1904ab3..cb3624d 100644 --- a/src/dejitter.c +++ b/src/dejitter.c @@ -22,6 +22,8 @@ - + #include - + +#include +#include + #include #include #include #include diff --git a/pkgs/applications/networking/siproxd/default.nix b/pkgs/applications/networking/siproxd/default.nix index 69ebab78f94..14ed2587e63 100644 --- a/pkgs/applications/networking/siproxd/default.nix +++ b/pkgs/applications/networking/siproxd/default.nix @@ -1,11 +1,11 @@ { stdenv, fetchurl, libosip }: stdenv.mkDerivation rec { - name = "siproxd-0.8.1"; - + name = "siproxd-0.8.2"; + src = fetchurl { url = "mirror://sourceforge/siproxd/${name}.tar.gz"; - sha256 = "1bcxl0h5nc28m8lcdhpbl5yc93w98xm53mfzrf04knsvmx7z0bfz"; + sha256 = "1l6cyxxhra825jiiw9npa7jrbfgbyfpk4966cqkrw66cn28y8v2j"; }; patches = [ ./cheaders.patch ]; diff --git a/pkgs/applications/science/biology/pal2nal/default.nix b/pkgs/applications/science/biology/pal2nal/default.nix deleted file mode 100644 index 956f8b07e98..00000000000 --- a/pkgs/applications/science/biology/pal2nal/default.nix +++ /dev/null @@ -1,31 +0,0 @@ -{stdenv, fetchurl, perl, paml}: - -stdenv.mkDerivation { - name = "pal2nal-12"; - src = fetchurl { - url = http://coot.embl.de/pal2nal/distribution/pal2nal.v12.tar.gz; - sha256 = "1qj9sq5skpa7vyccl9gxc5ls85jwiq8j6mr8wvacz4yhyg0afy04"; - }; - - installPhase = '' - mkdir -p $out/bin - - cp -v pal2nal.pl $out/bin - - mkdir -p $out/doc - - cp -v README $out/doc - ''; - - meta = { - description = "Program for aligning nucleotide sequences based on an aminoacid alignment"; - longDescription = '' - PAL2NAL is a program that converts a multiple sequence alignment of proteins and the corresponding DNA (or mRNA) sequences into a codon alignment. The program automatically assigns the corresponding codon sequence even if the input DNA sequence has mismatches with the input protein sequence, or contains UTRs, polyA tails. It can also deal with frame shifts in the input alignment, which is suitable for the analysis of pseudogenes. The resulting codon alignment can further be subjected to the calculation of synonymous (KS) and non-synonymous (KA) substitution rates. - -If the input is a pair of sequences, PAL2NAL automatically calculates KS and KA by the codeml program in PAML. -''; - license = "non-commercial"; - homepage = http://coot.embl.de/pal2nal/; - pkgMaintainer = "Pjotr Prins"; - }; -} diff --git a/pkgs/applications/science/biology/paml/default.nix b/pkgs/applications/science/biology/paml/default.nix index cec0aa7e5eb..589c2809a93 100644 --- a/pkgs/applications/science/biology/paml/default.nix +++ b/pkgs/applications/science/biology/paml/default.nix @@ -1,11 +1,11 @@ {stdenv, fetchurl}: stdenv.mkDerivation rec { - version = "4.2a"; + version = "4.9c"; name = "paml-${version}"; src = fetchurl { - url = "http://abacus.gene.ucl.ac.uk/software/paml${version}.tar.gz"; - sha256 = "0yywyrjgxrpavp50n00l01pl90b7pykgb2k53yrlykz9dnf583pb"; + url = "http://abacus.gene.ucl.ac.uk/software/paml${version}.tgz"; + sha256 = "18a1l47223l7jyjavm8a8la84q9k9kbxwmj7kz4z3pdx70qrl04j"; }; preBuild = '' @@ -13,7 +13,7 @@ stdenv.mkDerivation rec { ''; installPhase = '' mkdir -pv $out/bin - cp -v codeml $out/bin + cp -v codeml $out/bin cp -v baseml $out/bin cp -v basemlg $out/bin cp -v chi2 $out/bin @@ -28,6 +28,6 @@ stdenv.mkDerivation rec { description = "Phylogenetic Analysis by Maximum Likelihood (PAML)"; longDescription = ''PAML is a package of programs for phylogenetic analyses of DNA or protein sequences using maximum likelihood. It is maintained and distributed for academic use free of charge by Ziheng Yang. ANSI C source codes are distributed for UNIX/Linux/Mac OSX, and executables are provided for MS Windows. PAML is not good for tree making. It may be used to estimate parameters and test hypotheses to study the evolutionary process, when you have reconstructed trees using other programs such as PAUP*, PHYLIP, MOLPHY, PhyML, RaxML, etc.''; license = "non-commercial"; - homepage = http://abacus.gene.ucl.ac.uk/software/paml.html; + homepage = http://abacus.gene.ucl.ac.uk/software/paml.html; }; } diff --git a/pkgs/applications/virtualization/remotebox/default.nix b/pkgs/applications/virtualization/remotebox/default.nix index 37086f52730..63389153a0a 100644 --- a/pkgs/applications/virtualization/remotebox/default.nix +++ b/pkgs/applications/virtualization/remotebox/default.nix @@ -2,11 +2,11 @@ stdenv.mkDerivation rec { name = "remotebox-${version}"; - version = "2.0"; + version = "2.1"; src = fetchurl { url = "http://remotebox.knobgoblin.org.uk/downloads/RemoteBox-${version}.tar.bz2"; - sha256 = "0c73i53wdjd2m2sdgq3r3xp30irxh5z5rak2rk79yb686s6bv759"; + sha256 = "0pyi433pwbpyh58p08q8acav7mk90gchgjghvl9f8wqafx7bp404"; }; buildInputs = with perlPackages; [ perl Glib Gtk2 Pango SOAPLite ]; diff --git a/pkgs/data/fonts/unifont/default.nix b/pkgs/data/fonts/unifont/default.nix index b4078720d0a..682c42afa3e 100644 --- a/pkgs/data/fonts/unifont/default.nix +++ b/pkgs/data/fonts/unifont/default.nix @@ -2,16 +2,16 @@ stdenv.mkDerivation rec { name = "unifont-${version}"; - version = "9.0.03"; + version = "9.0.04"; ttf = fetchurl { - url = "http://fossies.org/linux/unifont/font/precompiled/${name}.ttf"; - sha256 = "00j97r658xl33zgi66glgbx2s7j9q52cj4iq7z1rrf3p38xzgbff"; + url = "mirror://gnu/unifont/${name}/${name}.ttf"; + sha256 = "052waajjdry67jjl7vy984padyzdrkhf5gylgbnvj90q6d52j02z"; }; pcf = fetchurl { - url = "http://fossies.org/linux/unifont/font/precompiled/${name}.pcf.gz"; - sha256 = "1w3gaz8afc3q7svgm4hmgjhvi9pxkmgsib8sscgi52c7ff0mhq9f"; + url = "mirror://gnu/unifont/${name}/${name}.pcf.gz"; + sha256 = "0736qmlzsf4xlipj4vzihafkigc3xjisxnwcqhl9dzkhxfjq9612"; }; buildInputs = [ mkfontscale mkfontdir ]; diff --git a/pkgs/desktops/kde-5/plasma/kmenuedit.nix b/pkgs/desktops/kde-5/plasma/kmenuedit.nix index 3adb77a0051..f10bf6bb3cd 100644 --- a/pkgs/desktops/kde-5/plasma/kmenuedit.nix +++ b/pkgs/desktops/kde-5/plasma/kmenuedit.nix @@ -1,11 +1,14 @@ -{ plasmaPackage, ecm, kdoctools, ki18n, kxmlgui -, kdbusaddons, kiconthemes, kio, sonnet, kdelibs4support +{ + plasmaPackage, + ecm, kdoctools, + kdbusaddons, kdelibs4support, khotkeys, ki18n, kiconthemes, kio, kxmlgui, + sonnet }: plasmaPackage { name = "kmenuedit"; nativeBuildInputs = [ ecm kdoctools ]; propagatedBuildInputs = [ - kdelibs4support ki18n kio sonnet kxmlgui kdbusaddons kiconthemes + kdbusaddons kdelibs4support khotkeys ki18n kiconthemes kio kxmlgui sonnet ]; } diff --git a/pkgs/desktops/kde-5/plasma/ksysguard.nix b/pkgs/desktops/kde-5/plasma/ksysguard.nix index dcde867dffa..f7e5cced708 100644 --- a/pkgs/desktops/kde-5/plasma/ksysguard.nix +++ b/pkgs/desktops/kde-5/plasma/ksysguard.nix @@ -1,11 +1,15 @@ -{ plasmaPackage, ecm, kdoctools, kconfig -, kcoreaddons, kdelibs4support, ki18n, kitemviews, knewstuff -, kiconthemes, libksysguard, qtwebkit +{ + plasmaPackage, + ecm, kdoctools, + lm_sensors, + kconfig, kcoreaddons, kdelibs4support, ki18n, kiconthemes, kitemviews, + knewstuff, libksysguard, qtwebkit }: plasmaPackage { name = "ksysguard"; nativeBuildInputs = [ ecm kdoctools ]; + buildInputs = [ lm_sensors ]; propagatedBuildInputs = [ kconfig kcoreaddons kitemviews knewstuff kiconthemes libksysguard kdelibs4support ki18n qtwebkit diff --git a/pkgs/desktops/kde-5/plasma/libksysguard/default.nix b/pkgs/desktops/kde-5/plasma/libksysguard/default.nix index b6ca3dfa26d..2d81d061f4c 100644 --- a/pkgs/desktops/kde-5/plasma/libksysguard/default.nix +++ b/pkgs/desktops/kde-5/plasma/libksysguard/default.nix @@ -1,7 +1,9 @@ -{ fetchpatch, plasmaPackage, ecm, kauth, kcompletion -, kconfigwidgets, kcoreaddons, kservice, kwidgetsaddons -, kwindowsystem, plasma-framework, qtscript, qtx11extras -, kconfig, ki18n, kiconthemes +{ + plasmaPackage, + ecm, + kauth, kcompletion, kconfig, kconfigwidgets, kcoreaddons, ki18n, kiconthemes, + kservice, kwidgetsaddons, kwindowsystem, plasma-framework, qtscript, qtwebkit, + qtx11extras }: plasmaPackage { @@ -9,11 +11,10 @@ plasmaPackage { patches = [ ./0001-qdiriterator-follow-symlinks.patch ]; - nativeBuildInputs = [ - ecm - ]; + nativeBuildInputs = [ ecm ]; propagatedBuildInputs = [ - kauth kconfig ki18n kiconthemes kwindowsystem plasma-framework qtx11extras - kcompletion kconfigwidgets kcoreaddons kservice kwidgetsaddons qtscript + kauth kconfig ki18n kiconthemes kwindowsystem kcompletion kconfigwidgets + kcoreaddons kservice kwidgetsaddons plasma-framework qtscript qtx11extras + qtwebkit ]; } diff --git a/pkgs/desktops/kde-5/plasma/oxygen.nix b/pkgs/desktops/kde-5/plasma/oxygen.nix index 3c54055a633..44a7575526f 100644 --- a/pkgs/desktops/kde-5/plasma/oxygen.nix +++ b/pkgs/desktops/kde-5/plasma/oxygen.nix @@ -1,16 +1,16 @@ -{ plasmaPackage, ecm, ki18n, kcmutils, kconfig -, kdecoration, kguiaddons, kwidgetsaddons, kservice, kcompletion -, frameworkintegration, kwindowsystem, makeQtWrapper, qtx11extras +{ + plasmaPackage, + ecm, makeQtWrapper, + frameworkintegration, kcmutils, kcompletion, kconfig, kdecoration, kguiaddons, + ki18n, kwidgetsaddons, kservice, kwayland, kwindowsystem, qtx11extras }: plasmaPackage { name = "oxygen"; - nativeBuildInputs = [ - ecm makeQtWrapper - ]; + nativeBuildInputs = [ ecm makeQtWrapper ]; propagatedBuildInputs = [ - kcmutils kconfig kdecoration kguiaddons kwidgetsaddons kservice kcompletion - frameworkintegration ki18n kwindowsystem qtx11extras + frameworkintegration kcmutils kcompletion kconfig kdecoration kguiaddons + ki18n kservice kwayland kwidgetsaddons kwindowsystem qtx11extras ]; postInstall = '' wrapQtProgram "$out/bin/oxygen-demo5" diff --git a/pkgs/desktops/kde-5/plasma/plasma-desktop/default.nix b/pkgs/desktops/kde-5/plasma/plasma-desktop/default.nix index e2b1acd198f..7e8823e2db9 100644 --- a/pkgs/desktops/kde-5/plasma/plasma-desktop/default.nix +++ b/pkgs/desktops/kde-5/plasma/plasma-desktop/default.nix @@ -1,20 +1,20 @@ -{ plasmaPackage, substituteAll, ecm, kdoctools -, attica, baloo, boost, fontconfig, kactivities, kactivities-stats -, kauth, kcmutils, kdbusaddons, kdeclarative, kded, kdelibs4support, kemoticons -, kglobalaccel, ki18n, kitemmodels, knewstuff, knotifications -, knotifyconfig, kpeople, krunner, kwallet, kwin, phonon -, plasma-framework, plasma-workspace, qtdeclarative, qtx11extras -, qtsvg, libXcursor, libXft, libxkbfile, xf86inputevdev -, xf86inputsynaptics, xinput, xkeyboard_config, xorgserver -, libcanberra_kde, libpulseaudio, utillinux -, qtquickcontrols, ksysguard +{ + plasmaPackage, substituteAll, + ecm, kdoctools, + attica, baloo, boost, fontconfig, ibus, kactivities, kactivities-stats, kauth, + kcmutils, kdbusaddons, kdeclarative, kded, kdelibs4support, kemoticons, + kglobalaccel, ki18n, kitemmodels, knewstuff, knotifications, knotifyconfig, + kpeople, krunner, ksysguard, kwallet, kwin, libXcursor, libXft, + libcanberra_kde, libpulseaudio, libxkbfile, phonon, plasma-framework, + plasma-workspace, qtdeclarative, qtquickcontrols, qtsvg, qtx11extras, xf86inputevdev, + xf86inputsynaptics, xinput, xkeyboard_config, xorgserver, utillinux }: plasmaPackage rec { name = "plasma-desktop"; nativeBuildInputs = [ ecm kdoctools ]; buildInputs = [ - attica boost fontconfig kcmutils kdbusaddons kded kitemmodels knewstuff + attica boost fontconfig ibus kcmutils kdbusaddons kded kitemmodels knewstuff knotifications knotifyconfig kwallet libcanberra_kde libXcursor libpulseaudio libXft libxkbfile phonon qtsvg xf86inputevdev xf86inputsynaptics xkeyboard_config xinput baloo kactivities diff --git a/pkgs/desktops/xfce/panel-plugins/xfce4-whiskermenu-plugin.nix b/pkgs/desktops/xfce/panel-plugins/xfce4-whiskermenu-plugin.nix index e4c44865583..a9c4da810c1 100644 --- a/pkgs/desktops/xfce/panel-plugins/xfce4-whiskermenu-plugin.nix +++ b/pkgs/desktops/xfce/panel-plugins/xfce4-whiskermenu-plugin.nix @@ -4,7 +4,7 @@ with stdenv.lib; stdenv.mkDerivation rec { p_name = "xfce4-whiskermenu-plugin"; - version = "1.5.3"; + version = "1.6.1"; name = "${p_name}-${version}"; @@ -12,7 +12,7 @@ stdenv.mkDerivation rec { owner = "gottcode"; repo = "xfce4-whiskermenu-plugin"; rev = "v${version}"; - sha256 = "07gmf9x3pw6xajklj0idahbnv0psnkhiqhb88bmkp344jirsx6ba"; + sha256 = "19hldrrgy7qmrncv5rfsclybycjp9rjfnslhm996h62d2p675qpc"; }; nativeBuildInputs = [ cmake pkgconfig intltool ]; diff --git a/pkgs/development/compilers/abc/builder-binjar.sh b/pkgs/development/compilers/abc/builder-binjar.sh deleted file mode 100644 index a954c49aa3f..00000000000 --- a/pkgs/development/compilers/abc/builder-binjar.sh +++ /dev/null @@ -1,4 +0,0 @@ -source $stdenv/setup - -mkdir -p $out/jars -cp $src $out/jars/$jarname.jar diff --git a/pkgs/development/compilers/abc/builder.sh b/pkgs/development/compilers/abc/builder.sh deleted file mode 100644 index ba594023842..00000000000 --- a/pkgs/development/compilers/abc/builder.sh +++ /dev/null @@ -1,40 +0,0 @@ -source $stdenv/setup - -tar zxvf $src - -cd abc-* - -for p in $patches; do - echo "applying patch $p" - patch -p1 < $p -done - -cat > ant.settings < $out/bin/abc < build-tmp.xml -mv build-tmp.xml build.xml - -cat > ant.settings < ant.settings < +Date: Sat, 5 Nov 2016 14:27:04 +0300 +Subject: [PATCH] getSessionAddress: take first bus address from + semicolon-separated variable + +--- + lib/DBus/Address.hs | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/lib/DBus/Address.hs b/lib/DBus/Address.hs +index 72ac99d..596b18c 100644 +--- a/lib/DBus/Address.hs ++++ b/lib/DBus/Address.hs +@@ -18,6 +18,7 @@ module DBus.Address where + import qualified Control.Exception + import Data.Char (digitToInt, ord, chr) + import Data.List (intercalate) ++import Data.Maybe (listToMaybe) + import qualified Data.Map + import Data.Map (Map) + import qualified System.Environment +@@ -152,7 +153,7 @@ getSystemAddress = do + getSessionAddress :: IO (Maybe Address) + getSessionAddress = do + env <- getenv "DBUS_SESSION_BUS_ADDRESS" +- return (env >>= parseAddress) ++ return $ maybe Nothing listToMaybe (env >>= parseAddresses) + + -- | Returns the address in the environment variable + -- @DBUS_STARTER_ADDRESS@, which must be set. +-- +2.10.1 + diff --git a/pkgs/development/interpreters/octave/default.nix b/pkgs/development/interpreters/octave/default.nix index 3389620cdd9..df443cdc3e1 100644 --- a/pkgs/development/interpreters/octave/default.nix +++ b/pkgs/development/interpreters/octave/default.nix @@ -25,9 +25,9 @@ stdenv.mkDerivation rec { sha256 = "11y2w6jgngj4rxiy136mkcs02l52rxk60kapyfc4rgrxz5hli3ym"; }; - buildInputs = [ gfortran readline ncurses perl flex texinfo qhull libX11 - graphicsmagick pcre pkgconfig mesa fltk zlib curl openblas libsndfile - fftw fftwSinglePrec qrupdate arpack libwebp ] + buildInputs = [ gfortran readline ncurses perl flex texinfo qhull + graphicsmagick pcre pkgconfig fltk zlib curl openblas libsndfile fftw + fftwSinglePrec qrupdate arpack libwebp ] ++ (stdenv.lib.optional (qt != null) qt) ++ (stdenv.lib.optional (qscintilla != null) qscintilla) ++ (stdenv.lib.optional (ghostscript != null) ghostscript) @@ -38,7 +38,7 @@ stdenv.mkDerivation rec { ++ (stdenv.lib.optional (jdk != null) jdk) ++ (stdenv.lib.optional (gnuplot != null) gnuplot) ++ (stdenv.lib.optional (python != null) python) - ++ (stdenv.lib.optionals (!stdenv.isDarwin) [mesa libX11]) + ++ (stdenv.lib.optionals (!stdenv.isDarwin) [ mesa libX11 ]) ; doCheck = !stdenv.isDarwin; diff --git a/pkgs/development/libraries/clanlib/default.nix b/pkgs/development/libraries/clanlib/default.nix deleted file mode 100644 index d4d46dd696c..00000000000 --- a/pkgs/development/libraries/clanlib/default.nix +++ /dev/null @@ -1,19 +0,0 @@ -{ -stdenv, fetchurl, zlib, -libpng, libjpeg, libvorbis, libogg, -libX11, xf86vidmodeproto, libXxf86vm, libXmu, mesa -}: - -stdenv.mkDerivation { - name = "clanlib-0.8.0"; - src = fetchurl { - url = http://www.clanlib.org/download/releases-0.8/ClanLib-0.8.0.tgz; - sha256 = "1rjr601h3hisrhvpkrj00wirx5hyfbppv9rla400wx7a42xvvyfy"; - }; - - buildInputs = [zlib libpng libjpeg - libvorbis libogg libX11 - xf86vidmodeproto libXmu - mesa libXxf86vm - ]; -} diff --git a/pkgs/development/libraries/dxflib/default.nix b/pkgs/development/libraries/dxflib/default.nix index f9c58857731..832b013123d 100644 --- a/pkgs/development/libraries/dxflib/default.nix +++ b/pkgs/development/libraries/dxflib/default.nix @@ -1,7 +1,7 @@ -{stdenv, fetchurl}: +{stdenv, fetchurl}: stdenv.mkDerivation rec { - version = "2.5.0.0-1"; + version = "3.12.2"; name = "dxflib-${version}"; src = fetchurl { url = "http://www.qcad.org/archives/dxflib/${name}.src.tar.gz"; diff --git a/pkgs/development/libraries/fltk/default.nix b/pkgs/development/libraries/fltk/default.nix index 99cb8aae323..6f906d52502 100644 --- a/pkgs/development/libraries/fltk/default.nix +++ b/pkgs/development/libraries/fltk/default.nix @@ -21,6 +21,8 @@ composableDerivation.composableDerivation {} { --replace 'class Fl_XFont_On_Demand' 'class FL_EXPORT Fl_XFont_On_Demand' ''; + patches = stdenv.lib.optionals stdenv.isDarwin [ ./nsosv.patch ]; + nativeBuildInputs = [ pkgconfig ]; propagatedBuildInputs = [ inputproto ] ++ (if stdenv.isDarwin diff --git a/pkgs/development/libraries/fltk/nsosv.patch b/pkgs/development/libraries/fltk/nsosv.patch new file mode 100644 index 00000000000..9e55b011b57 --- /dev/null +++ b/pkgs/development/libraries/fltk/nsosv.patch @@ -0,0 +1,20 @@ +diff --git a/src/Fl_cocoa.mm b/src/Fl_cocoa.mm +index 6f5b8b1..2c7763d 100644 +--- a/src/Fl_cocoa.mm ++++ b/src/Fl_cocoa.mm +@@ -4074,15 +4074,6 @@ Window fl_xid(const Fl_Window* w) + static int calc_mac_os_version() { + int M, m, b = 0; + NSAutoreleasePool *localPool = [[NSAutoreleasePool alloc] init]; +-#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_10 +- if ([NSProcessInfo instancesRespondToSelector:@selector(operatingSystemVersion)]) { +- NSOperatingSystemVersion version = [[NSProcessInfo processInfo] operatingSystemVersion]; +- M = version.majorVersion; +- m = version.minorVersion; +- b = version.patchVersion; +- } +- else +-#endif + { + NSDictionary * sv = [NSDictionary dictionaryWithContentsOfFile:@"/System/Library/CoreServices/SystemVersion.plist"]; + const char *s = [[sv objectForKey:@"ProductVersion"] UTF8String]; diff --git a/pkgs/development/libraries/gsoap/default.nix b/pkgs/development/libraries/gsoap/default.nix index d1140319611..bf1d29dae0e 100644 --- a/pkgs/development/libraries/gsoap/default.nix +++ b/pkgs/development/libraries/gsoap/default.nix @@ -2,11 +2,11 @@ stdenv.mkDerivation rec { name = "gsoap-${version}"; - version = "2.8.16"; + version = "2.8.37"; src = fetchurl { - url = "mirror://sourceforge/project/gsoap2/gSOAP/gsoap_${version}.zip"; - sha256 = "00lhhysa9f9ychkvn1ij0ngr54l1dl9ww801yrliwq5c05gql7a6"; + url = "mirror://sourceforge/project/gsoap2/gsoap-2.8/gsoap_${version}.zip"; + sha256 = "1nvf5hgwff1agqdzbn3qc5569jzm14qkwqws0673z6hv2l3lijx3"; }; buildInputs = [ unzip m4 bison flex openssl zlib ]; diff --git a/pkgs/development/libraries/iksemel/default.nix b/pkgs/development/libraries/iksemel/default.nix deleted file mode 100644 index 7e8061ee8bb..00000000000 --- a/pkgs/development/libraries/iksemel/default.nix +++ /dev/null @@ -1,24 +0,0 @@ -{ stdenv, fetchurl, pkgconfig, gnutls, zlib }: - -stdenv.mkDerivation rec { - name = "iksemel-${version}"; - version = "1.4"; - - src = fetchurl { - url = "https://iksemel.googlecode.com/files/${name}.tar.gz"; - sha1 = "722910b99ce794fd3f6f0e5f33fa804732cf46db"; - }; - - preConfigure = '' - sed -i -e '/if.*gnutls_check_version/,/return 1;/c return 0;' configure - export LIBGNUTLS_CONFIG="${pkgconfig}/bin/pkg-config gnutls" - ''; - - buildInputs = [ pkgconfig gnutls zlib ]; - - meta = { - homepage = "https://code.google.com/p/iksemel/"; - license = stdenv.lib.licenses.lgpl21Plus; - description = "Fast and portable XML parser and Jabber protocol library"; - }; -} diff --git a/pkgs/development/libraries/jasper/default.nix b/pkgs/development/libraries/jasper/default.nix index e2061df88f0..895d72dd7a0 100644 --- a/pkgs/development/libraries/jasper/default.nix +++ b/pkgs/development/libraries/jasper/default.nix @@ -1,27 +1,13 @@ { stdenv, fetchurl, fetchpatch, libjpeg, autoreconfHook }: stdenv.mkDerivation rec { - name = "jasper-1.900.2"; + name = "jasper-1.900.21"; src = fetchurl { url = "http://www.ece.uvic.ca/~mdadams/jasper/software/${name}.tar.gz"; - sha256 = "0bkibjhq3js2ldxa2f9pss84lcx4f5d3v0qis3ifi11ciy7a6c9a"; + sha256 = "1cypmlzq5vmbacsn8n3ls9p7g64scv3fzx88qf8c270dz10s5j79"; }; - patches = [ - ./jasper-CVE-2014-8137-variant2.diff - ./jasper-CVE-2014-8137-noabort.diff - - (fetchpatch { # CVE-2016-2089 - url = "https://github.com/mdadams/jasper/commit/aa6d9c2bbae9155f8e1466295373a68fa97291c3.patch"; - sha256 = "1pxnm86zmbq6brfwsm5wx3iv7s92n4xilc52lzp61q266jmlggrf"; - }) - (fetchpatch { # CVE-2015-5203 - url = "https://github.com/mdadams/jasper/commit/e73bb58f99fec0bf9c5d8866e010fcf736a53b9a.patch"; - sha256 = "1r6hxbnhpnb7q6p2kbdxc1cpph3ic851x2hy477yv5c3qmrbx9bk"; - }) - ]; - # newer reconf to recognize a multiout flag nativeBuildInputs = [ autoreconfHook ]; propagatedBuildInputs = [ libjpeg ]; diff --git a/pkgs/development/libraries/jasper/jasper-CVE-2014-8137-noabort.diff b/pkgs/development/libraries/jasper/jasper-CVE-2014-8137-noabort.diff deleted file mode 100644 index 47b57d5c809..00000000000 --- a/pkgs/development/libraries/jasper/jasper-CVE-2014-8137-noabort.diff +++ /dev/null @@ -1,16 +0,0 @@ -From RedHat: https://bugzilla.redhat.com/attachment.cgi?id=967284&action=diff - ---- jasper-1.900.1.orig/src/libjasper/jp2/jp2_dec.c 2014-12-11 14:30:54.193209780 +0100 -+++ jasper-1.900.1/src/libjasper/jp2/jp2_dec.c 2014-12-11 14:36:46.313217814 +0100 -@@ -291,7 +291,10 @@ jas_image_t *jp2_decode(jas_stream_t *in - case JP2_COLR_ICC: - iccprof = jas_iccprof_createfrombuf(dec->colr->data.colr.iccp, - dec->colr->data.colr.iccplen); -- assert(iccprof); -+ if (!iccprof) { -+ jas_eprintf("error: failed to parse ICC profile\n"); -+ goto error; -+ } - jas_iccprof_gethdr(iccprof, &icchdr); - jas_eprintf("ICC Profile CS %08x\n", icchdr.colorspc); - jas_image_setclrspc(dec->image, fromiccpcs(icchdr.colorspc)); diff --git a/pkgs/development/libraries/jasper/jasper-CVE-2014-8137-variant2.diff b/pkgs/development/libraries/jasper/jasper-CVE-2014-8137-variant2.diff deleted file mode 100644 index 243300dd70e..00000000000 --- a/pkgs/development/libraries/jasper/jasper-CVE-2014-8137-variant2.diff +++ /dev/null @@ -1,45 +0,0 @@ -From RedHat: https://bugzilla.redhat.com/attachment.cgi?id=967283&action=diff - ---- jasper-1.900.1.orig/src/libjasper/base/jas_icc.c 2014-12-11 14:06:44.000000000 +0100 -+++ jasper-1.900.1/src/libjasper/base/jas_icc.c 2014-12-11 15:16:37.971272386 +0100 -@@ -1009,7 +1009,6 @@ static int jas_icccurv_input(jas_iccattr - return 0; - - error: -- jas_icccurv_destroy(attrval); - return -1; - } - -@@ -1127,7 +1126,6 @@ static int jas_icctxtdesc_input(jas_icca - #endif - return 0; - error: -- jas_icctxtdesc_destroy(attrval); - return -1; - } - -@@ -1206,8 +1204,6 @@ static int jas_icctxt_input(jas_iccattrv - goto error; - return 0; - error: -- if (txt->string) -- jas_free(txt->string); - return -1; - } - -@@ -1328,7 +1324,6 @@ static int jas_icclut8_input(jas_iccattr - goto error; - return 0; - error: -- jas_icclut8_destroy(attrval); - return -1; - } - -@@ -1497,7 +1492,6 @@ static int jas_icclut16_input(jas_iccatt - goto error; - return 0; - error: -- jas_icclut16_destroy(attrval); - return -1; - } - diff --git a/pkgs/development/libraries/jasper/jasper-CVE-2014-8138.diff b/pkgs/development/libraries/jasper/jasper-CVE-2014-8138.diff deleted file mode 100644 index cbf0899d807..00000000000 --- a/pkgs/development/libraries/jasper/jasper-CVE-2014-8138.diff +++ /dev/null @@ -1,16 +0,0 @@ -From RedHat: https://bugzilla.redhat.com/attachment.cgi?id=967280&action=diff - ---- jasper-1.900.1.orig/src/libjasper/jp2/jp2_dec.c 2014-12-11 14:06:44.000000000 +0100 -+++ jasper-1.900.1/src/libjasper/jp2/jp2_dec.c 2014-12-11 14:06:26.000000000 +0100 -@@ -386,6 +386,11 @@ jas_image_t *jp2_decode(jas_stream_t *in - /* Determine the type of each component. */ - if (dec->cdef) { - for (i = 0; i < dec->numchans; ++i) { -+ /* Is the channel number reasonable? */ -+ if (dec->cdef->data.cdef.ents[i].channo >= dec->numchans) { -+ jas_eprintf("error: invalid channel number in CDEF box\n"); -+ goto error; -+ } - jas_image_setcmpttype(dec->image, - dec->chantocmptlut[dec->cdef->data.cdef.ents[i].channo], - jp2_getct(jas_image_clrspc(dec->image), diff --git a/pkgs/development/libraries/jasper/jasper-CVE-2014-8157.diff b/pkgs/development/libraries/jasper/jasper-CVE-2014-8157.diff deleted file mode 100644 index ebfc1b2d0f2..00000000000 --- a/pkgs/development/libraries/jasper/jasper-CVE-2014-8157.diff +++ /dev/null @@ -1,12 +0,0 @@ -diff -up jasper-1.900.1/src/libjasper/jpc/jpc_dec.c.CVE-2014-8157 jasper-1.900.1/src/libjasper/jpc/jpc_dec.c ---- jasper-1.900.1/src/libjasper/jpc/jpc_dec.c.CVE-2014-8157 2015-01-19 16:59:36.000000000 +0100 -+++ jasper-1.900.1/src/libjasper/jpc/jpc_dec.c 2015-01-19 17:07:41.609863268 +0100 -@@ -489,7 +489,7 @@ static int jpc_dec_process_sot(jpc_dec_t - dec->curtileendoff = 0; - } - -- if (JAS_CAST(int, sot->tileno) > dec->numtiles) { -+ if (JAS_CAST(int, sot->tileno) >= dec->numtiles) { - jas_eprintf("invalid tile number in SOT marker segment\n"); - return -1; - } diff --git a/pkgs/development/libraries/jasper/jasper-CVE-2014-8158.diff b/pkgs/development/libraries/jasper/jasper-CVE-2014-8158.diff deleted file mode 100644 index ce9e4b497f3..00000000000 --- a/pkgs/development/libraries/jasper/jasper-CVE-2014-8158.diff +++ /dev/null @@ -1,329 +0,0 @@ -diff -up jasper-1.900.1/src/libjasper/jpc/jpc_qmfb.c.CVE-2014-8158 jasper-1.900.1/src/libjasper/jpc/jpc_qmfb.c ---- jasper-1.900.1/src/libjasper/jpc/jpc_qmfb.c.CVE-2014-8158 2015-01-19 17:25:28.730195502 +0100 -+++ jasper-1.900.1/src/libjasper/jpc/jpc_qmfb.c 2015-01-19 17:27:20.214663127 +0100 -@@ -306,11 +306,7 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in - { - - int bufsize = JPC_CEILDIVPOW2(numcols, 1); --#if !defined(HAVE_VLA) - jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE]; --#else -- jpc_fix_t splitbuf[bufsize]; --#endif - jpc_fix_t *buf = splitbuf; - register jpc_fix_t *srcptr; - register jpc_fix_t *dstptr; -@@ -318,7 +314,6 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in - register int m; - int hstartcol; - --#if !defined(HAVE_VLA) - /* Get a buffer. */ - if (bufsize > QMFB_SPLITBUFSIZE) { - if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) { -@@ -326,7 +321,6 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in - abort(); - } - } --#endif - - if (numcols >= 2) { - hstartcol = (numcols + 1 - parity) >> 1; -@@ -360,12 +354,10 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in - } - } - --#if !defined(HAVE_VLA) - /* If the split buffer was allocated on the heap, free this memory. */ - if (buf != splitbuf) { - jas_free(buf); - } --#endif - - } - -@@ -374,11 +366,7 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in - { - - int bufsize = JPC_CEILDIVPOW2(numrows, 1); --#if !defined(HAVE_VLA) - jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE]; --#else -- jpc_fix_t splitbuf[bufsize]; --#endif - jpc_fix_t *buf = splitbuf; - register jpc_fix_t *srcptr; - register jpc_fix_t *dstptr; -@@ -386,7 +374,6 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in - register int m; - int hstartcol; - --#if !defined(HAVE_VLA) - /* Get a buffer. */ - if (bufsize > QMFB_SPLITBUFSIZE) { - if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) { -@@ -394,7 +381,6 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in - abort(); - } - } --#endif - - if (numrows >= 2) { - hstartcol = (numrows + 1 - parity) >> 1; -@@ -428,12 +414,10 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in - } - } - --#if !defined(HAVE_VLA) - /* If the split buffer was allocated on the heap, free this memory. */ - if (buf != splitbuf) { - jas_free(buf); - } --#endif - - } - -@@ -442,11 +426,7 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a, - { - - int bufsize = JPC_CEILDIVPOW2(numrows, 1); --#if !defined(HAVE_VLA) - jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE * JPC_QMFB_COLGRPSIZE]; --#else -- jpc_fix_t splitbuf[bufsize * JPC_QMFB_COLGRPSIZE]; --#endif - jpc_fix_t *buf = splitbuf; - jpc_fix_t *srcptr; - jpc_fix_t *dstptr; -@@ -457,7 +437,6 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a, - int m; - int hstartcol; - --#if !defined(HAVE_VLA) - /* Get a buffer. */ - if (bufsize > QMFB_SPLITBUFSIZE) { - if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) { -@@ -465,7 +444,6 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a, - abort(); - } - } --#endif - - if (numrows >= 2) { - hstartcol = (numrows + 1 - parity) >> 1; -@@ -517,12 +495,10 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a, - } - } - --#if !defined(HAVE_VLA) - /* If the split buffer was allocated on the heap, free this memory. */ - if (buf != splitbuf) { - jas_free(buf); - } --#endif - - } - -@@ -531,11 +507,7 @@ void jpc_qmfb_split_colres(jpc_fix_t *a, - { - - int bufsize = JPC_CEILDIVPOW2(numrows, 1); --#if !defined(HAVE_VLA) - jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE * JPC_QMFB_COLGRPSIZE]; --#else -- jpc_fix_t splitbuf[bufsize * numcols]; --#endif - jpc_fix_t *buf = splitbuf; - jpc_fix_t *srcptr; - jpc_fix_t *dstptr; -@@ -546,7 +518,6 @@ void jpc_qmfb_split_colres(jpc_fix_t *a, - int m; - int hstartcol; - --#if !defined(HAVE_VLA) - /* Get a buffer. */ - if (bufsize > QMFB_SPLITBUFSIZE) { - if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) { -@@ -554,7 +525,6 @@ void jpc_qmfb_split_colres(jpc_fix_t *a, - abort(); - } - } --#endif - - if (numrows >= 2) { - hstartcol = (numrows + 1 - parity) >> 1; -@@ -606,12 +576,10 @@ void jpc_qmfb_split_colres(jpc_fix_t *a, - } - } - --#if !defined(HAVE_VLA) - /* If the split buffer was allocated on the heap, free this memory. */ - if (buf != splitbuf) { - jas_free(buf); - } --#endif - - } - -@@ -619,18 +587,13 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int - { - - int bufsize = JPC_CEILDIVPOW2(numcols, 1); --#if !defined(HAVE_VLA) - jpc_fix_t joinbuf[QMFB_JOINBUFSIZE]; --#else -- jpc_fix_t joinbuf[bufsize]; --#endif - jpc_fix_t *buf = joinbuf; - register jpc_fix_t *srcptr; - register jpc_fix_t *dstptr; - register int n; - int hstartcol; - --#if !defined(HAVE_VLA) - /* Allocate memory for the join buffer from the heap. */ - if (bufsize > QMFB_JOINBUFSIZE) { - if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) { -@@ -638,7 +601,6 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int - abort(); - } - } --#endif - - hstartcol = (numcols + 1 - parity) >> 1; - -@@ -670,12 +632,10 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int - ++srcptr; - } - --#if !defined(HAVE_VLA) - /* If the join buffer was allocated on the heap, free this memory. */ - if (buf != joinbuf) { - jas_free(buf); - } --#endif - - } - -@@ -684,18 +644,13 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int - { - - int bufsize = JPC_CEILDIVPOW2(numrows, 1); --#if !defined(HAVE_VLA) - jpc_fix_t joinbuf[QMFB_JOINBUFSIZE]; --#else -- jpc_fix_t joinbuf[bufsize]; --#endif - jpc_fix_t *buf = joinbuf; - register jpc_fix_t *srcptr; - register jpc_fix_t *dstptr; - register int n; - int hstartcol; - --#if !defined(HAVE_VLA) - /* Allocate memory for the join buffer from the heap. */ - if (bufsize > QMFB_JOINBUFSIZE) { - if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) { -@@ -703,7 +658,6 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int - abort(); - } - } --#endif - - hstartcol = (numrows + 1 - parity) >> 1; - -@@ -735,12 +689,10 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int - ++srcptr; - } - --#if !defined(HAVE_VLA) - /* If the join buffer was allocated on the heap, free this memory. */ - if (buf != joinbuf) { - jas_free(buf); - } --#endif - - } - -@@ -749,11 +701,7 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a, - { - - int bufsize = JPC_CEILDIVPOW2(numrows, 1); --#if !defined(HAVE_VLA) - jpc_fix_t joinbuf[QMFB_JOINBUFSIZE * JPC_QMFB_COLGRPSIZE]; --#else -- jpc_fix_t joinbuf[bufsize * JPC_QMFB_COLGRPSIZE]; --#endif - jpc_fix_t *buf = joinbuf; - jpc_fix_t *srcptr; - jpc_fix_t *dstptr; -@@ -763,7 +711,6 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a, - register int i; - int hstartcol; - --#if !defined(HAVE_VLA) - /* Allocate memory for the join buffer from the heap. */ - if (bufsize > QMFB_JOINBUFSIZE) { - if (!(buf = jas_alloc2(bufsize, JPC_QMFB_COLGRPSIZE * sizeof(jpc_fix_t)))) { -@@ -771,7 +718,6 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a, - abort(); - } - } --#endif - - hstartcol = (numrows + 1 - parity) >> 1; - -@@ -821,12 +767,10 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a, - srcptr += JPC_QMFB_COLGRPSIZE; - } - --#if !defined(HAVE_VLA) - /* If the join buffer was allocated on the heap, free this memory. */ - if (buf != joinbuf) { - jas_free(buf); - } --#endif - - } - -@@ -835,11 +779,7 @@ void jpc_qmfb_join_colres(jpc_fix_t *a, - { - - int bufsize = JPC_CEILDIVPOW2(numrows, 1); --#if !defined(HAVE_VLA) - jpc_fix_t joinbuf[QMFB_JOINBUFSIZE * JPC_QMFB_COLGRPSIZE]; --#else -- jpc_fix_t joinbuf[bufsize * numcols]; --#endif - jpc_fix_t *buf = joinbuf; - jpc_fix_t *srcptr; - jpc_fix_t *dstptr; -@@ -849,7 +789,6 @@ void jpc_qmfb_join_colres(jpc_fix_t *a, - register int i; - int hstartcol; - --#if !defined(HAVE_VLA) - /* Allocate memory for the join buffer from the heap. */ - if (bufsize > QMFB_JOINBUFSIZE) { - if (!(buf = jas_alloc3(bufsize, numcols, sizeof(jpc_fix_t)))) { -@@ -857,7 +796,6 @@ void jpc_qmfb_join_colres(jpc_fix_t *a, - abort(); - } - } --#endif - - hstartcol = (numrows + 1 - parity) >> 1; - -@@ -907,12 +845,10 @@ void jpc_qmfb_join_colres(jpc_fix_t *a, - srcptr += numcols; - } - --#if !defined(HAVE_VLA) - /* If the join buffer was allocated on the heap, free this memory. */ - if (buf != joinbuf) { - jas_free(buf); - } --#endif - - } - diff --git a/pkgs/development/libraries/jasper/jasper-CVE-2014-9029.diff b/pkgs/development/libraries/jasper/jasper-CVE-2014-9029.diff deleted file mode 100644 index 01db7f03cdf..00000000000 --- a/pkgs/development/libraries/jasper/jasper-CVE-2014-9029.diff +++ /dev/null @@ -1,31 +0,0 @@ -From RedHat: https://bugzilla.redhat.com/attachment.cgi?id=961994&action=diff - ---- jasper-1.900.1.orig/src/libjasper/jpc/jpc_dec.c 2014-11-27 12:45:44.000000000 +0100 -+++ jasper-1.900.1/src/libjasper/jpc/jpc_dec.c 2014-11-27 12:44:58.000000000 +0100 -@@ -1281,7 +1281,7 @@ static int jpc_dec_process_coc(jpc_dec_t - jpc_coc_t *coc = &ms->parms.coc; - jpc_dec_tile_t *tile; - -- if (JAS_CAST(int, coc->compno) > dec->numcomps) { -+ if (JAS_CAST(int, coc->compno) >= dec->numcomps) { - jas_eprintf("invalid component number in COC marker segment\n"); - return -1; - } -@@ -1307,7 +1307,7 @@ static int jpc_dec_process_rgn(jpc_dec_t - jpc_rgn_t *rgn = &ms->parms.rgn; - jpc_dec_tile_t *tile; - -- if (JAS_CAST(int, rgn->compno) > dec->numcomps) { -+ if (JAS_CAST(int, rgn->compno) >= dec->numcomps) { - jas_eprintf("invalid component number in RGN marker segment\n"); - return -1; - } -@@ -1356,7 +1356,7 @@ static int jpc_dec_process_qcc(jpc_dec_t - jpc_qcc_t *qcc = &ms->parms.qcc; - jpc_dec_tile_t *tile; - -- if (JAS_CAST(int, qcc->compno) > dec->numcomps) { -+ if (JAS_CAST(int, qcc->compno) >= dec->numcomps) { - jas_eprintf("invalid component number in QCC marker segment\n"); - return -1; - } diff --git a/pkgs/development/libraries/jasper/jasper-CVE-2016-1867.diff b/pkgs/development/libraries/jasper/jasper-CVE-2016-1867.diff deleted file mode 100644 index b2dce8d8e70..00000000000 --- a/pkgs/development/libraries/jasper/jasper-CVE-2016-1867.diff +++ /dev/null @@ -1,11 +0,0 @@ ---- jasper-1.900.1/src/libjasper/jpc/jpc_t2cod.c 2007-01-19 22:43:07.000000000 +0100 -+++ jasper-1.900.1/src/libjasper/jpc/jpc_t2cod.c 2016-01-14 14:22:24.569056412 +0100 -@@ -429,7 +429,7 @@ - } - - for (pi->compno = pchg->compnostart, pi->picomp = -- &pi->picomps[pi->compno]; pi->compno < JAS_CAST(int, pchg->compnoend); ++pi->compno, -+ &pi->picomps[pi->compno]; pi->compno < JAS_CAST(int, pchg->compnoend) && pi->compno < pi->numcomps; ++pi->compno, - ++pi->picomp) { - pirlvl = pi->picomp->pirlvls; - pi->xstep = pi->picomp->hsamp * (1 << (pirlvl->prcwidthexpn + \ No newline at end of file diff --git a/pkgs/development/libraries/java/jetty-gwt/default.nix b/pkgs/development/libraries/java/jetty-gwt/default.nix deleted file mode 100644 index 03433048270..00000000000 --- a/pkgs/development/libraries/java/jetty-gwt/default.nix +++ /dev/null @@ -1,13 +0,0 @@ -{stdenv, fetchurl}: - -stdenv.mkDerivation { - name = "jetty-gwt-6.1.14"; - src = fetchurl { - url = http://repository.codehaus.org/org/mortbay/jetty/jetty-gwt/6.1.14/jetty-gwt-6.1.14.jar; - sha256 = "17x8ss75rx9xjn93rq861mdn9d6gw87rbrf24blawa6ahhb56ppf"; - }; - buildCommand = '' - mkdir -p $out/share/java - cp $src $out/share/java/$name.jar - ''; -} diff --git a/pkgs/development/libraries/java/jetty-util/default.nix b/pkgs/development/libraries/java/jetty-util/default.nix deleted file mode 100644 index 349339aad44..00000000000 --- a/pkgs/development/libraries/java/jetty-util/default.nix +++ /dev/null @@ -1,13 +0,0 @@ -{stdenv, fetchurl}: - -stdenv.mkDerivation { - name = "jetty-util-6.1.16"; - src = fetchurl { - url = http://repository.codehaus.org/org/mortbay/jetty/jetty-util/6.1.16/jetty-util-6.1.16.jar; - sha256 = "1ld94lb5dk7y6sjg1rq8zdk97wiy56ik5vbgy7yjj4f6rz5pxbyq"; - }; - buildCommand = '' - mkdir -p $out/share/java - cp $src $out/share/java/$name.jar - ''; -} diff --git a/pkgs/development/libraries/java/saxon/default.nix b/pkgs/development/libraries/java/saxon/default.nix index 464776569cb..1677376230b 100644 --- a/pkgs/development/libraries/java/saxon/default.nix +++ b/pkgs/development/libraries/java/saxon/default.nix @@ -1,4 +1,4 @@ -{stdenv, fetchurl, unzip}: +{ stdenv, fetchurl, unzip }: stdenv.mkDerivation { name = "saxon-6.5.3"; @@ -8,8 +8,13 @@ stdenv.mkDerivation { md5 = "7b8c7c187473c04d2abdb40d8ddab5c6"; }; - inherit unzip; - buildInputs = [unzip]; + nativeBuildInputs = [ unzip ]; + + # still leaving in root as well, in case someone is relying on that + preFixup = '' + mkdir -p "$out/share/java" + cp -s "$out"/*.jar "$out/share/java/" + ''; meta = { platforms = stdenv.lib.platforms.unix; diff --git a/pkgs/development/libraries/java/saxon/unzip-builder.sh b/pkgs/development/libraries/java/saxon/unzip-builder.sh index 48b3d4509d0..8ac35568f0b 100755 --- a/pkgs/development/libraries/java/saxon/unzip-builder.sh +++ b/pkgs/development/libraries/java/saxon/unzip-builder.sh @@ -1,3 +1,6 @@ source $stdenv/setup unzip $src -d $out + +fixupPhase + diff --git a/pkgs/development/libraries/kde-frameworks/kimageformats.nix b/pkgs/development/libraries/kde-frameworks/kimageformats.nix index f05da98f553..631cac4217c 100644 --- a/pkgs/development/libraries/kde-frameworks/kimageformats.nix +++ b/pkgs/development/libraries/kde-frameworks/kimageformats.nix @@ -1,11 +1,14 @@ -{ kdeFramework, lib -, ecm -, ilmbase +{ + kdeFramework, lib, + ecm, + ilmbase, karchive }: kdeFramework { name = "kimageformats"; meta = { maintainers = [ lib.maintainers.ttuegel ]; }; nativeBuildInputs = [ ecm ]; + buildInputs = [ ilmbase ]; + propagatedBuildInputs = [ karchive ]; NIX_CFLAGS_COMPILE = "-I${ilmbase.dev}/include/OpenEXR"; } diff --git a/pkgs/development/libraries/libgeotiff/default.nix b/pkgs/development/libraries/libgeotiff/default.nix index 01dd6b0d49e..496306c254d 100644 --- a/pkgs/development/libraries/libgeotiff/default.nix +++ b/pkgs/development/libraries/libgeotiff/default.nix @@ -1,14 +1,19 @@ -{ stdenv, fetchurl, libtiff }: +{ stdenv, fetchurl, libtiff, libjpeg, proj, zlib}: -stdenv.mkDerivation { - name = "libgeotiff-1.2.5"; +stdenv.mkDerivation rec { + version = "1.4.2"; + name = "libgeotiff-${version}"; src = fetchurl { - url = http://download.osgeo.org/geotiff/libgeotiff/libgeotiff-1.2.5.tar.gz; - sha256 = "0z2yx77pm0zs81hc0b4lwzdd5s0rxcbylnscgq80b649src1fyzj"; + url = "http://download.osgeo.org/geotiff/libgeotiff/${name}.tar.gz"; + sha256 = "0vjy3bwfhljjx66p9w999i4mdhsf7vjshx29yc3pn5livf5091xd"; }; - buildInputs = [ libtiff ]; + configureFlags = [ + "--with-jpeg=${libjpeg.dev}" + "--with-zlib=${zlib.dev}" + ]; + buildInputs = [ libtiff proj ]; hardeningDisable = [ "format" ]; diff --git a/pkgs/development/libraries/liblastfmSF/default.nix b/pkgs/development/libraries/liblastfmSF/default.nix index 99f94bb8225..efe93ec02df 100644 --- a/pkgs/development/libraries/liblastfmSF/default.nix +++ b/pkgs/development/libraries/liblastfmSF/default.nix @@ -1,15 +1,15 @@ { stdenv, fetchurl, pkgconfig, curl, openssl }: stdenv.mkDerivation rec { - name = "liblastfm-SF-0.3.2"; + name = "liblastfm-SF-0.5"; buildInputs = [ pkgconfig ]; propagatedBuildInputs = [ curl openssl ]; src = fetchurl { - url = "mirror://sourceforge/liblastfm/liblastfm-0.3.2.tar.gz"; - sha256 = "1hk62giysi96h6cyjyph69nlv1v4vw45w3sx7i2m89i9aysd6qp7"; + url = "mirror://sourceforge/liblastfm/libclastfm-0.5.tar.gz"; + sha256 = "0hpfflvfx6r4vvsbvdc564gkby8kr07p8ma7hgpxiy2pnlbpian9"; }; meta = { diff --git a/pkgs/development/libraries/minmay/default.nix b/pkgs/development/libraries/minmay/default.nix deleted file mode 100644 index 4518b4dd760..00000000000 --- a/pkgs/development/libraries/minmay/default.nix +++ /dev/null @@ -1,19 +0,0 @@ -{ stdenv, fetchurl, cmake, openssl }: - -stdenv.mkDerivation rec { - name = "minmay-${version}"; - version = "1.0.0"; - - src = fetchurl { - url = "https://github.com/mazhe/minmay/archive/1.0.0.tar.gz"; - sha256 = "1amycxvhbd0lv6j5zsvxiwrx29jvndcy856j3b3bisys24h95zw2"; - }; - - buildInputs = [ cmake openssl ]; - - meta = { - homepage = "https://github.com/mazhe/minmay"; - license = stdenv.lib.licenses.lgpl21Plus; - description = "An XMPP library (forked from the iksemel project)"; - }; -} diff --git a/pkgs/development/libraries/nvidia-texture-tools/default.nix b/pkgs/development/libraries/nvidia-texture-tools/default.nix index f35d363e575..a010ae9bd1a 100644 --- a/pkgs/development/libraries/nvidia-texture-tools/default.nix +++ b/pkgs/development/libraries/nvidia-texture-tools/default.nix @@ -1,43 +1,41 @@ -{ stdenv, fetchsvn, cmake, libpng, ilmbase, libtiff, zlib, libjpeg -, mesa, libX11 -}: +{ stdenv, fetchFromGitHub, cmake }: stdenv.mkDerivation rec { - # No support yet for cg, cuda, glew, glut, openexr. + name = "nvidia-texture-tools-${version}"; + version = "2.1.0"; - name = "nvidia-texture-tools-1388"; - - src = fetchsvn { - url = "http://nvidia-texture-tools.googlecode.com/svn/trunk"; - rev = "1388"; - sha256 = "0pwxqx5l16nqidzm6mwd3rd4gbbknkz6q8cxnvf7sggjpbcvm2d6"; + src = fetchFromGitHub { + owner = "castano"; + repo = "nvidia-texture-tools"; + rev = version; + sha256 = "0p8ja0k323nkgm07z0qlslg6743vimy9rf3wad2968az0vwzjjyx"; }; - buildInputs = [ cmake libpng ilmbase libtiff zlib libjpeg mesa libX11 ]; + nativeBuildInputs = [ cmake ]; - hardeningDisable = [ "format" ]; - - patchPhase = '' - # Fix build due to missing dependnecies. - echo 'target_link_libraries(bc7 nvmath)' >> src/nvtt/bc7/CMakeLists.txt - echo 'target_link_libraries(bc6h nvmath)' >> src/nvtt/bc6h/CMakeLists.txt + outputs = [ "out" "dev" "lib" ]; + postPatch = '' # Make a recently added pure virtual function just virtual, # to keep compatibility. sed -i 's/virtual void endImage() = 0;/virtual void endImage() {}/' src/nvtt/nvtt.h - - # Fix building shared libraries. - sed -i 's/SET(NVIMAGE_SHARED TRUE)/SET(NVIMAGE_SHARED TRUE)\nSET(NVTHREAD_SHARED TRUE)/' CMakeLists.txt ''; cmakeFlags = [ "-DNVTT_SHARED=TRUE" ]; - meta = { + postInstall = '' + moveToOutput include "$dev" + moveToOutput lib "$lib" + ''; + + enableParallelBuilding = true; + + meta = with stdenv.lib; { description = "A set of cuda-enabled texture tools and compressors"; - homepage = "http://developer.nvidia.com/object/texture_tools.html"; - license = stdenv.lib.licenses.mit; - platforms = stdenv.lib.platforms.linux; + homepage = "https://github.com/castano/nvidia-texture-tools"; + license = licenses.mit; + platforms = platforms.linux; }; } diff --git a/pkgs/development/libraries/openjpeg/2.0.nix b/pkgs/development/libraries/openjpeg/2.0.nix deleted file mode 100644 index dd30b18e97c..00000000000 --- a/pkgs/development/libraries/openjpeg/2.0.nix +++ /dev/null @@ -1,8 +0,0 @@ -{ callPackage, ... } @ args: - -callPackage ./generic.nix (args // rec { - version = "2.0.1"; - branch = "2"; - revision = "version.2.0.1"; - sha256 = "03d0r8x66cxri9i20nr9gm1jnkp85yyd8mkrbmawv5nvybd0r7wv"; -}) diff --git a/pkgs/development/libraries/qjson/default.nix b/pkgs/development/libraries/qjson/default.nix index e69ae5f98f7..1d4da00bf4c 100644 --- a/pkgs/development/libraries/qjson/default.nix +++ b/pkgs/development/libraries/qjson/default.nix @@ -1,11 +1,14 @@ -{ stdenv, fetchurl, cmake, qt4 }: +{ stdenv, fetchFromGitHub, cmake, qt4 }: stdenv.mkDerivation rec { - name = "qjson-0.8.1"; + version = "0.8.1"; + name = "qjson-${version}"; - src = fetchurl { - url = "mirror://sourceforge/qjson/${name}.tar.bz2"; - sha256 = "1n8lr2ph08yhcgimf4q1pnkd4z15v895bsf3m68ljz14aswvakfd"; + src = fetchFromGitHub { + owner = "flavio"; + repo = "qjson"; + rev = "${version}"; + sha256 = "1rb3ydrhyd4bczqzfv0kqpi2mx4hlpq1k8jvnwpcmvyaypqpqg59"; }; buildInputs = [ cmake qt4 ]; diff --git a/pkgs/development/libraries/qt-5/5.6/default.nix b/pkgs/development/libraries/qt-5/5.6/default.nix index 2112b29c729..0e40a7ac96d 100644 --- a/pkgs/development/libraries/qt-5/5.6/default.nix +++ b/pkgs/development/libraries/qt-5/5.6/default.nix @@ -94,7 +94,7 @@ let qttranslations = callPackage ./qttranslations.nix {}; qtwayland = callPackage ./qtwayland.nix {}; qtwebchannel = callPackage ./qtwebchannel.nix {}; - qtwebengine = callPackage ./qtwebengine.nix {}; + qtwebengine = callPackage ./qtwebengine {}; qtwebkit = callPackage ./qtwebkit {}; qtwebsockets = callPackage ./qtwebsockets.nix {}; /* qtwinextras = not packaged */ diff --git a/pkgs/development/libraries/qt-5/5.6/qtwebengine/chromium-clang-update-py.patch b/pkgs/development/libraries/qt-5/5.6/qtwebengine/chromium-clang-update-py.patch new file mode 100644 index 00000000000..65a604d2534 --- /dev/null +++ b/pkgs/development/libraries/qt-5/5.6/qtwebengine/chromium-clang-update-py.patch @@ -0,0 +1,874 @@ +--- a/src/3rdparty/chromium/tools/clang/scripts/update.py 2016-05-26 04:58:54.000000000 -0800 ++++ b/src/3rdparty/chromium/tools/clang/scripts/update.py 2016-11-04 08:35:34.956154012 -0800 +@@ -3,12 +3,12 @@ + # Use of this source code is governed by a BSD-style license that can be + # found in the LICENSE file. + +-"""Windows can't run .sh files, so this is a Python implementation of +-update.sh. This script should replace update.sh on all platforms eventually.""" ++"""This script is used to download prebuilt clang binaries. ++ ++It is also used by package.py to build the prebuilt clang binaries.""" + + import argparse +-import contextlib +-import cStringIO ++import distutils.spawn + import glob + import os + import pipes +@@ -18,6 +18,7 @@ + import stat + import sys + import tarfile ++import tempfile + import time + import urllib2 + import zipfile +@@ -25,19 +26,16 @@ + # Do NOT CHANGE this if you don't know what you're doing -- see + # https://code.google.com/p/chromium/wiki/UpdatingClang + # Reverting problematic clang rolls is safe, though. +-# Note: this revision is only used for Windows. Other platforms use update.sh. +-# TODO(thakis): Use the same revision on Windows and non-Windows. +-# TODO(thakis): Remove update.sh, use update.py everywhere. +-LLVM_WIN_REVISION = '239674' ++CLANG_REVISION = '239674' + + use_head_revision = 'LLVM_FORCE_HEAD_REVISION' in os.environ + if use_head_revision: +- LLVM_WIN_REVISION = 'HEAD' ++ CLANG_REVISION = 'HEAD' + + # This is incremented when pushing a new build of Clang at the same revision. + CLANG_SUB_REVISION=1 + +-PACKAGE_VERSION = "%s-%s" % (LLVM_WIN_REVISION, CLANG_SUB_REVISION) ++PACKAGE_VERSION = "%s-%s" % (CLANG_REVISION, CLANG_SUB_REVISION) + + # Path constants. (All of these should be absolute paths.) + THIS_DIR = os.path.abspath(os.path.dirname(__file__)) +@@ -50,17 +48,26 @@ + CHROME_TOOLS_SHIM_DIR = os.path.join(LLVM_DIR, 'tools', 'chrometools') + LLVM_BUILD_DIR = os.path.join(CHROMIUM_DIR, 'third_party', 'llvm-build', + 'Release+Asserts') +-COMPILER_RT_BUILD_DIR = os.path.join(LLVM_BUILD_DIR, '32bit-compiler-rt') ++COMPILER_RT_BUILD_DIR = os.path.join(LLVM_BUILD_DIR, 'compiler-rt') + CLANG_DIR = os.path.join(LLVM_DIR, 'tools', 'clang') + LLD_DIR = os.path.join(LLVM_DIR, 'tools', 'lld') +-COMPILER_RT_DIR = os.path.join(LLVM_DIR, 'projects', 'compiler-rt') ++# compiler-rt is built as part of the regular LLVM build on Windows to get ++# the 64-bit runtime, and out-of-tree elsewhere. ++# TODO(thakis): Try to unify this. ++if sys.platform == 'win32': ++ COMPILER_RT_DIR = os.path.join(LLVM_DIR, 'projects', 'compiler-rt') ++else: ++ COMPILER_RT_DIR = os.path.join(LLVM_DIR, 'compiler-rt') + LIBCXX_DIR = os.path.join(LLVM_DIR, 'projects', 'libcxx') + LIBCXXABI_DIR = os.path.join(LLVM_DIR, 'projects', 'libcxxabi') + LLVM_BUILD_TOOLS_DIR = os.path.abspath( + os.path.join(LLVM_DIR, '..', 'llvm-build-tools')) +-STAMP_FILE = os.path.join(LLVM_DIR, '..', 'llvm-build', 'cr_build_revision') ++STAMP_FILE = os.path.normpath( ++ os.path.join(LLVM_DIR, '..', 'llvm-build', 'cr_build_revision')) + BINUTILS_DIR = os.path.join(THIRD_PARTY_DIR, 'binutils') +-VERSION = '3.7.0' ++VERSION = '3.8.0' ++ANDROID_NDK_DIR = os.path.join( ++ CHROMIUM_DIR, 'third_party', 'android_tools', 'ndk') + + # URL for pre-built binaries. + CDS_URL = 'https://commondatastorage.googleapis.com/chromium-browser-clang' +@@ -74,40 +81,75 @@ + """Download url into output_file.""" + CHUNK_SIZE = 4096 + TOTAL_DOTS = 10 +- sys.stdout.write('Downloading %s ' % url) +- sys.stdout.flush() +- response = urllib2.urlopen(url) +- total_size = int(response.info().getheader('Content-Length').strip()) +- bytes_done = 0 +- dots_printed = 0 ++ num_retries = 3 ++ retry_wait_s = 5 # Doubled at each retry. ++ + while True: +- chunk = response.read(CHUNK_SIZE) +- if not chunk: +- break +- output_file.write(chunk) +- bytes_done += len(chunk) +- num_dots = TOTAL_DOTS * bytes_done / total_size +- sys.stdout.write('.' * (num_dots - dots_printed)) +- sys.stdout.flush() +- dots_printed = num_dots +- print ' Done.' ++ try: ++ sys.stdout.write('Downloading %s ' % url) ++ sys.stdout.flush() ++ response = urllib2.urlopen(url) ++ total_size = int(response.info().getheader('Content-Length').strip()) ++ bytes_done = 0 ++ dots_printed = 0 ++ while True: ++ chunk = response.read(CHUNK_SIZE) ++ if not chunk: ++ break ++ output_file.write(chunk) ++ bytes_done += len(chunk) ++ num_dots = TOTAL_DOTS * bytes_done / total_size ++ sys.stdout.write('.' * (num_dots - dots_printed)) ++ sys.stdout.flush() ++ dots_printed = num_dots ++ if bytes_done != total_size: ++ raise urllib2.URLError("only got %d of %d bytes" % ++ (bytes_done, total_size)) ++ print ' Done.' ++ return ++ except urllib2.URLError as e: ++ sys.stdout.write('\n') ++ print e ++ if num_retries == 0 or isinstance(e, urllib2.HTTPError) and e.code == 404: ++ raise e ++ num_retries -= 1 ++ print 'Retrying in %d s ...' % retry_wait_s ++ time.sleep(retry_wait_s) ++ retry_wait_s *= 2 ++ ++ ++def EnsureDirExists(path): ++ if not os.path.exists(path): ++ print "Creating directory %s" % path ++ os.makedirs(path) ++ ++ ++def DownloadAndUnpack(url, output_dir): ++ with tempfile.TemporaryFile() as f: ++ DownloadUrl(url, f) ++ f.seek(0) ++ EnsureDirExists(output_dir) ++ if url.endswith('.zip'): ++ zipfile.ZipFile(f).extractall(path=output_dir) ++ else: ++ tarfile.open(mode='r:gz', fileobj=f).extractall(path=output_dir) + + + def ReadStampFile(): + """Return the contents of the stamp file, or '' if it doesn't exist.""" + try: + with open(STAMP_FILE, 'r') as f: +- return f.read() ++ return f.read().rstrip() + except IOError: + return '' + + + def WriteStampFile(s): + """Write s to the stamp file.""" +- if not os.path.exists(os.path.dirname(STAMP_FILE)): +- os.makedirs(os.path.dirname(STAMP_FILE)) ++ EnsureDirExists(os.path.dirname(STAMP_FILE)) + with open(STAMP_FILE, 'w') as f: + f.write(s) ++ f.write('\n') + + + def GetSvnRevision(svn_repo): +@@ -129,6 +171,13 @@ + shutil.rmtree(dir, onerror=ChmodAndRetry) + + ++def RmCmakeCache(dir): ++ """Delete CMakeCache.txt files under dir recursively.""" ++ for dirpath, _, files in os.walk(dir): ++ if 'CMakeCache.txt' in files: ++ os.remove(os.path.join(dirpath, 'CMakeCache.txt')) ++ ++ + def RunCommand(command, msvc_arch=None, env=None, fail_hard=True): + """Run command and return success (True) or failure; or if fail_hard is + True, exit on failure. If msvc_arch is set, runs the command in a +@@ -170,8 +219,8 @@ + def CopyDirectoryContents(src, dst, filename_filter=None): + """Copy the files from directory src to dst + with an optional filename filter.""" +- if not os.path.exists(dst): +- os.makedirs(dst) ++ dst = os.path.realpath(dst) # realpath() in case dst ends in /.. ++ EnsureDirExists(dst) + for root, _, files in os.walk(src): + for f in files: + if filename_filter and not re.match(filename_filter, f): +@@ -181,9 +230,9 @@ + + def Checkout(name, url, dir): + """Checkout the SVN module at url into dir. Use name for the log message.""" +- print "Checking out %s r%s into '%s'" % (name, LLVM_WIN_REVISION, dir) ++ print "Checking out %s r%s into '%s'" % (name, CLANG_REVISION, dir) + +- command = ['svn', 'checkout', '--force', url + '@' + LLVM_WIN_REVISION, dir] ++ command = ['svn', 'checkout', '--force', url + '@' + CLANG_REVISION, dir] + if RunCommand(command, fail_hard=False): + return + +@@ -195,120 +244,9 @@ + RunCommand(command) + + +-def RevertPreviouslyPatchedFiles(): +- print 'Reverting previously patched files' +- files = [ +- '%(clang)s/test/Index/crash-recovery-modules.m', +- '%(clang)s/unittests/libclang/LibclangTest.cpp', +- '%(compiler_rt)s/lib/asan/asan_rtl.cc', +- '%(compiler_rt)s/test/asan/TestCases/Linux/new_array_cookie_test.cc', +- '%(llvm)s/test/DebugInfo/gmlt.ll', +- '%(llvm)s/lib/CodeGen/SpillPlacement.cpp', +- '%(llvm)s/lib/CodeGen/SpillPlacement.h', +- '%(llvm)s/lib/Transforms/Instrumentation/MemorySanitizer.cpp', +- '%(clang)s/test/Driver/env.c', +- '%(clang)s/lib/Frontend/InitPreprocessor.cpp', +- '%(clang)s/test/Frontend/exceptions.c', +- '%(clang)s/test/Preprocessor/predefined-exceptions.m', +- '%(llvm)s/test/Bindings/Go/go.test', +- '%(clang)s/lib/Parse/ParseExpr.cpp', +- '%(clang)s/lib/Parse/ParseTemplate.cpp', +- '%(clang)s/lib/Sema/SemaDeclCXX.cpp', +- '%(clang)s/lib/Sema/SemaExprCXX.cpp', +- '%(clang)s/test/SemaCXX/default2.cpp', +- '%(clang)s/test/SemaCXX/typo-correction-delayed.cpp', +- '%(compiler_rt)s/lib/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc', +- '%(compiler_rt)s/test/tsan/signal_segv_handler.cc', +- '%(compiler_rt)s/lib/sanitizer_common/sanitizer_coverage_libcdep.cc', +- '%(compiler_rt)s/cmake/config-ix.cmake', +- '%(compiler_rt)s/CMakeLists.txt', +- '%(compiler_rt)s/lib/ubsan/ubsan_platform.h', +- ] +- for f in files: +- f = f % { +- 'clang': CLANG_DIR, +- 'compiler_rt': COMPILER_RT_DIR, +- 'llvm': LLVM_DIR, +- } +- if os.path.exists(f): +- os.remove(f) # For unversioned files. +- RunCommand(['svn', 'revert', f]) +- +- +-def ApplyLocalPatches(): +- # There's no patch program on Windows by default. We don't need patches on +- # Windows yet, and maybe this not working on Windows will motivate us to +- # remove patches over time. +- assert sys.platform != 'win32' +- +- # Apply patch for tests failing with --disable-pthreads (llvm.org/PR11974) +- clang_patches = [ r"""\ +---- test/Index/crash-recovery-modules.m (revision 202554) +-+++ test/Index/crash-recovery-modules.m (working copy) +-@@ -12,6 +12,8 @@ +- +- // REQUIRES: crash-recovery +- // REQUIRES: shell +-+// XFAIL: * +-+// (PR11974) +- +- @import Crash; +-""", r"""\ +---- unittests/libclang/LibclangTest.cpp (revision 215949) +-+++ unittests/libclang/LibclangTest.cpp (working copy) +-@@ -431,7 +431,7 @@ +- EXPECT_EQ(0U, clang_getNumDiagnostics(ClangTU)); +- } +- +--TEST_F(LibclangReparseTest, ReparseWithModule) { +-+TEST_F(LibclangReparseTest, DISABLED_ReparseWithModule) { +- const char *HeaderTop = "#ifndef H\n#define H\nstruct Foo { int bar;"; +- const char *HeaderBottom = "\n};\n#endif\n"; +- const char *MFile = "#include \"HeaderFile.h\"\nint main() {" +-""" +- ] +- +- # This Go bindings test doesn't work after bootstrap on Linux, PR21552. +- llvm_patches = [ r"""\ +---- test/Bindings/Go/go.test (revision 223109) +-+++ test/Bindings/Go/go.test (working copy) +-@@ -1,3 +1,3 @@ +--; RUN: llvm-go test llvm.org/llvm/bindings/go/llvm +-+; RUN: true +- +- ; REQUIRES: shell +-""" +- ] +- +- # The UBSan run-time, which is now bundled with the ASan run-time, doesn't +- # work on Mac OS X 10.8 (PR23539). +- compiler_rt_patches = [ r"""\ +---- CMakeLists.txt (revision 241602) +-+++ CMakeLists.txt (working copy) +-@@ -305,6 +305,7 @@ +- list(APPEND SANITIZER_COMMON_SUPPORTED_OS iossim) +- endif() +- endif() +-+ set(SANITIZER_MIN_OSX_VERSION "10.7") +- if(SANITIZER_MIN_OSX_VERSION VERSION_LESS "10.7") +- message(FATAL_ERROR "Too old OS X version: ${SANITIZER_MIN_OSX_VERSION}") +- endif() +-""" +- ] +- +- for path, patches in [(LLVM_DIR, llvm_patches), +- (CLANG_DIR, clang_patches), +- (COMPILER_RT_DIR, compiler_rt_patches)]: +- print 'Applying patches in', path +- for patch in patches: +- print patch +- p = subprocess.Popen( ['patch', '-p0', '-d', path], stdin=subprocess.PIPE) +- (stdout, stderr) = p.communicate(input=patch) +- if p.returncode != 0: +- raise RuntimeError('stdout %s, stderr %s' % (stdout, stderr)) +- +- + def DeleteChromeToolsShim(): ++ OLD_SHIM_DIR = os.path.join(LLVM_DIR, 'tools', 'zzz-chrometools') ++ shutil.rmtree(OLD_SHIM_DIR, ignore_errors=True) + shutil.rmtree(CHROME_TOOLS_SHIM_DIR, ignore_errors=True) + + +@@ -337,6 +275,25 @@ + f.write('endif (CHROMIUM_TOOLS_SRC)\n') + + ++def MaybeDownloadHostGcc(args): ++ """Downloads gcc 4.8.2 if needed and makes sure args.gcc_toolchain is set.""" ++ if not sys.platform.startswith('linux') or args.gcc_toolchain: ++ return ++ ++ if subprocess.check_output(['gcc', '-dumpversion']).rstrip() < '4.7.0': ++ # We need a newer gcc version. ++ gcc_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, 'gcc482precise') ++ if not os.path.exists(gcc_dir): ++ print 'Downloading pre-built GCC 4.8.2...' ++ DownloadAndUnpack( ++ CDS_URL + '/tools/gcc482precise.tgz', LLVM_BUILD_TOOLS_DIR) ++ args.gcc_toolchain = gcc_dir ++ else: ++ # Always set gcc_toolchain; llvm-symbolizer needs the bundled libstdc++. ++ args.gcc_toolchain = \ ++ os.path.dirname(os.path.dirname(distutils.spawn.find_executable('gcc'))) ++ ++ + def AddCMakeToPath(): + """Download CMake and add it to PATH.""" + if sys.platform == 'win32': +@@ -345,20 +302,10 @@ + 'cmake-3.2.2-win32-x86', 'bin') + else: + suffix = 'Darwin' if sys.platform == 'darwin' else 'Linux' +- zip_name = 'cmake310_%s.tgz' % suffix +- cmake_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, 'cmake310', 'bin') ++ zip_name = 'cmake322_%s.tgz' % suffix ++ cmake_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, 'cmake322', 'bin') + if not os.path.exists(cmake_dir): +- if not os.path.exists(LLVM_BUILD_TOOLS_DIR): +- os.makedirs(LLVM_BUILD_TOOLS_DIR) +- # The cmake archive is smaller than 20 MB, small enough to keep in memory: +- with contextlib.closing(cStringIO.StringIO()) as f: +- DownloadUrl(CDS_URL + '/tools/' + zip_name, f) +- f.seek(0) +- if zip_name.endswith('.zip'): +- zipfile.ZipFile(f).extractall(path=LLVM_BUILD_TOOLS_DIR) +- else: +- tarfile.open(mode='r:gz', fileobj=f).extractall(path= +- LLVM_BUILD_TOOLS_DIR) ++ DownloadAndUnpack(CDS_URL + '/tools/' + zip_name, LLVM_BUILD_TOOLS_DIR) + os.environ['PATH'] = cmake_dir + os.pathsep + os.environ.get('PATH', '') + + vs_version = None +@@ -383,37 +330,61 @@ + + def UpdateClang(args): + print 'Updating Clang to %s...' % PACKAGE_VERSION +- if ReadStampFile() == PACKAGE_VERSION: +- print 'Already up to date.' +- return 0 ++ ++ need_gold_plugin = 'LLVM_DOWNLOAD_GOLD_PLUGIN' in os.environ or ( ++ sys.platform.startswith('linux') and ++ 'buildtype=Official' in os.environ.get('GYP_DEFINES', '') and ++ 'branding=Chrome' in os.environ.get('GYP_DEFINES', '')) ++ ++ if ReadStampFile() == PACKAGE_VERSION and not args.force_local_build: ++ print 'Clang is already up to date.' ++ if not need_gold_plugin or os.path.exists( ++ os.path.join(LLVM_BUILD_DIR, "lib/LLVMgold.so")): ++ return 0 + + # Reset the stamp file in case the build is unsuccessful. + WriteStampFile('') + + if not args.force_local_build: + cds_file = "clang-%s.tgz" % PACKAGE_VERSION +- cds_full_url = CDS_URL + '/Win/' + cds_file ++ if sys.platform == 'win32': ++ cds_full_url = CDS_URL + '/Win/' + cds_file ++ elif sys.platform == 'darwin': ++ cds_full_url = CDS_URL + '/Mac/' + cds_file ++ else: ++ assert sys.platform.startswith('linux') ++ cds_full_url = CDS_URL + '/Linux_x64/' + cds_file + +- # Check if there's a prebuilt binary and if so just fetch that. That's +- # faster, and goma relies on having matching binary hashes on client and +- # server too. +- print 'Trying to download prebuilt clang' +- +- # clang packages are smaller than 50 MB, small enough to keep in memory. +- with contextlib.closing(cStringIO.StringIO()) as f: +- try: +- DownloadUrl(cds_full_url, f) +- f.seek(0) +- tarfile.open(mode='r:gz', fileobj=f).extractall(path=LLVM_BUILD_DIR) +- print 'clang %s unpacked' % PACKAGE_VERSION +- WriteStampFile(PACKAGE_VERSION) +- return 0 +- except urllib2.HTTPError: +- print 'Did not find prebuilt clang %s, building locally' % cds_file ++ print 'Downloading prebuilt clang' ++ if os.path.exists(LLVM_BUILD_DIR): ++ RmTree(LLVM_BUILD_DIR) ++ try: ++ DownloadAndUnpack(cds_full_url, LLVM_BUILD_DIR) ++ print 'clang %s unpacked' % PACKAGE_VERSION ++ # Download the gold plugin if requested to by an environment variable. ++ # This is used by the CFI ClusterFuzz bot, and it's required for official ++ # builds on linux. ++ if need_gold_plugin: ++ RunCommand(['python', CHROMIUM_DIR+'/build/download_gold_plugin.py']) ++ WriteStampFile(PACKAGE_VERSION) ++ return 0 ++ except urllib2.URLError: ++ print 'Failed to download prebuilt clang %s' % cds_file ++ print 'Use --force-local-build if you want to build locally.' ++ print 'Exiting.' ++ return 1 ++ ++ if args.with_android and not os.path.exists(ANDROID_NDK_DIR): ++ print 'Android NDK not found at ' + ANDROID_NDK_DIR ++ print 'The Android NDK is needed to build a Clang whose -fsanitize=address' ++ print 'works on Android. See ' ++ print 'http://code.google.com/p/chromium/wiki/AndroidBuildInstructions' ++ print 'for how to install the NDK, or pass --without-android.' ++ return 1 + ++ MaybeDownloadHostGcc(args) + AddCMakeToPath() + +- RevertPreviouslyPatchedFiles() + DeleteChromeToolsShim() + + Checkout('LLVM', LLVM_REPO_URL + '/llvm/trunk', LLVM_DIR) +@@ -429,10 +400,24 @@ + # into it too (since OS X 10.6 doesn't have libc++abi.dylib either). + Checkout('libcxxabi', LLVM_REPO_URL + '/libcxxabi/trunk', LIBCXXABI_DIR) + +- if args.with_patches and sys.platform != 'win32': +- ApplyLocalPatches() +- + cc, cxx = None, None ++ libstdcpp = None ++ if args.gcc_toolchain: # This option is only used on Linux. ++ # Use the specified gcc installation for building. ++ cc = os.path.join(args.gcc_toolchain, 'bin', 'gcc') ++ cxx = os.path.join(args.gcc_toolchain, 'bin', 'g++') ++ ++ if not os.access(cc, os.X_OK): ++ print 'Invalid --gcc-toolchain: "%s"' % args.gcc_toolchain ++ print '"%s" does not appear to be valid.' % cc ++ return 1 ++ ++ # Set LD_LIBRARY_PATH to make auxiliary targets (tablegen, bootstrap ++ # compiler, etc.) find the .so. ++ libstdcpp = subprocess.check_output( ++ [cxx, '-print-file-name=libstdc++.so.6']).rstrip() ++ os.environ['LD_LIBRARY_PATH'] = os.path.dirname(libstdcpp) ++ + cflags = cxxflags = ldflags = [] + + # LLVM uses C++11 starting in llvm 3.5. On Linux, this means libstdc++4.7+ is +@@ -462,8 +447,7 @@ + + if args.bootstrap: + print 'Building bootstrap compiler' +- if not os.path.exists(LLVM_BOOTSTRAP_DIR): +- os.makedirs(LLVM_BOOTSTRAP_DIR) ++ EnsureDirExists(LLVM_BOOTSTRAP_DIR) + os.chdir(LLVM_BOOTSTRAP_DIR) + bootstrap_args = base_cmake_args + [ + '-DLLVM_TARGETS_TO_BUILD=host', +@@ -473,11 +457,16 @@ + ] + if cc is not None: bootstrap_args.append('-DCMAKE_C_COMPILER=' + cc) + if cxx is not None: bootstrap_args.append('-DCMAKE_CXX_COMPILER=' + cxx) ++ RmCmakeCache('.') + RunCommand(['cmake'] + bootstrap_args + [LLVM_DIR], msvc_arch='x64') + RunCommand(['ninja'], msvc_arch='x64') + if args.run_tests: + RunCommand(['ninja', 'check-all'], msvc_arch='x64') + RunCommand(['ninja', 'install'], msvc_arch='x64') ++ if args.gcc_toolchain: ++ # Copy that gcc's stdlibc++.so.6 to the build dir, so the bootstrap ++ # compiler can start. ++ CopyFile(libstdcpp, os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'lib')) + + if sys.platform == 'win32': + cc = os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'bin', 'clang-cl.exe') +@@ -489,6 +478,12 @@ + else: + cc = os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'bin', 'clang') + cxx = os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'bin', 'clang++') ++ ++ if args.gcc_toolchain: ++ # Tell the bootstrap compiler to use a specific gcc prefix to search ++ # for standard library headers and shared object files. ++ cflags = ['--gcc-toolchain=' + args.gcc_toolchain] ++ cxxflags = ['--gcc-toolchain=' + args.gcc_toolchain] + print 'Building final compiler' + + if sys.platform == 'darwin': +@@ -543,7 +538,7 @@ + binutils_incdir = os.path.join(BINUTILS_DIR, 'Linux_x64/Release/include') + + # If building at head, define a macro that plugins can use for #ifdefing +- # out code that builds at head, but not at LLVM_WIN_REVISION or vice versa. ++ # out code that builds at head, but not at CLANG_REVISION or vice versa. + if use_head_revision: + cflags += ['-DLLVM_FORCE_HEAD_REVISION'] + cxxflags += ['-DLLVM_FORCE_HEAD_REVISION'] +@@ -555,8 +550,15 @@ + deployment_env = os.environ.copy() + deployment_env['MACOSX_DEPLOYMENT_TARGET'] = deployment_target + +- cmake_args = base_cmake_args + [ ++ cmake_args = [] ++ # TODO(thakis): Unconditionally append this to base_cmake_args instead once ++ # compiler-rt can build with clang-cl on Windows (http://llvm.org/PR23698) ++ cc_args = base_cmake_args if sys.platform != 'win32' else cmake_args ++ if cc is not None: cc_args.append('-DCMAKE_C_COMPILER=' + cc) ++ if cxx is not None: cc_args.append('-DCMAKE_CXX_COMPILER=' + cxx) ++ cmake_args += base_cmake_args + [ + '-DLLVM_BINUTILS_INCDIR=' + binutils_incdir, ++ '-DLLVM_EXPERIMENTAL_TARGETS_TO_BUILD=WebAssembly', + '-DCMAKE_C_FLAGS=' + ' '.join(cflags), + '-DCMAKE_CXX_FLAGS=' + ' '.join(cxxflags), + '-DCMAKE_EXE_LINKER_FLAGS=' + ' '.join(ldflags), +@@ -565,35 +567,44 @@ + '-DCMAKE_INSTALL_PREFIX=' + LLVM_BUILD_DIR, + '-DCHROMIUM_TOOLS_SRC=%s' % os.path.join(CHROMIUM_DIR, 'tools', 'clang'), + '-DCHROMIUM_TOOLS=%s' % ';'.join(args.tools)] +- # TODO(thakis): Unconditionally append this to base_cmake_args instead once +- # compiler-rt can build with clang-cl on Windows (http://llvm.org/PR23698) +- cc_args = base_cmake_args if sys.platform != 'win32' else cmake_args +- if cc is not None: cc_args.append('-DCMAKE_C_COMPILER=' + cc) +- if cxx is not None: cc_args.append('-DCMAKE_CXX_COMPILER=' + cxx) + +- if not os.path.exists(LLVM_BUILD_DIR): +- os.makedirs(LLVM_BUILD_DIR) ++ EnsureDirExists(LLVM_BUILD_DIR) + os.chdir(LLVM_BUILD_DIR) ++ RmCmakeCache('.') + RunCommand(['cmake'] + cmake_args + [LLVM_DIR], + msvc_arch='x64', env=deployment_env) +- RunCommand(['ninja'], msvc_arch='x64') ++ ++ if args.gcc_toolchain: ++ # Copy in the right stdlibc++.so.6 so clang can start. ++ if not os.path.exists(os.path.join(LLVM_BUILD_DIR, 'lib')): ++ os.mkdir(os.path.join(LLVM_BUILD_DIR, 'lib')) ++ libstdcpp = subprocess.check_output( ++ [cxx] + cxxflags + ['-print-file-name=libstdc++.so.6']).rstrip() ++ CopyFile(libstdcpp, os.path.join(LLVM_BUILD_DIR, 'lib')) ++ ++ # TODO(thakis): Remove "-d explain" once http://crbug.com/569337 is fixed. ++ RunCommand(['ninja', '-d', 'explain'], msvc_arch='x64') + + if args.tools: + # If any Chromium tools were built, install those now. + RunCommand(['ninja', 'cr-install'], msvc_arch='x64') + + if sys.platform == 'darwin': +- CopyFile(os.path.join(LLVM_BUILD_DIR, 'libc++.1.dylib'), ++ CopyFile(os.path.join(libcxxbuild, 'libc++.1.dylib'), + os.path.join(LLVM_BUILD_DIR, 'bin')) + # See http://crbug.com/256342 + RunCommand(['strip', '-x', os.path.join(LLVM_BUILD_DIR, 'bin', 'clang')]) + elif sys.platform.startswith('linux'): + RunCommand(['strip', os.path.join(LLVM_BUILD_DIR, 'bin', 'clang')]) + +- # Do an x86 build of compiler-rt to get the 32-bit ASan run-time. ++ # Do an out-of-tree build of compiler-rt. ++ # On Windows, this is used to get the 32-bit ASan run-time. + # TODO(hans): Remove once the regular build above produces this. +- if not os.path.exists(COMPILER_RT_BUILD_DIR): +- os.makedirs(COMPILER_RT_BUILD_DIR) ++ # On Mac and Linux, this is used to get the regular 64-bit run-time. ++ # Do a clobbered build due to cmake changes. ++ if os.path.isdir(COMPILER_RT_BUILD_DIR): ++ RmTree(COMPILER_RT_BUILD_DIR) ++ os.makedirs(COMPILER_RT_BUILD_DIR) + os.chdir(COMPILER_RT_BUILD_DIR) + # TODO(thakis): Add this once compiler-rt can build with clang-cl (see + # above). +@@ -606,11 +617,17 @@ + '-DCMAKE_CXX_FLAGS=' + ' '.join(cxxflags)] + if sys.platform != 'win32': + compiler_rt_args += ['-DLLVM_CONFIG_PATH=' + +- os.path.join(LLVM_BUILD_DIR, 'bin', 'llvm-config')] +- RunCommand(['cmake'] + compiler_rt_args + [LLVM_DIR], +- msvc_arch='x86', env=deployment_env) ++ os.path.join(LLVM_BUILD_DIR, 'bin', 'llvm-config'), ++ '-DSANITIZER_MIN_OSX_VERSION="10.7"'] ++ # compiler-rt is part of the llvm checkout on Windows but a stand-alone ++ # directory elsewhere, see the TODO above COMPILER_RT_DIR. ++ RmCmakeCache('.') ++ RunCommand(['cmake'] + compiler_rt_args + ++ [LLVM_DIR if sys.platform == 'win32' else COMPILER_RT_DIR], ++ msvc_arch='x86', env=deployment_env) + RunCommand(['ninja', 'compiler-rt'], msvc_arch='x86') + ++ # Copy select output to the main tree. + # TODO(hans): Make this (and the .gypi and .isolate files) version number + # independent. + if sys.platform == 'win32': +@@ -620,17 +637,35 @@ + else: + assert sys.platform.startswith('linux') + platform = 'linux' +- asan_rt_lib_src_dir = os.path.join(COMPILER_RT_BUILD_DIR, 'lib', 'clang', +- VERSION, 'lib', platform) ++ asan_rt_lib_src_dir = os.path.join(COMPILER_RT_BUILD_DIR, 'lib', platform) ++ if sys.platform == 'win32': ++ # TODO(thakis): This too is due to compiler-rt being part of the checkout ++ # on Windows, see TODO above COMPILER_RT_DIR. ++ asan_rt_lib_src_dir = os.path.join(COMPILER_RT_BUILD_DIR, 'lib', 'clang', ++ VERSION, 'lib', platform) + asan_rt_lib_dst_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang', + VERSION, 'lib', platform) +- CopyDirectoryContents(asan_rt_lib_src_dir, asan_rt_lib_dst_dir, +- r'^.*-i386\.lib$') +- CopyDirectoryContents(asan_rt_lib_src_dir, asan_rt_lib_dst_dir, +- r'^.*-i386\.dll$') ++ # Blacklists: ++ CopyDirectoryContents(os.path.join(asan_rt_lib_src_dir, '..', '..'), ++ os.path.join(asan_rt_lib_dst_dir, '..', '..'), ++ r'^.*blacklist\.txt$') ++ # Headers: ++ if sys.platform != 'win32': ++ CopyDirectoryContents( ++ os.path.join(COMPILER_RT_BUILD_DIR, 'include/sanitizer'), ++ os.path.join(LLVM_BUILD_DIR, 'lib/clang', VERSION, 'include/sanitizer')) ++ # Static and dynamic libraries: ++ CopyDirectoryContents(asan_rt_lib_src_dir, asan_rt_lib_dst_dir) ++ if sys.platform == 'darwin': ++ for dylib in glob.glob(os.path.join(asan_rt_lib_dst_dir, '*.dylib')): ++ # Fix LC_ID_DYLIB for the ASan dynamic libraries to be relative to ++ # @executable_path. ++ # TODO(glider): this is transitional. We'll need to fix the dylib ++ # name either in our build system, or in Clang. See also ++ # http://crbug.com/344836. ++ subprocess.call(['install_name_tool', '-id', ++ '@executable_path/' + os.path.basename(dylib), dylib]) + +- CopyFile(os.path.join(asan_rt_lib_src_dir, '..', '..', 'asan_blacklist.txt'), +- os.path.join(asan_rt_lib_dst_dir, '..', '..')) + + if sys.platform == 'win32': + # Make an extra copy of the sanitizer headers, to be put on the include path +@@ -640,22 +675,67 @@ + aux_sanitizer_include_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang', + VERSION, 'include_sanitizer', + 'sanitizer') +- if not os.path.exists(aux_sanitizer_include_dir): +- os.makedirs(aux_sanitizer_include_dir) ++ EnsureDirExists(aux_sanitizer_include_dir) + for _, _, files in os.walk(sanitizer_include_dir): + for f in files: + CopyFile(os.path.join(sanitizer_include_dir, f), + aux_sanitizer_include_dir) + ++ if args.with_android: ++ make_toolchain = os.path.join( ++ ANDROID_NDK_DIR, 'build', 'tools', 'make-standalone-toolchain.sh') ++ for target_arch in ['aarch64', 'arm', 'i686']: ++ # Make standalone Android toolchain for target_arch. ++ toolchain_dir = os.path.join( ++ LLVM_BUILD_DIR, 'android-toolchain-' + target_arch) ++ RunCommand([ ++ make_toolchain, ++ '--platform=android-' + ('21' if target_arch == 'aarch64' else '19'), ++ '--install-dir="%s"' % toolchain_dir, ++ '--system=linux-x86_64', ++ '--stl=stlport', ++ '--toolchain=' + { ++ 'aarch64': 'aarch64-linux-android-4.9', ++ 'arm': 'arm-linux-androideabi-4.9', ++ 'i686': 'x86-4.9', ++ }[target_arch]]) ++ # Android NDK r9d copies a broken unwind.h into the toolchain, see ++ # http://crbug.com/357890 ++ for f in glob.glob(os.path.join(toolchain_dir, 'include/c++/*/unwind.h')): ++ os.remove(f) ++ ++ # Build ASan runtime for Android in a separate build tree. ++ build_dir = os.path.join(LLVM_BUILD_DIR, 'android-' + target_arch) ++ if not os.path.exists(build_dir): ++ os.mkdir(os.path.join(build_dir)) ++ os.chdir(build_dir) ++ cflags = ['--target=%s-linux-androideabi' % target_arch, ++ '--sysroot=%s/sysroot' % toolchain_dir, ++ '-B%s' % toolchain_dir] ++ android_args = base_cmake_args + [ ++ '-DCMAKE_C_COMPILER=' + os.path.join(LLVM_BUILD_DIR, 'bin/clang'), ++ '-DCMAKE_CXX_COMPILER=' + os.path.join(LLVM_BUILD_DIR, 'bin/clang++'), ++ '-DLLVM_CONFIG_PATH=' + os.path.join(LLVM_BUILD_DIR, 'bin/llvm-config'), ++ '-DCMAKE_C_FLAGS=' + ' '.join(cflags), ++ '-DCMAKE_CXX_FLAGS=' + ' '.join(cflags), ++ '-DANDROID=1'] ++ RmCmakeCache('.') ++ RunCommand(['cmake'] + android_args + [COMPILER_RT_DIR]) ++ RunCommand(['ninja', 'libclang_rt.asan-%s-android.so' % target_arch]) ++ ++ # And copy it into the main build tree. ++ runtime = 'libclang_rt.asan-%s-android.so' % target_arch ++ for root, _, files in os.walk(build_dir): ++ if runtime in files: ++ shutil.copy(os.path.join(root, runtime), asan_rt_lib_dst_dir) ++ + # Run tests. + if args.run_tests or use_head_revision: + os.chdir(LLVM_BUILD_DIR) +- RunCommand(GetVSVersion().SetupScript('x64') + +- ['&&', 'ninja', 'cr-check-all']) ++ RunCommand(['ninja', 'cr-check-all'], msvc_arch='x64') + if args.run_tests: + os.chdir(LLVM_BUILD_DIR) +- RunCommand(GetVSVersion().SetupScript('x64') + +- ['&&', 'ninja', 'check-all']) ++ RunCommand(['ninja', 'check-all'], msvc_arch='x64') + + WriteStampFile(PACKAGE_VERSION) + print 'Clang update was successful.' +@@ -663,31 +743,6 @@ + + + def main(): +- if not sys.platform in ['win32', 'cygwin']: +- # For non-Windows, fall back to update.sh. +- # TODO(hans): Make update.py replace update.sh completely. +- +- # This script is called by gclient. gclient opens its hooks subprocesses +- # with (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does +- # custom output processing that breaks printing '\r' characters for +- # single-line updating status messages as printed by curl and wget. +- # Work around this by setting stderr of the update.sh process to stdin (!): +- # gclient doesn't redirect stdin, and while stdin itself is read-only, a +- # dup()ed sys.stdin is writable, try +- # fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi') +- # TODO: Fix gclient instead, http://crbug.com/95350 +- if '--no-stdin-hack' in sys.argv: +- sys.argv.remove('--no-stdin-hack') +- stderr = None +- else: +- try: +- stderr = os.fdopen(os.dup(sys.stdin.fileno())) +- except: +- stderr = sys.stderr +- return subprocess.call( +- [os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:], +- stderr=stderr) +- + parser = argparse.ArgumentParser(description='Build Clang.') + parser.add_argument('--bootstrap', action='store_true', + help='first build clang with CC, then with itself.') +@@ -695,26 +750,24 @@ + help="run only if the script thinks clang is needed") + parser.add_argument('--force-local-build', action='store_true', + help="don't try to download prebuild binaries") ++ parser.add_argument('--gcc-toolchain', help='set the version for which gcc ' ++ 'version be used for building; --gcc-toolchain=/opt/foo ' ++ 'picks /opt/foo/bin/gcc') + parser.add_argument('--print-revision', action='store_true', + help='print current clang revision and exit.') ++ parser.add_argument('--print-clang-version', action='store_true', ++ help='print current clang version (e.g. x.y.z) and exit.') + parser.add_argument('--run-tests', action='store_true', + help='run tests after building; only for local builds') + parser.add_argument('--tools', nargs='*', + help='select which chrome tools to build', + default=['plugins', 'blink_gc_plugin']) +- parser.add_argument('--without-patches', action='store_false', +- help="don't apply patches (default)", dest='with_patches', +- default=True) +- +- # For now, these flags are only used for the non-Windows flow, but argparser +- # gets mad if it sees a flag it doesn't recognize. +- parser.add_argument('--no-stdin-hack', action='store_true') +- ++ parser.add_argument('--without-android', action='store_false', ++ help='don\'t build Android ASan runtime (linux only)', ++ dest='with_android', ++ default=sys.platform.startswith('linux')) + args = parser.parse_args() + +- if re.search(r'\b(make_clang_dir)=', os.environ.get('GYP_DEFINES', '')): +- print 'Skipping Clang update (make_clang_dir= was set in GYP_DEFINES).' +- return 0 + if args.if_needed: + is_clang_required = False + # clang is always used on Mac and Linux. +@@ -730,8 +783,16 @@ + is_clang_required = True + if not is_clang_required: + return 0 ++ if re.search(r'\b(make_clang_dir)=', os.environ.get('GYP_DEFINES', '')): ++ print 'Skipping Clang update (make_clang_dir= was set in GYP_DEFINES).' ++ return 0 ++ ++ if use_head_revision: ++ # TODO(hans): Remove after the next roll. ++ global VERSION ++ VERSION = '3.9.0' + +- global LLVM_WIN_REVISION, PACKAGE_VERSION ++ global CLANG_REVISION, PACKAGE_VERSION + if args.print_revision: + if use_head_revision: + print GetSvnRevision(LLVM_DIR) +@@ -739,6 +800,10 @@ + print PACKAGE_VERSION + return 0 + ++ if args.print_clang_version: ++ sys.stdout.write(VERSION) ++ return 0 ++ + # Don't buffer stdout, so that print statements are immediately flushed. + # Do this only after --print-revision has been handled, else we'll get + # an error message when this script is run from gn for some reason. +@@ -747,12 +812,13 @@ + if use_head_revision: + # Use a real revision number rather than HEAD to make sure that the stamp + # file logic works. +- LLVM_WIN_REVISION = GetSvnRevision(LLVM_REPO_URL) +- PACKAGE_VERSION = LLVM_WIN_REVISION + '-0' ++ CLANG_REVISION = GetSvnRevision(LLVM_REPO_URL) ++ PACKAGE_VERSION = CLANG_REVISION + '-0' + + args.force_local_build = True +- # Skip local patches when using HEAD: they probably don't apply anymore. +- args.with_patches = False ++ if 'OS=android' not in os.environ.get('GYP_DEFINES', ''): ++ # Only build the Android ASan rt on ToT bots when targetting Android. ++ args.with_android = False + + return UpdateClang(args) + diff --git a/pkgs/development/libraries/qt-5/5.6/qtwebengine.nix b/pkgs/development/libraries/qt-5/5.6/qtwebengine/default.nix similarity index 96% rename from pkgs/development/libraries/qt-5/5.6/qtwebengine.nix rename to pkgs/development/libraries/qt-5/5.6/qtwebengine/default.nix index 2a437e62eca..dba3611683e 100644 --- a/pkgs/development/libraries/qt-5/5.6/qtwebengine.nix +++ b/pkgs/development/libraries/qt-5/5.6/qtwebengine/default.nix @@ -53,6 +53,9 @@ qtSubmodule { libcap pciutils ]; + patches = [ + ./chromium-clang-update-py.patch + ]; postInstall = '' cat > $out/libexec/qt.conf <> "$bin" << EOF - #!/bin/sh - exec ${jre}/bin/java -jar $out/lib/selenium-server-${version}/selenium-server.jar "\$@" - EOF - chmod +x "$bin" - ''; - */ - - # this snapshot version starts a firefox from a script file. It only issues a warning about it - # you still have to pass -DfirefoxDefaultPath=/home/marc/.nix-profile/bin/firefox or such.. - name = "selenium-remote-control-${version}-dist"; - # this dist file has been created using mvn package -Dmaven.test.skip=true based on svn rev 2450 - src = fetchurl { - url = "http://mawercer.de/~nix/selenium-server-1.0-SNAPSHOT-standalone.jar"; - sha256 = "1lqr72a3lmmww1psl19pzp91c9q1dm0314b7y7mz1gnfpwc49y38"; - }; - phases = "buildPhase"; - buildPhase = '' - mkdir -p $out/{bin,lib} - cp $src $out/lib/ - bin="$out/bin/selenium-remote-control" - cat >> "$bin" << EOF - #!/bin/sh - exec ${jre}/bin/java -jar "$out/lib/$(basename $src)" "\$@" - EOF - chmod +x "$bin" - ''; -} diff --git a/pkgs/games/0ad/data.nix b/pkgs/games/0ad/data.nix index 98603251e59..6b4dface0e1 100644 --- a/pkgs/games/0ad/data.nix +++ b/pkgs/games/0ad/data.nix @@ -20,5 +20,6 @@ stdenv.mkDerivation rec { homepage = "http://wildfiregames.com/0ad/"; license = licenses.cc-by-sa-30; platforms = platforms.linux; + hydraPlatforms = []; }; } diff --git a/pkgs/games/steam/chrootenv.nix b/pkgs/games/steam/chrootenv.nix index 8c86371ecab..c53418b6523 100644 --- a/pkgs/games/steam/chrootenv.nix +++ b/pkgs/games/steam/chrootenv.nix @@ -52,6 +52,7 @@ in buildFHSUserEnv rec { gst_all_1.gst-plugins-ugly libdrm mono + xorg.xkeyboardconfig (steamPackages.steam-runtime-wrapped.override { inherit nativeOnly runtimeOnly newStdcpp; diff --git a/pkgs/games/tintin/default.nix b/pkgs/games/tintin/default.nix index c2bc9d37b4f..deb283c57c4 100644 --- a/pkgs/games/tintin/default.nix +++ b/pkgs/games/tintin/default.nix @@ -1,11 +1,11 @@ { stdenv, fetchurl, zlib, pcre }: stdenv.mkDerivation rec { - name = "tintin-2.00.9"; + name = "tintin-2.01.1"; src = fetchurl { url = "mirror://sourceforge/tintin/${name}.tar.gz"; - sha256 = "0x8jakxx7hh7b0z6vjcxyrda0afbz2s2yy7mvrbxjffyc2dyxzna"; + sha256 = "195wrfcys8yy953gdrl1gxryhjnx9lg1vqgxm3dyzm8bi18aa2yc"; }; buildInputs = [ zlib pcre ]; diff --git a/pkgs/os-specific/linux/cryopid/default.nix b/pkgs/os-specific/linux/cryopid/default.nix deleted file mode 100644 index 0cb64bcc975..00000000000 --- a/pkgs/os-specific/linux/cryopid/default.nix +++ /dev/null @@ -1,39 +0,0 @@ -{stdenv, fetchurl, zlibStatic}: - -let - - pname = "cryopid"; - version = "20090908"; - revision = "7da69201d50e"; - -in - -stdenv.mkDerivation rec { - name = "${pname}-${version}"; - - src = fetchurl { - url = "https://sharesource.org/hg/cryopid/archive/${revision}.tar.bz2"; - sha256 = "908a4b1cb26322ee25afe13ff59e0d86f669538cb4583766b15ca79fda6c69ca"; - }; - - buildInputs = [ zlibStatic ]; - - preBuild = "cd src"; - - installPhase = "mkdir -p $out/bin; cp cryopid $out/bin"; - - meta = { - description = "A process freezer for Linux"; - longDescription = '' - CryoPID allows you to capture the state of a running process in Linux - and save it to a file. This file can then be used to resume the process - later on, either after a reboot or even on another machines. - ''; - homepage = http://sharesource.org/project/cryopid; - license = '' - Modified BSD license (without advertising clause). CryoPID ships with - and links against the dietlibc library, which is distributed under the - GNU General Public Licence, version 2. - ''; - }; -} diff --git a/pkgs/os-specific/linux/light/default.nix b/pkgs/os-specific/linux/light/default.nix index 5ca9f69f879..1c44c0d78a8 100644 --- a/pkgs/os-specific/linux/light/default.nix +++ b/pkgs/os-specific/linux/light/default.nix @@ -1,15 +1,18 @@ -{ stdenv, fetchurl }: +{ stdenv, fetchurl, help2man }: stdenv.mkDerivation rec { - version = "0.9"; + version = "1.0"; name = "light-${version}"; src = fetchurl { url = "https://github.com/haikarainen/light/archive/v${version}.tar.gz"; - sha256 = "1dnzkkg307izvw76gvzsl2vpxd2a1grxg5h82ix505rb9nnmn0d6"; + sha256 = "974608ee42ffe85cfd23184306d56d86ec4e6f4b0518bafcb7b3330998b1af64"; }; + buildInputs = [ help2man ]; installPhase = "mkdir -p $out/bin; cp light $out/bin/"; + preFixup = "make man; mkdir -p $out/man/man1; mv light.1.gz $out/man/man1"; + meta = { description = "GNU/Linux application to control backlights"; homepage = https://haikarainen.github.io/light/; diff --git a/pkgs/os-specific/linux/mcelog/default.nix b/pkgs/os-specific/linux/mcelog/default.nix index f9969fa79fd..590d2b4a854 100644 --- a/pkgs/os-specific/linux/mcelog/default.nix +++ b/pkgs/os-specific/linux/mcelog/default.nix @@ -2,10 +2,10 @@ stdenv.mkDerivation rec { name = "mcelog-${version}"; - version = "138"; + version = "142"; src = fetchFromGitHub { - sha256 = "039ycn5m3gx4n0kppxl35wcrkyva6lv64qhlqhh7034qkbqbhqiy"; + sha256 = "1cqx7w75d570vxqi2gk9bkqqclakkhp4kjanv5j3nhqwg3p38zyv"; rev = "v${version}"; repo = "mcelog"; owner = "andikleen"; diff --git a/pkgs/os-specific/linux/ttysnoop/default.nix b/pkgs/os-specific/linux/ttysnoop/default.nix deleted file mode 100644 index 670c9608344..00000000000 --- a/pkgs/os-specific/linux/ttysnoop/default.nix +++ /dev/null @@ -1,38 +0,0 @@ -{stdenv, fetchurl}: -let - s = # Generated upstream information - rec { - baseName="ttysnoop"; - version="0.12d.k26"; - name="${baseName}-${version}"; - hash="0jb2zchaiqmmickj0la7wjw3sf9vy65qfhhs11yrzx4mmwkp0395"; - url="http://sysd.org/stas/files/active/0/ttysnoop-0.12d.k26.tar.gz"; - sha256="0jb2zchaiqmmickj0la7wjw3sf9vy65qfhhs11yrzx4mmwkp0395"; - }; - buildInputs = [ - ]; -in -stdenv.mkDerivation { - inherit (s) name version; - inherit buildInputs; - src = fetchurl { - inherit (s) url sha256; - }; - preBuild = '' - sed -e "s@/sbin@$out/sbin@g" -i Makefile - sed -e "s@/usr/man@$out/share/man@g" -i Makefile - mkdir -p "$out/share/man/man8" - mkdir -p "$out/sbin" - ''; - postInstall = '' - mkdir -p "$out/etc" - cp snooptab.dist "$out/etc/snooptab" - ''; - meta = { - inherit (s) version; - description = "A tool to clone input and output of another tty/pty to the current one"; - license = stdenv.lib.licenses.gpl2 ; - maintainers = [stdenv.lib.maintainers.raskin]; - platforms = stdenv.lib.platforms.linux; - }; -} diff --git a/pkgs/os-specific/linux/ttysnoop/default.upstream b/pkgs/os-specific/linux/ttysnoop/default.upstream deleted file mode 100644 index 905a639c31e..00000000000 --- a/pkgs/os-specific/linux/ttysnoop/default.upstream +++ /dev/null @@ -1,3 +0,0 @@ -url http://sysd.org/stas/node/35 -ensure_choice -version '.*-([0-9a-z.]+)[.]tar[.].*' '\1' diff --git a/pkgs/servers/felix/remoteshell.nix b/pkgs/servers/felix/remoteshell.nix index 6e8089d32e9..3ac3c98718f 100644 --- a/pkgs/servers/felix/remoteshell.nix +++ b/pkgs/servers/felix/remoteshell.nix @@ -1,14 +1,15 @@ {stdenv, fetchurl}: stdenv.mkDerivation rec { - name = "apache-felix-remoteshell-bundle-1.0.4"; + version = "1.1.2"; + name = "apache-felix-remoteshell-bundle-${version}"; src = fetchurl { - url = http://apache.proserve.nl/felix/org.apache.felix.shell.remote-1.0.4.jar; - sha256 = "1bgahzs9nnnvfr0yyh9s0r6h1zp2ls6533377rp8r1qk2a4s1gzb"; + url = "http://apache.proserve.nl/felix/org.apache.felix.shell.remote-${version}.jar"; + sha256 = "147zw5ppn98wfl3pr32isyb267xm3gwsvdfdvjr33m9g2v1z69aq"; }; - buildCommand = + buildCommand = '' mkdir -p $out/bundle - cp ${src} $out/bundle/org.apache.felix.shell.remote-1.0.4.jar + cp ${src} $out/bundle/org.apache.felix.shell.remote-${version}.jar ''; } diff --git a/pkgs/tools/compression/zstd/default.nix b/pkgs/tools/compression/zstd/default.nix index e981a108914..d966175f50d 100644 --- a/pkgs/tools/compression/zstd/default.nix +++ b/pkgs/tools/compression/zstd/default.nix @@ -3,10 +3,10 @@ stdenv.mkDerivation rec { name = "zstd-${version}"; - version = "1.0.0"; + version = "1.1.1"; src = fetchFromGitHub { - sha256 = "0h8r8vlk8v28cxxgdp7h7dcygbpn8g95wffsvhzybxhfvkrlw6f2"; + sha256 = "18snd1jiz0j6r1yk4vkgqmil2gbzwxgmcv2chvpnc5i93pp18hri"; rev = "v${version}"; repo = "zstd"; owner = "facebook"; diff --git a/pkgs/tools/filesystems/fuse-zip/default.nix b/pkgs/tools/filesystems/fuse-zip/default.nix deleted file mode 100644 index a5ac74fe47a..00000000000 --- a/pkgs/tools/filesystems/fuse-zip/default.nix +++ /dev/null @@ -1,23 +0,0 @@ -{ stdenv, fetchurl, pkgconfig, fuse, libzip, zlib }: - -stdenv.mkDerivation rec { - name = "fuse-zip-0.2.13"; - - src = fetchurl { - url = "http://fuse-zip.googlecode.com/files/${name}.tar.gz"; - sha1 = "9cfa00e38a59d4e06fd47bfaca75ad5e299ecc6b"; - }; - - patches = [ ./libzip.patch ]; # problems with new libzip; from Gentoo - - buildInputs = [ pkgconfig fuse libzip zlib ]; - - makeFlags = "INSTALLPREFIX=$(out)"; - - meta = { - homepage = http://code.google.com/p/fuse-zip/; - description = "A FUSE-based filesystem that allows read and write access to ZIP files"; - platforms = stdenv.lib.platforms.linux; - license = stdenv.lib.licenses.gpl3Plus; - }; -} diff --git a/pkgs/tools/filesystems/fuse-zip/libzip.patch b/pkgs/tools/filesystems/fuse-zip/libzip.patch deleted file mode 100644 index f2348e5f1ba..00000000000 --- a/pkgs/tools/filesystems/fuse-zip/libzip.patch +++ /dev/null @@ -1,24 +0,0 @@ -diff -ru fuse-zip-0.2.13/lib/bigBuffer.cpp fuse-zip-0.2.13.new//lib/bigBuffer.cpp ---- fuse-zip-0.2.13/lib/bigBuffer.cpp 2010-12-06 12:34:32.000000000 -0500 -+++ fuse-zip-0.2.13.new//lib/bigBuffer.cpp 2011-09-28 21:40:01.294946957 -0400 -@@ -236,7 +236,7 @@ - len = offset; - } - --ssize_t BigBuffer::zipUserFunctionCallback(void *state, void *data, size_t len, enum zip_source_cmd cmd) { -+zip_int64_t BigBuffer::zipUserFunctionCallback(void *state, void *data, zip_uint64_t len, enum zip_source_cmd cmd) { - CallBackStruct *b = (CallBackStruct*)state; - switch (cmd) { - case ZIP_SOURCE_OPEN: { -diff -ru fuse-zip-0.2.13/lib/bigBuffer.h fuse-zip-0.2.13.new//lib/bigBuffer.h ---- fuse-zip-0.2.13/lib/bigBuffer.h 2010-12-06 12:34:32.000000000 -0500 -+++ fuse-zip-0.2.13.new//lib/bigBuffer.h 2011-09-28 21:40:23.203719133 -0400 -@@ -52,7 +52,7 @@ - * never called because read() always successfull. - * See zip_source_function(3) for details. - */ -- static ssize_t zipUserFunctionCallback(void *state, void *data, size_t len, enum zip_source_cmd cmd); -+ static zip_int64_t zipUserFunctionCallback(void *state, void *data, zip_uint64_t len, enum zip_source_cmd cmd); - - /** - * Return number of chunks needed to keep 'offset' bytes. diff --git a/pkgs/tools/misc/cowsay/default.nix b/pkgs/tools/misc/cowsay/default.nix index a9bdf1b2b92..0a7b079445d 100644 --- a/pkgs/tools/misc/cowsay/default.nix +++ b/pkgs/tools/misc/cowsay/default.nix @@ -1,11 +1,12 @@ -{ stdenv, fetchurl, perl }: +{ stdenv, fetchgit, perl }: stdenv.mkDerivation { - name = "cowsay-3.03"; + name = "cowsay-3.03+dfsg1-16"; - src = fetchurl { - url = http://www.nog.net/~tony/warez/cowsay-3.03.tar.gz; - sha256 = "1s3c0g5vmsadicc4lrlkmkm8znm4y6wnxd8kyv9xgm676hban1il"; + src = fetchgit { + url = https://anonscm.debian.org/git/collab-maint/cowsay.git; + rev = "acb946c166fa3b9526b9c471ef1330f9f89f9c8b"; + sha256 = "1ji66nrdcc8sh79hwils3nbaj897s352r5wp7kzjwiym8bm2azk6"; }; buildInputs = [ perl ]; diff --git a/pkgs/tools/misc/g500-control/default.nix b/pkgs/tools/misc/g500-control/default.nix deleted file mode 100644 index 9d42c7d68d5..00000000000 --- a/pkgs/tools/misc/g500-control/default.nix +++ /dev/null @@ -1,34 +0,0 @@ -{ stdenv, fetchurl }: - -stdenv.mkDerivation { - name = "g500-control-0.0.1"; - - src = fetchurl { - url = "http://g500-control.googlecode.com/files/g500_control_0.0.1.tar.gz"; - sha256 = "1xlg9lpxnk3228k81y1i6jjh4df1p4jh64g54w969g6a6v6dazvb"; - }; - - unpackPhase = '' - mkdir -p g500-control - tar -C g500-control/ -xf $src - ''; - - buildPhase = '' - cd g500-control - gcc -o g500-control *.c - ''; - - installPhase = '' - mkdir -p $out/bin/ - cp g500-control $out/bin/ - ''; - - meta = { - homepage = http://code.google.com/p/g500-control/; - description = "Configure Logitech G500's internal profile under Linux"; - license = stdenv.lib.licenses.gpl2; - platforms = stdenv.lib.platforms.linux; - maintainers = with stdenv.lib.maintainers; [ the-kenny ]; - }; -} - diff --git a/pkgs/tools/misc/graylog/default.nix b/pkgs/tools/misc/graylog/default.nix index 829c524113b..ae26bad5bc8 100644 --- a/pkgs/tools/misc/graylog/default.nix +++ b/pkgs/tools/misc/graylog/default.nix @@ -1,12 +1,12 @@ { stdenv, fetchurl }: stdenv.mkDerivation rec { - version = "2.1.1"; + version = "2.1.2"; name = "graylog-${version}"; src = fetchurl { url = "https://packages.graylog2.org/releases/graylog/graylog-${version}.tgz"; - sha256 = "0p7vx6b4k6lzxi0v9x44wbrvplw93288lpixpwckc0xx0r7js07z"; + sha256 = "0jwm1l3s00rh22gqvkg730h8xm4h1y1dr60m4s5xbz8qzdkk8rax"; }; dontBuild = true; diff --git a/pkgs/tools/misc/mcrl/default.nix b/pkgs/tools/misc/mcrl/default.nix deleted file mode 100644 index bf5043e8b63..00000000000 --- a/pkgs/tools/misc/mcrl/default.nix +++ /dev/null @@ -1,14 +0,0 @@ -{stdenv, fetchurl, coreutils}: - -stdenv.mkDerivation { - name = "mcrl-2.18.4"; - src = fetchurl { - url = http://homepages.cwi.nl/~mcrl/mcrl-2.18.4.tar.gz ; - sha256 = "0gld7x3cv3y0vwjr1snz24xzr818sj1l2dfn8qhirfyhc7dnnqfw"; - }; - - RMPROG = "${coreutils}/bin/rm -f"; -} - - - diff --git a/pkgs/tools/misc/youtube-dl/default.nix b/pkgs/tools/misc/youtube-dl/default.nix index 8f1ac7546bb..21828f2c278 100644 --- a/pkgs/tools/misc/youtube-dl/default.nix +++ b/pkgs/tools/misc/youtube-dl/default.nix @@ -14,11 +14,11 @@ with stdenv.lib; buildPythonApplication rec { name = "youtube-dl-${version}"; - version = "2016.11.02"; + version = "2016.11.04"; src = fetchurl { url = "https://yt-dl.org/downloads/${version}/${name}.tar.gz"; - sha256 = "97777924c3df763d3f2259c9a7f227a01e787ccd452be198191a4a848a7632d7"; + sha256 = "9622b29b81587278a00e39e4206e7c52555d240cbbb44242f237660169e8d531"; }; buildInputs = [ makeWrapper zip ] ++ optional generateManPage pandoc; diff --git a/pkgs/tools/networking/i2pd/default.nix b/pkgs/tools/networking/i2pd/default.nix index f12b2900e67..56af632e616 100644 --- a/pkgs/tools/networking/i2pd/default.nix +++ b/pkgs/tools/networking/i2pd/default.nix @@ -4,13 +4,13 @@ stdenv.mkDerivation rec { name = pname + "-" + version; pname = "i2pd"; - version = "2.9.0"; + version = "2.10.0"; src = fetchFromGitHub { owner = "PurpleI2P"; repo = pname; rev = version; - sha256 = "1xwcq7lklma0daamp9z76l9mgr3glpvicjgsr645rjhdv8a0mqwp"; + sha256 = "0lw0vcibp3v5xz855h4x2rs3ff7yx86znzjfnfri348wg413js5c"; }; buildInputs = [ boost zlib openssl ]; diff --git a/pkgs/tools/text/xml/jing-trang/default.nix b/pkgs/tools/text/xml/jing-trang/default.nix new file mode 100644 index 00000000000..36ff976a6c1 --- /dev/null +++ b/pkgs/tools/text/xml/jing-trang/default.nix @@ -0,0 +1,41 @@ +{ stdenv, fetchFromGitHub, jre, jdk, ant, saxon }: + +stdenv.mkDerivation rec { + name = "jing-trang-${version}"; + version = "20150603"; + + src = fetchFromGitHub { + owner = "relaxng"; + repo = "jing-trang"; + rev = "54b9b1f4e67cd79c7987750d8c9dcfc014af98c3"; # needed to compile with jdk8 + sha256 = "0wa569xjb7ihhcaazz32y2b0dv092lisjz77isz1gfb1wvf53di5"; + }; + + buildInputs = [ jdk ant saxon ]; + + preBuild = "ant"; + + installPhase = '' + mkdir -p "$out"/{share/java,bin} + cp ./build/*.jar "$out/share/java/" + + for tool in jing trang; do + cat > "$out/bin/$tool" < "$out/bin/jing" <> $out/bin/trang <