Merge branch 'master' into staging
This commit is contained in:
commit
f77801d88d
@ -231,12 +231,20 @@ rec {
|
|||||||
correspond to the definition of 'loc' in 'opt.file'. */
|
correspond to the definition of 'loc' in 'opt.file'. */
|
||||||
mergeOptionDecls = loc: opts:
|
mergeOptionDecls = loc: opts:
|
||||||
foldl' (res: opt:
|
foldl' (res: opt:
|
||||||
if opt.options ? default && res ? default ||
|
let t = res.type;
|
||||||
opt.options ? example && res ? example ||
|
t' = opt.options.type;
|
||||||
opt.options ? description && res ? description ||
|
mergedType = t.typeMerge t'.functor;
|
||||||
opt.options ? apply && res ? apply ||
|
typesMergeable = mergedType != null;
|
||||||
# Accept to merge options which have identical types.
|
typeSet = if (bothHave "type") && typesMergeable
|
||||||
opt.options ? type && res ? type && opt.options.type.name != res.type.name
|
then { type = mergedType; }
|
||||||
|
else {};
|
||||||
|
bothHave = k: opt.options ? ${k} && res ? ${k};
|
||||||
|
in
|
||||||
|
if bothHave "default" ||
|
||||||
|
bothHave "example" ||
|
||||||
|
bothHave "description" ||
|
||||||
|
bothHave "apply" ||
|
||||||
|
(bothHave "type" && (! typesMergeable))
|
||||||
then
|
then
|
||||||
throw "The option `${showOption loc}' in `${opt.file}' is already declared in ${showFiles res.declarations}."
|
throw "The option `${showOption loc}' in `${opt.file}' is already declared in ${showFiles res.declarations}."
|
||||||
else
|
else
|
||||||
@ -258,7 +266,7 @@ rec {
|
|||||||
in opt.options // res //
|
in opt.options // res //
|
||||||
{ declarations = res.declarations ++ [opt.file];
|
{ declarations = res.declarations ++ [opt.file];
|
||||||
options = submodules;
|
options = submodules;
|
||||||
}
|
} // typeSet
|
||||||
) { inherit loc; declarations = []; options = []; } opts;
|
) { inherit loc; declarations = []; options = []; } opts;
|
||||||
|
|
||||||
/* Merge all the definitions of an option to produce the final
|
/* Merge all the definitions of an option to produce the final
|
||||||
@ -422,12 +430,14 @@ rec {
|
|||||||
options = opt.options or
|
options = opt.options or
|
||||||
(throw "Option `${showOption loc'}' has type optionSet but has no option attribute, in ${showFiles opt.declarations}.");
|
(throw "Option `${showOption loc'}' has type optionSet but has no option attribute, in ${showFiles opt.declarations}.");
|
||||||
f = tp:
|
f = tp:
|
||||||
|
let optionSetIn = type: (tp.name == type) && (tp.functor.wrapped.name == "optionSet");
|
||||||
|
in
|
||||||
if tp.name == "option set" || tp.name == "submodule" then
|
if tp.name == "option set" || tp.name == "submodule" then
|
||||||
throw "The option ${showOption loc} uses submodules without a wrapping type, in ${showFiles opt.declarations}."
|
throw "The option ${showOption loc} uses submodules without a wrapping type, in ${showFiles opt.declarations}."
|
||||||
else if tp.name == "attribute set of option sets" then types.attrsOf (types.submodule options)
|
else if optionSetIn "attrsOf" then types.attrsOf (types.submodule options)
|
||||||
else if tp.name == "list or attribute set of option sets" then types.loaOf (types.submodule options)
|
else if optionSetIn "loaOf" then types.loaOf (types.submodule options)
|
||||||
else if tp.name == "list of option sets" then types.listOf (types.submodule options)
|
else if optionSetIn "listOf" then types.listOf (types.submodule options)
|
||||||
else if tp.name == "null or option set" then types.nullOr (types.submodule options)
|
else if optionSetIn "nullOr" then types.nullOr (types.submodule options)
|
||||||
else tp;
|
else tp;
|
||||||
in
|
in
|
||||||
if opt.type.getSubModules or null == null
|
if opt.type.getSubModules or null == null
|
||||||
|
@ -92,7 +92,7 @@ rec {
|
|||||||
internal = opt.internal or false;
|
internal = opt.internal or false;
|
||||||
visible = opt.visible or true;
|
visible = opt.visible or true;
|
||||||
readOnly = opt.readOnly or false;
|
readOnly = opt.readOnly or false;
|
||||||
type = opt.type.name or null;
|
type = opt.type.description or null;
|
||||||
}
|
}
|
||||||
// (if opt ? example then { example = scrubOptionValue opt.example; } else {})
|
// (if opt ? example then { example = scrubOptionValue opt.example; } else {})
|
||||||
// (if opt ? default then { default = scrubOptionValue opt.default; } else {})
|
// (if opt ? default then { default = scrubOptionValue opt.default; } else {})
|
||||||
|
142
lib/types.nix
142
lib/types.nix
@ -17,10 +17,43 @@ rec {
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
# Default type merging function
|
||||||
|
# takes two type functors and return the merged type
|
||||||
|
defaultTypeMerge = f: f':
|
||||||
|
let wrapped = f.wrapped.typeMerge f'.wrapped.functor;
|
||||||
|
payload = f.binOp f.payload f'.payload;
|
||||||
|
in
|
||||||
|
# cannot merge different types
|
||||||
|
if f.name != f'.name
|
||||||
|
then null
|
||||||
|
# simple types
|
||||||
|
else if (f.wrapped == null && f'.wrapped == null)
|
||||||
|
&& (f.payload == null && f'.payload == null)
|
||||||
|
then f.type
|
||||||
|
# composed types
|
||||||
|
else if (f.wrapped != null && f'.wrapped != null) && (wrapped != null)
|
||||||
|
then f.type wrapped
|
||||||
|
# value types
|
||||||
|
else if (f.payload != null && f'.payload != null) && (payload != null)
|
||||||
|
then f.type payload
|
||||||
|
else null;
|
||||||
|
|
||||||
|
# Default type functor
|
||||||
|
defaultFunctor = name: {
|
||||||
|
inherit name;
|
||||||
|
type = types."${name}" or null;
|
||||||
|
wrapped = null;
|
||||||
|
payload = null;
|
||||||
|
binOp = a: b: null;
|
||||||
|
};
|
||||||
|
|
||||||
isOptionType = isType "option-type";
|
isOptionType = isType "option-type";
|
||||||
mkOptionType =
|
mkOptionType =
|
||||||
{ # Human-readable representation of the type.
|
{ # Human-readable representation of the type, should be equivalent to
|
||||||
|
# the type function name.
|
||||||
name
|
name
|
||||||
|
, # Description of the type, defined recursively by embedding the the wrapped type if any.
|
||||||
|
description ? null
|
||||||
, # Function applied to each definition that should return true if
|
, # Function applied to each definition that should return true if
|
||||||
# its type-correct, false otherwise.
|
# its type-correct, false otherwise.
|
||||||
check ? (x: true)
|
check ? (x: true)
|
||||||
@ -36,12 +69,26 @@ rec {
|
|||||||
getSubOptions ? prefix: {}
|
getSubOptions ? prefix: {}
|
||||||
, # List of modules if any, or null if none.
|
, # List of modules if any, or null if none.
|
||||||
getSubModules ? null
|
getSubModules ? null
|
||||||
, # Function for building the same option type with a different list of
|
, # Function for building the same option type with a different list of
|
||||||
# modules.
|
# modules.
|
||||||
substSubModules ? m: null
|
substSubModules ? m: null
|
||||||
|
, # Function that merge type declarations.
|
||||||
|
# internal, takes a functor as argument and returns the merged type.
|
||||||
|
# returning null means the type is not mergeable
|
||||||
|
typeMerge ? defaultTypeMerge functor
|
||||||
|
, # The type functor.
|
||||||
|
# internal, representation of the type as an attribute set.
|
||||||
|
# name: name of the type
|
||||||
|
# type: type function.
|
||||||
|
# wrapped: the type wrapped in case of compound types.
|
||||||
|
# payload: values of the type, two payloads of the same type must be
|
||||||
|
# combinable with the binOp binary operation.
|
||||||
|
# binOp: binary operation that merge two payloads of the same type.
|
||||||
|
functor ? defaultFunctor name
|
||||||
}:
|
}:
|
||||||
{ _type = "option-type";
|
{ _type = "option-type";
|
||||||
inherit name check merge getSubOptions getSubModules substSubModules;
|
inherit name check merge getSubOptions getSubModules substSubModules typeMerge functor;
|
||||||
|
description = if description == null then name else description;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -52,29 +99,39 @@ rec {
|
|||||||
};
|
};
|
||||||
|
|
||||||
bool = mkOptionType {
|
bool = mkOptionType {
|
||||||
name = "boolean";
|
name = "bool";
|
||||||
|
description = "boolean";
|
||||||
check = isBool;
|
check = isBool;
|
||||||
merge = mergeEqualOption;
|
merge = mergeEqualOption;
|
||||||
};
|
};
|
||||||
|
|
||||||
int = mkOptionType {
|
int = mkOptionType rec {
|
||||||
name = "integer";
|
name = "int";
|
||||||
|
description = "integer";
|
||||||
check = isInt;
|
check = isInt;
|
||||||
merge = mergeOneOption;
|
merge = mergeOneOption;
|
||||||
};
|
};
|
||||||
|
|
||||||
str = mkOptionType {
|
str = mkOptionType {
|
||||||
name = "string";
|
name = "str";
|
||||||
|
description = "string";
|
||||||
check = isString;
|
check = isString;
|
||||||
merge = mergeOneOption;
|
merge = mergeOneOption;
|
||||||
};
|
};
|
||||||
|
|
||||||
# Merge multiple definitions by concatenating them (with the given
|
# Merge multiple definitions by concatenating them (with the given
|
||||||
# separator between the values).
|
# separator between the values).
|
||||||
separatedString = sep: mkOptionType {
|
separatedString = sep: mkOptionType rec {
|
||||||
name = "string";
|
name = "separatedString";
|
||||||
|
description = "string";
|
||||||
check = isString;
|
check = isString;
|
||||||
merge = loc: defs: concatStringsSep sep (getValues defs);
|
merge = loc: defs: concatStringsSep sep (getValues defs);
|
||||||
|
functor = (defaultFunctor name) // {
|
||||||
|
payload = sep;
|
||||||
|
binOp = sepLhs: sepRhs:
|
||||||
|
if sepLhs == sepRhs then sepLhs
|
||||||
|
else null;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
lines = separatedString "\n";
|
lines = separatedString "\n";
|
||||||
@ -86,7 +143,8 @@ rec {
|
|||||||
string = separatedString "";
|
string = separatedString "";
|
||||||
|
|
||||||
attrs = mkOptionType {
|
attrs = mkOptionType {
|
||||||
name = "attribute set";
|
name = "attrs";
|
||||||
|
description = "attribute set";
|
||||||
check = isAttrs;
|
check = isAttrs;
|
||||||
merge = loc: foldl' (res: def: mergeAttrs res def.value) {};
|
merge = loc: foldl' (res: def: mergeAttrs res def.value) {};
|
||||||
};
|
};
|
||||||
@ -114,8 +172,9 @@ rec {
|
|||||||
# drop this in the future:
|
# drop this in the future:
|
||||||
list = builtins.trace "`types.list' is deprecated; use `types.listOf' instead" types.listOf;
|
list = builtins.trace "`types.list' is deprecated; use `types.listOf' instead" types.listOf;
|
||||||
|
|
||||||
listOf = elemType: mkOptionType {
|
listOf = elemType: mkOptionType rec {
|
||||||
name = "list of ${elemType.name}s";
|
name = "listOf";
|
||||||
|
description = "list of ${elemType.description}s";
|
||||||
check = isList;
|
check = isList;
|
||||||
merge = loc: defs:
|
merge = loc: defs:
|
||||||
map (x: x.value) (filter (x: x ? value) (concatLists (imap (n: def:
|
map (x: x.value) (filter (x: x ? value) (concatLists (imap (n: def:
|
||||||
@ -132,10 +191,12 @@ rec {
|
|||||||
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["*"]);
|
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["*"]);
|
||||||
getSubModules = elemType.getSubModules;
|
getSubModules = elemType.getSubModules;
|
||||||
substSubModules = m: listOf (elemType.substSubModules m);
|
substSubModules = m: listOf (elemType.substSubModules m);
|
||||||
|
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||||
};
|
};
|
||||||
|
|
||||||
attrsOf = elemType: mkOptionType {
|
attrsOf = elemType: mkOptionType rec {
|
||||||
name = "attribute set of ${elemType.name}s";
|
name = "attrsOf";
|
||||||
|
description = "attribute set of ${elemType.description}s";
|
||||||
check = isAttrs;
|
check = isAttrs;
|
||||||
merge = loc: defs:
|
merge = loc: defs:
|
||||||
mapAttrs (n: v: v.value) (filterAttrs (n: v: v ? value) (zipAttrsWith (name: defs:
|
mapAttrs (n: v: v.value) (filterAttrs (n: v: v ? value) (zipAttrsWith (name: defs:
|
||||||
@ -147,6 +208,7 @@ rec {
|
|||||||
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["<name>"]);
|
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["<name>"]);
|
||||||
getSubModules = elemType.getSubModules;
|
getSubModules = elemType.getSubModules;
|
||||||
substSubModules = m: attrsOf (elemType.substSubModules m);
|
substSubModules = m: attrsOf (elemType.substSubModules m);
|
||||||
|
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||||
};
|
};
|
||||||
|
|
||||||
# List or attribute set of ...
|
# List or attribute set of ...
|
||||||
@ -165,18 +227,21 @@ rec {
|
|||||||
def;
|
def;
|
||||||
listOnly = listOf elemType;
|
listOnly = listOf elemType;
|
||||||
attrOnly = attrsOf elemType;
|
attrOnly = attrsOf elemType;
|
||||||
in mkOptionType {
|
in mkOptionType rec {
|
||||||
name = "list or attribute set of ${elemType.name}s";
|
name = "loaOf";
|
||||||
|
description = "list or attribute set of ${elemType.description}s";
|
||||||
check = x: isList x || isAttrs x;
|
check = x: isList x || isAttrs x;
|
||||||
merge = loc: defs: attrOnly.merge loc (imap convertIfList defs);
|
merge = loc: defs: attrOnly.merge loc (imap convertIfList defs);
|
||||||
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["<name?>"]);
|
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["<name?>"]);
|
||||||
getSubModules = elemType.getSubModules;
|
getSubModules = elemType.getSubModules;
|
||||||
substSubModules = m: loaOf (elemType.substSubModules m);
|
substSubModules = m: loaOf (elemType.substSubModules m);
|
||||||
|
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||||
};
|
};
|
||||||
|
|
||||||
# List or element of ...
|
# List or element of ...
|
||||||
loeOf = elemType: mkOptionType {
|
loeOf = elemType: mkOptionType rec {
|
||||||
name = "element or list of ${elemType.name}s";
|
name = "loeOf";
|
||||||
|
description = "element or list of ${elemType.description}s";
|
||||||
check = x: isList x || elemType.check x;
|
check = x: isList x || elemType.check x;
|
||||||
merge = loc: defs:
|
merge = loc: defs:
|
||||||
let
|
let
|
||||||
@ -189,18 +254,22 @@ rec {
|
|||||||
else if !isString res then
|
else if !isString res then
|
||||||
throw "The option `${showOption loc}' does not have a string value, in ${showFiles (getFiles defs)}."
|
throw "The option `${showOption loc}' does not have a string value, in ${showFiles (getFiles defs)}."
|
||||||
else res;
|
else res;
|
||||||
|
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||||
};
|
};
|
||||||
|
|
||||||
uniq = elemType: mkOptionType {
|
uniq = elemType: mkOptionType rec {
|
||||||
inherit (elemType) name check;
|
name = "uniq";
|
||||||
|
inherit (elemType) description check;
|
||||||
merge = mergeOneOption;
|
merge = mergeOneOption;
|
||||||
getSubOptions = elemType.getSubOptions;
|
getSubOptions = elemType.getSubOptions;
|
||||||
getSubModules = elemType.getSubModules;
|
getSubModules = elemType.getSubModules;
|
||||||
substSubModules = m: uniq (elemType.substSubModules m);
|
substSubModules = m: uniq (elemType.substSubModules m);
|
||||||
|
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||||
};
|
};
|
||||||
|
|
||||||
nullOr = elemType: mkOptionType {
|
nullOr = elemType: mkOptionType rec {
|
||||||
name = "null or ${elemType.name}";
|
name = "nullOr";
|
||||||
|
description = "null or ${elemType.description}";
|
||||||
check = x: x == null || elemType.check x;
|
check = x: x == null || elemType.check x;
|
||||||
merge = loc: defs:
|
merge = loc: defs:
|
||||||
let nrNulls = count (def: def.value == null) defs; in
|
let nrNulls = count (def: def.value == null) defs; in
|
||||||
@ -211,6 +280,7 @@ rec {
|
|||||||
getSubOptions = elemType.getSubOptions;
|
getSubOptions = elemType.getSubOptions;
|
||||||
getSubModules = elemType.getSubModules;
|
getSubModules = elemType.getSubModules;
|
||||||
substSubModules = m: nullOr (elemType.substSubModules m);
|
substSubModules = m: nullOr (elemType.substSubModules m);
|
||||||
|
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||||
};
|
};
|
||||||
|
|
||||||
submodule = opts:
|
submodule = opts:
|
||||||
@ -236,6 +306,12 @@ rec {
|
|||||||
args = { name = ""; }; }).options;
|
args = { name = ""; }; }).options;
|
||||||
getSubModules = opts';
|
getSubModules = opts';
|
||||||
substSubModules = m: submodule m;
|
substSubModules = m: submodule m;
|
||||||
|
functor = (defaultFunctor name) // {
|
||||||
|
# Merging of submodules is done as part of mergeOptionDecls, as we have to annotate
|
||||||
|
# each submodule with its location.
|
||||||
|
payload = [];
|
||||||
|
binOp = lhs: rhs: [];
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
enum = values:
|
enum = values:
|
||||||
@ -245,23 +321,35 @@ rec {
|
|||||||
else if builtins.isInt v then builtins.toString v
|
else if builtins.isInt v then builtins.toString v
|
||||||
else ''<${builtins.typeOf v}>'';
|
else ''<${builtins.typeOf v}>'';
|
||||||
in
|
in
|
||||||
mkOptionType {
|
mkOptionType rec {
|
||||||
name = "one of ${concatMapStringsSep ", " show values}";
|
name = "enum";
|
||||||
|
description = "one of ${concatMapStringsSep ", " show values}";
|
||||||
check = flip elem values;
|
check = flip elem values;
|
||||||
merge = mergeOneOption;
|
merge = mergeOneOption;
|
||||||
|
functor = (defaultFunctor name) // { payload = values; binOp = a: b: unique (a ++ b); };
|
||||||
};
|
};
|
||||||
|
|
||||||
either = t1: t2: mkOptionType {
|
either = t1: t2: mkOptionType rec {
|
||||||
name = "${t1.name} or ${t2.name}";
|
name = "either";
|
||||||
|
description = "${t1.description} or ${t2.description}";
|
||||||
check = x: t1.check x || t2.check x;
|
check = x: t1.check x || t2.check x;
|
||||||
merge = mergeOneOption;
|
merge = mergeOneOption;
|
||||||
|
typeMerge = f':
|
||||||
|
let mt1 = t1.typeMerge (elemAt f'.wrapped 0).functor;
|
||||||
|
mt2 = t2.typeMerge (elemAt f'.wrapped 1).functor;
|
||||||
|
in
|
||||||
|
if (name == f'.name) && (mt1 != null) && (mt2 != null)
|
||||||
|
then functor.type mt1 mt2
|
||||||
|
else null;
|
||||||
|
functor = (defaultFunctor name) // { wrapped = [ t1 t2 ]; };
|
||||||
};
|
};
|
||||||
|
|
||||||
# Obsolete alternative to configOf. It takes its option
|
# Obsolete alternative to configOf. It takes its option
|
||||||
# declarations from the ‘options’ attribute of containing option
|
# declarations from the ‘options’ attribute of containing option
|
||||||
# declaration.
|
# declaration.
|
||||||
optionSet = mkOptionType {
|
optionSet = mkOptionType {
|
||||||
name = builtins.trace "types.optionSet is deprecated; use types.submodule instead" "option set";
|
name = builtins.trace "types.optionSet is deprecated; use types.submodule instead" "optionSet";
|
||||||
|
description = "option set";
|
||||||
};
|
};
|
||||||
|
|
||||||
# Augment the given type with an additional type check function.
|
# Augment the given type with an additional type check function.
|
||||||
|
@ -42,29 +42,30 @@ construction, so without them,
|
|||||||
elements.)</para>
|
elements.)</para>
|
||||||
|
|
||||||
<para>Even greater customisation is possible using the function
|
<para>Even greater customisation is possible using the function
|
||||||
<varname>overrideDerivation</varname>. While the
|
<varname>overrideAttrs</varname>. While the
|
||||||
<varname>override</varname> mechanism above overrides the arguments of
|
<varname>override</varname> mechanism above overrides the arguments of
|
||||||
a package function, <varname>overrideDerivation</varname> allows
|
a package function, <varname>overrideAttrs</varname> allows
|
||||||
changing the <emphasis>result</emphasis> of the function. This
|
changing the <emphasis>attributes</emphasis> passed to <literal>mkDerivation</literal>.
|
||||||
permits changing any aspect of the package, such as the source code.
|
This permits changing any aspect of the package, such as the source code.
|
||||||
For instance, if you want to override the source code of Emacs, you
|
For instance, if you want to override the source code of Emacs, you
|
||||||
can say:
|
can say:
|
||||||
|
|
||||||
<programlisting>
|
<programlisting>
|
||||||
environment.systemPackages =
|
environment.systemPackages = [
|
||||||
[ (pkgs.lib.overrideDerivation pkgs.emacs (attrs: {
|
(pkgs.emacs.overrideAttrs (oldAttrs: {
|
||||||
name = "emacs-25.0-pre";
|
name = "emacs-25.0-pre";
|
||||||
src = /path/to/my/emacs/tree;
|
src = /path/to/my/emacs/tree;
|
||||||
}))
|
}))
|
||||||
];
|
];
|
||||||
</programlisting>
|
</programlisting>
|
||||||
|
|
||||||
Here, <varname>overrideDerivation</varname> takes the Nix derivation
|
Here, <varname>overrideAttrs</varname> takes the Nix derivation
|
||||||
specified by <varname>pkgs.emacs</varname> and produces a new
|
specified by <varname>pkgs.emacs</varname> and produces a new
|
||||||
derivation in which the original’s <literal>name</literal> and
|
derivation in which the original’s <literal>name</literal> and
|
||||||
<literal>src</literal> attribute have been replaced by the given
|
<literal>src</literal> attribute have been replaced by the given
|
||||||
values. The original attributes are accessible via
|
values by re-calling <literal>stdenv.mkDerivation</literal>.
|
||||||
<varname>attrs</varname>.</para>
|
The original attributes are accessible via the function argument,
|
||||||
|
which is conventionally named <varname>oldAttrs</varname>.</para>
|
||||||
|
|
||||||
<para>The overrides shown above are not global. They do not affect
|
<para>The overrides shown above are not global. They do not affect
|
||||||
the original package; other packages in Nixpkgs continue to depend on
|
the original package; other packages in Nixpkgs continue to depend on
|
||||||
|
@ -65,4 +65,92 @@ options = {
|
|||||||
|
|
||||||
</para>
|
</para>
|
||||||
|
|
||||||
|
<section xml:id="sec-option-declarations-eot"><title>Extensible Option
|
||||||
|
Types</title>
|
||||||
|
|
||||||
|
<para>Extensible option types is a feature that allow to extend certain types
|
||||||
|
declaration through multiple module files.
|
||||||
|
This feature only work with a restricted set of types, namely
|
||||||
|
<literal>enum</literal> and <literal>submodules</literal> and any composed
|
||||||
|
forms of them.</para>
|
||||||
|
|
||||||
|
<para>Extensible option types can be used for <literal>enum</literal> options
|
||||||
|
that affects multiple modules, or as an alternative to related
|
||||||
|
<literal>enable</literal> options.</para>
|
||||||
|
|
||||||
|
<para>As an example, we will take the case of display managers. There is a
|
||||||
|
central display manager module for generic display manager options and a
|
||||||
|
module file per display manager backend (slim, kdm, gdm ...).
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<para>There are two approach to this module structure:
|
||||||
|
|
||||||
|
<itemizedlist>
|
||||||
|
<listitem><para>Managing the display managers independently by adding an
|
||||||
|
enable option to every display manager module backend. (NixOS)</para>
|
||||||
|
</listitem>
|
||||||
|
<listitem><para>Managing the display managers in the central module by
|
||||||
|
adding an option to select which display manager backend to use.</para>
|
||||||
|
</listitem>
|
||||||
|
</itemizedlist>
|
||||||
|
</para>
|
||||||
|
|
||||||
|
<para>Both approachs have problems.</para>
|
||||||
|
|
||||||
|
<para>Making backends independent can quickly become hard to manage. For
|
||||||
|
display managers, there can be only one enabled at a time, but the type
|
||||||
|
system can not enforce this restriction as there is no relation between
|
||||||
|
each backend <literal>enable</literal> option. As a result, this restriction
|
||||||
|
has to be done explicitely by adding assertions in each display manager
|
||||||
|
backend module.</para>
|
||||||
|
|
||||||
|
<para>On the other hand, managing the display managers backends in the
|
||||||
|
central module will require to change the central module option every time
|
||||||
|
a new backend is added or removed.</para>
|
||||||
|
|
||||||
|
<para>By using extensible option types, it is possible to create a placeholder
|
||||||
|
option in the central module (<xref linkend='ex-option-declaration-eot-service'
|
||||||
|
/>), and to extend it in each backend module (<xref
|
||||||
|
linkend='ex-option-declaration-eot-backend-slim' />, <xref
|
||||||
|
linkend='ex-option-declaration-eot-backend-kdm' />).</para>
|
||||||
|
|
||||||
|
<para>As a result, <literal>displayManager.enable</literal> option values can
|
||||||
|
be added without changing the main service module file and the type system
|
||||||
|
automatically enforce that there can only be a single display manager
|
||||||
|
enabled.</para>
|
||||||
|
|
||||||
|
<example xml:id='ex-option-declaration-eot-service'><title>Extensible type
|
||||||
|
placeholder in the service module</title>
|
||||||
|
<screen>
|
||||||
|
services.xserver.displayManager.enable = mkOption {
|
||||||
|
description = "Display manager to use";
|
||||||
|
type = with types; nullOr (enum [ ]);
|
||||||
|
};</screen></example>
|
||||||
|
|
||||||
|
<example xml:id='ex-option-declaration-eot-backend-slim'><title>Extending
|
||||||
|
<literal>services.xserver.displayManager.enable</literal> in the
|
||||||
|
<literal>slim</literal> module</title>
|
||||||
|
<screen>
|
||||||
|
services.xserver.displayManager.enable = mkOption {
|
||||||
|
type = with types; nullOr (enum [ "slim" ]);
|
||||||
|
};</screen></example>
|
||||||
|
|
||||||
|
<example xml:id='ex-option-declaration-eot-backend-kdm'><title>Extending
|
||||||
|
<literal>services.foo.backend</literal> in the <literal>kdm</literal>
|
||||||
|
module</title>
|
||||||
|
<screen>
|
||||||
|
services.xserver.displayManager.enable = mkOption {
|
||||||
|
type = with types; nullOr (enum [ "kdm" ]);
|
||||||
|
};</screen></example>
|
||||||
|
|
||||||
|
<para>The placeholder declaration is a standard <literal>mkOption</literal>
|
||||||
|
declaration, but it is important that extensible option declarations only use
|
||||||
|
the <literal>type</literal> argument.</para>
|
||||||
|
|
||||||
|
<para>Extensible option types work with any of the composed variants of
|
||||||
|
<literal>enum</literal> such as
|
||||||
|
<literal>with types; nullOr (enum [ "foo" "bar" ])</literal>
|
||||||
|
or <literal>with types; listOf (enum [ "foo" "bar" ])</literal>.</para>
|
||||||
|
|
||||||
|
</section>
|
||||||
</section>
|
</section>
|
||||||
|
@ -62,6 +62,22 @@
|
|||||||
<listitem><para>A string. Multiple definitions are concatenated with a
|
<listitem><para>A string. Multiple definitions are concatenated with a
|
||||||
collon <literal>":"</literal>.</para></listitem>
|
collon <literal>":"</literal>.</para></listitem>
|
||||||
</varlistentry>
|
</varlistentry>
|
||||||
|
</variablelist>
|
||||||
|
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<section><title>Value Types</title>
|
||||||
|
|
||||||
|
<para>Value types are type that take a value parameter. The only value type
|
||||||
|
in the library is <literal>enum</literal>.</para>
|
||||||
|
|
||||||
|
<variablelist>
|
||||||
|
<varlistentry>
|
||||||
|
<term><varname>types.enum</varname> <replaceable>l</replaceable></term>
|
||||||
|
<listitem><para>One element of the list <replaceable>l</replaceable>, e.g.
|
||||||
|
<literal>types.enum [ "left" "right" ]</literal>. Multiple definitions
|
||||||
|
cannot be merged.</para></listitem>
|
||||||
|
</varlistentry>
|
||||||
<varlistentry>
|
<varlistentry>
|
||||||
<term><varname>types.separatedString</varname>
|
<term><varname>types.separatedString</varname>
|
||||||
<replaceable>sep</replaceable></term>
|
<replaceable>sep</replaceable></term>
|
||||||
@ -69,16 +85,22 @@
|
|||||||
<replaceable>sep</replaceable>, e.g. <literal>types.separatedString
|
<replaceable>sep</replaceable>, e.g. <literal>types.separatedString
|
||||||
"|"</literal>.</para></listitem>
|
"|"</literal>.</para></listitem>
|
||||||
</varlistentry>
|
</varlistentry>
|
||||||
|
<varlistentry>
|
||||||
|
<term><varname>types.submodule</varname> <replaceable>o</replaceable></term>
|
||||||
|
<listitem><para>A set of sub options <replaceable>o</replaceable>.
|
||||||
|
<replaceable>o</replaceable> can be an attribute set or a function
|
||||||
|
returning an attribute set. Submodules are used in composed types to
|
||||||
|
create modular options. Submodule are detailed in <xref
|
||||||
|
linkend='section-option-types-submodule' />.</para></listitem>
|
||||||
|
</varlistentry>
|
||||||
</variablelist>
|
</variablelist>
|
||||||
|
|
||||||
</section>
|
</section>
|
||||||
|
|
||||||
<section><title>Composed Types</title>
|
<section><title>Composed Types</title>
|
||||||
|
|
||||||
<para>Composed types allow to create complex types by taking another type(s)
|
<para>Composed types are types that take a type as parameter. <literal>listOf
|
||||||
or value(s) as parameter(s).
|
int</literal> and <literal>either int str</literal> are examples of
|
||||||
It is possible to compose types multiple times, e.g. <literal>with types;
|
composed types.</para>
|
||||||
nullOr (enum [ "left" "right" ])</literal>.</para>
|
|
||||||
|
|
||||||
<variablelist>
|
<variablelist>
|
||||||
<varlistentry>
|
<varlistentry>
|
||||||
@ -99,12 +121,6 @@
|
|||||||
type. Multiple definitions are merged according to the
|
type. Multiple definitions are merged according to the
|
||||||
value.</para></listitem>
|
value.</para></listitem>
|
||||||
</varlistentry>
|
</varlistentry>
|
||||||
<varlistentry>
|
|
||||||
<term><varname>types.loeOf</varname> <replaceable>t</replaceable></term>
|
|
||||||
<listitem><para>A list or an element of <replaceable>t</replaceable> type.
|
|
||||||
Multiple definitions are merged according to the
|
|
||||||
values.</para></listitem>
|
|
||||||
</varlistentry>
|
|
||||||
<varlistentry>
|
<varlistentry>
|
||||||
<term><varname>types.nullOr</varname> <replaceable>t</replaceable></term>
|
<term><varname>types.nullOr</varname> <replaceable>t</replaceable></term>
|
||||||
<listitem><para><literal>null</literal> or type
|
<listitem><para><literal>null</literal> or type
|
||||||
@ -117,12 +133,6 @@
|
|||||||
merged. It is used to ensure option definitions are declared only
|
merged. It is used to ensure option definitions are declared only
|
||||||
once.</para></listitem>
|
once.</para></listitem>
|
||||||
</varlistentry>
|
</varlistentry>
|
||||||
<varlistentry>
|
|
||||||
<term><varname>types.enum</varname> <replaceable>l</replaceable></term>
|
|
||||||
<listitem><para>One element of the list <replaceable>l</replaceable>, e.g.
|
|
||||||
<literal>types.enum [ "left" "right" ]</literal>. Multiple definitions
|
|
||||||
cannot be merged</para></listitem>
|
|
||||||
</varlistentry>
|
|
||||||
<varlistentry>
|
<varlistentry>
|
||||||
<term><varname>types.either</varname> <replaceable>t1</replaceable>
|
<term><varname>types.either</varname> <replaceable>t1</replaceable>
|
||||||
<replaceable>t2</replaceable></term>
|
<replaceable>t2</replaceable></term>
|
||||||
@ -131,14 +141,6 @@
|
|||||||
str</literal>. Multiple definitions cannot be
|
str</literal>. Multiple definitions cannot be
|
||||||
merged.</para></listitem>
|
merged.</para></listitem>
|
||||||
</varlistentry>
|
</varlistentry>
|
||||||
<varlistentry>
|
|
||||||
<term><varname>types.submodule</varname> <replaceable>o</replaceable></term>
|
|
||||||
<listitem><para>A set of sub options <replaceable>o</replaceable>.
|
|
||||||
<replaceable>o</replaceable> can be an attribute set or a function
|
|
||||||
returning an attribute set. Submodules are used in composed types to
|
|
||||||
create modular options. Submodule are detailed in <xref
|
|
||||||
linkend='section-option-types-submodule' />.</para></listitem>
|
|
||||||
</varlistentry>
|
|
||||||
</variablelist>
|
</variablelist>
|
||||||
|
|
||||||
</section>
|
</section>
|
||||||
@ -197,7 +199,6 @@ options.mod = mkOption {
|
|||||||
type = with types; listOf (submodule modOptions);
|
type = with types; listOf (submodule modOptions);
|
||||||
};</screen></example>
|
};</screen></example>
|
||||||
|
|
||||||
|
|
||||||
<section><title>Composed with <literal>listOf</literal></title>
|
<section><title>Composed with <literal>listOf</literal></title>
|
||||||
|
|
||||||
<para>When composed with <literal>listOf</literal>, submodule allows multiple
|
<para>When composed with <literal>listOf</literal>, submodule allows multiple
|
||||||
@ -323,9 +324,13 @@ code before creating a new type.</para>
|
|||||||
<variablelist>
|
<variablelist>
|
||||||
<varlistentry>
|
<varlistentry>
|
||||||
<term><varname>name</varname></term>
|
<term><varname>name</varname></term>
|
||||||
<listitem><para>A string representation of the type function name, name
|
<listitem><para>A string representation of the type function
|
||||||
usually changes accordingly parameters passed to
|
name.</para></listitem>
|
||||||
types.</para></listitem>
|
</varlistentry>
|
||||||
|
<varlistentry>
|
||||||
|
<term><varname>definition</varname></term>
|
||||||
|
<listitem><para>Description of the type used in documentation. Give
|
||||||
|
information of the type and any of its arguments.</para></listitem>
|
||||||
</varlistentry>
|
</varlistentry>
|
||||||
<varlistentry>
|
<varlistentry>
|
||||||
<term><varname>check</varname></term>
|
<term><varname>check</varname></term>
|
||||||
@ -388,6 +393,53 @@ code before creating a new type.</para>
|
|||||||
type parameter, this function should be defined as <literal>m:
|
type parameter, this function should be defined as <literal>m:
|
||||||
composedType (elemType.substSubModules m)</literal>.</para></listitem>
|
composedType (elemType.substSubModules m)</literal>.</para></listitem>
|
||||||
</varlistentry>
|
</varlistentry>
|
||||||
|
<varlistentry>
|
||||||
|
<term><varname>typeMerge</varname></term>
|
||||||
|
<listitem><para>A function to merge multiple type declarations. Takes the
|
||||||
|
type to merge <literal>functor</literal> as parameter. A
|
||||||
|
<literal>null</literal> return value means that type cannot be
|
||||||
|
merged.</para>
|
||||||
|
<variablelist>
|
||||||
|
<varlistentry>
|
||||||
|
<term><replaceable>f</replaceable></term>
|
||||||
|
<listitem><para>The type to merge
|
||||||
|
<literal>functor</literal>.</para></listitem>
|
||||||
|
</varlistentry>
|
||||||
|
</variablelist>
|
||||||
|
<para>Note: There is a generic <literal>defaultTypeMerge</literal> that
|
||||||
|
work with most of value and composed types.</para>
|
||||||
|
</listitem>
|
||||||
|
</varlistentry>
|
||||||
|
<varlistentry>
|
||||||
|
<term><varname>functor</varname></term>
|
||||||
|
<listitem><para>An attribute set representing the type. It is used for type
|
||||||
|
operations and has the following keys:</para>
|
||||||
|
<variablelist>
|
||||||
|
<varlistentry>
|
||||||
|
<term><varname>type</varname></term>
|
||||||
|
<listitem><para>The type function.</para></listitem>
|
||||||
|
</varlistentry>
|
||||||
|
<varlistentry>
|
||||||
|
<term><varname>wrapped</varname></term>
|
||||||
|
<listitem><para>Holds the type parameter for composed types.</para>
|
||||||
|
</listitem>
|
||||||
|
</varlistentry>
|
||||||
|
<varlistentry>
|
||||||
|
<term><varname>payload</varname></term>
|
||||||
|
<listitem><para>Holds the value parameter for value types.
|
||||||
|
The types that have a <literal>payload</literal> are the
|
||||||
|
<literal>enum</literal>, <literal>separatedString</literal> and
|
||||||
|
<literal>submodule</literal> types.</para></listitem>
|
||||||
|
</varlistentry>
|
||||||
|
<varlistentry>
|
||||||
|
<term><varname>binOp</varname></term>
|
||||||
|
<listitem><para>A binary operation that can merge the payloads of two
|
||||||
|
same types. Defined as a function that take two payloads as
|
||||||
|
parameters and return the payloads merged.</para></listitem>
|
||||||
|
</varlistentry>
|
||||||
|
</variablelist>
|
||||||
|
</listitem>
|
||||||
|
</varlistentry>
|
||||||
</variablelist>
|
</variablelist>
|
||||||
|
|
||||||
</section>
|
</section>
|
||||||
|
@ -75,7 +75,10 @@ following incompatible changes:</para>
|
|||||||
|
|
||||||
<itemizedlist>
|
<itemizedlist>
|
||||||
<listitem>
|
<listitem>
|
||||||
<para></para>
|
<para>Module type system have a new extensible option types feature that
|
||||||
|
allow to extend certain types, such as enum, through multiple option
|
||||||
|
declarations of the same option across multiple modules.
|
||||||
|
</para>
|
||||||
</listitem>
|
</listitem>
|
||||||
</itemizedlist>
|
</itemizedlist>
|
||||||
|
|
||||||
|
@ -17,12 +17,10 @@ with lib;
|
|||||||
where tools such as <command>gdb</command> can find them.
|
where tools such as <command>gdb</command> can find them.
|
||||||
If you need debug symbols for a package that doesn't
|
If you need debug symbols for a package that doesn't
|
||||||
provide them by default, you can enable them as follows:
|
provide them by default, you can enable them as follows:
|
||||||
<!-- FIXME: ugly, see #10721 -->
|
|
||||||
<programlisting>
|
<programlisting>
|
||||||
nixpkgs.config.packageOverrides = pkgs: {
|
nixpkgs.config.packageOverrides = pkgs: {
|
||||||
hello = pkgs.lib.overrideDerivation pkgs.hello (attrs: {
|
hello = pkgs.hello.overrideAttrs (oldAttrs: {
|
||||||
outputs = attrs.outputs or ["out"] ++ ["debug"];
|
separateDebugInfo = true;
|
||||||
buildInputs = attrs.buildInputs ++ [<nixpkgs/pkgs/build-support/setup-hooks/separate-debug-info.sh>];
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
</programlisting>
|
</programlisting>
|
||||||
|
@ -41,7 +41,7 @@ in
|
|||||||
strings. The latter is concatenated, interspersed with colon
|
strings. The latter is concatenated, interspersed with colon
|
||||||
characters.
|
characters.
|
||||||
'';
|
'';
|
||||||
type = types.attrsOf (types.loeOf types.str);
|
type = with types; attrsOf (either str (listOf str));
|
||||||
apply = mapAttrs (n: v: if isList v then concatStringsSep ":" v else v);
|
apply = mapAttrs (n: v: if isList v then concatStringsSep ":" v else v);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ in
|
|||||||
strings. The latter is concatenated, interspersed with colon
|
strings. The latter is concatenated, interspersed with colon
|
||||||
characters.
|
characters.
|
||||||
'';
|
'';
|
||||||
type = types.attrsOf (types.loeOf types.str);
|
type = with types; attrsOf (either str (listOf str));
|
||||||
apply = mapAttrs (n: v: if isList v then concatStringsSep ":" v else v);
|
apply = mapAttrs (n: v: if isList v then concatStringsSep ":" v else v);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -256,7 +256,7 @@ if isOption opt then
|
|||||||
// optionalAttrs (opt ? default) { inherit (opt) default; }
|
// optionalAttrs (opt ? default) { inherit (opt) default; }
|
||||||
// optionalAttrs (opt ? example) { inherit (opt) example; }
|
// optionalAttrs (opt ? example) { inherit (opt) example; }
|
||||||
// optionalAttrs (opt ? description) { inherit (opt) description; }
|
// optionalAttrs (opt ? description) { inherit (opt) description; }
|
||||||
// optionalAttrs (opt ? type) { typename = opt.type.name; }
|
// optionalAttrs (opt ? type) { typename = opt.type.description; }
|
||||||
// optionalAttrs (opt ? options) { inherit (opt) options; }
|
// optionalAttrs (opt ? options) { inherit (opt) options; }
|
||||||
// {
|
// {
|
||||||
# to disambiguate the xml output.
|
# to disambiguate the xml output.
|
||||||
|
@ -277,6 +277,7 @@
|
|||||||
gitlab-runner = 257;
|
gitlab-runner = 257;
|
||||||
postgrey = 258;
|
postgrey = 258;
|
||||||
hound = 259;
|
hound = 259;
|
||||||
|
leaps = 260;
|
||||||
|
|
||||||
# When adding a uid, make sure it doesn't match an existing gid. And don't use uids above 399!
|
# When adding a uid, make sure it doesn't match an existing gid. And don't use uids above 399!
|
||||||
|
|
||||||
@ -524,6 +525,7 @@
|
|||||||
gitlab-runner = 257;
|
gitlab-runner = 257;
|
||||||
postgrey = 258;
|
postgrey = 258;
|
||||||
hound = 259;
|
hound = 259;
|
||||||
|
leaps = 260;
|
||||||
|
|
||||||
# When adding a gid, make sure it doesn't match an existing
|
# When adding a gid, make sure it doesn't match an existing
|
||||||
# uid. Users and groups with the same name should have equal
|
# uid. Users and groups with the same name should have equal
|
||||||
|
@ -251,6 +251,7 @@
|
|||||||
./services/misc/gitolite.nix
|
./services/misc/gitolite.nix
|
||||||
./services/misc/gpsd.nix
|
./services/misc/gpsd.nix
|
||||||
./services/misc/ihaskell.nix
|
./services/misc/ihaskell.nix
|
||||||
|
./services/misc/leaps.nix
|
||||||
./services/misc/mantisbt.nix
|
./services/misc/mantisbt.nix
|
||||||
./services/misc/mathics.nix
|
./services/misc/mathics.nix
|
||||||
./services/misc/matrix-synapse.nix
|
./services/misc/matrix-synapse.nix
|
||||||
|
@ -356,14 +356,14 @@ https://nixos.org/nixpkgs/manual/#sec-modify-via-packageOverrides
|
|||||||
<programlisting><![CDATA[
|
<programlisting><![CDATA[
|
||||||
{ pkgs ? import <nixpkgs> {} }:
|
{ pkgs ? import <nixpkgs> {} }:
|
||||||
let
|
let
|
||||||
myEmacs = pkgs.lib.overrideDerivation (pkgs.emacs.override {
|
myEmacs = (pkgs.emacs.override {
|
||||||
# Use gtk3 instead of the default gtk2
|
# Use gtk3 instead of the default gtk2
|
||||||
withGTK3 = true;
|
withGTK3 = true;
|
||||||
withGTK2 = false;
|
withGTK2 = false;
|
||||||
}) (attrs: {
|
}).overrideAttrs (attrs: {
|
||||||
# I don't want emacs.desktop file because I only use
|
# I don't want emacs.desktop file because I only use
|
||||||
# emacsclient.
|
# emacsclient.
|
||||||
postInstall = attrs.postInstall + ''
|
postInstall = (attrs.postInstall or "") + ''
|
||||||
rm $out/share/applications/emacs.desktop
|
rm $out/share/applications/emacs.desktop
|
||||||
'';
|
'';
|
||||||
});
|
});
|
||||||
|
@ -21,8 +21,7 @@ in
|
|||||||
|
|
||||||
language = mkOption {
|
language = mkOption {
|
||||||
default = "English";
|
default = "English";
|
||||||
type = types.addCheck types.str
|
type = types.enum [ "English" "Spanish" "Russian" "Serbian" "Turkish" ];
|
||||||
(lang: elem lang [ "English" "Spanish" "Russian" "Serbian" "Turkish" ]);
|
|
||||||
description = "The language of bot messages: English, Spanish, Russian, Serbian or Turkish.";
|
description = "The language of bot messages: English, Spanish, Russian, Serbian or Turkish.";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -55,9 +55,9 @@ let
|
|||||||
|
|
||||||
levelOption = mkOption {
|
levelOption = mkOption {
|
||||||
default = "server";
|
default = "server";
|
||||||
type = types.str;
|
type = types.enum [ "workstation" "server" "paranoid" ];
|
||||||
description = ''
|
description = ''
|
||||||
Set the logcheck level. Either "workstation", "server", or "paranoid".
|
Set the logcheck level.
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
62
nixos/modules/services/misc/leaps.nix
Normal file
62
nixos/modules/services/misc/leaps.nix
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
{ config, pkgs, lib, ... } @ args:
|
||||||
|
|
||||||
|
with lib;
|
||||||
|
|
||||||
|
let
|
||||||
|
cfg = config.services.leaps;
|
||||||
|
stateDir = "/var/lib/leaps/";
|
||||||
|
in
|
||||||
|
{
|
||||||
|
options = {
|
||||||
|
services.leaps = {
|
||||||
|
enable = mkEnableOption "leaps";
|
||||||
|
port = mkOption {
|
||||||
|
type = types.int;
|
||||||
|
default = 8080;
|
||||||
|
description = "A port where leaps listens for incoming http requests";
|
||||||
|
};
|
||||||
|
address = mkOption {
|
||||||
|
default = "";
|
||||||
|
type = types.str;
|
||||||
|
example = "127.0.0.1";
|
||||||
|
description = "Hostname or IP-address to listen to. By default it will listen on all interfaces.";
|
||||||
|
};
|
||||||
|
path = mkOption {
|
||||||
|
default = "/";
|
||||||
|
type = types.path;
|
||||||
|
description = "Subdirectory used for reverse proxy setups";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
config = mkIf cfg.enable {
|
||||||
|
users = {
|
||||||
|
users.leaps = {
|
||||||
|
uid = config.ids.uids.leaps;
|
||||||
|
description = "Leaps server user";
|
||||||
|
group = "leaps";
|
||||||
|
home = stateDir;
|
||||||
|
createHome = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
groups.leaps = {
|
||||||
|
gid = config.ids.gids.leaps;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
systemd.services.leaps = {
|
||||||
|
description = "leaps service";
|
||||||
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
after = [ "network.target" ];
|
||||||
|
|
||||||
|
serviceConfig = {
|
||||||
|
User = "leaps";
|
||||||
|
Group = "leaps";
|
||||||
|
Restart = "on-failure";
|
||||||
|
WorkingDirectory = stateDir;
|
||||||
|
PrivateTmp = true;
|
||||||
|
ExecStart = "${pkgs.leaps.bin}/bin/leaps -path ${toString cfg.path} -address ${cfg.address}:${toString cfg.port}";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
@ -292,7 +292,7 @@ in {
|
|||||||
};
|
};
|
||||||
|
|
||||||
allowedClientIDs = mkOption {
|
allowedClientIDs = mkOption {
|
||||||
type = with types; loeOf (either (enum ["all" "none"]) str);
|
type = with types; either str (listOf str);
|
||||||
default = [];
|
default = [];
|
||||||
example = [ "[Tt]ask [2-9]+" ];
|
example = [ "[Tt]ask [2-9]+" ];
|
||||||
description = ''
|
description = ''
|
||||||
@ -306,7 +306,7 @@ in {
|
|||||||
};
|
};
|
||||||
|
|
||||||
disallowedClientIDs = mkOption {
|
disallowedClientIDs = mkOption {
|
||||||
type = with types; loeOf (either (enum ["all" "none"]) str);
|
type = with types; either str (listOf str);
|
||||||
default = [];
|
default = [];
|
||||||
example = [ "[Tt]ask [2-9]+" ];
|
example = [ "[Tt]ask [2-9]+" ];
|
||||||
description = ''
|
description = ''
|
||||||
|
@ -7,11 +7,6 @@ let
|
|||||||
cfg = config.services.bitlbee;
|
cfg = config.services.bitlbee;
|
||||||
bitlbeeUid = config.ids.uids.bitlbee;
|
bitlbeeUid = config.ids.uids.bitlbee;
|
||||||
|
|
||||||
authModeCheck = v:
|
|
||||||
v == "Open" ||
|
|
||||||
v == "Closed" ||
|
|
||||||
v == "Registered";
|
|
||||||
|
|
||||||
bitlbeeConfig = pkgs.writeText "bitlbee.conf"
|
bitlbeeConfig = pkgs.writeText "bitlbee.conf"
|
||||||
''
|
''
|
||||||
[settings]
|
[settings]
|
||||||
@ -67,7 +62,7 @@ in
|
|||||||
|
|
||||||
authMode = mkOption {
|
authMode = mkOption {
|
||||||
default = "Open";
|
default = "Open";
|
||||||
type = types.addCheck types.str authModeCheck;
|
type = types.enum [ "Open" "Closed" "Registered" ];
|
||||||
description = ''
|
description = ''
|
||||||
The following authentication modes are available:
|
The following authentication modes are available:
|
||||||
Open -- Accept connections from anyone, use NickServ for user authentication.
|
Open -- Accept connections from anyone, use NickServ for user authentication.
|
||||||
|
@ -5,15 +5,25 @@ let
|
|||||||
apparmorEnabled = config.security.apparmor.enable;
|
apparmorEnabled = config.security.apparmor.enable;
|
||||||
dnscrypt-proxy = pkgs.dnscrypt-proxy;
|
dnscrypt-proxy = pkgs.dnscrypt-proxy;
|
||||||
cfg = config.services.dnscrypt-proxy;
|
cfg = config.services.dnscrypt-proxy;
|
||||||
|
stateDirectory = "/var/lib/dnscrypt-proxy";
|
||||||
|
|
||||||
localAddress = "${cfg.localAddress}:${toString cfg.localPort}";
|
localAddress = "${cfg.localAddress}:${toString cfg.localPort}";
|
||||||
|
|
||||||
daemonArgs =
|
# The minisign public key used to sign the upstream resolver list.
|
||||||
[ "--local-address=${localAddress}"
|
# This is somewhat more flexible than preloading the key as an
|
||||||
(optionalString cfg.tcpOnly "--tcp-only")
|
# embedded string.
|
||||||
(optionalString cfg.ephemeralKeys "-E")
|
upstreamResolverListPubKey = pkgs.fetchurl {
|
||||||
]
|
url = https://raw.githubusercontent.com/jedisct1/dnscrypt-proxy/master/minisign.pub;
|
||||||
++ resolverArgs;
|
sha256 = "18lnp8qr6ghfc2sd46nn1rhcpr324fqlvgsp4zaigw396cd7vnnh";
|
||||||
|
};
|
||||||
|
|
||||||
|
# Internal flag indicating whether the upstream resolver list is used
|
||||||
|
useUpstreamResolverList = cfg.resolverList == null && cfg.customResolver == null;
|
||||||
|
|
||||||
|
resolverList =
|
||||||
|
if (cfg.resolverList != null)
|
||||||
|
then cfg.resolverList
|
||||||
|
else "${stateDirectory}/dnscrypt-resolvers.csv";
|
||||||
|
|
||||||
resolverArgs = if (cfg.customResolver != null)
|
resolverArgs = if (cfg.customResolver != null)
|
||||||
then
|
then
|
||||||
@ -22,9 +32,16 @@ let
|
|||||||
"--provider-key=${cfg.customResolver.key}"
|
"--provider-key=${cfg.customResolver.key}"
|
||||||
]
|
]
|
||||||
else
|
else
|
||||||
[ "--resolvers-list=${cfg.resolverList}"
|
[ "--resolvers-list=${resolverList}"
|
||||||
"--resolver-name=${toString cfg.resolverName}"
|
"--resolver-name=${cfg.resolverName}"
|
||||||
];
|
];
|
||||||
|
|
||||||
|
# The final command line arguments passed to the daemon
|
||||||
|
daemonArgs =
|
||||||
|
[ "--local-address=${localAddress}" ]
|
||||||
|
++ optional cfg.tcpOnly "--tcp-only"
|
||||||
|
++ optional cfg.ephemeralKeys "-E"
|
||||||
|
++ resolverArgs;
|
||||||
in
|
in
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -66,24 +83,20 @@ in
|
|||||||
default = "dnscrypt.eu-nl";
|
default = "dnscrypt.eu-nl";
|
||||||
type = types.nullOr types.str;
|
type = types.nullOr types.str;
|
||||||
description = ''
|
description = ''
|
||||||
The name of the upstream DNSCrypt resolver to use, taken from the
|
The name of the upstream DNSCrypt resolver to use, taken from
|
||||||
list named in the <literal>resolverList</literal> option.
|
<filename>${resolverList}</filename>. The default resolver is
|
||||||
The default resolver is located in Holland, supports DNS security
|
located in Holland, supports DNS security extensions, and
|
||||||
extensions, and claims to not keep logs.
|
<emphasis>claims</emphasis> to not keep logs.
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
resolverList = mkOption {
|
resolverList = mkOption {
|
||||||
|
default = null;
|
||||||
|
type = types.nullOr types.path;
|
||||||
description = ''
|
description = ''
|
||||||
The list of upstream DNSCrypt resolvers. By default, we use the most
|
List of DNSCrypt resolvers. The default is to use the list of
|
||||||
recent list published by upstream.
|
public resolvers provided by upstream.
|
||||||
'';
|
'';
|
||||||
example = literalExample "${pkgs.dnscrypt-proxy}/share/dnscrypt-proxy/dnscrypt-resolvers.csv";
|
|
||||||
default = pkgs.fetchurl {
|
|
||||||
url = https://raw.githubusercontent.com/jedisct1/dnscrypt-proxy/master/dnscrypt-resolvers.csv;
|
|
||||||
sha256 = "1i9wzw4zl052h5nyp28bwl8d66cgj0awvjhw5wgwz0warkjl1g8g";
|
|
||||||
};
|
|
||||||
defaultText = "pkgs.fetchurl { url = ...; sha256 = ...; }";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
customResolver = mkOption {
|
customResolver = mkOption {
|
||||||
@ -150,7 +163,7 @@ in
|
|||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
security.apparmor.profiles = mkIf apparmorEnabled (singleton (pkgs.writeText "apparmor-dnscrypt-proxy" ''
|
security.apparmor.profiles = optional apparmorEnabled (pkgs.writeText "apparmor-dnscrypt-proxy" ''
|
||||||
${dnscrypt-proxy}/bin/dnscrypt-proxy {
|
${dnscrypt-proxy}/bin/dnscrypt-proxy {
|
||||||
/dev/null rw,
|
/dev/null rw,
|
||||||
/dev/urandom r,
|
/dev/urandom r,
|
||||||
@ -177,9 +190,9 @@ in
|
|||||||
${getLib pkgs.lz4}/lib/liblz4.so.* mr,
|
${getLib pkgs.lz4}/lib/liblz4.so.* mr,
|
||||||
${getLib pkgs.attr}/lib/libattr.so.* mr,
|
${getLib pkgs.attr}/lib/libattr.so.* mr,
|
||||||
|
|
||||||
${cfg.resolverList} r,
|
${resolverList} r,
|
||||||
}
|
}
|
||||||
''));
|
'');
|
||||||
|
|
||||||
users.users.dnscrypt-proxy = {
|
users.users.dnscrypt-proxy = {
|
||||||
description = "dnscrypt-proxy daemon user";
|
description = "dnscrypt-proxy daemon user";
|
||||||
@ -188,11 +201,61 @@ in
|
|||||||
};
|
};
|
||||||
users.groups.dnscrypt-proxy = {};
|
users.groups.dnscrypt-proxy = {};
|
||||||
|
|
||||||
|
systemd.services.init-dnscrypt-proxy-statedir = optionalAttrs useUpstreamResolverList {
|
||||||
|
description = "Initialize dnscrypt-proxy state directory";
|
||||||
|
script = ''
|
||||||
|
mkdir -pv ${stateDirectory}
|
||||||
|
chown -c dnscrypt-proxy:dnscrypt-proxy ${stateDirectory}
|
||||||
|
cp --preserve=timestamps -uv \
|
||||||
|
${pkgs.dnscrypt-proxy}/share/dnscrypt-proxy/dnscrypt-resolvers.csv \
|
||||||
|
${stateDirectory}
|
||||||
|
'';
|
||||||
|
serviceConfig = {
|
||||||
|
Type = "oneshot";
|
||||||
|
RemainAfterExit = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
systemd.services.update-dnscrypt-resolvers = optionalAttrs useUpstreamResolverList {
|
||||||
|
description = "Update list of DNSCrypt resolvers";
|
||||||
|
|
||||||
|
requires = [ "init-dnscrypt-proxy-statedir.service" ];
|
||||||
|
after = [ "init-dnscrypt-proxy-statedir.service" ];
|
||||||
|
|
||||||
|
path = with pkgs; [ curl minisign ];
|
||||||
|
script = ''
|
||||||
|
cd ${stateDirectory}
|
||||||
|
curl -fSsL -o dnscrypt-resolvers.csv.tmp \
|
||||||
|
https://download.dnscrypt.org/dnscrypt-proxy/dnscrypt-resolvers.csv
|
||||||
|
curl -fSsL -o dnscrypt-resolvers.csv.minisig.tmp \
|
||||||
|
https://download.dnscrypt.org/dnscrypt-proxy/dnscrypt-resolvers.csv.minisig
|
||||||
|
mv dnscrypt-resolvers.csv.minisig{.tmp,}
|
||||||
|
minisign -q -V -p ${upstreamResolverListPubKey} \
|
||||||
|
-m dnscrypt-resolvers.csv.tmp -x dnscrypt-resolvers.csv.minisig
|
||||||
|
mv dnscrypt-resolvers.csv{.tmp,}
|
||||||
|
'';
|
||||||
|
|
||||||
|
serviceConfig = {
|
||||||
|
PrivateTmp = true;
|
||||||
|
PrivateDevices = true;
|
||||||
|
ProtectHome = true;
|
||||||
|
ProtectSystem = true;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
systemd.timers.update-dnscrypt-resolvers = optionalAttrs useUpstreamResolverList {
|
||||||
|
timerConfig = {
|
||||||
|
OnBootSec = "5min";
|
||||||
|
OnUnitActiveSec = "6h";
|
||||||
|
};
|
||||||
|
wantedBy = [ "timers.target" ];
|
||||||
|
};
|
||||||
|
|
||||||
systemd.sockets.dnscrypt-proxy = {
|
systemd.sockets.dnscrypt-proxy = {
|
||||||
description = "dnscrypt-proxy listening socket";
|
description = "dnscrypt-proxy listening socket";
|
||||||
socketConfig = {
|
socketConfig = {
|
||||||
ListenStream = "${localAddress}";
|
ListenStream = localAddress;
|
||||||
ListenDatagram = "${localAddress}";
|
ListenDatagram = localAddress;
|
||||||
};
|
};
|
||||||
wantedBy = [ "sockets.target" ];
|
wantedBy = [ "sockets.target" ];
|
||||||
};
|
};
|
||||||
@ -200,8 +263,13 @@ in
|
|||||||
systemd.services.dnscrypt-proxy = {
|
systemd.services.dnscrypt-proxy = {
|
||||||
description = "dnscrypt-proxy daemon";
|
description = "dnscrypt-proxy daemon";
|
||||||
|
|
||||||
after = [ "network.target" ] ++ optional apparmorEnabled "apparmor.service";
|
after = [ "network.target" ]
|
||||||
requires = [ "dnscrypt-proxy.socket "] ++ optional apparmorEnabled "apparmor.service";
|
++ optional apparmorEnabled "apparmor.service"
|
||||||
|
++ optional useUpstreamResolverList "init-dnscrypt-proxy-statedir.service";
|
||||||
|
|
||||||
|
requires = [ "dnscrypt-proxy.socket "]
|
||||||
|
++ optional apparmorEnabled "apparmor.service"
|
||||||
|
++ optional useUpstreamResolverList "init-dnscrypt-proxy-statedir.service";
|
||||||
|
|
||||||
serviceConfig = {
|
serviceConfig = {
|
||||||
Type = "simple";
|
Type = "simple";
|
||||||
|
@ -10,7 +10,7 @@ let
|
|||||||
|
|
||||||
extip = "EXTIP=\$(${pkgs.curl.bin}/bin/curl -sf \"http://jsonip.com\" | ${pkgs.gawk}/bin/awk -F'\"' '{print $4}')";
|
extip = "EXTIP=\$(${pkgs.curl.bin}/bin/curl -sf \"http://jsonip.com\" | ${pkgs.gawk}/bin/awk -F'\"' '{print $4}')";
|
||||||
|
|
||||||
toYesNo = b: if b then "yes" else "no";
|
toYesNo = b: if b then "true" else "false";
|
||||||
|
|
||||||
mkEndpointOpt = name: addr: port: {
|
mkEndpointOpt = name: addr: port: {
|
||||||
enable = mkEnableOption name;
|
enable = mkEnableOption name;
|
||||||
@ -31,6 +31,17 @@ let
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
mkKeyedEndpointOpt = name: addr: port: keyFile:
|
||||||
|
(mkEndpointOpt name addr port) // {
|
||||||
|
keys = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "";
|
||||||
|
description = ''
|
||||||
|
File to persist ${lib.toUpper name} keys.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
commonTunOpts = let
|
commonTunOpts = let
|
||||||
i2cpOpts = {
|
i2cpOpts = {
|
||||||
length = mkOption {
|
length = mkOption {
|
||||||
@ -63,19 +74,49 @@ let
|
|||||||
};
|
};
|
||||||
} // mkEndpointOpt name "127.0.0.1" 0;
|
} // mkEndpointOpt name "127.0.0.1" 0;
|
||||||
|
|
||||||
i2pdConf = pkgs.writeText "i2pd.conf" ''
|
i2pdConf = pkgs.writeText "i2pd.conf"
|
||||||
ipv6 = ${toYesNo cfg.enableIPv6}
|
''
|
||||||
notransit = ${toYesNo cfg.notransit}
|
ipv4 = ${toYesNo cfg.enableIPv4}
|
||||||
floodfill = ${toYesNo cfg.floodfill}
|
ipv6 = ${toYesNo cfg.enableIPv6}
|
||||||
${if isNull cfg.port then "" else "port = ${toString cfg.port}"}
|
notransit = ${toYesNo cfg.notransit}
|
||||||
${flip concatMapStrings
|
floodfill = ${toYesNo cfg.floodfill}
|
||||||
(collect (proto: proto ? port && proto ? address && proto ? name) cfg.proto)
|
netid = ${toString cfg.netid}
|
||||||
(proto: let portStr = toString proto.port; in ''
|
${if isNull cfg.bandwidth then "" else "bandwidth = ${toString cfg.bandwidth}" }
|
||||||
[${proto.name}]
|
${if isNull cfg.port then "" else "port = ${toString cfg.port}"}
|
||||||
address = ${proto.address}
|
|
||||||
port = ${toString proto.port}
|
[limits]
|
||||||
enabled = ${toYesNo proto.enable}
|
transittunnels = ${toString cfg.limits.transittunnels}
|
||||||
'')
|
|
||||||
|
[upnp]
|
||||||
|
enabled = ${toYesNo cfg.upnp.enable}
|
||||||
|
name = ${cfg.upnp.name}
|
||||||
|
|
||||||
|
[precomputation]
|
||||||
|
elgamal = ${toYesNo cfg.precomputation.elgamal}
|
||||||
|
|
||||||
|
[reseed]
|
||||||
|
verify = ${toYesNo cfg.reseed.verify}
|
||||||
|
file = ${cfg.reseed.file}
|
||||||
|
urls = ${builtins.concatStringsSep "," cfg.reseed.urls}
|
||||||
|
|
||||||
|
[addressbook]
|
||||||
|
defaulturl = ${cfg.addressbook.defaulturl}
|
||||||
|
subscriptions = ${builtins.concatStringsSep "," cfg.addressbook.subscriptions}
|
||||||
|
${flip concatMapStrings
|
||||||
|
(collect (proto: proto ? port && proto ? address && proto ? name) cfg.proto)
|
||||||
|
(proto: let portStr = toString proto.port; in
|
||||||
|
''
|
||||||
|
[${proto.name}]
|
||||||
|
enabled = ${toYesNo proto.enable}
|
||||||
|
address = ${proto.address}
|
||||||
|
port = ${toString proto.port}
|
||||||
|
${if proto ? keys then "keys = ${proto.keys}" else ""}
|
||||||
|
${if proto ? auth then "auth = ${toYesNo proto.auth}" else ""}
|
||||||
|
${if proto ? user then "user = ${proto.user}" else ""}
|
||||||
|
${if proto ? pass then "pass = ${proto.pass}" else ""}
|
||||||
|
${if proto ? outproxy then "outproxy = ${proto.outproxy}" else ""}
|
||||||
|
${if proto ? outproxyPort then "outproxyport = ${toString proto.outproxyPort}" else ""}
|
||||||
|
'')
|
||||||
}
|
}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
@ -114,7 +155,7 @@ let
|
|||||||
i2pdSh = pkgs.writeScriptBin "i2pd" ''
|
i2pdSh = pkgs.writeScriptBin "i2pd" ''
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
${if isNull cfg.extIp then extip else ""}
|
${if isNull cfg.extIp then extip else ""}
|
||||||
${pkgs.i2pd}/bin/i2pd --log=1 \
|
${pkgs.i2pd}/bin/i2pd \
|
||||||
--host=${if isNull cfg.extIp then "$EXTIP" else cfg.extIp} \
|
--host=${if isNull cfg.extIp then "$EXTIP" else cfg.extIp} \
|
||||||
--conf=${i2pdConf} \
|
--conf=${i2pdConf} \
|
||||||
--tunconf=${i2pdTunnelConf}
|
--tunconf=${i2pdTunnelConf}
|
||||||
@ -135,6 +176,8 @@ in
|
|||||||
default = false;
|
default = false;
|
||||||
description = ''
|
description = ''
|
||||||
Enables I2Pd as a running service upon activation.
|
Enables I2Pd as a running service upon activation.
|
||||||
|
Please read http://i2pd.readthedocs.io/en/latest/ for further
|
||||||
|
configuration help.
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -162,6 +205,22 @@ in
|
|||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
netid = mkOption {
|
||||||
|
type = types.int;
|
||||||
|
default = 2;
|
||||||
|
description = ''
|
||||||
|
I2P overlay netid.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
bandwidth = mkOption {
|
||||||
|
type = with types; nullOr int;
|
||||||
|
default = null;
|
||||||
|
description = ''
|
||||||
|
Set a router bandwidth limit integer in kbps or letters: L (32), O (256), P (2048), X (>9000)
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
port = mkOption {
|
port = mkOption {
|
||||||
type = with types; nullOr int;
|
type = with types; nullOr int;
|
||||||
default = null;
|
default = null;
|
||||||
@ -170,6 +229,14 @@ in
|
|||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
enableIPv4 = mkOption {
|
||||||
|
type = types.bool;
|
||||||
|
default = true;
|
||||||
|
description = ''
|
||||||
|
Enables IPv4 connectivity. Enabled by default.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
enableIPv6 = mkOption {
|
enableIPv6 = mkOption {
|
||||||
type = types.bool;
|
type = types.bool;
|
||||||
default = false;
|
default = false;
|
||||||
@ -178,12 +245,137 @@ in
|
|||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
proto.http = mkEndpointOpt "http" "127.0.0.1" 7070;
|
upnp = {
|
||||||
|
enable = mkOption {
|
||||||
|
type = types.bool;
|
||||||
|
default = false;
|
||||||
|
description = ''
|
||||||
|
Enables UPnP.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
name = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "I2Pd";
|
||||||
|
description = ''
|
||||||
|
Name i2pd appears in UPnP forwardings list.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
precomputation.elgamal = mkOption {
|
||||||
|
type = types.bool;
|
||||||
|
default = false;
|
||||||
|
description = ''
|
||||||
|
Use ElGamal precomputated tables.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
reseed = {
|
||||||
|
verify = mkOption {
|
||||||
|
type = types.bool;
|
||||||
|
default = false;
|
||||||
|
description = ''
|
||||||
|
Request SU3 signature verification
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
file = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "";
|
||||||
|
description = ''
|
||||||
|
Full path to SU3 file to reseed from
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
urls = mkOption {
|
||||||
|
type = with types; listOf str;
|
||||||
|
default = [
|
||||||
|
"https://reseed.i2p-project.de/"
|
||||||
|
"https://i2p.mooo.com/netDb/"
|
||||||
|
"https://netdb.i2p2.no/"
|
||||||
|
"https://us.reseed.i2p2.no:444/"
|
||||||
|
"https://uk.reseed.i2p2.no:444/"
|
||||||
|
"https://i2p.manas.ca:8443/"
|
||||||
|
];
|
||||||
|
description = ''
|
||||||
|
Reseed URLs
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
addressbook = {
|
||||||
|
defaulturl = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "http://joajgazyztfssty4w2on5oaqksz6tqoxbduy553y34mf4byv6gpq.b32.i2p/export/alive-hosts.txt";
|
||||||
|
description = ''
|
||||||
|
AddressBook subscription URL for initial setup
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
subscriptions = mkOption {
|
||||||
|
type = with types; listOf str;
|
||||||
|
default = [
|
||||||
|
"http://inr.i2p/export/alive-hosts.txt"
|
||||||
|
"http://i2p-projekt.i2p/hosts.txt"
|
||||||
|
"http://stats.i2p/cgi-bin/newhosts.txt"
|
||||||
|
];
|
||||||
|
description = ''
|
||||||
|
AddressBook subscription URLs
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
limits.transittunnels = mkOption {
|
||||||
|
type = types.int;
|
||||||
|
default = 2500;
|
||||||
|
description = ''
|
||||||
|
Maximum number of active transit sessions
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
|
proto.http = (mkEndpointOpt "http" "127.0.0.1" 7070) // {
|
||||||
|
auth = mkOption {
|
||||||
|
type = types.bool;
|
||||||
|
default = false;
|
||||||
|
description = ''
|
||||||
|
Enable authentication for webconsole.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
user = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "i2pd";
|
||||||
|
description = ''
|
||||||
|
Username for webconsole access
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
pass = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "i2pd";
|
||||||
|
description = ''
|
||||||
|
Password for webconsole access.
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
proto.httpProxy = mkKeyedEndpointOpt "httpproxy" "127.0.0.1" 4446 "";
|
||||||
|
proto.socksProxy = (mkKeyedEndpointOpt "socksproxy" "127.0.0.1" 4447 "")
|
||||||
|
// {
|
||||||
|
outproxy = mkOption {
|
||||||
|
type = types.str;
|
||||||
|
default = "127.0.0.1";
|
||||||
|
description = "Upstream outproxy bind address.";
|
||||||
|
};
|
||||||
|
outproxyPort = mkOption {
|
||||||
|
type = types.int;
|
||||||
|
default = 4444;
|
||||||
|
description = "Upstream outproxy bind port.";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
proto.sam = mkEndpointOpt "sam" "127.0.0.1" 7656;
|
proto.sam = mkEndpointOpt "sam" "127.0.0.1" 7656;
|
||||||
proto.bob = mkEndpointOpt "bob" "127.0.0.1" 2827;
|
proto.bob = mkEndpointOpt "bob" "127.0.0.1" 2827;
|
||||||
|
proto.i2cp = mkEndpointOpt "i2cp" "127.0.0.1" 7654;
|
||||||
proto.i2pControl = mkEndpointOpt "i2pcontrol" "127.0.0.1" 7650;
|
proto.i2pControl = mkEndpointOpt "i2pcontrol" "127.0.0.1" 7650;
|
||||||
proto.httpProxy = mkEndpointOpt "httpproxy" "127.0.0.1" 4446;
|
|
||||||
proto.socksProxy = mkEndpointOpt "socksproxy" "127.0.0.1" 4447;
|
|
||||||
|
|
||||||
outTunnels = mkOption {
|
outTunnels = mkOption {
|
||||||
default = {};
|
default = {};
|
||||||
|
@ -68,7 +68,7 @@ in
|
|||||||
|
|
||||||
interfaceType = mkOption {
|
interfaceType = mkOption {
|
||||||
default = "tun";
|
default = "tun";
|
||||||
type = types.addCheck types.str (n: n == "tun" || n == "tap");
|
type = types.enum [ "tun" "tap" ];
|
||||||
description = ''
|
description = ''
|
||||||
The type of virtual interface used for the network connection
|
The type of virtual interface used for the network connection
|
||||||
'';
|
'';
|
||||||
|
@ -21,7 +21,7 @@ in {
|
|||||||
};
|
};
|
||||||
|
|
||||||
socketType = mkOption {
|
socketType = mkOption {
|
||||||
type = types.addCheck types.str (t: t == "unix" || t == "tcp" || t == "tcp6");
|
type = types.enum [ "unix" "tcp" "tcp6" ];
|
||||||
default = "unix";
|
default = "unix";
|
||||||
description = "Socket type: 'unix', 'tcp' or 'tcp6'.";
|
description = "Socket type: 'unix', 'tcp' or 'tcp6'.";
|
||||||
};
|
};
|
||||||
|
@ -324,8 +324,7 @@ in
|
|||||||
|
|
||||||
fsIdentifier = mkOption {
|
fsIdentifier = mkOption {
|
||||||
default = "uuid";
|
default = "uuid";
|
||||||
type = types.addCheck types.str
|
type = types.enum [ "uuid" "label" "provided" ];
|
||||||
(type: type == "uuid" || type == "label" || type == "provided");
|
|
||||||
description = ''
|
description = ''
|
||||||
Determines how GRUB will identify devices when generating the
|
Determines how GRUB will identify devices when generating the
|
||||||
configuration file. A value of uuid / label signifies that grub
|
configuration file. A value of uuid / label signifies that grub
|
||||||
|
@ -245,7 +245,7 @@ let
|
|||||||
|
|
||||||
virtualType = mkOption {
|
virtualType = mkOption {
|
||||||
default = null;
|
default = null;
|
||||||
type = types.nullOr (types.addCheck types.str (v: v == "tun" || v == "tap"));
|
type = with types; nullOr (enum [ "tun" "tap" ]);
|
||||||
description = ''
|
description = ''
|
||||||
The explicit type of interface to create. Accepts tun or tap strings.
|
The explicit type of interface to create. Accepts tun or tap strings.
|
||||||
Also accepts null to implicitly detect the type of device.
|
Also accepts null to implicitly detect the type of device.
|
||||||
|
@ -275,6 +275,7 @@ in rec {
|
|||||||
tests.networkingProxy = callTest tests/networking-proxy.nix {};
|
tests.networkingProxy = callTest tests/networking-proxy.nix {};
|
||||||
tests.nfs3 = callTest tests/nfs.nix { version = 3; };
|
tests.nfs3 = callTest tests/nfs.nix { version = 3; };
|
||||||
tests.nfs4 = callTest tests/nfs.nix { version = 4; };
|
tests.nfs4 = callTest tests/nfs.nix { version = 4; };
|
||||||
|
tests.leaps = callTest tests/leaps.nix { };
|
||||||
tests.nsd = callTest tests/nsd.nix {};
|
tests.nsd = callTest tests/nsd.nix {};
|
||||||
tests.openssh = callTest tests/openssh.nix {};
|
tests.openssh = callTest tests/openssh.nix {};
|
||||||
#tests.panamax = hydraJob (import tests/panamax.nix { system = "x86_64-linux"; });
|
#tests.panamax = hydraJob (import tests/panamax.nix { system = "x86_64-linux"; });
|
||||||
|
@ -22,8 +22,6 @@ import ./make-test.nix ({ pkgs, ... }: {
|
|||||||
};
|
};
|
||||||
|
|
||||||
testScript = ''
|
testScript = ''
|
||||||
$client->start;
|
|
||||||
$client->waitForUnit("sockets.target");
|
|
||||||
$client->waitForUnit("dnsmasq");
|
$client->waitForUnit("dnsmasq");
|
||||||
|
|
||||||
# The daemon is socket activated; sending a single ping should activate it.
|
# The daemon is socket activated; sending a single ping should activate it.
|
||||||
|
29
nixos/tests/leaps.nix
Normal file
29
nixos/tests/leaps.nix
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import ./make-test.nix ({ pkgs, ... }:
|
||||||
|
|
||||||
|
{
|
||||||
|
name = "leaps";
|
||||||
|
meta = with pkgs.stdenv.lib.maintainers; {
|
||||||
|
maintainers = [ qknight ];
|
||||||
|
};
|
||||||
|
|
||||||
|
nodes =
|
||||||
|
{
|
||||||
|
client = { };
|
||||||
|
|
||||||
|
server =
|
||||||
|
{ services.leaps = {
|
||||||
|
enable = true;
|
||||||
|
port = 6666;
|
||||||
|
path = "/leaps/";
|
||||||
|
};
|
||||||
|
networking.firewall.enable = false;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
testScript =
|
||||||
|
''
|
||||||
|
startAll;
|
||||||
|
$server->waitForOpenPort(6666);
|
||||||
|
$client->succeed("curl http://server:6666/leaps/ | grep -i 'leaps'");
|
||||||
|
'';
|
||||||
|
})
|
@ -1,5 +1,5 @@
|
|||||||
{ stdenv, fetchurl, wxGTK30, pkgconfig, gettext, gtk2, glib, zlib, perl, intltool,
|
{ stdenv, fetchurl, wxGTK30, pkgconfig, gettext, gtk2, glib, zlib, perl, intltool,
|
||||||
libogg, libvorbis, libmad, alsaLib, libsndfile, soxr, flac, lame, fetchpatch,
|
libogg, libvorbis, libmad, libjack2, lv2, lilv, serd, sord, sratom, suil, alsaLib, libsndfile, soxr, flac, lame, fetchpatch,
|
||||||
expat, libid3tag, ffmpeg, soundtouch /*, portaudio - given up fighting their portaudio.patch */
|
expat, libid3tag, ffmpeg, soundtouch /*, portaudio - given up fighting their portaudio.patch */
|
||||||
}:
|
}:
|
||||||
|
|
||||||
@ -29,7 +29,7 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
pkgconfig gettext wxGTK30 expat alsaLib
|
pkgconfig gettext wxGTK30 expat alsaLib
|
||||||
libsndfile soxr libid3tag gtk2
|
libsndfile soxr libid3tag libjack2 lv2 lilv serd sord sratom suil gtk2
|
||||||
ffmpeg libmad lame libvorbis flac soundtouch
|
ffmpeg libmad lame libvorbis flac soundtouch
|
||||||
]; #ToDo: detach sbsms
|
]; #ToDo: detach sbsms
|
||||||
|
|
||||||
|
@ -1,22 +1,29 @@
|
|||||||
{ stdenv, fetchurl, fftw, ladspaH, pkgconfig }:
|
{ stdenv, fetchurl, autoreconfHook, automake, fftw, ladspaH, libxml2, pkgconfig
|
||||||
|
, perlPackages }:
|
||||||
|
|
||||||
|
stdenv.mkDerivation rec {
|
||||||
|
name = "swh-plugins-${version}";
|
||||||
|
version = "0.4.17";
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "swh-plugins-0.4.15";
|
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = http://plugin.org.uk/releases/0.4.15/swh-plugins-0.4.15.tar.gz;
|
url = "https://github.com/swh/ladspa/archive/v${version}.tar.gz";
|
||||||
sha256 = "0h462s4mmqg4iw7zdsihnrmz2vjg0fd49qxw2a284bnryjjfhpnh";
|
sha256 = "1rqwh8xrw6hnp69dg4gy336bfbfpmbx4fjrk0nb8ypjcxkz91c6i";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [fftw ladspaH pkgconfig];
|
buildInputs = [ autoreconfHook fftw ladspaH libxml2 pkgconfig perlPackages.perl perlPackages.XMLParser ];
|
||||||
|
|
||||||
postInstall =
|
patchPhase = ''
|
||||||
''
|
patchShebangs .
|
||||||
mkdir -p $out/share/ladspa/
|
patchShebangs ./metadata/
|
||||||
ln -sv $out/lib/ladspa $out/share/ladspa/lib
|
cp ${automake}/share/automake-*/mkinstalldirs .
|
||||||
'';
|
'';
|
||||||
|
|
||||||
meta = {
|
meta = with stdenv.lib; {
|
||||||
|
homepage = http://plugin.org.uk/;
|
||||||
description = "LADSPA format audio plugins";
|
description = "LADSPA format audio plugins";
|
||||||
|
license = licenses.gpl2;
|
||||||
|
maintainers = [ maintainers.magnetophon ];
|
||||||
|
platforms = platforms.linux;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,28 +0,0 @@
|
|||||||
{ stdenv, fetchgit, autoreconfHook, automake, fftw, ladspaH, libxml2, pkgconfig
|
|
||||||
, perl, perlPackages }:
|
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "swh-plugins-git-2015-03-04";
|
|
||||||
|
|
||||||
src = fetchgit {
|
|
||||||
url = https://github.com/swh/ladspa.git;
|
|
||||||
rev = "4b8437e8037cace3d5bf8ce6d1d1da0182aba686";
|
|
||||||
sha256 = "1rmqm4780dhp0pj2scl3k7m8hpp1x6w6ln4wwg954zb9570rqaxx";
|
|
||||||
};
|
|
||||||
|
|
||||||
buildInputs = [ autoreconfHook fftw ladspaH libxml2 pkgconfig perl perlPackages.XMLParser ];
|
|
||||||
|
|
||||||
patchPhase = ''
|
|
||||||
patchShebangs .
|
|
||||||
patchShebangs ./metadata/
|
|
||||||
cp ${automake}/share/automake-*/mkinstalldirs .
|
|
||||||
'';
|
|
||||||
|
|
||||||
meta = with stdenv.lib; {
|
|
||||||
homepage = http://plugin.org.uk/;
|
|
||||||
description = "LADSPA format audio plugins";
|
|
||||||
license = licenses.gpl2;
|
|
||||||
maintainers = [ maintainers.magnetophon ];
|
|
||||||
platforms = platforms.linux;
|
|
||||||
};
|
|
||||||
}
|
|
@ -4,11 +4,11 @@
|
|||||||
}:
|
}:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
name = "snd-15.9";
|
name = "snd-16.9";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "mirror://sourceforge/snd/${name}.tar.gz";
|
url = "mirror://sourceforge/snd/${name}.tar.gz";
|
||||||
sha256 = "0hs9ailgaphgyi3smnrpwksvdww85aa7szqgi6l6d2jwfx9g4bhd";
|
sha256 = "1rw9wrj1f0g413ya32s9mwhvv3c6iasjza22irzf6xlv49b9s5dp";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ pkgconfig ];
|
nativeBuildInputs = [ pkgconfig ];
|
||||||
|
@ -1,4 +0,0 @@
|
|||||||
source $stdenv/setup
|
|
||||||
|
|
||||||
mkdir -p $out/share/emacs/site-lisp
|
|
||||||
cp $src $out/share/emacs/site-lisp/stratego.el
|
|
@ -1,10 +0,0 @@
|
|||||||
{stdenv, fetchsvn}:
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "stratego-mode";
|
|
||||||
builder = ./builder.sh;
|
|
||||||
src = fetchsvn {
|
|
||||||
url = https://svn.strategoxt.org/repos/StrategoXT/stratego-editors/trunk/emacs/stratego.el;
|
|
||||||
rev = 12678;
|
|
||||||
sha256 = "4ab4ec587550233f29ca08b82fa0a9f7e5b33fc178348037e3ab1816bd60f538";
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,11 +1,11 @@
|
|||||||
{ stdenv, fetchurl, openssl, curl, coreutils, gawk, bash, which }:
|
{ stdenv, fetchurl, openssl, curl, coreutils, gawk, bash, which }:
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
stdenv.mkDerivation {
|
||||||
name = "esniper-2.31.0";
|
name = "esniper-2.32.0";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "mirror://sourceforge/esniper/esniper-2-31-0.tgz";
|
url = "mirror://sourceforge/esniper/esniper-2-32-0.tgz";
|
||||||
sha256 = "0xn6gdyr0c18khwcsi2brp49wkancrsrxxca7hvbawhbf263glih";
|
sha256 = "04lka4d0mnrwc369yzvq28n8qi1qbm8810ykx6d0a4kaghiybqsy";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [ openssl curl ];
|
buildInputs = [ openssl curl ];
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
, libXext, libXfixes, libXi, libXrandr, libXrender, libXtst, nspr, nss, pango
|
, libXext, libXfixes, libXi, libXrandr, libXrender, libXtst, nspr, nss, pango
|
||||||
, systemd, libXScrnSaver }:
|
, systemd, libXScrnSaver }:
|
||||||
|
|
||||||
let version = "0.0.9"; in
|
let version = "0.0.10"; in
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
stdenv.mkDerivation {
|
||||||
|
|
||||||
@ -12,7 +12,7 @@ stdenv.mkDerivation {
|
|||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "https://cdn-canary.discordapp.com/apps/linux/${version}/discord-canary-${version}.tar.gz";
|
url = "https://cdn-canary.discordapp.com/apps/linux/${version}/discord-canary-${version}.tar.gz";
|
||||||
sha256 = "72f692cea62b836220f40d81d110846f9cde9a0fba7a8d47226d89e0980255b9";
|
sha256 = "1wkbbnbqbwgixdbm69dlirhgjnn8llqyzil01nqwpknh1qwd06pr";
|
||||||
};
|
};
|
||||||
|
|
||||||
libPath = stdenv.lib.makeLibraryPath [
|
libPath = stdenv.lib.makeLibraryPath [
|
||||||
|
@ -8,6 +8,7 @@ index 1904ab3..cb3624d 100644
|
|||||||
|
|
||||||
+#include <string.h>
|
+#include <string.h>
|
||||||
+#include <sys/time.h>
|
+#include <sys/time.h>
|
||||||
|
#include <unistd.h>
|
||||||
#include <sys/types.h>
|
#include <sys/types.h>
|
||||||
#include <sys/socket.h>
|
#include <sys/socket.h>
|
||||||
#include <netinet/in.h>
|
#include <netinet/in.h>
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
{ stdenv, fetchurl, libosip }:
|
{ stdenv, fetchurl, libosip }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
name = "siproxd-0.8.1";
|
name = "siproxd-0.8.2";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "mirror://sourceforge/siproxd/${name}.tar.gz";
|
url = "mirror://sourceforge/siproxd/${name}.tar.gz";
|
||||||
sha256 = "1bcxl0h5nc28m8lcdhpbl5yc93w98xm53mfzrf04knsvmx7z0bfz";
|
sha256 = "1l6cyxxhra825jiiw9npa7jrbfgbyfpk4966cqkrw66cn28y8v2j";
|
||||||
};
|
};
|
||||||
|
|
||||||
patches = [ ./cheaders.patch ];
|
patches = [ ./cheaders.patch ];
|
||||||
|
@ -1,31 +0,0 @@
|
|||||||
{stdenv, fetchurl, perl, paml}:
|
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "pal2nal-12";
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://coot.embl.de/pal2nal/distribution/pal2nal.v12.tar.gz;
|
|
||||||
sha256 = "1qj9sq5skpa7vyccl9gxc5ls85jwiq8j6mr8wvacz4yhyg0afy04";
|
|
||||||
};
|
|
||||||
|
|
||||||
installPhase = ''
|
|
||||||
mkdir -p $out/bin
|
|
||||||
|
|
||||||
cp -v pal2nal.pl $out/bin
|
|
||||||
|
|
||||||
mkdir -p $out/doc
|
|
||||||
|
|
||||||
cp -v README $out/doc
|
|
||||||
'';
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
description = "Program for aligning nucleotide sequences based on an aminoacid alignment";
|
|
||||||
longDescription = ''
|
|
||||||
PAL2NAL is a program that converts a multiple sequence alignment of proteins and the corresponding DNA (or mRNA) sequences into a codon alignment. The program automatically assigns the corresponding codon sequence even if the input DNA sequence has mismatches with the input protein sequence, or contains UTRs, polyA tails. It can also deal with frame shifts in the input alignment, which is suitable for the analysis of pseudogenes. The resulting codon alignment can further be subjected to the calculation of synonymous (KS) and non-synonymous (KA) substitution rates.
|
|
||||||
|
|
||||||
If the input is a pair of sequences, PAL2NAL automatically calculates KS and KA by the codeml program in PAML.
|
|
||||||
'';
|
|
||||||
license = "non-commercial";
|
|
||||||
homepage = http://coot.embl.de/pal2nal/;
|
|
||||||
pkgMaintainer = "Pjotr Prins";
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,11 +1,11 @@
|
|||||||
{stdenv, fetchurl}:
|
{stdenv, fetchurl}:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
version = "4.2a";
|
version = "4.9c";
|
||||||
name = "paml-${version}";
|
name = "paml-${version}";
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "http://abacus.gene.ucl.ac.uk/software/paml${version}.tar.gz";
|
url = "http://abacus.gene.ucl.ac.uk/software/paml${version}.tgz";
|
||||||
sha256 = "0yywyrjgxrpavp50n00l01pl90b7pykgb2k53yrlykz9dnf583pb";
|
sha256 = "18a1l47223l7jyjavm8a8la84q9k9kbxwmj7kz4z3pdx70qrl04j";
|
||||||
};
|
};
|
||||||
|
|
||||||
preBuild = ''
|
preBuild = ''
|
||||||
|
@ -2,11 +2,11 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
name = "remotebox-${version}";
|
name = "remotebox-${version}";
|
||||||
version = "2.0";
|
version = "2.1";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "http://remotebox.knobgoblin.org.uk/downloads/RemoteBox-${version}.tar.bz2";
|
url = "http://remotebox.knobgoblin.org.uk/downloads/RemoteBox-${version}.tar.bz2";
|
||||||
sha256 = "0c73i53wdjd2m2sdgq3r3xp30irxh5z5rak2rk79yb686s6bv759";
|
sha256 = "0pyi433pwbpyh58p08q8acav7mk90gchgjghvl9f8wqafx7bp404";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = with perlPackages; [ perl Glib Gtk2 Pango SOAPLite ];
|
buildInputs = with perlPackages; [ perl Glib Gtk2 Pango SOAPLite ];
|
||||||
|
@ -2,16 +2,16 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
name = "unifont-${version}";
|
name = "unifont-${version}";
|
||||||
version = "9.0.03";
|
version = "9.0.04";
|
||||||
|
|
||||||
ttf = fetchurl {
|
ttf = fetchurl {
|
||||||
url = "http://fossies.org/linux/unifont/font/precompiled/${name}.ttf";
|
url = "mirror://gnu/unifont/${name}/${name}.ttf";
|
||||||
sha256 = "00j97r658xl33zgi66glgbx2s7j9q52cj4iq7z1rrf3p38xzgbff";
|
sha256 = "052waajjdry67jjl7vy984padyzdrkhf5gylgbnvj90q6d52j02z";
|
||||||
};
|
};
|
||||||
|
|
||||||
pcf = fetchurl {
|
pcf = fetchurl {
|
||||||
url = "http://fossies.org/linux/unifont/font/precompiled/${name}.pcf.gz";
|
url = "mirror://gnu/unifont/${name}/${name}.pcf.gz";
|
||||||
sha256 = "1w3gaz8afc3q7svgm4hmgjhvi9pxkmgsib8sscgi52c7ff0mhq9f";
|
sha256 = "0736qmlzsf4xlipj4vzihafkigc3xjisxnwcqhl9dzkhxfjq9612";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [ mkfontscale mkfontdir ];
|
buildInputs = [ mkfontscale mkfontdir ];
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
{ plasmaPackage, ecm, kdoctools, ki18n, kxmlgui
|
{
|
||||||
, kdbusaddons, kiconthemes, kio, sonnet, kdelibs4support
|
plasmaPackage,
|
||||||
|
ecm, kdoctools,
|
||||||
|
kdbusaddons, kdelibs4support, khotkeys, ki18n, kiconthemes, kio, kxmlgui,
|
||||||
|
sonnet
|
||||||
}:
|
}:
|
||||||
|
|
||||||
plasmaPackage {
|
plasmaPackage {
|
||||||
name = "kmenuedit";
|
name = "kmenuedit";
|
||||||
nativeBuildInputs = [ ecm kdoctools ];
|
nativeBuildInputs = [ ecm kdoctools ];
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
kdelibs4support ki18n kio sonnet kxmlgui kdbusaddons kiconthemes
|
kdbusaddons kdelibs4support khotkeys ki18n kiconthemes kio kxmlgui sonnet
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,15 @@
|
|||||||
{ plasmaPackage, ecm, kdoctools, kconfig
|
{
|
||||||
, kcoreaddons, kdelibs4support, ki18n, kitemviews, knewstuff
|
plasmaPackage,
|
||||||
, kiconthemes, libksysguard, qtwebkit
|
ecm, kdoctools,
|
||||||
|
lm_sensors,
|
||||||
|
kconfig, kcoreaddons, kdelibs4support, ki18n, kiconthemes, kitemviews,
|
||||||
|
knewstuff, libksysguard, qtwebkit
|
||||||
}:
|
}:
|
||||||
|
|
||||||
plasmaPackage {
|
plasmaPackage {
|
||||||
name = "ksysguard";
|
name = "ksysguard";
|
||||||
nativeBuildInputs = [ ecm kdoctools ];
|
nativeBuildInputs = [ ecm kdoctools ];
|
||||||
|
buildInputs = [ lm_sensors ];
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
kconfig kcoreaddons kitemviews knewstuff kiconthemes libksysguard
|
kconfig kcoreaddons kitemviews knewstuff kiconthemes libksysguard
|
||||||
kdelibs4support ki18n qtwebkit
|
kdelibs4support ki18n qtwebkit
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
{ fetchpatch, plasmaPackage, ecm, kauth, kcompletion
|
{
|
||||||
, kconfigwidgets, kcoreaddons, kservice, kwidgetsaddons
|
plasmaPackage,
|
||||||
, kwindowsystem, plasma-framework, qtscript, qtx11extras
|
ecm,
|
||||||
, kconfig, ki18n, kiconthemes
|
kauth, kcompletion, kconfig, kconfigwidgets, kcoreaddons, ki18n, kiconthemes,
|
||||||
|
kservice, kwidgetsaddons, kwindowsystem, plasma-framework, qtscript, qtwebkit,
|
||||||
|
qtx11extras
|
||||||
}:
|
}:
|
||||||
|
|
||||||
plasmaPackage {
|
plasmaPackage {
|
||||||
@ -9,11 +11,10 @@ plasmaPackage {
|
|||||||
patches = [
|
patches = [
|
||||||
./0001-qdiriterator-follow-symlinks.patch
|
./0001-qdiriterator-follow-symlinks.patch
|
||||||
];
|
];
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [ ecm ];
|
||||||
ecm
|
|
||||||
];
|
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
kauth kconfig ki18n kiconthemes kwindowsystem plasma-framework qtx11extras
|
kauth kconfig ki18n kiconthemes kwindowsystem kcompletion kconfigwidgets
|
||||||
kcompletion kconfigwidgets kcoreaddons kservice kwidgetsaddons qtscript
|
kcoreaddons kservice kwidgetsaddons plasma-framework qtscript qtx11extras
|
||||||
|
qtwebkit
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
{ plasmaPackage, ecm, ki18n, kcmutils, kconfig
|
{
|
||||||
, kdecoration, kguiaddons, kwidgetsaddons, kservice, kcompletion
|
plasmaPackage,
|
||||||
, frameworkintegration, kwindowsystem, makeQtWrapper, qtx11extras
|
ecm, makeQtWrapper,
|
||||||
|
frameworkintegration, kcmutils, kcompletion, kconfig, kdecoration, kguiaddons,
|
||||||
|
ki18n, kwidgetsaddons, kservice, kwayland, kwindowsystem, qtx11extras
|
||||||
}:
|
}:
|
||||||
|
|
||||||
plasmaPackage {
|
plasmaPackage {
|
||||||
name = "oxygen";
|
name = "oxygen";
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [ ecm makeQtWrapper ];
|
||||||
ecm makeQtWrapper
|
|
||||||
];
|
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
kcmutils kconfig kdecoration kguiaddons kwidgetsaddons kservice kcompletion
|
frameworkintegration kcmutils kcompletion kconfig kdecoration kguiaddons
|
||||||
frameworkintegration ki18n kwindowsystem qtx11extras
|
ki18n kservice kwayland kwidgetsaddons kwindowsystem qtx11extras
|
||||||
];
|
];
|
||||||
postInstall = ''
|
postInstall = ''
|
||||||
wrapQtProgram "$out/bin/oxygen-demo5"
|
wrapQtProgram "$out/bin/oxygen-demo5"
|
||||||
|
@ -1,20 +1,20 @@
|
|||||||
{ plasmaPackage, substituteAll, ecm, kdoctools
|
{
|
||||||
, attica, baloo, boost, fontconfig, kactivities, kactivities-stats
|
plasmaPackage, substituteAll,
|
||||||
, kauth, kcmutils, kdbusaddons, kdeclarative, kded, kdelibs4support, kemoticons
|
ecm, kdoctools,
|
||||||
, kglobalaccel, ki18n, kitemmodels, knewstuff, knotifications
|
attica, baloo, boost, fontconfig, ibus, kactivities, kactivities-stats, kauth,
|
||||||
, knotifyconfig, kpeople, krunner, kwallet, kwin, phonon
|
kcmutils, kdbusaddons, kdeclarative, kded, kdelibs4support, kemoticons,
|
||||||
, plasma-framework, plasma-workspace, qtdeclarative, qtx11extras
|
kglobalaccel, ki18n, kitemmodels, knewstuff, knotifications, knotifyconfig,
|
||||||
, qtsvg, libXcursor, libXft, libxkbfile, xf86inputevdev
|
kpeople, krunner, ksysguard, kwallet, kwin, libXcursor, libXft,
|
||||||
, xf86inputsynaptics, xinput, xkeyboard_config, xorgserver
|
libcanberra_kde, libpulseaudio, libxkbfile, phonon, plasma-framework,
|
||||||
, libcanberra_kde, libpulseaudio, utillinux
|
plasma-workspace, qtdeclarative, qtquickcontrols, qtsvg, qtx11extras, xf86inputevdev,
|
||||||
, qtquickcontrols, ksysguard
|
xf86inputsynaptics, xinput, xkeyboard_config, xorgserver, utillinux
|
||||||
}:
|
}:
|
||||||
|
|
||||||
plasmaPackage rec {
|
plasmaPackage rec {
|
||||||
name = "plasma-desktop";
|
name = "plasma-desktop";
|
||||||
nativeBuildInputs = [ ecm kdoctools ];
|
nativeBuildInputs = [ ecm kdoctools ];
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
attica boost fontconfig kcmutils kdbusaddons kded kitemmodels knewstuff
|
attica boost fontconfig ibus kcmutils kdbusaddons kded kitemmodels knewstuff
|
||||||
knotifications knotifyconfig kwallet libcanberra_kde libXcursor
|
knotifications knotifyconfig kwallet libcanberra_kde libXcursor
|
||||||
libpulseaudio libXft libxkbfile phonon qtsvg xf86inputevdev
|
libpulseaudio libXft libxkbfile phonon qtsvg xf86inputevdev
|
||||||
xf86inputsynaptics xkeyboard_config xinput baloo kactivities
|
xf86inputsynaptics xkeyboard_config xinput baloo kactivities
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
with stdenv.lib;
|
with stdenv.lib;
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
p_name = "xfce4-whiskermenu-plugin";
|
p_name = "xfce4-whiskermenu-plugin";
|
||||||
version = "1.5.3";
|
version = "1.6.1";
|
||||||
|
|
||||||
name = "${p_name}-${version}";
|
name = "${p_name}-${version}";
|
||||||
|
|
||||||
@ -12,7 +12,7 @@ stdenv.mkDerivation rec {
|
|||||||
owner = "gottcode";
|
owner = "gottcode";
|
||||||
repo = "xfce4-whiskermenu-plugin";
|
repo = "xfce4-whiskermenu-plugin";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
sha256 = "07gmf9x3pw6xajklj0idahbnv0psnkhiqhb88bmkp344jirsx6ba";
|
sha256 = "19hldrrgy7qmrncv5rfsclybycjp9rjfnslhm996h62d2p675qpc";
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs = [ cmake pkgconfig intltool ];
|
nativeBuildInputs = [ cmake pkgconfig intltool ];
|
||||||
|
@ -1,4 +0,0 @@
|
|||||||
source $stdenv/setup
|
|
||||||
|
|
||||||
mkdir -p $out/jars
|
|
||||||
cp $src $out/jars/$jarname.jar
|
|
@ -1,40 +0,0 @@
|
|||||||
source $stdenv/setup
|
|
||||||
|
|
||||||
tar zxvf $src
|
|
||||||
|
|
||||||
cd abc-*
|
|
||||||
|
|
||||||
for p in $patches; do
|
|
||||||
echo "applying patch $p"
|
|
||||||
patch -p1 < $p
|
|
||||||
done
|
|
||||||
|
|
||||||
cat > ant.settings <<EOF
|
|
||||||
polyglot.loc=$polyglot/jars/polyglot.jar
|
|
||||||
polyglot.cupclasses.loc=$polyglot/jars/java_cup.jar
|
|
||||||
jflex.loc=
|
|
||||||
soot.loc=$soot/jars/soot.jar
|
|
||||||
jasmin.loc=$jasmin/jars/jasmin.jar
|
|
||||||
xact.loc=$xact/jars/xact.jar
|
|
||||||
paddle.loc=$paddle/jars/paddle.jar
|
|
||||||
jedd.runtime.jar=$jedd/jars/jedd.runtime.jar
|
|
||||||
javabdd.jar=$javabdd/jars/javabdd.jar
|
|
||||||
EOF
|
|
||||||
|
|
||||||
$apacheAnt/bin/ant jars
|
|
||||||
|
|
||||||
mkdir -p $out/jars
|
|
||||||
|
|
||||||
cp lib/abc.jar $out/jars
|
|
||||||
cp lib/abc-runtime.jar $out/jars
|
|
||||||
cp lib/abc-testing.jar $out/jars
|
|
||||||
cp lib/abc-complete.jar $out/jars
|
|
||||||
|
|
||||||
# Create the executable abc script
|
|
||||||
mkdir -p $out/bin
|
|
||||||
cat > $out/bin/abc <<EOF
|
|
||||||
#! $SHELL -e
|
|
||||||
|
|
||||||
exec $jre/bin/java -classpath $out/jars/abc-complete.jar -Xmx256M -Dabc.home=$out/jars abc.main.Main \$@
|
|
||||||
EOF
|
|
||||||
chmod +x $out/bin/abc
|
|
@ -1,79 +0,0 @@
|
|||||||
{stdenv, fetchurl, javaCup, jre, apacheAnt}:
|
|
||||||
|
|
||||||
let
|
|
||||||
soot =
|
|
||||||
import ./soot {
|
|
||||||
inherit stdenv fetchurl apacheAnt polyglot jasmin;
|
|
||||||
};
|
|
||||||
|
|
||||||
jasmin =
|
|
||||||
import ./jasmin {
|
|
||||||
inherit stdenv fetchurl apacheAnt javaCup;
|
|
||||||
};
|
|
||||||
|
|
||||||
polyglot =
|
|
||||||
import ./polyglot {
|
|
||||||
inherit stdenv fetchurl apacheAnt;
|
|
||||||
};
|
|
||||||
|
|
||||||
jedd =
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "jedd-runtime-snapshot";
|
|
||||||
jarname = "jedd.runtime";
|
|
||||||
builder = ./builder-binjar.sh;
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://abc.comlab.ox.ac.uk/dists/1.2.0/files/lib/jedd-runtime-snapshot.jar;
|
|
||||||
md5 = "595c5ac2f6384f4c34f1854891b5e422";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
javabdd =
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "javabdd-0.6";
|
|
||||||
jarname = "javabdd";
|
|
||||||
builder = ./builder-binjar.sh;
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://abc.comlab.ox.ac.uk/dists/1.2.0/files/lib/javabdd_0.6.jar;
|
|
||||||
md5 = "6e0246e891b7431f4e7265b5b1471307";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
paddle =
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "paddle-snapshot";
|
|
||||||
jarname = "paddle";
|
|
||||||
builder = ./builder-binjar.sh;
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://abc.comlab.ox.ac.uk/dists/1.2.0/files/lib/paddle-snapshot.jar;
|
|
||||||
md5 = "a8e032310137945124a2850cd8cfc277";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
xact =
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "xact-complete-1.0-1";
|
|
||||||
jarname = "xact";
|
|
||||||
builder = ./builder-binjar.sh;
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://abc.comlab.ox.ac.uk/dists/1.2.0/files/lib/xact-complete-1.0-1.jar;
|
|
||||||
md5 = "9810ad8762101ea691a895f0a6b7a5c3";
|
|
||||||
};
|
|
||||||
};
|
|
||||||
in
|
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "abc-1.2.0";
|
|
||||||
builder = ./builder.sh;
|
|
||||||
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://abc.comlab.ox.ac.uk/dists/1.2.0/files/abc-1.2.0-src.tar.gz;
|
|
||||||
md5 = "aef9e8eac860f904f2a841e18770dc47";
|
|
||||||
};
|
|
||||||
|
|
||||||
inherit apacheAnt polyglot soot javaCup xact jasmin jre javabdd paddle jedd;
|
|
||||||
patches = [];
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
description = "The AspectBench Compiler for AspectJ";
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,24 +0,0 @@
|
|||||||
source $stdenv/setup
|
|
||||||
|
|
||||||
tar zxvf $src
|
|
||||||
cd jasmin-*
|
|
||||||
|
|
||||||
sed -e 's/<javac/<javac source="1.4"/' build.xml > build-tmp.xml
|
|
||||||
mv build-tmp.xml build.xml
|
|
||||||
|
|
||||||
cat > ant.settings <<EOF
|
|
||||||
java_cup.jar=$javaCup
|
|
||||||
|
|
||||||
# Location in which to generate Jasmin release tarballs
|
|
||||||
release.loc=lib
|
|
||||||
|
|
||||||
# Version of Jasmin for tagging tarballs
|
|
||||||
jasmin.version=foo
|
|
||||||
|
|
||||||
build.compiler=javac1.4
|
|
||||||
EOF
|
|
||||||
|
|
||||||
ant jasmin-jar
|
|
||||||
|
|
||||||
mkdir -p $out/jars/
|
|
||||||
mv lib/jasminclasses-foo.jar $out/jars/jasmin.jar
|
|
@ -1,14 +0,0 @@
|
|||||||
{stdenv, fetchurl, apacheAnt, javaCup}:
|
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "jasmin-dev-20060422015512";
|
|
||||||
builder = ./builder.sh;
|
|
||||||
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://abc.comlab.ox.ac.uk/dists/1.2.0/files/jasmin-dev-20060422015512.tar.gz;
|
|
||||||
md5 = "0e49e532b4658c959582c5d5b5e28bf1";
|
|
||||||
};
|
|
||||||
|
|
||||||
inherit javaCup;
|
|
||||||
buildInputs = [apacheAnt];
|
|
||||||
}
|
|
@ -1,11 +0,0 @@
|
|||||||
source $stdenv/setup
|
|
||||||
|
|
||||||
tar zxvf $src
|
|
||||||
cd polyglot-*
|
|
||||||
|
|
||||||
ant polyglot-jar
|
|
||||||
ant cup
|
|
||||||
|
|
||||||
mkdir -p $out/jars/
|
|
||||||
mv lib/java_cup.jar $out/jars/
|
|
||||||
mv lib/polyglot*.jar $out/jars/
|
|
@ -1,13 +0,0 @@
|
|||||||
{stdenv, fetchurl, apacheAnt}:
|
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "polyglot-dev-20060422015512";
|
|
||||||
builder = ./builder.sh;
|
|
||||||
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://abc.comlab.ox.ac.uk/dists/1.2.0/files/polyglot-dev-20060422015512.tar.gz;
|
|
||||||
md5 = "6972fe537b4edd41872ed1cf24d24b50";
|
|
||||||
};
|
|
||||||
|
|
||||||
buildInputs = [apacheAnt];
|
|
||||||
}
|
|
@ -1,19 +0,0 @@
|
|||||||
source $stdenv/setup
|
|
||||||
|
|
||||||
tar zxvf $src
|
|
||||||
cd soot-*
|
|
||||||
|
|
||||||
export NIX_ANT_OPTS="$ANT_OPTS -Xmx200m"
|
|
||||||
|
|
||||||
cat > ant.settings <<EOF
|
|
||||||
polyglot.jar=$polyglot/jars/polyglot.jar
|
|
||||||
jasmin.jar=$jasmin/jars/jasmin.jar
|
|
||||||
soot.version=foo
|
|
||||||
release.loc=lib
|
|
||||||
javaapi.url=http://java.sun.com/j2se/1.4.2/docs/api/
|
|
||||||
EOF
|
|
||||||
|
|
||||||
ant classesjar
|
|
||||||
|
|
||||||
mkdir -p $out/jars/
|
|
||||||
mv lib/sootclasses-foo.jar $out/jars/soot.jar
|
|
@ -1,14 +0,0 @@
|
|||||||
{stdenv, fetchurl, apacheAnt, polyglot, jasmin}:
|
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "soot-dev-20060422015512";
|
|
||||||
builder = ./builder.sh;
|
|
||||||
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://abc.comlab.ox.ac.uk/dists/1.2.0/files/soot-dev-20060422015512.tar.gz;
|
|
||||||
md5 = "20dae3e31215b7ec88e3ff32a107d713";
|
|
||||||
};
|
|
||||||
|
|
||||||
inherit polyglot jasmin;
|
|
||||||
buildInputs = [apacheAnt];
|
|
||||||
}
|
|
74
pkgs/development/compilers/dmd/2.067.1.nix
Normal file
74
pkgs/development/compilers/dmd/2.067.1.nix
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
{ stdenv, fetchurl, unzip, makeWrapper }:
|
||||||
|
|
||||||
|
stdenv.mkDerivation {
|
||||||
|
name = "dmd-2.067.1";
|
||||||
|
|
||||||
|
src = fetchurl {
|
||||||
|
url = http://downloads.dlang.org/releases/2015/dmd.2.067.1.zip;
|
||||||
|
sha256 = "0ny99vfllvvgcl79pwisxcdnb3732i827k9zg8c0j4s0n79k5z94";
|
||||||
|
};
|
||||||
|
|
||||||
|
nativeBuildInputs = [ unzip makeWrapper ];
|
||||||
|
|
||||||
|
postPatch = stdenv.lib.optionalString stdenv.isDarwin ''
|
||||||
|
# Allow to use "clang++", commented in Makefile
|
||||||
|
substituteInPlace src/dmd/posix.mak \
|
||||||
|
--replace g++ clang++ \
|
||||||
|
--replace MACOSX_DEPLOYMENT_TARGET MACOSX_DEPLOYMENT_TARGET_
|
||||||
|
|
||||||
|
# Was not able to compile on darwin due to "__inline_isnanl"
|
||||||
|
# being undefined.
|
||||||
|
substituteInPlace src/dmd/root/port.c --replace __inline_isnanl __inline_isnan
|
||||||
|
'';
|
||||||
|
|
||||||
|
# Buid and install are based on http://wiki.dlang.org/Building_DMD
|
||||||
|
buildPhase = ''
|
||||||
|
cd src/dmd
|
||||||
|
make -f posix.mak INSTALL_DIR=$out
|
||||||
|
export DMD=$PWD/dmd
|
||||||
|
cd ../druntime
|
||||||
|
make -f posix.mak INSTALL_DIR=$out DMD=$DMD
|
||||||
|
cd ../phobos
|
||||||
|
make -f posix.mak INSTALL_DIR=$out DMD=$DMD
|
||||||
|
cd ../..
|
||||||
|
'';
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
cd src/dmd
|
||||||
|
mkdir $out
|
||||||
|
mkdir $out/bin
|
||||||
|
cp dmd $out/bin
|
||||||
|
|
||||||
|
cd ../druntime
|
||||||
|
mkdir $out/include
|
||||||
|
mkdir $out/include/d2
|
||||||
|
cp -r import/* $out/include/d2
|
||||||
|
|
||||||
|
cd ../phobos
|
||||||
|
mkdir $out/lib
|
||||||
|
${let bits = if stdenv.is64bit then "64" else "32";
|
||||||
|
osname = if stdenv.isDarwin then "osx" else "linux"; in
|
||||||
|
"cp generated/${osname}/release/${bits}/libphobos2.a $out/lib"
|
||||||
|
}
|
||||||
|
|
||||||
|
cp -r std $out/include/d2
|
||||||
|
cp -r etc $out/include/d2
|
||||||
|
|
||||||
|
wrapProgram $out/bin/dmd \
|
||||||
|
--prefix PATH ":" "${stdenv.cc}/bin" \
|
||||||
|
--set CC "$""{CC:-$CC""}"
|
||||||
|
|
||||||
|
cd $out/bin
|
||||||
|
tee dmd.conf << EOF
|
||||||
|
[Environment]
|
||||||
|
DFLAGS=-I$out/include/d2 -L-L$out/lib ${stdenv.lib.optionalString (!stdenv.cc.isClang) "-L--no-warn-search-mismatch -L--export-dynamic"}
|
||||||
|
EOF
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta = with stdenv.lib; {
|
||||||
|
description = "D language compiler";
|
||||||
|
homepage = http://dlang.org/;
|
||||||
|
license = licenses.free; # parts under different licenses
|
||||||
|
platforms = platforms.unix;
|
||||||
|
};
|
||||||
|
}
|
@ -1,14 +1,19 @@
|
|||||||
{ stdenv, fetchurl, unzip, curl, makeWrapper }:
|
{ stdenv, fetchurl
|
||||||
|
, makeWrapper, unzip, which
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
# Versions 2.070.2 and up require a working dmd compiler to build:
|
||||||
name = "dmd-2.067.1";
|
, bootstrapDmd }:
|
||||||
|
|
||||||
|
stdenv.mkDerivation rec {
|
||||||
|
name = "dmd-${version}";
|
||||||
|
version = "2.070.2";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = http://downloads.dlang.org/releases/2015/dmd.2.067.1.zip;
|
url = "http://downloads.dlang.org/releases/2.x/${version}/dmd.${version}.zip";
|
||||||
sha256 = "0ny99vfllvvgcl79pwisxcdnb3732i827k9zg8c0j4s0n79k5z94";
|
sha256 = "1pbhxxf41v816j0aky3q2pcd8a6phy3363l7vr5r5pg8ps3gl701";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [ unzip curl makeWrapper ];
|
nativeBuildInputs = [ bootstrapDmd makeWrapper unzip which ];
|
||||||
|
|
||||||
postPatch = stdenv.lib.optionalString stdenv.isDarwin ''
|
postPatch = stdenv.lib.optionalString stdenv.isDarwin ''
|
||||||
# Allow to use "clang++", commented in Makefile
|
# Allow to use "clang++", commented in Makefile
|
||||||
|
@ -1,7 +0,0 @@
|
|||||||
set -e
|
|
||||||
|
|
||||||
source $stdenv/setup
|
|
||||||
|
|
||||||
unzip $src
|
|
||||||
mkdir $out
|
|
||||||
mv eclipse $out/
|
|
@ -1,15 +0,0 @@
|
|||||||
{stdenv, fetchurl, unzip}:
|
|
||||||
|
|
||||||
stdenv.mkDerivation ( rec {
|
|
||||||
pname = "eclipse-JDT-SDK";
|
|
||||||
version = "3.3.2";
|
|
||||||
name = "${pname}-${version}";
|
|
||||||
|
|
||||||
builder = ./builder.sh;
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://sunsite.informatik.rwth-aachen.de/eclipse/downloads/drops/R-3.3.2-200802211800/eclipse-JDT-SDK-3.3.2.zip;
|
|
||||||
md5 = "f9e513b7e3b609feef28651c07807b17";
|
|
||||||
};
|
|
||||||
|
|
||||||
buildInputs = [unzip];
|
|
||||||
})
|
|
@ -291,7 +291,6 @@ self: super: {
|
|||||||
bitcoin-api-extra = dontCheck super.bitcoin-api-extra;
|
bitcoin-api-extra = dontCheck super.bitcoin-api-extra;
|
||||||
bitx-bitcoin = dontCheck super.bitx-bitcoin; # http://hydra.cryp.to/build/926187/log/raw
|
bitx-bitcoin = dontCheck super.bitx-bitcoin; # http://hydra.cryp.to/build/926187/log/raw
|
||||||
concurrent-dns-cache = dontCheck super.concurrent-dns-cache;
|
concurrent-dns-cache = dontCheck super.concurrent-dns-cache;
|
||||||
dbus = dontCheck super.dbus; # http://hydra.cryp.to/build/498404/log/raw
|
|
||||||
digitalocean-kzs = dontCheck super.digitalocean-kzs; # https://github.com/KazumaSATO/digitalocean-kzs/issues/1
|
digitalocean-kzs = dontCheck super.digitalocean-kzs; # https://github.com/KazumaSATO/digitalocean-kzs/issues/1
|
||||||
github-types = dontCheck super.github-types; # http://hydra.cryp.to/build/1114046/nixlog/1/raw
|
github-types = dontCheck super.github-types; # http://hydra.cryp.to/build/1114046/nixlog/1/raw
|
||||||
hadoop-rpc = dontCheck super.hadoop-rpc; # http://hydra.cryp.to/build/527461/nixlog/2/raw
|
hadoop-rpc = dontCheck super.hadoop-rpc; # http://hydra.cryp.to/build/527461/nixlog/2/raw
|
||||||
@ -1059,12 +1058,14 @@ self: super: {
|
|||||||
dataenc = doJailbreak super.dataenc;
|
dataenc = doJailbreak super.dataenc;
|
||||||
|
|
||||||
libsystemd-journal = overrideCabal super.libsystemd-journal (old: {
|
libsystemd-journal = overrideCabal super.libsystemd-journal (old: {
|
||||||
# https://github.com/ocharles/libsystemd-journal/pull/17
|
|
||||||
jailbreak = true;
|
|
||||||
librarySystemDepends = old.librarySystemDepends or [] ++ [ pkgs.systemd ];
|
librarySystemDepends = old.librarySystemDepends or [] ++ [ pkgs.systemd ];
|
||||||
});
|
});
|
||||||
|
|
||||||
# horribly outdated (X11 interface changed a lot)
|
# horribly outdated (X11 interface changed a lot)
|
||||||
sindre = markBroken super.sindre;
|
sindre = markBroken super.sindre;
|
||||||
|
|
||||||
|
# https://github.com/jmillikin/haskell-dbus/pull/7
|
||||||
|
# http://hydra.cryp.to/build/498404/log/raw
|
||||||
|
dbus = dontCheck (appendPatch super.dbus ./patches/hdbus-semicolons.patch);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,34 @@
|
|||||||
|
From 8fd84b4d6ba257ac93a61bce3378777840e8bf80 Mon Sep 17 00:00:00 2001
|
||||||
|
From: Nikolay Amiantov <ab@fmap.me>
|
||||||
|
Date: Sat, 5 Nov 2016 14:27:04 +0300
|
||||||
|
Subject: [PATCH] getSessionAddress: take first bus address from
|
||||||
|
semicolon-separated variable
|
||||||
|
|
||||||
|
---
|
||||||
|
lib/DBus/Address.hs | 3 ++-
|
||||||
|
1 file changed, 2 insertions(+), 1 deletion(-)
|
||||||
|
|
||||||
|
diff --git a/lib/DBus/Address.hs b/lib/DBus/Address.hs
|
||||||
|
index 72ac99d..596b18c 100644
|
||||||
|
--- a/lib/DBus/Address.hs
|
||||||
|
+++ b/lib/DBus/Address.hs
|
||||||
|
@@ -18,6 +18,7 @@ module DBus.Address where
|
||||||
|
import qualified Control.Exception
|
||||||
|
import Data.Char (digitToInt, ord, chr)
|
||||||
|
import Data.List (intercalate)
|
||||||
|
+import Data.Maybe (listToMaybe)
|
||||||
|
import qualified Data.Map
|
||||||
|
import Data.Map (Map)
|
||||||
|
import qualified System.Environment
|
||||||
|
@@ -152,7 +153,7 @@ getSystemAddress = do
|
||||||
|
getSessionAddress :: IO (Maybe Address)
|
||||||
|
getSessionAddress = do
|
||||||
|
env <- getenv "DBUS_SESSION_BUS_ADDRESS"
|
||||||
|
- return (env >>= parseAddress)
|
||||||
|
+ return $ maybe Nothing listToMaybe (env >>= parseAddresses)
|
||||||
|
|
||||||
|
-- | Returns the address in the environment variable
|
||||||
|
-- @DBUS_STARTER_ADDRESS@, which must be set.
|
||||||
|
--
|
||||||
|
2.10.1
|
||||||
|
|
@ -25,9 +25,9 @@ stdenv.mkDerivation rec {
|
|||||||
sha256 = "11y2w6jgngj4rxiy136mkcs02l52rxk60kapyfc4rgrxz5hli3ym";
|
sha256 = "11y2w6jgngj4rxiy136mkcs02l52rxk60kapyfc4rgrxz5hli3ym";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [ gfortran readline ncurses perl flex texinfo qhull libX11
|
buildInputs = [ gfortran readline ncurses perl flex texinfo qhull
|
||||||
graphicsmagick pcre pkgconfig mesa fltk zlib curl openblas libsndfile
|
graphicsmagick pcre pkgconfig fltk zlib curl openblas libsndfile fftw
|
||||||
fftw fftwSinglePrec qrupdate arpack libwebp ]
|
fftwSinglePrec qrupdate arpack libwebp ]
|
||||||
++ (stdenv.lib.optional (qt != null) qt)
|
++ (stdenv.lib.optional (qt != null) qt)
|
||||||
++ (stdenv.lib.optional (qscintilla != null) qscintilla)
|
++ (stdenv.lib.optional (qscintilla != null) qscintilla)
|
||||||
++ (stdenv.lib.optional (ghostscript != null) ghostscript)
|
++ (stdenv.lib.optional (ghostscript != null) ghostscript)
|
||||||
@ -38,7 +38,7 @@ stdenv.mkDerivation rec {
|
|||||||
++ (stdenv.lib.optional (jdk != null) jdk)
|
++ (stdenv.lib.optional (jdk != null) jdk)
|
||||||
++ (stdenv.lib.optional (gnuplot != null) gnuplot)
|
++ (stdenv.lib.optional (gnuplot != null) gnuplot)
|
||||||
++ (stdenv.lib.optional (python != null) python)
|
++ (stdenv.lib.optional (python != null) python)
|
||||||
++ (stdenv.lib.optionals (!stdenv.isDarwin) [mesa libX11])
|
++ (stdenv.lib.optionals (!stdenv.isDarwin) [ mesa libX11 ])
|
||||||
;
|
;
|
||||||
|
|
||||||
doCheck = !stdenv.isDarwin;
|
doCheck = !stdenv.isDarwin;
|
||||||
|
@ -1,19 +0,0 @@
|
|||||||
{
|
|
||||||
stdenv, fetchurl, zlib,
|
|
||||||
libpng, libjpeg, libvorbis, libogg,
|
|
||||||
libX11, xf86vidmodeproto, libXxf86vm, libXmu, mesa
|
|
||||||
}:
|
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "clanlib-0.8.0";
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://www.clanlib.org/download/releases-0.8/ClanLib-0.8.0.tgz;
|
|
||||||
sha256 = "1rjr601h3hisrhvpkrj00wirx5hyfbppv9rla400wx7a42xvvyfy";
|
|
||||||
};
|
|
||||||
|
|
||||||
buildInputs = [zlib libpng libjpeg
|
|
||||||
libvorbis libogg libX11
|
|
||||||
xf86vidmodeproto libXmu
|
|
||||||
mesa libXxf86vm
|
|
||||||
];
|
|
||||||
}
|
|
@ -1,7 +1,7 @@
|
|||||||
{stdenv, fetchurl}:
|
{stdenv, fetchurl}:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
version = "2.5.0.0-1";
|
version = "3.12.2";
|
||||||
name = "dxflib-${version}";
|
name = "dxflib-${version}";
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "http://www.qcad.org/archives/dxflib/${name}.src.tar.gz";
|
url = "http://www.qcad.org/archives/dxflib/${name}.src.tar.gz";
|
||||||
|
@ -21,6 +21,8 @@ composableDerivation.composableDerivation {} {
|
|||||||
--replace 'class Fl_XFont_On_Demand' 'class FL_EXPORT Fl_XFont_On_Demand'
|
--replace 'class Fl_XFont_On_Demand' 'class FL_EXPORT Fl_XFont_On_Demand'
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
patches = stdenv.lib.optionals stdenv.isDarwin [ ./nsosv.patch ];
|
||||||
|
|
||||||
nativeBuildInputs = [ pkgconfig ];
|
nativeBuildInputs = [ pkgconfig ];
|
||||||
propagatedBuildInputs = [ inputproto ]
|
propagatedBuildInputs = [ inputproto ]
|
||||||
++ (if stdenv.isDarwin
|
++ (if stdenv.isDarwin
|
||||||
|
20
pkgs/development/libraries/fltk/nsosv.patch
Normal file
20
pkgs/development/libraries/fltk/nsosv.patch
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
diff --git a/src/Fl_cocoa.mm b/src/Fl_cocoa.mm
|
||||||
|
index 6f5b8b1..2c7763d 100644
|
||||||
|
--- a/src/Fl_cocoa.mm
|
||||||
|
+++ b/src/Fl_cocoa.mm
|
||||||
|
@@ -4074,15 +4074,6 @@ Window fl_xid(const Fl_Window* w)
|
||||||
|
static int calc_mac_os_version() {
|
||||||
|
int M, m, b = 0;
|
||||||
|
NSAutoreleasePool *localPool = [[NSAutoreleasePool alloc] init];
|
||||||
|
-#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_10
|
||||||
|
- if ([NSProcessInfo instancesRespondToSelector:@selector(operatingSystemVersion)]) {
|
||||||
|
- NSOperatingSystemVersion version = [[NSProcessInfo processInfo] operatingSystemVersion];
|
||||||
|
- M = version.majorVersion;
|
||||||
|
- m = version.minorVersion;
|
||||||
|
- b = version.patchVersion;
|
||||||
|
- }
|
||||||
|
- else
|
||||||
|
-#endif
|
||||||
|
{
|
||||||
|
NSDictionary * sv = [NSDictionary dictionaryWithContentsOfFile:@"/System/Library/CoreServices/SystemVersion.plist"];
|
||||||
|
const char *s = [[sv objectForKey:@"ProductVersion"] UTF8String];
|
@ -2,11 +2,11 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
name = "gsoap-${version}";
|
name = "gsoap-${version}";
|
||||||
version = "2.8.16";
|
version = "2.8.37";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "mirror://sourceforge/project/gsoap2/gSOAP/gsoap_${version}.zip";
|
url = "mirror://sourceforge/project/gsoap2/gsoap-2.8/gsoap_${version}.zip";
|
||||||
sha256 = "00lhhysa9f9ychkvn1ij0ngr54l1dl9ww801yrliwq5c05gql7a6";
|
sha256 = "1nvf5hgwff1agqdzbn3qc5569jzm14qkwqws0673z6hv2l3lijx3";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [ unzip m4 bison flex openssl zlib ];
|
buildInputs = [ unzip m4 bison flex openssl zlib ];
|
||||||
|
@ -1,24 +0,0 @@
|
|||||||
{ stdenv, fetchurl, pkgconfig, gnutls, zlib }:
|
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
|
||||||
name = "iksemel-${version}";
|
|
||||||
version = "1.4";
|
|
||||||
|
|
||||||
src = fetchurl {
|
|
||||||
url = "https://iksemel.googlecode.com/files/${name}.tar.gz";
|
|
||||||
sha1 = "722910b99ce794fd3f6f0e5f33fa804732cf46db";
|
|
||||||
};
|
|
||||||
|
|
||||||
preConfigure = ''
|
|
||||||
sed -i -e '/if.*gnutls_check_version/,/return 1;/c return 0;' configure
|
|
||||||
export LIBGNUTLS_CONFIG="${pkgconfig}/bin/pkg-config gnutls"
|
|
||||||
'';
|
|
||||||
|
|
||||||
buildInputs = [ pkgconfig gnutls zlib ];
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
homepage = "https://code.google.com/p/iksemel/";
|
|
||||||
license = stdenv.lib.licenses.lgpl21Plus;
|
|
||||||
description = "Fast and portable XML parser and Jabber protocol library";
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,27 +1,13 @@
|
|||||||
{ stdenv, fetchurl, fetchpatch, libjpeg, autoreconfHook }:
|
{ stdenv, fetchurl, fetchpatch, libjpeg, autoreconfHook }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
name = "jasper-1.900.2";
|
name = "jasper-1.900.21";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "http://www.ece.uvic.ca/~mdadams/jasper/software/${name}.tar.gz";
|
url = "http://www.ece.uvic.ca/~mdadams/jasper/software/${name}.tar.gz";
|
||||||
sha256 = "0bkibjhq3js2ldxa2f9pss84lcx4f5d3v0qis3ifi11ciy7a6c9a";
|
sha256 = "1cypmlzq5vmbacsn8n3ls9p7g64scv3fzx88qf8c270dz10s5j79";
|
||||||
};
|
};
|
||||||
|
|
||||||
patches = [
|
|
||||||
./jasper-CVE-2014-8137-variant2.diff
|
|
||||||
./jasper-CVE-2014-8137-noabort.diff
|
|
||||||
|
|
||||||
(fetchpatch { # CVE-2016-2089
|
|
||||||
url = "https://github.com/mdadams/jasper/commit/aa6d9c2bbae9155f8e1466295373a68fa97291c3.patch";
|
|
||||||
sha256 = "1pxnm86zmbq6brfwsm5wx3iv7s92n4xilc52lzp61q266jmlggrf";
|
|
||||||
})
|
|
||||||
(fetchpatch { # CVE-2015-5203
|
|
||||||
url = "https://github.com/mdadams/jasper/commit/e73bb58f99fec0bf9c5d8866e010fcf736a53b9a.patch";
|
|
||||||
sha256 = "1r6hxbnhpnb7q6p2kbdxc1cpph3ic851x2hy477yv5c3qmrbx9bk";
|
|
||||||
})
|
|
||||||
];
|
|
||||||
|
|
||||||
# newer reconf to recognize a multiout flag
|
# newer reconf to recognize a multiout flag
|
||||||
nativeBuildInputs = [ autoreconfHook ];
|
nativeBuildInputs = [ autoreconfHook ];
|
||||||
propagatedBuildInputs = [ libjpeg ];
|
propagatedBuildInputs = [ libjpeg ];
|
||||||
|
@ -1,16 +0,0 @@
|
|||||||
From RedHat: https://bugzilla.redhat.com/attachment.cgi?id=967284&action=diff
|
|
||||||
|
|
||||||
--- jasper-1.900.1.orig/src/libjasper/jp2/jp2_dec.c 2014-12-11 14:30:54.193209780 +0100
|
|
||||||
+++ jasper-1.900.1/src/libjasper/jp2/jp2_dec.c 2014-12-11 14:36:46.313217814 +0100
|
|
||||||
@@ -291,7 +291,10 @@ jas_image_t *jp2_decode(jas_stream_t *in
|
|
||||||
case JP2_COLR_ICC:
|
|
||||||
iccprof = jas_iccprof_createfrombuf(dec->colr->data.colr.iccp,
|
|
||||||
dec->colr->data.colr.iccplen);
|
|
||||||
- assert(iccprof);
|
|
||||||
+ if (!iccprof) {
|
|
||||||
+ jas_eprintf("error: failed to parse ICC profile\n");
|
|
||||||
+ goto error;
|
|
||||||
+ }
|
|
||||||
jas_iccprof_gethdr(iccprof, &icchdr);
|
|
||||||
jas_eprintf("ICC Profile CS %08x\n", icchdr.colorspc);
|
|
||||||
jas_image_setclrspc(dec->image, fromiccpcs(icchdr.colorspc));
|
|
@ -1,45 +0,0 @@
|
|||||||
From RedHat: https://bugzilla.redhat.com/attachment.cgi?id=967283&action=diff
|
|
||||||
|
|
||||||
--- jasper-1.900.1.orig/src/libjasper/base/jas_icc.c 2014-12-11 14:06:44.000000000 +0100
|
|
||||||
+++ jasper-1.900.1/src/libjasper/base/jas_icc.c 2014-12-11 15:16:37.971272386 +0100
|
|
||||||
@@ -1009,7 +1009,6 @@ static int jas_icccurv_input(jas_iccattr
|
|
||||||
return 0;
|
|
||||||
|
|
||||||
error:
|
|
||||||
- jas_icccurv_destroy(attrval);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1127,7 +1126,6 @@ static int jas_icctxtdesc_input(jas_icca
|
|
||||||
#endif
|
|
||||||
return 0;
|
|
||||||
error:
|
|
||||||
- jas_icctxtdesc_destroy(attrval);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1206,8 +1204,6 @@ static int jas_icctxt_input(jas_iccattrv
|
|
||||||
goto error;
|
|
||||||
return 0;
|
|
||||||
error:
|
|
||||||
- if (txt->string)
|
|
||||||
- jas_free(txt->string);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1328,7 +1324,6 @@ static int jas_icclut8_input(jas_iccattr
|
|
||||||
goto error;
|
|
||||||
return 0;
|
|
||||||
error:
|
|
||||||
- jas_icclut8_destroy(attrval);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -1497,7 +1492,6 @@ static int jas_icclut16_input(jas_iccatt
|
|
||||||
goto error;
|
|
||||||
return 0;
|
|
||||||
error:
|
|
||||||
- jas_icclut16_destroy(attrval);
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
@ -1,16 +0,0 @@
|
|||||||
From RedHat: https://bugzilla.redhat.com/attachment.cgi?id=967280&action=diff
|
|
||||||
|
|
||||||
--- jasper-1.900.1.orig/src/libjasper/jp2/jp2_dec.c 2014-12-11 14:06:44.000000000 +0100
|
|
||||||
+++ jasper-1.900.1/src/libjasper/jp2/jp2_dec.c 2014-12-11 14:06:26.000000000 +0100
|
|
||||||
@@ -386,6 +386,11 @@ jas_image_t *jp2_decode(jas_stream_t *in
|
|
||||||
/* Determine the type of each component. */
|
|
||||||
if (dec->cdef) {
|
|
||||||
for (i = 0; i < dec->numchans; ++i) {
|
|
||||||
+ /* Is the channel number reasonable? */
|
|
||||||
+ if (dec->cdef->data.cdef.ents[i].channo >= dec->numchans) {
|
|
||||||
+ jas_eprintf("error: invalid channel number in CDEF box\n");
|
|
||||||
+ goto error;
|
|
||||||
+ }
|
|
||||||
jas_image_setcmpttype(dec->image,
|
|
||||||
dec->chantocmptlut[dec->cdef->data.cdef.ents[i].channo],
|
|
||||||
jp2_getct(jas_image_clrspc(dec->image),
|
|
@ -1,12 +0,0 @@
|
|||||||
diff -up jasper-1.900.1/src/libjasper/jpc/jpc_dec.c.CVE-2014-8157 jasper-1.900.1/src/libjasper/jpc/jpc_dec.c
|
|
||||||
--- jasper-1.900.1/src/libjasper/jpc/jpc_dec.c.CVE-2014-8157 2015-01-19 16:59:36.000000000 +0100
|
|
||||||
+++ jasper-1.900.1/src/libjasper/jpc/jpc_dec.c 2015-01-19 17:07:41.609863268 +0100
|
|
||||||
@@ -489,7 +489,7 @@ static int jpc_dec_process_sot(jpc_dec_t
|
|
||||||
dec->curtileendoff = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
- if (JAS_CAST(int, sot->tileno) > dec->numtiles) {
|
|
||||||
+ if (JAS_CAST(int, sot->tileno) >= dec->numtiles) {
|
|
||||||
jas_eprintf("invalid tile number in SOT marker segment\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
@ -1,329 +0,0 @@
|
|||||||
diff -up jasper-1.900.1/src/libjasper/jpc/jpc_qmfb.c.CVE-2014-8158 jasper-1.900.1/src/libjasper/jpc/jpc_qmfb.c
|
|
||||||
--- jasper-1.900.1/src/libjasper/jpc/jpc_qmfb.c.CVE-2014-8158 2015-01-19 17:25:28.730195502 +0100
|
|
||||||
+++ jasper-1.900.1/src/libjasper/jpc/jpc_qmfb.c 2015-01-19 17:27:20.214663127 +0100
|
|
||||||
@@ -306,11 +306,7 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
|
|
||||||
{
|
|
||||||
|
|
||||||
int bufsize = JPC_CEILDIVPOW2(numcols, 1);
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE];
|
|
||||||
-#else
|
|
||||||
- jpc_fix_t splitbuf[bufsize];
|
|
||||||
-#endif
|
|
||||||
jpc_fix_t *buf = splitbuf;
|
|
||||||
register jpc_fix_t *srcptr;
|
|
||||||
register jpc_fix_t *dstptr;
|
|
||||||
@@ -318,7 +314,6 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
|
|
||||||
register int m;
|
|
||||||
int hstartcol;
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* Get a buffer. */
|
|
||||||
if (bufsize > QMFB_SPLITBUFSIZE) {
|
|
||||||
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
|
|
||||||
@@ -326,7 +321,6 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
if (numcols >= 2) {
|
|
||||||
hstartcol = (numcols + 1 - parity) >> 1;
|
|
||||||
@@ -360,12 +354,10 @@ void jpc_qmfb_split_row(jpc_fix_t *a, in
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* If the split buffer was allocated on the heap, free this memory. */
|
|
||||||
if (buf != splitbuf) {
|
|
||||||
jas_free(buf);
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -374,11 +366,7 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
|
|
||||||
{
|
|
||||||
|
|
||||||
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE];
|
|
||||||
-#else
|
|
||||||
- jpc_fix_t splitbuf[bufsize];
|
|
||||||
-#endif
|
|
||||||
jpc_fix_t *buf = splitbuf;
|
|
||||||
register jpc_fix_t *srcptr;
|
|
||||||
register jpc_fix_t *dstptr;
|
|
||||||
@@ -386,7 +374,6 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
|
|
||||||
register int m;
|
|
||||||
int hstartcol;
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* Get a buffer. */
|
|
||||||
if (bufsize > QMFB_SPLITBUFSIZE) {
|
|
||||||
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
|
|
||||||
@@ -394,7 +381,6 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
if (numrows >= 2) {
|
|
||||||
hstartcol = (numrows + 1 - parity) >> 1;
|
|
||||||
@@ -428,12 +414,10 @@ void jpc_qmfb_split_col(jpc_fix_t *a, in
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* If the split buffer was allocated on the heap, free this memory. */
|
|
||||||
if (buf != splitbuf) {
|
|
||||||
jas_free(buf);
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -442,11 +426,7 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
|
|
||||||
{
|
|
||||||
|
|
||||||
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE * JPC_QMFB_COLGRPSIZE];
|
|
||||||
-#else
|
|
||||||
- jpc_fix_t splitbuf[bufsize * JPC_QMFB_COLGRPSIZE];
|
|
||||||
-#endif
|
|
||||||
jpc_fix_t *buf = splitbuf;
|
|
||||||
jpc_fix_t *srcptr;
|
|
||||||
jpc_fix_t *dstptr;
|
|
||||||
@@ -457,7 +437,6 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
|
|
||||||
int m;
|
|
||||||
int hstartcol;
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* Get a buffer. */
|
|
||||||
if (bufsize > QMFB_SPLITBUFSIZE) {
|
|
||||||
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
|
|
||||||
@@ -465,7 +444,6 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
if (numrows >= 2) {
|
|
||||||
hstartcol = (numrows + 1 - parity) >> 1;
|
|
||||||
@@ -517,12 +495,10 @@ void jpc_qmfb_split_colgrp(jpc_fix_t *a,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* If the split buffer was allocated on the heap, free this memory. */
|
|
||||||
if (buf != splitbuf) {
|
|
||||||
jas_free(buf);
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -531,11 +507,7 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
|
|
||||||
{
|
|
||||||
|
|
||||||
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
jpc_fix_t splitbuf[QMFB_SPLITBUFSIZE * JPC_QMFB_COLGRPSIZE];
|
|
||||||
-#else
|
|
||||||
- jpc_fix_t splitbuf[bufsize * numcols];
|
|
||||||
-#endif
|
|
||||||
jpc_fix_t *buf = splitbuf;
|
|
||||||
jpc_fix_t *srcptr;
|
|
||||||
jpc_fix_t *dstptr;
|
|
||||||
@@ -546,7 +518,6 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
|
|
||||||
int m;
|
|
||||||
int hstartcol;
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* Get a buffer. */
|
|
||||||
if (bufsize > QMFB_SPLITBUFSIZE) {
|
|
||||||
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
|
|
||||||
@@ -554,7 +525,6 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
if (numrows >= 2) {
|
|
||||||
hstartcol = (numrows + 1 - parity) >> 1;
|
|
||||||
@@ -606,12 +576,10 @@ void jpc_qmfb_split_colres(jpc_fix_t *a,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* If the split buffer was allocated on the heap, free this memory. */
|
|
||||||
if (buf != splitbuf) {
|
|
||||||
jas_free(buf);
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -619,18 +587,13 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int
|
|
||||||
{
|
|
||||||
|
|
||||||
int bufsize = JPC_CEILDIVPOW2(numcols, 1);
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE];
|
|
||||||
-#else
|
|
||||||
- jpc_fix_t joinbuf[bufsize];
|
|
||||||
-#endif
|
|
||||||
jpc_fix_t *buf = joinbuf;
|
|
||||||
register jpc_fix_t *srcptr;
|
|
||||||
register jpc_fix_t *dstptr;
|
|
||||||
register int n;
|
|
||||||
int hstartcol;
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* Allocate memory for the join buffer from the heap. */
|
|
||||||
if (bufsize > QMFB_JOINBUFSIZE) {
|
|
||||||
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
|
|
||||||
@@ -638,7 +601,6 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
hstartcol = (numcols + 1 - parity) >> 1;
|
|
||||||
|
|
||||||
@@ -670,12 +632,10 @@ void jpc_qmfb_join_row(jpc_fix_t *a, int
|
|
||||||
++srcptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* If the join buffer was allocated on the heap, free this memory. */
|
|
||||||
if (buf != joinbuf) {
|
|
||||||
jas_free(buf);
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -684,18 +644,13 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int
|
|
||||||
{
|
|
||||||
|
|
||||||
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE];
|
|
||||||
-#else
|
|
||||||
- jpc_fix_t joinbuf[bufsize];
|
|
||||||
-#endif
|
|
||||||
jpc_fix_t *buf = joinbuf;
|
|
||||||
register jpc_fix_t *srcptr;
|
|
||||||
register jpc_fix_t *dstptr;
|
|
||||||
register int n;
|
|
||||||
int hstartcol;
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* Allocate memory for the join buffer from the heap. */
|
|
||||||
if (bufsize > QMFB_JOINBUFSIZE) {
|
|
||||||
if (!(buf = jas_alloc2(bufsize, sizeof(jpc_fix_t)))) {
|
|
||||||
@@ -703,7 +658,6 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
hstartcol = (numrows + 1 - parity) >> 1;
|
|
||||||
|
|
||||||
@@ -735,12 +689,10 @@ void jpc_qmfb_join_col(jpc_fix_t *a, int
|
|
||||||
++srcptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* If the join buffer was allocated on the heap, free this memory. */
|
|
||||||
if (buf != joinbuf) {
|
|
||||||
jas_free(buf);
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -749,11 +701,7 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
|
|
||||||
{
|
|
||||||
|
|
||||||
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE * JPC_QMFB_COLGRPSIZE];
|
|
||||||
-#else
|
|
||||||
- jpc_fix_t joinbuf[bufsize * JPC_QMFB_COLGRPSIZE];
|
|
||||||
-#endif
|
|
||||||
jpc_fix_t *buf = joinbuf;
|
|
||||||
jpc_fix_t *srcptr;
|
|
||||||
jpc_fix_t *dstptr;
|
|
||||||
@@ -763,7 +711,6 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
|
|
||||||
register int i;
|
|
||||||
int hstartcol;
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* Allocate memory for the join buffer from the heap. */
|
|
||||||
if (bufsize > QMFB_JOINBUFSIZE) {
|
|
||||||
if (!(buf = jas_alloc2(bufsize, JPC_QMFB_COLGRPSIZE * sizeof(jpc_fix_t)))) {
|
|
||||||
@@ -771,7 +718,6 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
hstartcol = (numrows + 1 - parity) >> 1;
|
|
||||||
|
|
||||||
@@ -821,12 +767,10 @@ void jpc_qmfb_join_colgrp(jpc_fix_t *a,
|
|
||||||
srcptr += JPC_QMFB_COLGRPSIZE;
|
|
||||||
}
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* If the join buffer was allocated on the heap, free this memory. */
|
|
||||||
if (buf != joinbuf) {
|
|
||||||
jas_free(buf);
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@@ -835,11 +779,7 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
|
|
||||||
{
|
|
||||||
|
|
||||||
int bufsize = JPC_CEILDIVPOW2(numrows, 1);
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
jpc_fix_t joinbuf[QMFB_JOINBUFSIZE * JPC_QMFB_COLGRPSIZE];
|
|
||||||
-#else
|
|
||||||
- jpc_fix_t joinbuf[bufsize * numcols];
|
|
||||||
-#endif
|
|
||||||
jpc_fix_t *buf = joinbuf;
|
|
||||||
jpc_fix_t *srcptr;
|
|
||||||
jpc_fix_t *dstptr;
|
|
||||||
@@ -849,7 +789,6 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
|
|
||||||
register int i;
|
|
||||||
int hstartcol;
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* Allocate memory for the join buffer from the heap. */
|
|
||||||
if (bufsize > QMFB_JOINBUFSIZE) {
|
|
||||||
if (!(buf = jas_alloc3(bufsize, numcols, sizeof(jpc_fix_t)))) {
|
|
||||||
@@ -857,7 +796,6 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
|
|
||||||
abort();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
hstartcol = (numrows + 1 - parity) >> 1;
|
|
||||||
|
|
||||||
@@ -907,12 +845,10 @@ void jpc_qmfb_join_colres(jpc_fix_t *a,
|
|
||||||
srcptr += numcols;
|
|
||||||
}
|
|
||||||
|
|
||||||
-#if !defined(HAVE_VLA)
|
|
||||||
/* If the join buffer was allocated on the heap, free this memory. */
|
|
||||||
if (buf != joinbuf) {
|
|
||||||
jas_free(buf);
|
|
||||||
}
|
|
||||||
-#endif
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
@ -1,31 +0,0 @@
|
|||||||
From RedHat: https://bugzilla.redhat.com/attachment.cgi?id=961994&action=diff
|
|
||||||
|
|
||||||
--- jasper-1.900.1.orig/src/libjasper/jpc/jpc_dec.c 2014-11-27 12:45:44.000000000 +0100
|
|
||||||
+++ jasper-1.900.1/src/libjasper/jpc/jpc_dec.c 2014-11-27 12:44:58.000000000 +0100
|
|
||||||
@@ -1281,7 +1281,7 @@ static int jpc_dec_process_coc(jpc_dec_t
|
|
||||||
jpc_coc_t *coc = &ms->parms.coc;
|
|
||||||
jpc_dec_tile_t *tile;
|
|
||||||
|
|
||||||
- if (JAS_CAST(int, coc->compno) > dec->numcomps) {
|
|
||||||
+ if (JAS_CAST(int, coc->compno) >= dec->numcomps) {
|
|
||||||
jas_eprintf("invalid component number in COC marker segment\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
@@ -1307,7 +1307,7 @@ static int jpc_dec_process_rgn(jpc_dec_t
|
|
||||||
jpc_rgn_t *rgn = &ms->parms.rgn;
|
|
||||||
jpc_dec_tile_t *tile;
|
|
||||||
|
|
||||||
- if (JAS_CAST(int, rgn->compno) > dec->numcomps) {
|
|
||||||
+ if (JAS_CAST(int, rgn->compno) >= dec->numcomps) {
|
|
||||||
jas_eprintf("invalid component number in RGN marker segment\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
@@ -1356,7 +1356,7 @@ static int jpc_dec_process_qcc(jpc_dec_t
|
|
||||||
jpc_qcc_t *qcc = &ms->parms.qcc;
|
|
||||||
jpc_dec_tile_t *tile;
|
|
||||||
|
|
||||||
- if (JAS_CAST(int, qcc->compno) > dec->numcomps) {
|
|
||||||
+ if (JAS_CAST(int, qcc->compno) >= dec->numcomps) {
|
|
||||||
jas_eprintf("invalid component number in QCC marker segment\n");
|
|
||||||
return -1;
|
|
||||||
}
|
|
@ -1,11 +0,0 @@
|
|||||||
--- jasper-1.900.1/src/libjasper/jpc/jpc_t2cod.c 2007-01-19 22:43:07.000000000 +0100
|
|
||||||
+++ jasper-1.900.1/src/libjasper/jpc/jpc_t2cod.c 2016-01-14 14:22:24.569056412 +0100
|
|
||||||
@@ -429,7 +429,7 @@
|
|
||||||
}
|
|
||||||
|
|
||||||
for (pi->compno = pchg->compnostart, pi->picomp =
|
|
||||||
- &pi->picomps[pi->compno]; pi->compno < JAS_CAST(int, pchg->compnoend); ++pi->compno,
|
|
||||||
+ &pi->picomps[pi->compno]; pi->compno < JAS_CAST(int, pchg->compnoend) && pi->compno < pi->numcomps; ++pi->compno,
|
|
||||||
++pi->picomp) {
|
|
||||||
pirlvl = pi->picomp->pirlvls;
|
|
||||||
pi->xstep = pi->picomp->hsamp * (1 << (pirlvl->prcwidthexpn +
|
|
@ -1,13 +0,0 @@
|
|||||||
{stdenv, fetchurl}:
|
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "jetty-gwt-6.1.14";
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://repository.codehaus.org/org/mortbay/jetty/jetty-gwt/6.1.14/jetty-gwt-6.1.14.jar;
|
|
||||||
sha256 = "17x8ss75rx9xjn93rq861mdn9d6gw87rbrf24blawa6ahhb56ppf";
|
|
||||||
};
|
|
||||||
buildCommand = ''
|
|
||||||
mkdir -p $out/share/java
|
|
||||||
cp $src $out/share/java/$name.jar
|
|
||||||
'';
|
|
||||||
}
|
|
@ -1,13 +0,0 @@
|
|||||||
{stdenv, fetchurl}:
|
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "jetty-util-6.1.16";
|
|
||||||
src = fetchurl {
|
|
||||||
url = http://repository.codehaus.org/org/mortbay/jetty/jetty-util/6.1.16/jetty-util-6.1.16.jar;
|
|
||||||
sha256 = "1ld94lb5dk7y6sjg1rq8zdk97wiy56ik5vbgy7yjj4f6rz5pxbyq";
|
|
||||||
};
|
|
||||||
buildCommand = ''
|
|
||||||
mkdir -p $out/share/java
|
|
||||||
cp $src $out/share/java/$name.jar
|
|
||||||
'';
|
|
||||||
}
|
|
@ -1,4 +1,4 @@
|
|||||||
{stdenv, fetchurl, unzip}:
|
{ stdenv, fetchurl, unzip }:
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
stdenv.mkDerivation {
|
||||||
name = "saxon-6.5.3";
|
name = "saxon-6.5.3";
|
||||||
@ -8,8 +8,13 @@ stdenv.mkDerivation {
|
|||||||
md5 = "7b8c7c187473c04d2abdb40d8ddab5c6";
|
md5 = "7b8c7c187473c04d2abdb40d8ddab5c6";
|
||||||
};
|
};
|
||||||
|
|
||||||
inherit unzip;
|
nativeBuildInputs = [ unzip ];
|
||||||
buildInputs = [unzip];
|
|
||||||
|
# still leaving in root as well, in case someone is relying on that
|
||||||
|
preFixup = ''
|
||||||
|
mkdir -p "$out/share/java"
|
||||||
|
cp -s "$out"/*.jar "$out/share/java/"
|
||||||
|
'';
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
platforms = stdenv.lib.platforms.unix;
|
platforms = stdenv.lib.platforms.unix;
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
source $stdenv/setup
|
source $stdenv/setup
|
||||||
|
|
||||||
unzip $src -d $out
|
unzip $src -d $out
|
||||||
|
|
||||||
|
fixupPhase
|
||||||
|
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
{ kdeFramework, lib
|
{
|
||||||
, ecm
|
kdeFramework, lib,
|
||||||
, ilmbase
|
ecm,
|
||||||
|
ilmbase, karchive
|
||||||
}:
|
}:
|
||||||
|
|
||||||
kdeFramework {
|
kdeFramework {
|
||||||
name = "kimageformats";
|
name = "kimageformats";
|
||||||
meta = { maintainers = [ lib.maintainers.ttuegel ]; };
|
meta = { maintainers = [ lib.maintainers.ttuegel ]; };
|
||||||
nativeBuildInputs = [ ecm ];
|
nativeBuildInputs = [ ecm ];
|
||||||
|
buildInputs = [ ilmbase ];
|
||||||
|
propagatedBuildInputs = [ karchive ];
|
||||||
NIX_CFLAGS_COMPILE = "-I${ilmbase.dev}/include/OpenEXR";
|
NIX_CFLAGS_COMPILE = "-I${ilmbase.dev}/include/OpenEXR";
|
||||||
}
|
}
|
||||||
|
@ -1,14 +1,19 @@
|
|||||||
{ stdenv, fetchurl, libtiff }:
|
{ stdenv, fetchurl, libtiff, libjpeg, proj, zlib}:
|
||||||
|
|
||||||
stdenv.mkDerivation {
|
stdenv.mkDerivation rec {
|
||||||
name = "libgeotiff-1.2.5";
|
version = "1.4.2";
|
||||||
|
name = "libgeotiff-${version}";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = http://download.osgeo.org/geotiff/libgeotiff/libgeotiff-1.2.5.tar.gz;
|
url = "http://download.osgeo.org/geotiff/libgeotiff/${name}.tar.gz";
|
||||||
sha256 = "0z2yx77pm0zs81hc0b4lwzdd5s0rxcbylnscgq80b649src1fyzj";
|
sha256 = "0vjy3bwfhljjx66p9w999i4mdhsf7vjshx29yc3pn5livf5091xd";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [ libtiff ];
|
configureFlags = [
|
||||||
|
"--with-jpeg=${libjpeg.dev}"
|
||||||
|
"--with-zlib=${zlib.dev}"
|
||||||
|
];
|
||||||
|
buildInputs = [ libtiff proj ];
|
||||||
|
|
||||||
hardeningDisable = [ "format" ];
|
hardeningDisable = [ "format" ];
|
||||||
|
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
{ stdenv, fetchurl, pkgconfig, curl, openssl }:
|
{ stdenv, fetchurl, pkgconfig, curl, openssl }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
name = "liblastfm-SF-0.3.2";
|
name = "liblastfm-SF-0.5";
|
||||||
|
|
||||||
buildInputs = [ pkgconfig ];
|
buildInputs = [ pkgconfig ];
|
||||||
|
|
||||||
propagatedBuildInputs = [ curl openssl ];
|
propagatedBuildInputs = [ curl openssl ];
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "mirror://sourceforge/liblastfm/liblastfm-0.3.2.tar.gz";
|
url = "mirror://sourceforge/liblastfm/libclastfm-0.5.tar.gz";
|
||||||
sha256 = "1hk62giysi96h6cyjyph69nlv1v4vw45w3sx7i2m89i9aysd6qp7";
|
sha256 = "0hpfflvfx6r4vvsbvdc564gkby8kr07p8ma7hgpxiy2pnlbpian9";
|
||||||
};
|
};
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
|
@ -1,19 +0,0 @@
|
|||||||
{ stdenv, fetchurl, cmake, openssl }:
|
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
|
||||||
name = "minmay-${version}";
|
|
||||||
version = "1.0.0";
|
|
||||||
|
|
||||||
src = fetchurl {
|
|
||||||
url = "https://github.com/mazhe/minmay/archive/1.0.0.tar.gz";
|
|
||||||
sha256 = "1amycxvhbd0lv6j5zsvxiwrx29jvndcy856j3b3bisys24h95zw2";
|
|
||||||
};
|
|
||||||
|
|
||||||
buildInputs = [ cmake openssl ];
|
|
||||||
|
|
||||||
meta = {
|
|
||||||
homepage = "https://github.com/mazhe/minmay";
|
|
||||||
license = stdenv.lib.licenses.lgpl21Plus;
|
|
||||||
description = "An XMPP library (forked from the iksemel project)";
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,43 +1,41 @@
|
|||||||
{ stdenv, fetchsvn, cmake, libpng, ilmbase, libtiff, zlib, libjpeg
|
{ stdenv, fetchFromGitHub, cmake }:
|
||||||
, mesa, libX11
|
|
||||||
}:
|
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
# No support yet for cg, cuda, glew, glut, openexr.
|
name = "nvidia-texture-tools-${version}";
|
||||||
|
version = "2.1.0";
|
||||||
|
|
||||||
name = "nvidia-texture-tools-1388";
|
src = fetchFromGitHub {
|
||||||
|
owner = "castano";
|
||||||
src = fetchsvn {
|
repo = "nvidia-texture-tools";
|
||||||
url = "http://nvidia-texture-tools.googlecode.com/svn/trunk";
|
rev = version;
|
||||||
rev = "1388";
|
sha256 = "0p8ja0k323nkgm07z0qlslg6743vimy9rf3wad2968az0vwzjjyx";
|
||||||
sha256 = "0pwxqx5l16nqidzm6mwd3rd4gbbknkz6q8cxnvf7sggjpbcvm2d6";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [ cmake libpng ilmbase libtiff zlib libjpeg mesa libX11 ];
|
nativeBuildInputs = [ cmake ];
|
||||||
|
|
||||||
hardeningDisable = [ "format" ];
|
outputs = [ "out" "dev" "lib" ];
|
||||||
|
|
||||||
patchPhase = ''
|
|
||||||
# Fix build due to missing dependnecies.
|
|
||||||
echo 'target_link_libraries(bc7 nvmath)' >> src/nvtt/bc7/CMakeLists.txt
|
|
||||||
echo 'target_link_libraries(bc6h nvmath)' >> src/nvtt/bc6h/CMakeLists.txt
|
|
||||||
|
|
||||||
|
postPatch = ''
|
||||||
# Make a recently added pure virtual function just virtual,
|
# Make a recently added pure virtual function just virtual,
|
||||||
# to keep compatibility.
|
# to keep compatibility.
|
||||||
sed -i 's/virtual void endImage() = 0;/virtual void endImage() {}/' src/nvtt/nvtt.h
|
sed -i 's/virtual void endImage() = 0;/virtual void endImage() {}/' src/nvtt/nvtt.h
|
||||||
|
|
||||||
# Fix building shared libraries.
|
|
||||||
sed -i 's/SET(NVIMAGE_SHARED TRUE)/SET(NVIMAGE_SHARED TRUE)\nSET(NVTHREAD_SHARED TRUE)/' CMakeLists.txt
|
|
||||||
'';
|
'';
|
||||||
|
|
||||||
cmakeFlags = [
|
cmakeFlags = [
|
||||||
"-DNVTT_SHARED=TRUE"
|
"-DNVTT_SHARED=TRUE"
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = {
|
postInstall = ''
|
||||||
|
moveToOutput include "$dev"
|
||||||
|
moveToOutput lib "$lib"
|
||||||
|
'';
|
||||||
|
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
meta = with stdenv.lib; {
|
||||||
description = "A set of cuda-enabled texture tools and compressors";
|
description = "A set of cuda-enabled texture tools and compressors";
|
||||||
homepage = "http://developer.nvidia.com/object/texture_tools.html";
|
homepage = "https://github.com/castano/nvidia-texture-tools";
|
||||||
license = stdenv.lib.licenses.mit;
|
license = licenses.mit;
|
||||||
platforms = stdenv.lib.platforms.linux;
|
platforms = platforms.linux;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,8 +0,0 @@
|
|||||||
{ callPackage, ... } @ args:
|
|
||||||
|
|
||||||
callPackage ./generic.nix (args // rec {
|
|
||||||
version = "2.0.1";
|
|
||||||
branch = "2";
|
|
||||||
revision = "version.2.0.1";
|
|
||||||
sha256 = "03d0r8x66cxri9i20nr9gm1jnkp85yyd8mkrbmawv5nvybd0r7wv";
|
|
||||||
})
|
|
@ -1,11 +1,14 @@
|
|||||||
{ stdenv, fetchurl, cmake, qt4 }:
|
{ stdenv, fetchFromGitHub, cmake, qt4 }:
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
name = "qjson-0.8.1";
|
version = "0.8.1";
|
||||||
|
name = "qjson-${version}";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchFromGitHub {
|
||||||
url = "mirror://sourceforge/qjson/${name}.tar.bz2";
|
owner = "flavio";
|
||||||
sha256 = "1n8lr2ph08yhcgimf4q1pnkd4z15v895bsf3m68ljz14aswvakfd";
|
repo = "qjson";
|
||||||
|
rev = "${version}";
|
||||||
|
sha256 = "1rb3ydrhyd4bczqzfv0kqpi2mx4hlpq1k8jvnwpcmvyaypqpqg59";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [ cmake qt4 ];
|
buildInputs = [ cmake qt4 ];
|
||||||
|
@ -94,7 +94,7 @@ let
|
|||||||
qttranslations = callPackage ./qttranslations.nix {};
|
qttranslations = callPackage ./qttranslations.nix {};
|
||||||
qtwayland = callPackage ./qtwayland.nix {};
|
qtwayland = callPackage ./qtwayland.nix {};
|
||||||
qtwebchannel = callPackage ./qtwebchannel.nix {};
|
qtwebchannel = callPackage ./qtwebchannel.nix {};
|
||||||
qtwebengine = callPackage ./qtwebengine.nix {};
|
qtwebengine = callPackage ./qtwebengine {};
|
||||||
qtwebkit = callPackage ./qtwebkit {};
|
qtwebkit = callPackage ./qtwebkit {};
|
||||||
qtwebsockets = callPackage ./qtwebsockets.nix {};
|
qtwebsockets = callPackage ./qtwebsockets.nix {};
|
||||||
/* qtwinextras = not packaged */
|
/* qtwinextras = not packaged */
|
||||||
|
@ -0,0 +1,874 @@
|
|||||||
|
--- a/src/3rdparty/chromium/tools/clang/scripts/update.py 2016-05-26 04:58:54.000000000 -0800
|
||||||
|
+++ b/src/3rdparty/chromium/tools/clang/scripts/update.py 2016-11-04 08:35:34.956154012 -0800
|
||||||
|
@@ -3,12 +3,12 @@
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
-"""Windows can't run .sh files, so this is a Python implementation of
|
||||||
|
-update.sh. This script should replace update.sh on all platforms eventually."""
|
||||||
|
+"""This script is used to download prebuilt clang binaries.
|
||||||
|
+
|
||||||
|
+It is also used by package.py to build the prebuilt clang binaries."""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
-import contextlib
|
||||||
|
-import cStringIO
|
||||||
|
+import distutils.spawn
|
||||||
|
import glob
|
||||||
|
import os
|
||||||
|
import pipes
|
||||||
|
@@ -18,6 +18,7 @@
|
||||||
|
import stat
|
||||||
|
import sys
|
||||||
|
import tarfile
|
||||||
|
+import tempfile
|
||||||
|
import time
|
||||||
|
import urllib2
|
||||||
|
import zipfile
|
||||||
|
@@ -25,19 +26,16 @@
|
||||||
|
# Do NOT CHANGE this if you don't know what you're doing -- see
|
||||||
|
# https://code.google.com/p/chromium/wiki/UpdatingClang
|
||||||
|
# Reverting problematic clang rolls is safe, though.
|
||||||
|
-# Note: this revision is only used for Windows. Other platforms use update.sh.
|
||||||
|
-# TODO(thakis): Use the same revision on Windows and non-Windows.
|
||||||
|
-# TODO(thakis): Remove update.sh, use update.py everywhere.
|
||||||
|
-LLVM_WIN_REVISION = '239674'
|
||||||
|
+CLANG_REVISION = '239674'
|
||||||
|
|
||||||
|
use_head_revision = 'LLVM_FORCE_HEAD_REVISION' in os.environ
|
||||||
|
if use_head_revision:
|
||||||
|
- LLVM_WIN_REVISION = 'HEAD'
|
||||||
|
+ CLANG_REVISION = 'HEAD'
|
||||||
|
|
||||||
|
# This is incremented when pushing a new build of Clang at the same revision.
|
||||||
|
CLANG_SUB_REVISION=1
|
||||||
|
|
||||||
|
-PACKAGE_VERSION = "%s-%s" % (LLVM_WIN_REVISION, CLANG_SUB_REVISION)
|
||||||
|
+PACKAGE_VERSION = "%s-%s" % (CLANG_REVISION, CLANG_SUB_REVISION)
|
||||||
|
|
||||||
|
# Path constants. (All of these should be absolute paths.)
|
||||||
|
THIS_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
@@ -50,17 +48,26 @@
|
||||||
|
CHROME_TOOLS_SHIM_DIR = os.path.join(LLVM_DIR, 'tools', 'chrometools')
|
||||||
|
LLVM_BUILD_DIR = os.path.join(CHROMIUM_DIR, 'third_party', 'llvm-build',
|
||||||
|
'Release+Asserts')
|
||||||
|
-COMPILER_RT_BUILD_DIR = os.path.join(LLVM_BUILD_DIR, '32bit-compiler-rt')
|
||||||
|
+COMPILER_RT_BUILD_DIR = os.path.join(LLVM_BUILD_DIR, 'compiler-rt')
|
||||||
|
CLANG_DIR = os.path.join(LLVM_DIR, 'tools', 'clang')
|
||||||
|
LLD_DIR = os.path.join(LLVM_DIR, 'tools', 'lld')
|
||||||
|
-COMPILER_RT_DIR = os.path.join(LLVM_DIR, 'projects', 'compiler-rt')
|
||||||
|
+# compiler-rt is built as part of the regular LLVM build on Windows to get
|
||||||
|
+# the 64-bit runtime, and out-of-tree elsewhere.
|
||||||
|
+# TODO(thakis): Try to unify this.
|
||||||
|
+if sys.platform == 'win32':
|
||||||
|
+ COMPILER_RT_DIR = os.path.join(LLVM_DIR, 'projects', 'compiler-rt')
|
||||||
|
+else:
|
||||||
|
+ COMPILER_RT_DIR = os.path.join(LLVM_DIR, 'compiler-rt')
|
||||||
|
LIBCXX_DIR = os.path.join(LLVM_DIR, 'projects', 'libcxx')
|
||||||
|
LIBCXXABI_DIR = os.path.join(LLVM_DIR, 'projects', 'libcxxabi')
|
||||||
|
LLVM_BUILD_TOOLS_DIR = os.path.abspath(
|
||||||
|
os.path.join(LLVM_DIR, '..', 'llvm-build-tools'))
|
||||||
|
-STAMP_FILE = os.path.join(LLVM_DIR, '..', 'llvm-build', 'cr_build_revision')
|
||||||
|
+STAMP_FILE = os.path.normpath(
|
||||||
|
+ os.path.join(LLVM_DIR, '..', 'llvm-build', 'cr_build_revision'))
|
||||||
|
BINUTILS_DIR = os.path.join(THIRD_PARTY_DIR, 'binutils')
|
||||||
|
-VERSION = '3.7.0'
|
||||||
|
+VERSION = '3.8.0'
|
||||||
|
+ANDROID_NDK_DIR = os.path.join(
|
||||||
|
+ CHROMIUM_DIR, 'third_party', 'android_tools', 'ndk')
|
||||||
|
|
||||||
|
# URL for pre-built binaries.
|
||||||
|
CDS_URL = 'https://commondatastorage.googleapis.com/chromium-browser-clang'
|
||||||
|
@@ -74,40 +81,75 @@
|
||||||
|
"""Download url into output_file."""
|
||||||
|
CHUNK_SIZE = 4096
|
||||||
|
TOTAL_DOTS = 10
|
||||||
|
- sys.stdout.write('Downloading %s ' % url)
|
||||||
|
- sys.stdout.flush()
|
||||||
|
- response = urllib2.urlopen(url)
|
||||||
|
- total_size = int(response.info().getheader('Content-Length').strip())
|
||||||
|
- bytes_done = 0
|
||||||
|
- dots_printed = 0
|
||||||
|
+ num_retries = 3
|
||||||
|
+ retry_wait_s = 5 # Doubled at each retry.
|
||||||
|
+
|
||||||
|
while True:
|
||||||
|
- chunk = response.read(CHUNK_SIZE)
|
||||||
|
- if not chunk:
|
||||||
|
- break
|
||||||
|
- output_file.write(chunk)
|
||||||
|
- bytes_done += len(chunk)
|
||||||
|
- num_dots = TOTAL_DOTS * bytes_done / total_size
|
||||||
|
- sys.stdout.write('.' * (num_dots - dots_printed))
|
||||||
|
- sys.stdout.flush()
|
||||||
|
- dots_printed = num_dots
|
||||||
|
- print ' Done.'
|
||||||
|
+ try:
|
||||||
|
+ sys.stdout.write('Downloading %s ' % url)
|
||||||
|
+ sys.stdout.flush()
|
||||||
|
+ response = urllib2.urlopen(url)
|
||||||
|
+ total_size = int(response.info().getheader('Content-Length').strip())
|
||||||
|
+ bytes_done = 0
|
||||||
|
+ dots_printed = 0
|
||||||
|
+ while True:
|
||||||
|
+ chunk = response.read(CHUNK_SIZE)
|
||||||
|
+ if not chunk:
|
||||||
|
+ break
|
||||||
|
+ output_file.write(chunk)
|
||||||
|
+ bytes_done += len(chunk)
|
||||||
|
+ num_dots = TOTAL_DOTS * bytes_done / total_size
|
||||||
|
+ sys.stdout.write('.' * (num_dots - dots_printed))
|
||||||
|
+ sys.stdout.flush()
|
||||||
|
+ dots_printed = num_dots
|
||||||
|
+ if bytes_done != total_size:
|
||||||
|
+ raise urllib2.URLError("only got %d of %d bytes" %
|
||||||
|
+ (bytes_done, total_size))
|
||||||
|
+ print ' Done.'
|
||||||
|
+ return
|
||||||
|
+ except urllib2.URLError as e:
|
||||||
|
+ sys.stdout.write('\n')
|
||||||
|
+ print e
|
||||||
|
+ if num_retries == 0 or isinstance(e, urllib2.HTTPError) and e.code == 404:
|
||||||
|
+ raise e
|
||||||
|
+ num_retries -= 1
|
||||||
|
+ print 'Retrying in %d s ...' % retry_wait_s
|
||||||
|
+ time.sleep(retry_wait_s)
|
||||||
|
+ retry_wait_s *= 2
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+def EnsureDirExists(path):
|
||||||
|
+ if not os.path.exists(path):
|
||||||
|
+ print "Creating directory %s" % path
|
||||||
|
+ os.makedirs(path)
|
||||||
|
+
|
||||||
|
+
|
||||||
|
+def DownloadAndUnpack(url, output_dir):
|
||||||
|
+ with tempfile.TemporaryFile() as f:
|
||||||
|
+ DownloadUrl(url, f)
|
||||||
|
+ f.seek(0)
|
||||||
|
+ EnsureDirExists(output_dir)
|
||||||
|
+ if url.endswith('.zip'):
|
||||||
|
+ zipfile.ZipFile(f).extractall(path=output_dir)
|
||||||
|
+ else:
|
||||||
|
+ tarfile.open(mode='r:gz', fileobj=f).extractall(path=output_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def ReadStampFile():
|
||||||
|
"""Return the contents of the stamp file, or '' if it doesn't exist."""
|
||||||
|
try:
|
||||||
|
with open(STAMP_FILE, 'r') as f:
|
||||||
|
- return f.read()
|
||||||
|
+ return f.read().rstrip()
|
||||||
|
except IOError:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
|
||||||
|
def WriteStampFile(s):
|
||||||
|
"""Write s to the stamp file."""
|
||||||
|
- if not os.path.exists(os.path.dirname(STAMP_FILE)):
|
||||||
|
- os.makedirs(os.path.dirname(STAMP_FILE))
|
||||||
|
+ EnsureDirExists(os.path.dirname(STAMP_FILE))
|
||||||
|
with open(STAMP_FILE, 'w') as f:
|
||||||
|
f.write(s)
|
||||||
|
+ f.write('\n')
|
||||||
|
|
||||||
|
|
||||||
|
def GetSvnRevision(svn_repo):
|
||||||
|
@@ -129,6 +171,13 @@
|
||||||
|
shutil.rmtree(dir, onerror=ChmodAndRetry)
|
||||||
|
|
||||||
|
|
||||||
|
+def RmCmakeCache(dir):
|
||||||
|
+ """Delete CMakeCache.txt files under dir recursively."""
|
||||||
|
+ for dirpath, _, files in os.walk(dir):
|
||||||
|
+ if 'CMakeCache.txt' in files:
|
||||||
|
+ os.remove(os.path.join(dirpath, 'CMakeCache.txt'))
|
||||||
|
+
|
||||||
|
+
|
||||||
|
def RunCommand(command, msvc_arch=None, env=None, fail_hard=True):
|
||||||
|
"""Run command and return success (True) or failure; or if fail_hard is
|
||||||
|
True, exit on failure. If msvc_arch is set, runs the command in a
|
||||||
|
@@ -170,8 +219,8 @@
|
||||||
|
def CopyDirectoryContents(src, dst, filename_filter=None):
|
||||||
|
"""Copy the files from directory src to dst
|
||||||
|
with an optional filename filter."""
|
||||||
|
- if not os.path.exists(dst):
|
||||||
|
- os.makedirs(dst)
|
||||||
|
+ dst = os.path.realpath(dst) # realpath() in case dst ends in /..
|
||||||
|
+ EnsureDirExists(dst)
|
||||||
|
for root, _, files in os.walk(src):
|
||||||
|
for f in files:
|
||||||
|
if filename_filter and not re.match(filename_filter, f):
|
||||||
|
@@ -181,9 +230,9 @@
|
||||||
|
|
||||||
|
def Checkout(name, url, dir):
|
||||||
|
"""Checkout the SVN module at url into dir. Use name for the log message."""
|
||||||
|
- print "Checking out %s r%s into '%s'" % (name, LLVM_WIN_REVISION, dir)
|
||||||
|
+ print "Checking out %s r%s into '%s'" % (name, CLANG_REVISION, dir)
|
||||||
|
|
||||||
|
- command = ['svn', 'checkout', '--force', url + '@' + LLVM_WIN_REVISION, dir]
|
||||||
|
+ command = ['svn', 'checkout', '--force', url + '@' + CLANG_REVISION, dir]
|
||||||
|
if RunCommand(command, fail_hard=False):
|
||||||
|
return
|
||||||
|
|
||||||
|
@@ -195,120 +244,9 @@
|
||||||
|
RunCommand(command)
|
||||||
|
|
||||||
|
|
||||||
|
-def RevertPreviouslyPatchedFiles():
|
||||||
|
- print 'Reverting previously patched files'
|
||||||
|
- files = [
|
||||||
|
- '%(clang)s/test/Index/crash-recovery-modules.m',
|
||||||
|
- '%(clang)s/unittests/libclang/LibclangTest.cpp',
|
||||||
|
- '%(compiler_rt)s/lib/asan/asan_rtl.cc',
|
||||||
|
- '%(compiler_rt)s/test/asan/TestCases/Linux/new_array_cookie_test.cc',
|
||||||
|
- '%(llvm)s/test/DebugInfo/gmlt.ll',
|
||||||
|
- '%(llvm)s/lib/CodeGen/SpillPlacement.cpp',
|
||||||
|
- '%(llvm)s/lib/CodeGen/SpillPlacement.h',
|
||||||
|
- '%(llvm)s/lib/Transforms/Instrumentation/MemorySanitizer.cpp',
|
||||||
|
- '%(clang)s/test/Driver/env.c',
|
||||||
|
- '%(clang)s/lib/Frontend/InitPreprocessor.cpp',
|
||||||
|
- '%(clang)s/test/Frontend/exceptions.c',
|
||||||
|
- '%(clang)s/test/Preprocessor/predefined-exceptions.m',
|
||||||
|
- '%(llvm)s/test/Bindings/Go/go.test',
|
||||||
|
- '%(clang)s/lib/Parse/ParseExpr.cpp',
|
||||||
|
- '%(clang)s/lib/Parse/ParseTemplate.cpp',
|
||||||
|
- '%(clang)s/lib/Sema/SemaDeclCXX.cpp',
|
||||||
|
- '%(clang)s/lib/Sema/SemaExprCXX.cpp',
|
||||||
|
- '%(clang)s/test/SemaCXX/default2.cpp',
|
||||||
|
- '%(clang)s/test/SemaCXX/typo-correction-delayed.cpp',
|
||||||
|
- '%(compiler_rt)s/lib/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc',
|
||||||
|
- '%(compiler_rt)s/test/tsan/signal_segv_handler.cc',
|
||||||
|
- '%(compiler_rt)s/lib/sanitizer_common/sanitizer_coverage_libcdep.cc',
|
||||||
|
- '%(compiler_rt)s/cmake/config-ix.cmake',
|
||||||
|
- '%(compiler_rt)s/CMakeLists.txt',
|
||||||
|
- '%(compiler_rt)s/lib/ubsan/ubsan_platform.h',
|
||||||
|
- ]
|
||||||
|
- for f in files:
|
||||||
|
- f = f % {
|
||||||
|
- 'clang': CLANG_DIR,
|
||||||
|
- 'compiler_rt': COMPILER_RT_DIR,
|
||||||
|
- 'llvm': LLVM_DIR,
|
||||||
|
- }
|
||||||
|
- if os.path.exists(f):
|
||||||
|
- os.remove(f) # For unversioned files.
|
||||||
|
- RunCommand(['svn', 'revert', f])
|
||||||
|
-
|
||||||
|
-
|
||||||
|
-def ApplyLocalPatches():
|
||||||
|
- # There's no patch program on Windows by default. We don't need patches on
|
||||||
|
- # Windows yet, and maybe this not working on Windows will motivate us to
|
||||||
|
- # remove patches over time.
|
||||||
|
- assert sys.platform != 'win32'
|
||||||
|
-
|
||||||
|
- # Apply patch for tests failing with --disable-pthreads (llvm.org/PR11974)
|
||||||
|
- clang_patches = [ r"""\
|
||||||
|
---- test/Index/crash-recovery-modules.m (revision 202554)
|
||||||
|
-+++ test/Index/crash-recovery-modules.m (working copy)
|
||||||
|
-@@ -12,6 +12,8 @@
|
||||||
|
-
|
||||||
|
- // REQUIRES: crash-recovery
|
||||||
|
- // REQUIRES: shell
|
||||||
|
-+// XFAIL: *
|
||||||
|
-+// (PR11974)
|
||||||
|
-
|
||||||
|
- @import Crash;
|
||||||
|
-""", r"""\
|
||||||
|
---- unittests/libclang/LibclangTest.cpp (revision 215949)
|
||||||
|
-+++ unittests/libclang/LibclangTest.cpp (working copy)
|
||||||
|
-@@ -431,7 +431,7 @@
|
||||||
|
- EXPECT_EQ(0U, clang_getNumDiagnostics(ClangTU));
|
||||||
|
- }
|
||||||
|
-
|
||||||
|
--TEST_F(LibclangReparseTest, ReparseWithModule) {
|
||||||
|
-+TEST_F(LibclangReparseTest, DISABLED_ReparseWithModule) {
|
||||||
|
- const char *HeaderTop = "#ifndef H\n#define H\nstruct Foo { int bar;";
|
||||||
|
- const char *HeaderBottom = "\n};\n#endif\n";
|
||||||
|
- const char *MFile = "#include \"HeaderFile.h\"\nint main() {"
|
||||||
|
-"""
|
||||||
|
- ]
|
||||||
|
-
|
||||||
|
- # This Go bindings test doesn't work after bootstrap on Linux, PR21552.
|
||||||
|
- llvm_patches = [ r"""\
|
||||||
|
---- test/Bindings/Go/go.test (revision 223109)
|
||||||
|
-+++ test/Bindings/Go/go.test (working copy)
|
||||||
|
-@@ -1,3 +1,3 @@
|
||||||
|
--; RUN: llvm-go test llvm.org/llvm/bindings/go/llvm
|
||||||
|
-+; RUN: true
|
||||||
|
-
|
||||||
|
- ; REQUIRES: shell
|
||||||
|
-"""
|
||||||
|
- ]
|
||||||
|
-
|
||||||
|
- # The UBSan run-time, which is now bundled with the ASan run-time, doesn't
|
||||||
|
- # work on Mac OS X 10.8 (PR23539).
|
||||||
|
- compiler_rt_patches = [ r"""\
|
||||||
|
---- CMakeLists.txt (revision 241602)
|
||||||
|
-+++ CMakeLists.txt (working copy)
|
||||||
|
-@@ -305,6 +305,7 @@
|
||||||
|
- list(APPEND SANITIZER_COMMON_SUPPORTED_OS iossim)
|
||||||
|
- endif()
|
||||||
|
- endif()
|
||||||
|
-+ set(SANITIZER_MIN_OSX_VERSION "10.7")
|
||||||
|
- if(SANITIZER_MIN_OSX_VERSION VERSION_LESS "10.7")
|
||||||
|
- message(FATAL_ERROR "Too old OS X version: ${SANITIZER_MIN_OSX_VERSION}")
|
||||||
|
- endif()
|
||||||
|
-"""
|
||||||
|
- ]
|
||||||
|
-
|
||||||
|
- for path, patches in [(LLVM_DIR, llvm_patches),
|
||||||
|
- (CLANG_DIR, clang_patches),
|
||||||
|
- (COMPILER_RT_DIR, compiler_rt_patches)]:
|
||||||
|
- print 'Applying patches in', path
|
||||||
|
- for patch in patches:
|
||||||
|
- print patch
|
||||||
|
- p = subprocess.Popen( ['patch', '-p0', '-d', path], stdin=subprocess.PIPE)
|
||||||
|
- (stdout, stderr) = p.communicate(input=patch)
|
||||||
|
- if p.returncode != 0:
|
||||||
|
- raise RuntimeError('stdout %s, stderr %s' % (stdout, stderr))
|
||||||
|
-
|
||||||
|
-
|
||||||
|
def DeleteChromeToolsShim():
|
||||||
|
+ OLD_SHIM_DIR = os.path.join(LLVM_DIR, 'tools', 'zzz-chrometools')
|
||||||
|
+ shutil.rmtree(OLD_SHIM_DIR, ignore_errors=True)
|
||||||
|
shutil.rmtree(CHROME_TOOLS_SHIM_DIR, ignore_errors=True)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -337,6 +275,25 @@
|
||||||
|
f.write('endif (CHROMIUM_TOOLS_SRC)\n')
|
||||||
|
|
||||||
|
|
||||||
|
+def MaybeDownloadHostGcc(args):
|
||||||
|
+ """Downloads gcc 4.8.2 if needed and makes sure args.gcc_toolchain is set."""
|
||||||
|
+ if not sys.platform.startswith('linux') or args.gcc_toolchain:
|
||||||
|
+ return
|
||||||
|
+
|
||||||
|
+ if subprocess.check_output(['gcc', '-dumpversion']).rstrip() < '4.7.0':
|
||||||
|
+ # We need a newer gcc version.
|
||||||
|
+ gcc_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, 'gcc482precise')
|
||||||
|
+ if not os.path.exists(gcc_dir):
|
||||||
|
+ print 'Downloading pre-built GCC 4.8.2...'
|
||||||
|
+ DownloadAndUnpack(
|
||||||
|
+ CDS_URL + '/tools/gcc482precise.tgz', LLVM_BUILD_TOOLS_DIR)
|
||||||
|
+ args.gcc_toolchain = gcc_dir
|
||||||
|
+ else:
|
||||||
|
+ # Always set gcc_toolchain; llvm-symbolizer needs the bundled libstdc++.
|
||||||
|
+ args.gcc_toolchain = \
|
||||||
|
+ os.path.dirname(os.path.dirname(distutils.spawn.find_executable('gcc')))
|
||||||
|
+
|
||||||
|
+
|
||||||
|
def AddCMakeToPath():
|
||||||
|
"""Download CMake and add it to PATH."""
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
@@ -345,20 +302,10 @@
|
||||||
|
'cmake-3.2.2-win32-x86', 'bin')
|
||||||
|
else:
|
||||||
|
suffix = 'Darwin' if sys.platform == 'darwin' else 'Linux'
|
||||||
|
- zip_name = 'cmake310_%s.tgz' % suffix
|
||||||
|
- cmake_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, 'cmake310', 'bin')
|
||||||
|
+ zip_name = 'cmake322_%s.tgz' % suffix
|
||||||
|
+ cmake_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, 'cmake322', 'bin')
|
||||||
|
if not os.path.exists(cmake_dir):
|
||||||
|
- if not os.path.exists(LLVM_BUILD_TOOLS_DIR):
|
||||||
|
- os.makedirs(LLVM_BUILD_TOOLS_DIR)
|
||||||
|
- # The cmake archive is smaller than 20 MB, small enough to keep in memory:
|
||||||
|
- with contextlib.closing(cStringIO.StringIO()) as f:
|
||||||
|
- DownloadUrl(CDS_URL + '/tools/' + zip_name, f)
|
||||||
|
- f.seek(0)
|
||||||
|
- if zip_name.endswith('.zip'):
|
||||||
|
- zipfile.ZipFile(f).extractall(path=LLVM_BUILD_TOOLS_DIR)
|
||||||
|
- else:
|
||||||
|
- tarfile.open(mode='r:gz', fileobj=f).extractall(path=
|
||||||
|
- LLVM_BUILD_TOOLS_DIR)
|
||||||
|
+ DownloadAndUnpack(CDS_URL + '/tools/' + zip_name, LLVM_BUILD_TOOLS_DIR)
|
||||||
|
os.environ['PATH'] = cmake_dir + os.pathsep + os.environ.get('PATH', '')
|
||||||
|
|
||||||
|
vs_version = None
|
||||||
|
@@ -383,37 +330,61 @@
|
||||||
|
|
||||||
|
def UpdateClang(args):
|
||||||
|
print 'Updating Clang to %s...' % PACKAGE_VERSION
|
||||||
|
- if ReadStampFile() == PACKAGE_VERSION:
|
||||||
|
- print 'Already up to date.'
|
||||||
|
- return 0
|
||||||
|
+
|
||||||
|
+ need_gold_plugin = 'LLVM_DOWNLOAD_GOLD_PLUGIN' in os.environ or (
|
||||||
|
+ sys.platform.startswith('linux') and
|
||||||
|
+ 'buildtype=Official' in os.environ.get('GYP_DEFINES', '') and
|
||||||
|
+ 'branding=Chrome' in os.environ.get('GYP_DEFINES', ''))
|
||||||
|
+
|
||||||
|
+ if ReadStampFile() == PACKAGE_VERSION and not args.force_local_build:
|
||||||
|
+ print 'Clang is already up to date.'
|
||||||
|
+ if not need_gold_plugin or os.path.exists(
|
||||||
|
+ os.path.join(LLVM_BUILD_DIR, "lib/LLVMgold.so")):
|
||||||
|
+ return 0
|
||||||
|
|
||||||
|
# Reset the stamp file in case the build is unsuccessful.
|
||||||
|
WriteStampFile('')
|
||||||
|
|
||||||
|
if not args.force_local_build:
|
||||||
|
cds_file = "clang-%s.tgz" % PACKAGE_VERSION
|
||||||
|
- cds_full_url = CDS_URL + '/Win/' + cds_file
|
||||||
|
+ if sys.platform == 'win32':
|
||||||
|
+ cds_full_url = CDS_URL + '/Win/' + cds_file
|
||||||
|
+ elif sys.platform == 'darwin':
|
||||||
|
+ cds_full_url = CDS_URL + '/Mac/' + cds_file
|
||||||
|
+ else:
|
||||||
|
+ assert sys.platform.startswith('linux')
|
||||||
|
+ cds_full_url = CDS_URL + '/Linux_x64/' + cds_file
|
||||||
|
|
||||||
|
- # Check if there's a prebuilt binary and if so just fetch that. That's
|
||||||
|
- # faster, and goma relies on having matching binary hashes on client and
|
||||||
|
- # server too.
|
||||||
|
- print 'Trying to download prebuilt clang'
|
||||||
|
-
|
||||||
|
- # clang packages are smaller than 50 MB, small enough to keep in memory.
|
||||||
|
- with contextlib.closing(cStringIO.StringIO()) as f:
|
||||||
|
- try:
|
||||||
|
- DownloadUrl(cds_full_url, f)
|
||||||
|
- f.seek(0)
|
||||||
|
- tarfile.open(mode='r:gz', fileobj=f).extractall(path=LLVM_BUILD_DIR)
|
||||||
|
- print 'clang %s unpacked' % PACKAGE_VERSION
|
||||||
|
- WriteStampFile(PACKAGE_VERSION)
|
||||||
|
- return 0
|
||||||
|
- except urllib2.HTTPError:
|
||||||
|
- print 'Did not find prebuilt clang %s, building locally' % cds_file
|
||||||
|
+ print 'Downloading prebuilt clang'
|
||||||
|
+ if os.path.exists(LLVM_BUILD_DIR):
|
||||||
|
+ RmTree(LLVM_BUILD_DIR)
|
||||||
|
+ try:
|
||||||
|
+ DownloadAndUnpack(cds_full_url, LLVM_BUILD_DIR)
|
||||||
|
+ print 'clang %s unpacked' % PACKAGE_VERSION
|
||||||
|
+ # Download the gold plugin if requested to by an environment variable.
|
||||||
|
+ # This is used by the CFI ClusterFuzz bot, and it's required for official
|
||||||
|
+ # builds on linux.
|
||||||
|
+ if need_gold_plugin:
|
||||||
|
+ RunCommand(['python', CHROMIUM_DIR+'/build/download_gold_plugin.py'])
|
||||||
|
+ WriteStampFile(PACKAGE_VERSION)
|
||||||
|
+ return 0
|
||||||
|
+ except urllib2.URLError:
|
||||||
|
+ print 'Failed to download prebuilt clang %s' % cds_file
|
||||||
|
+ print 'Use --force-local-build if you want to build locally.'
|
||||||
|
+ print 'Exiting.'
|
||||||
|
+ return 1
|
||||||
|
+
|
||||||
|
+ if args.with_android and not os.path.exists(ANDROID_NDK_DIR):
|
||||||
|
+ print 'Android NDK not found at ' + ANDROID_NDK_DIR
|
||||||
|
+ print 'The Android NDK is needed to build a Clang whose -fsanitize=address'
|
||||||
|
+ print 'works on Android. See '
|
||||||
|
+ print 'http://code.google.com/p/chromium/wiki/AndroidBuildInstructions'
|
||||||
|
+ print 'for how to install the NDK, or pass --without-android.'
|
||||||
|
+ return 1
|
||||||
|
|
||||||
|
+ MaybeDownloadHostGcc(args)
|
||||||
|
AddCMakeToPath()
|
||||||
|
|
||||||
|
- RevertPreviouslyPatchedFiles()
|
||||||
|
DeleteChromeToolsShim()
|
||||||
|
|
||||||
|
Checkout('LLVM', LLVM_REPO_URL + '/llvm/trunk', LLVM_DIR)
|
||||||
|
@@ -429,10 +400,24 @@
|
||||||
|
# into it too (since OS X 10.6 doesn't have libc++abi.dylib either).
|
||||||
|
Checkout('libcxxabi', LLVM_REPO_URL + '/libcxxabi/trunk', LIBCXXABI_DIR)
|
||||||
|
|
||||||
|
- if args.with_patches and sys.platform != 'win32':
|
||||||
|
- ApplyLocalPatches()
|
||||||
|
-
|
||||||
|
cc, cxx = None, None
|
||||||
|
+ libstdcpp = None
|
||||||
|
+ if args.gcc_toolchain: # This option is only used on Linux.
|
||||||
|
+ # Use the specified gcc installation for building.
|
||||||
|
+ cc = os.path.join(args.gcc_toolchain, 'bin', 'gcc')
|
||||||
|
+ cxx = os.path.join(args.gcc_toolchain, 'bin', 'g++')
|
||||||
|
+
|
||||||
|
+ if not os.access(cc, os.X_OK):
|
||||||
|
+ print 'Invalid --gcc-toolchain: "%s"' % args.gcc_toolchain
|
||||||
|
+ print '"%s" does not appear to be valid.' % cc
|
||||||
|
+ return 1
|
||||||
|
+
|
||||||
|
+ # Set LD_LIBRARY_PATH to make auxiliary targets (tablegen, bootstrap
|
||||||
|
+ # compiler, etc.) find the .so.
|
||||||
|
+ libstdcpp = subprocess.check_output(
|
||||||
|
+ [cxx, '-print-file-name=libstdc++.so.6']).rstrip()
|
||||||
|
+ os.environ['LD_LIBRARY_PATH'] = os.path.dirname(libstdcpp)
|
||||||
|
+
|
||||||
|
cflags = cxxflags = ldflags = []
|
||||||
|
|
||||||
|
# LLVM uses C++11 starting in llvm 3.5. On Linux, this means libstdc++4.7+ is
|
||||||
|
@@ -462,8 +447,7 @@
|
||||||
|
|
||||||
|
if args.bootstrap:
|
||||||
|
print 'Building bootstrap compiler'
|
||||||
|
- if not os.path.exists(LLVM_BOOTSTRAP_DIR):
|
||||||
|
- os.makedirs(LLVM_BOOTSTRAP_DIR)
|
||||||
|
+ EnsureDirExists(LLVM_BOOTSTRAP_DIR)
|
||||||
|
os.chdir(LLVM_BOOTSTRAP_DIR)
|
||||||
|
bootstrap_args = base_cmake_args + [
|
||||||
|
'-DLLVM_TARGETS_TO_BUILD=host',
|
||||||
|
@@ -473,11 +457,16 @@
|
||||||
|
]
|
||||||
|
if cc is not None: bootstrap_args.append('-DCMAKE_C_COMPILER=' + cc)
|
||||||
|
if cxx is not None: bootstrap_args.append('-DCMAKE_CXX_COMPILER=' + cxx)
|
||||||
|
+ RmCmakeCache('.')
|
||||||
|
RunCommand(['cmake'] + bootstrap_args + [LLVM_DIR], msvc_arch='x64')
|
||||||
|
RunCommand(['ninja'], msvc_arch='x64')
|
||||||
|
if args.run_tests:
|
||||||
|
RunCommand(['ninja', 'check-all'], msvc_arch='x64')
|
||||||
|
RunCommand(['ninja', 'install'], msvc_arch='x64')
|
||||||
|
+ if args.gcc_toolchain:
|
||||||
|
+ # Copy that gcc's stdlibc++.so.6 to the build dir, so the bootstrap
|
||||||
|
+ # compiler can start.
|
||||||
|
+ CopyFile(libstdcpp, os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'lib'))
|
||||||
|
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
cc = os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'bin', 'clang-cl.exe')
|
||||||
|
@@ -489,6 +478,12 @@
|
||||||
|
else:
|
||||||
|
cc = os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'bin', 'clang')
|
||||||
|
cxx = os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'bin', 'clang++')
|
||||||
|
+
|
||||||
|
+ if args.gcc_toolchain:
|
||||||
|
+ # Tell the bootstrap compiler to use a specific gcc prefix to search
|
||||||
|
+ # for standard library headers and shared object files.
|
||||||
|
+ cflags = ['--gcc-toolchain=' + args.gcc_toolchain]
|
||||||
|
+ cxxflags = ['--gcc-toolchain=' + args.gcc_toolchain]
|
||||||
|
print 'Building final compiler'
|
||||||
|
|
||||||
|
if sys.platform == 'darwin':
|
||||||
|
@@ -543,7 +538,7 @@
|
||||||
|
binutils_incdir = os.path.join(BINUTILS_DIR, 'Linux_x64/Release/include')
|
||||||
|
|
||||||
|
# If building at head, define a macro that plugins can use for #ifdefing
|
||||||
|
- # out code that builds at head, but not at LLVM_WIN_REVISION or vice versa.
|
||||||
|
+ # out code that builds at head, but not at CLANG_REVISION or vice versa.
|
||||||
|
if use_head_revision:
|
||||||
|
cflags += ['-DLLVM_FORCE_HEAD_REVISION']
|
||||||
|
cxxflags += ['-DLLVM_FORCE_HEAD_REVISION']
|
||||||
|
@@ -555,8 +550,15 @@
|
||||||
|
deployment_env = os.environ.copy()
|
||||||
|
deployment_env['MACOSX_DEPLOYMENT_TARGET'] = deployment_target
|
||||||
|
|
||||||
|
- cmake_args = base_cmake_args + [
|
||||||
|
+ cmake_args = []
|
||||||
|
+ # TODO(thakis): Unconditionally append this to base_cmake_args instead once
|
||||||
|
+ # compiler-rt can build with clang-cl on Windows (http://llvm.org/PR23698)
|
||||||
|
+ cc_args = base_cmake_args if sys.platform != 'win32' else cmake_args
|
||||||
|
+ if cc is not None: cc_args.append('-DCMAKE_C_COMPILER=' + cc)
|
||||||
|
+ if cxx is not None: cc_args.append('-DCMAKE_CXX_COMPILER=' + cxx)
|
||||||
|
+ cmake_args += base_cmake_args + [
|
||||||
|
'-DLLVM_BINUTILS_INCDIR=' + binutils_incdir,
|
||||||
|
+ '-DLLVM_EXPERIMENTAL_TARGETS_TO_BUILD=WebAssembly',
|
||||||
|
'-DCMAKE_C_FLAGS=' + ' '.join(cflags),
|
||||||
|
'-DCMAKE_CXX_FLAGS=' + ' '.join(cxxflags),
|
||||||
|
'-DCMAKE_EXE_LINKER_FLAGS=' + ' '.join(ldflags),
|
||||||
|
@@ -565,35 +567,44 @@
|
||||||
|
'-DCMAKE_INSTALL_PREFIX=' + LLVM_BUILD_DIR,
|
||||||
|
'-DCHROMIUM_TOOLS_SRC=%s' % os.path.join(CHROMIUM_DIR, 'tools', 'clang'),
|
||||||
|
'-DCHROMIUM_TOOLS=%s' % ';'.join(args.tools)]
|
||||||
|
- # TODO(thakis): Unconditionally append this to base_cmake_args instead once
|
||||||
|
- # compiler-rt can build with clang-cl on Windows (http://llvm.org/PR23698)
|
||||||
|
- cc_args = base_cmake_args if sys.platform != 'win32' else cmake_args
|
||||||
|
- if cc is not None: cc_args.append('-DCMAKE_C_COMPILER=' + cc)
|
||||||
|
- if cxx is not None: cc_args.append('-DCMAKE_CXX_COMPILER=' + cxx)
|
||||||
|
|
||||||
|
- if not os.path.exists(LLVM_BUILD_DIR):
|
||||||
|
- os.makedirs(LLVM_BUILD_DIR)
|
||||||
|
+ EnsureDirExists(LLVM_BUILD_DIR)
|
||||||
|
os.chdir(LLVM_BUILD_DIR)
|
||||||
|
+ RmCmakeCache('.')
|
||||||
|
RunCommand(['cmake'] + cmake_args + [LLVM_DIR],
|
||||||
|
msvc_arch='x64', env=deployment_env)
|
||||||
|
- RunCommand(['ninja'], msvc_arch='x64')
|
||||||
|
+
|
||||||
|
+ if args.gcc_toolchain:
|
||||||
|
+ # Copy in the right stdlibc++.so.6 so clang can start.
|
||||||
|
+ if not os.path.exists(os.path.join(LLVM_BUILD_DIR, 'lib')):
|
||||||
|
+ os.mkdir(os.path.join(LLVM_BUILD_DIR, 'lib'))
|
||||||
|
+ libstdcpp = subprocess.check_output(
|
||||||
|
+ [cxx] + cxxflags + ['-print-file-name=libstdc++.so.6']).rstrip()
|
||||||
|
+ CopyFile(libstdcpp, os.path.join(LLVM_BUILD_DIR, 'lib'))
|
||||||
|
+
|
||||||
|
+ # TODO(thakis): Remove "-d explain" once http://crbug.com/569337 is fixed.
|
||||||
|
+ RunCommand(['ninja', '-d', 'explain'], msvc_arch='x64')
|
||||||
|
|
||||||
|
if args.tools:
|
||||||
|
# If any Chromium tools were built, install those now.
|
||||||
|
RunCommand(['ninja', 'cr-install'], msvc_arch='x64')
|
||||||
|
|
||||||
|
if sys.platform == 'darwin':
|
||||||
|
- CopyFile(os.path.join(LLVM_BUILD_DIR, 'libc++.1.dylib'),
|
||||||
|
+ CopyFile(os.path.join(libcxxbuild, 'libc++.1.dylib'),
|
||||||
|
os.path.join(LLVM_BUILD_DIR, 'bin'))
|
||||||
|
# See http://crbug.com/256342
|
||||||
|
RunCommand(['strip', '-x', os.path.join(LLVM_BUILD_DIR, 'bin', 'clang')])
|
||||||
|
elif sys.platform.startswith('linux'):
|
||||||
|
RunCommand(['strip', os.path.join(LLVM_BUILD_DIR, 'bin', 'clang')])
|
||||||
|
|
||||||
|
- # Do an x86 build of compiler-rt to get the 32-bit ASan run-time.
|
||||||
|
+ # Do an out-of-tree build of compiler-rt.
|
||||||
|
+ # On Windows, this is used to get the 32-bit ASan run-time.
|
||||||
|
# TODO(hans): Remove once the regular build above produces this.
|
||||||
|
- if not os.path.exists(COMPILER_RT_BUILD_DIR):
|
||||||
|
- os.makedirs(COMPILER_RT_BUILD_DIR)
|
||||||
|
+ # On Mac and Linux, this is used to get the regular 64-bit run-time.
|
||||||
|
+ # Do a clobbered build due to cmake changes.
|
||||||
|
+ if os.path.isdir(COMPILER_RT_BUILD_DIR):
|
||||||
|
+ RmTree(COMPILER_RT_BUILD_DIR)
|
||||||
|
+ os.makedirs(COMPILER_RT_BUILD_DIR)
|
||||||
|
os.chdir(COMPILER_RT_BUILD_DIR)
|
||||||
|
# TODO(thakis): Add this once compiler-rt can build with clang-cl (see
|
||||||
|
# above).
|
||||||
|
@@ -606,11 +617,17 @@
|
||||||
|
'-DCMAKE_CXX_FLAGS=' + ' '.join(cxxflags)]
|
||||||
|
if sys.platform != 'win32':
|
||||||
|
compiler_rt_args += ['-DLLVM_CONFIG_PATH=' +
|
||||||
|
- os.path.join(LLVM_BUILD_DIR, 'bin', 'llvm-config')]
|
||||||
|
- RunCommand(['cmake'] + compiler_rt_args + [LLVM_DIR],
|
||||||
|
- msvc_arch='x86', env=deployment_env)
|
||||||
|
+ os.path.join(LLVM_BUILD_DIR, 'bin', 'llvm-config'),
|
||||||
|
+ '-DSANITIZER_MIN_OSX_VERSION="10.7"']
|
||||||
|
+ # compiler-rt is part of the llvm checkout on Windows but a stand-alone
|
||||||
|
+ # directory elsewhere, see the TODO above COMPILER_RT_DIR.
|
||||||
|
+ RmCmakeCache('.')
|
||||||
|
+ RunCommand(['cmake'] + compiler_rt_args +
|
||||||
|
+ [LLVM_DIR if sys.platform == 'win32' else COMPILER_RT_DIR],
|
||||||
|
+ msvc_arch='x86', env=deployment_env)
|
||||||
|
RunCommand(['ninja', 'compiler-rt'], msvc_arch='x86')
|
||||||
|
|
||||||
|
+ # Copy select output to the main tree.
|
||||||
|
# TODO(hans): Make this (and the .gypi and .isolate files) version number
|
||||||
|
# independent.
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
@@ -620,17 +637,35 @@
|
||||||
|
else:
|
||||||
|
assert sys.platform.startswith('linux')
|
||||||
|
platform = 'linux'
|
||||||
|
- asan_rt_lib_src_dir = os.path.join(COMPILER_RT_BUILD_DIR, 'lib', 'clang',
|
||||||
|
- VERSION, 'lib', platform)
|
||||||
|
+ asan_rt_lib_src_dir = os.path.join(COMPILER_RT_BUILD_DIR, 'lib', platform)
|
||||||
|
+ if sys.platform == 'win32':
|
||||||
|
+ # TODO(thakis): This too is due to compiler-rt being part of the checkout
|
||||||
|
+ # on Windows, see TODO above COMPILER_RT_DIR.
|
||||||
|
+ asan_rt_lib_src_dir = os.path.join(COMPILER_RT_BUILD_DIR, 'lib', 'clang',
|
||||||
|
+ VERSION, 'lib', platform)
|
||||||
|
asan_rt_lib_dst_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang',
|
||||||
|
VERSION, 'lib', platform)
|
||||||
|
- CopyDirectoryContents(asan_rt_lib_src_dir, asan_rt_lib_dst_dir,
|
||||||
|
- r'^.*-i386\.lib$')
|
||||||
|
- CopyDirectoryContents(asan_rt_lib_src_dir, asan_rt_lib_dst_dir,
|
||||||
|
- r'^.*-i386\.dll$')
|
||||||
|
+ # Blacklists:
|
||||||
|
+ CopyDirectoryContents(os.path.join(asan_rt_lib_src_dir, '..', '..'),
|
||||||
|
+ os.path.join(asan_rt_lib_dst_dir, '..', '..'),
|
||||||
|
+ r'^.*blacklist\.txt$')
|
||||||
|
+ # Headers:
|
||||||
|
+ if sys.platform != 'win32':
|
||||||
|
+ CopyDirectoryContents(
|
||||||
|
+ os.path.join(COMPILER_RT_BUILD_DIR, 'include/sanitizer'),
|
||||||
|
+ os.path.join(LLVM_BUILD_DIR, 'lib/clang', VERSION, 'include/sanitizer'))
|
||||||
|
+ # Static and dynamic libraries:
|
||||||
|
+ CopyDirectoryContents(asan_rt_lib_src_dir, asan_rt_lib_dst_dir)
|
||||||
|
+ if sys.platform == 'darwin':
|
||||||
|
+ for dylib in glob.glob(os.path.join(asan_rt_lib_dst_dir, '*.dylib')):
|
||||||
|
+ # Fix LC_ID_DYLIB for the ASan dynamic libraries to be relative to
|
||||||
|
+ # @executable_path.
|
||||||
|
+ # TODO(glider): this is transitional. We'll need to fix the dylib
|
||||||
|
+ # name either in our build system, or in Clang. See also
|
||||||
|
+ # http://crbug.com/344836.
|
||||||
|
+ subprocess.call(['install_name_tool', '-id',
|
||||||
|
+ '@executable_path/' + os.path.basename(dylib), dylib])
|
||||||
|
|
||||||
|
- CopyFile(os.path.join(asan_rt_lib_src_dir, '..', '..', 'asan_blacklist.txt'),
|
||||||
|
- os.path.join(asan_rt_lib_dst_dir, '..', '..'))
|
||||||
|
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
# Make an extra copy of the sanitizer headers, to be put on the include path
|
||||||
|
@@ -640,22 +675,67 @@
|
||||||
|
aux_sanitizer_include_dir = os.path.join(LLVM_BUILD_DIR, 'lib', 'clang',
|
||||||
|
VERSION, 'include_sanitizer',
|
||||||
|
'sanitizer')
|
||||||
|
- if not os.path.exists(aux_sanitizer_include_dir):
|
||||||
|
- os.makedirs(aux_sanitizer_include_dir)
|
||||||
|
+ EnsureDirExists(aux_sanitizer_include_dir)
|
||||||
|
for _, _, files in os.walk(sanitizer_include_dir):
|
||||||
|
for f in files:
|
||||||
|
CopyFile(os.path.join(sanitizer_include_dir, f),
|
||||||
|
aux_sanitizer_include_dir)
|
||||||
|
|
||||||
|
+ if args.with_android:
|
||||||
|
+ make_toolchain = os.path.join(
|
||||||
|
+ ANDROID_NDK_DIR, 'build', 'tools', 'make-standalone-toolchain.sh')
|
||||||
|
+ for target_arch in ['aarch64', 'arm', 'i686']:
|
||||||
|
+ # Make standalone Android toolchain for target_arch.
|
||||||
|
+ toolchain_dir = os.path.join(
|
||||||
|
+ LLVM_BUILD_DIR, 'android-toolchain-' + target_arch)
|
||||||
|
+ RunCommand([
|
||||||
|
+ make_toolchain,
|
||||||
|
+ '--platform=android-' + ('21' if target_arch == 'aarch64' else '19'),
|
||||||
|
+ '--install-dir="%s"' % toolchain_dir,
|
||||||
|
+ '--system=linux-x86_64',
|
||||||
|
+ '--stl=stlport',
|
||||||
|
+ '--toolchain=' + {
|
||||||
|
+ 'aarch64': 'aarch64-linux-android-4.9',
|
||||||
|
+ 'arm': 'arm-linux-androideabi-4.9',
|
||||||
|
+ 'i686': 'x86-4.9',
|
||||||
|
+ }[target_arch]])
|
||||||
|
+ # Android NDK r9d copies a broken unwind.h into the toolchain, see
|
||||||
|
+ # http://crbug.com/357890
|
||||||
|
+ for f in glob.glob(os.path.join(toolchain_dir, 'include/c++/*/unwind.h')):
|
||||||
|
+ os.remove(f)
|
||||||
|
+
|
||||||
|
+ # Build ASan runtime for Android in a separate build tree.
|
||||||
|
+ build_dir = os.path.join(LLVM_BUILD_DIR, 'android-' + target_arch)
|
||||||
|
+ if not os.path.exists(build_dir):
|
||||||
|
+ os.mkdir(os.path.join(build_dir))
|
||||||
|
+ os.chdir(build_dir)
|
||||||
|
+ cflags = ['--target=%s-linux-androideabi' % target_arch,
|
||||||
|
+ '--sysroot=%s/sysroot' % toolchain_dir,
|
||||||
|
+ '-B%s' % toolchain_dir]
|
||||||
|
+ android_args = base_cmake_args + [
|
||||||
|
+ '-DCMAKE_C_COMPILER=' + os.path.join(LLVM_BUILD_DIR, 'bin/clang'),
|
||||||
|
+ '-DCMAKE_CXX_COMPILER=' + os.path.join(LLVM_BUILD_DIR, 'bin/clang++'),
|
||||||
|
+ '-DLLVM_CONFIG_PATH=' + os.path.join(LLVM_BUILD_DIR, 'bin/llvm-config'),
|
||||||
|
+ '-DCMAKE_C_FLAGS=' + ' '.join(cflags),
|
||||||
|
+ '-DCMAKE_CXX_FLAGS=' + ' '.join(cflags),
|
||||||
|
+ '-DANDROID=1']
|
||||||
|
+ RmCmakeCache('.')
|
||||||
|
+ RunCommand(['cmake'] + android_args + [COMPILER_RT_DIR])
|
||||||
|
+ RunCommand(['ninja', 'libclang_rt.asan-%s-android.so' % target_arch])
|
||||||
|
+
|
||||||
|
+ # And copy it into the main build tree.
|
||||||
|
+ runtime = 'libclang_rt.asan-%s-android.so' % target_arch
|
||||||
|
+ for root, _, files in os.walk(build_dir):
|
||||||
|
+ if runtime in files:
|
||||||
|
+ shutil.copy(os.path.join(root, runtime), asan_rt_lib_dst_dir)
|
||||||
|
+
|
||||||
|
# Run tests.
|
||||||
|
if args.run_tests or use_head_revision:
|
||||||
|
os.chdir(LLVM_BUILD_DIR)
|
||||||
|
- RunCommand(GetVSVersion().SetupScript('x64') +
|
||||||
|
- ['&&', 'ninja', 'cr-check-all'])
|
||||||
|
+ RunCommand(['ninja', 'cr-check-all'], msvc_arch='x64')
|
||||||
|
if args.run_tests:
|
||||||
|
os.chdir(LLVM_BUILD_DIR)
|
||||||
|
- RunCommand(GetVSVersion().SetupScript('x64') +
|
||||||
|
- ['&&', 'ninja', 'check-all'])
|
||||||
|
+ RunCommand(['ninja', 'check-all'], msvc_arch='x64')
|
||||||
|
|
||||||
|
WriteStampFile(PACKAGE_VERSION)
|
||||||
|
print 'Clang update was successful.'
|
||||||
|
@@ -663,31 +743,6 @@
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
- if not sys.platform in ['win32', 'cygwin']:
|
||||||
|
- # For non-Windows, fall back to update.sh.
|
||||||
|
- # TODO(hans): Make update.py replace update.sh completely.
|
||||||
|
-
|
||||||
|
- # This script is called by gclient. gclient opens its hooks subprocesses
|
||||||
|
- # with (stdout=subprocess.PIPE, stderr=subprocess.STDOUT) and then does
|
||||||
|
- # custom output processing that breaks printing '\r' characters for
|
||||||
|
- # single-line updating status messages as printed by curl and wget.
|
||||||
|
- # Work around this by setting stderr of the update.sh process to stdin (!):
|
||||||
|
- # gclient doesn't redirect stdin, and while stdin itself is read-only, a
|
||||||
|
- # dup()ed sys.stdin is writable, try
|
||||||
|
- # fd2 = os.dup(sys.stdin.fileno()); os.write(fd2, 'hi')
|
||||||
|
- # TODO: Fix gclient instead, http://crbug.com/95350
|
||||||
|
- if '--no-stdin-hack' in sys.argv:
|
||||||
|
- sys.argv.remove('--no-stdin-hack')
|
||||||
|
- stderr = None
|
||||||
|
- else:
|
||||||
|
- try:
|
||||||
|
- stderr = os.fdopen(os.dup(sys.stdin.fileno()))
|
||||||
|
- except:
|
||||||
|
- stderr = sys.stderr
|
||||||
|
- return subprocess.call(
|
||||||
|
- [os.path.join(os.path.dirname(__file__), 'update.sh')] + sys.argv[1:],
|
||||||
|
- stderr=stderr)
|
||||||
|
-
|
||||||
|
parser = argparse.ArgumentParser(description='Build Clang.')
|
||||||
|
parser.add_argument('--bootstrap', action='store_true',
|
||||||
|
help='first build clang with CC, then with itself.')
|
||||||
|
@@ -695,26 +750,24 @@
|
||||||
|
help="run only if the script thinks clang is needed")
|
||||||
|
parser.add_argument('--force-local-build', action='store_true',
|
||||||
|
help="don't try to download prebuild binaries")
|
||||||
|
+ parser.add_argument('--gcc-toolchain', help='set the version for which gcc '
|
||||||
|
+ 'version be used for building; --gcc-toolchain=/opt/foo '
|
||||||
|
+ 'picks /opt/foo/bin/gcc')
|
||||||
|
parser.add_argument('--print-revision', action='store_true',
|
||||||
|
help='print current clang revision and exit.')
|
||||||
|
+ parser.add_argument('--print-clang-version', action='store_true',
|
||||||
|
+ help='print current clang version (e.g. x.y.z) and exit.')
|
||||||
|
parser.add_argument('--run-tests', action='store_true',
|
||||||
|
help='run tests after building; only for local builds')
|
||||||
|
parser.add_argument('--tools', nargs='*',
|
||||||
|
help='select which chrome tools to build',
|
||||||
|
default=['plugins', 'blink_gc_plugin'])
|
||||||
|
- parser.add_argument('--without-patches', action='store_false',
|
||||||
|
- help="don't apply patches (default)", dest='with_patches',
|
||||||
|
- default=True)
|
||||||
|
-
|
||||||
|
- # For now, these flags are only used for the non-Windows flow, but argparser
|
||||||
|
- # gets mad if it sees a flag it doesn't recognize.
|
||||||
|
- parser.add_argument('--no-stdin-hack', action='store_true')
|
||||||
|
-
|
||||||
|
+ parser.add_argument('--without-android', action='store_false',
|
||||||
|
+ help='don\'t build Android ASan runtime (linux only)',
|
||||||
|
+ dest='with_android',
|
||||||
|
+ default=sys.platform.startswith('linux'))
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
- if re.search(r'\b(make_clang_dir)=', os.environ.get('GYP_DEFINES', '')):
|
||||||
|
- print 'Skipping Clang update (make_clang_dir= was set in GYP_DEFINES).'
|
||||||
|
- return 0
|
||||||
|
if args.if_needed:
|
||||||
|
is_clang_required = False
|
||||||
|
# clang is always used on Mac and Linux.
|
||||||
|
@@ -730,8 +783,16 @@
|
||||||
|
is_clang_required = True
|
||||||
|
if not is_clang_required:
|
||||||
|
return 0
|
||||||
|
+ if re.search(r'\b(make_clang_dir)=', os.environ.get('GYP_DEFINES', '')):
|
||||||
|
+ print 'Skipping Clang update (make_clang_dir= was set in GYP_DEFINES).'
|
||||||
|
+ return 0
|
||||||
|
+
|
||||||
|
+ if use_head_revision:
|
||||||
|
+ # TODO(hans): Remove after the next roll.
|
||||||
|
+ global VERSION
|
||||||
|
+ VERSION = '3.9.0'
|
||||||
|
|
||||||
|
- global LLVM_WIN_REVISION, PACKAGE_VERSION
|
||||||
|
+ global CLANG_REVISION, PACKAGE_VERSION
|
||||||
|
if args.print_revision:
|
||||||
|
if use_head_revision:
|
||||||
|
print GetSvnRevision(LLVM_DIR)
|
||||||
|
@@ -739,6 +800,10 @@
|
||||||
|
print PACKAGE_VERSION
|
||||||
|
return 0
|
||||||
|
|
||||||
|
+ if args.print_clang_version:
|
||||||
|
+ sys.stdout.write(VERSION)
|
||||||
|
+ return 0
|
||||||
|
+
|
||||||
|
# Don't buffer stdout, so that print statements are immediately flushed.
|
||||||
|
# Do this only after --print-revision has been handled, else we'll get
|
||||||
|
# an error message when this script is run from gn for some reason.
|
||||||
|
@@ -747,12 +812,13 @@
|
||||||
|
if use_head_revision:
|
||||||
|
# Use a real revision number rather than HEAD to make sure that the stamp
|
||||||
|
# file logic works.
|
||||||
|
- LLVM_WIN_REVISION = GetSvnRevision(LLVM_REPO_URL)
|
||||||
|
- PACKAGE_VERSION = LLVM_WIN_REVISION + '-0'
|
||||||
|
+ CLANG_REVISION = GetSvnRevision(LLVM_REPO_URL)
|
||||||
|
+ PACKAGE_VERSION = CLANG_REVISION + '-0'
|
||||||
|
|
||||||
|
args.force_local_build = True
|
||||||
|
- # Skip local patches when using HEAD: they probably don't apply anymore.
|
||||||
|
- args.with_patches = False
|
||||||
|
+ if 'OS=android' not in os.environ.get('GYP_DEFINES', ''):
|
||||||
|
+ # Only build the Android ASan rt on ToT bots when targetting Android.
|
||||||
|
+ args.with_android = False
|
||||||
|
|
||||||
|
return UpdateClang(args)
|
||||||
|
|
@ -53,6 +53,9 @@ qtSubmodule {
|
|||||||
libcap
|
libcap
|
||||||
pciutils
|
pciutils
|
||||||
];
|
];
|
||||||
|
patches = [
|
||||||
|
./chromium-clang-update-py.patch
|
||||||
|
];
|
||||||
postInstall = ''
|
postInstall = ''
|
||||||
cat > $out/libexec/qt.conf <<EOF
|
cat > $out/libexec/qt.conf <<EOF
|
||||||
[Paths]
|
[Paths]
|
@ -54,7 +54,7 @@ stdenv.mkDerivation {
|
|||||||
makeFlags =
|
makeFlags =
|
||||||
(if local then localFlags else genericFlags)
|
(if local then localFlags else genericFlags)
|
||||||
++
|
++
|
||||||
optionals stdenv.isDarwin ["MACOSX_DEPLOYMENT_TARGET=10.9"]
|
optionals stdenv.isDarwin ["MACOSX_DEPLOYMENT_TARGET=10.7"]
|
||||||
++
|
++
|
||||||
[
|
[
|
||||||
"FC=gfortran"
|
"FC=gfortran"
|
||||||
|
@ -25,6 +25,7 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
configurePhase = stdenv.lib.optionalString stdenv.isDarwin ''
|
configurePhase = stdenv.lib.optionalString stdenv.isDarwin ''
|
||||||
ln -s /usr/bin/xcodebuild $TMPDIR
|
ln -s /usr/bin/xcodebuild $TMPDIR
|
||||||
|
ln -s /usr/bin/libtool $TMPDIR
|
||||||
export PATH=$TMPDIR:$PATH
|
export PATH=$TMPDIR:$PATH
|
||||||
'' + ''
|
'' + ''
|
||||||
PYTHONPATH="tools/generate_shim_headers:$PYTHONPATH" \
|
PYTHONPATH="tools/generate_shim_headers:$PYTHONPATH" \
|
||||||
@ -57,8 +58,8 @@ stdenv.mkDerivation rec {
|
|||||||
|
|
||||||
installPhase = ''
|
installPhase = ''
|
||||||
install -vD out/Release/d8 "$out/bin/d8"
|
install -vD out/Release/d8 "$out/bin/d8"
|
||||||
${if stdenv.system == "x86_64-darwin" then ''
|
${if stdenv.isDarwin then ''
|
||||||
install -vD out/Release/lib.target/libv8.dylib "$out/lib/libv8.dylib"
|
install -vD out/Release/libv8.dylib "$out/lib/libv8.dylib"
|
||||||
'' else ''
|
'' else ''
|
||||||
install -vD out/Release/lib.target/libv8.so "$out/lib/libv8.so"
|
install -vD out/Release/lib.target/libv8.so "$out/lib/libv8.so"
|
||||||
''}
|
''}
|
||||||
|
43
pkgs/development/ocaml-modules/astring/default.nix
Normal file
43
pkgs/development/ocaml-modules/astring/default.nix
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
{stdenv, fetchurl, buildOcaml, ocaml, findlib, ocamlbuild, topkg, opam}:
|
||||||
|
|
||||||
|
buildOcaml rec {
|
||||||
|
version = "0.8.3";
|
||||||
|
name = "astring";
|
||||||
|
|
||||||
|
src = fetchurl {
|
||||||
|
url = "http://erratique.ch/software/astring/releases/astring-${version}.tbz";
|
||||||
|
sha256 = "0ixjwc3plrljvj24za3l9gy0w30lsbggp8yh02lwrzw61ls4cri0";
|
||||||
|
};
|
||||||
|
|
||||||
|
unpackCmd = "tar -xf $curSrc";
|
||||||
|
|
||||||
|
buildInputs = [ ocaml findlib ocamlbuild topkg opam ];
|
||||||
|
|
||||||
|
buildPhase = ''
|
||||||
|
ocaml -I ${findlib}/lib/ocaml/${ocaml.version}/site-lib/ pkg/pkg.ml build
|
||||||
|
'';
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
opam-installer --script --prefix=$out astring.install | sh
|
||||||
|
ln -s $out/lib/astring $out/lib/ocaml/${ocaml.version}/site-lib/
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
homepage = http://erratique.ch/software/astring;
|
||||||
|
description = "Alternative String module for OCaml";
|
||||||
|
longDescription = ''
|
||||||
|
Astring exposes an alternative String module for OCaml. This module tries
|
||||||
|
to balance minimality and expressiveness for basic, index-free, string
|
||||||
|
processing and provides types and functions for substrings, string sets
|
||||||
|
and string maps.
|
||||||
|
|
||||||
|
Remaining compatible with the OCaml String module is a non-goal.
|
||||||
|
The String module exposed by Astring has exception safe functions, removes
|
||||||
|
deprecated and rarely used functions, alters some signatures and names,
|
||||||
|
adds a few missing functions and fully exploits OCaml's newfound string
|
||||||
|
immutability.
|
||||||
|
'';
|
||||||
|
license = stdenv.lib.licenses.isc;
|
||||||
|
maintainers = with stdenv.lib.maintainers; [ sternenseemann ];
|
||||||
|
};
|
||||||
|
}
|
@ -2,10 +2,10 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
name = "dub-${version}";
|
name = "dub-${version}";
|
||||||
version = "1.0.0";
|
version = "1.1.0";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = fetchFromGitHub {
|
||||||
sha256 = "07s52hmh9jc3i4jfx4j4a91m44qrr933pwfwczzijhybj2wmpjhh";
|
sha256 = "1smzlfs5gjmrlghccdgn04qzy5b8l0xm8y2virayb2adrwqviscm";
|
||||||
rev = "v${version}";
|
rev = "v${version}";
|
||||||
repo = "dub";
|
repo = "dub";
|
||||||
owner = "D-Programming-Language";
|
owner = "D-Programming-Language";
|
||||||
|
@ -2,11 +2,13 @@
|
|||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
name = "remake-${version}";
|
name = "remake-${version}";
|
||||||
version = "3.82+dbg-0.6";
|
remakeVersion = "4.1";
|
||||||
|
dbgVersion = "1.1";
|
||||||
|
version = "${remakeVersion}+dbg-${dbgVersion}";
|
||||||
|
|
||||||
src = fetchurl {
|
src = fetchurl {
|
||||||
url = "mirror://sourceforge/project/bashdb/remake/${version}/${name}.tar.bz2";
|
url = "mirror://sourceforge/project/bashdb/remake/${version}/remake-${remakeVersion}+dbg${dbgVersion}.tar.bz2";
|
||||||
sha256 = "0i2g6vi9zya78d9zpigfnmzg2qcl93myjfibh3kfmjk7b9lajfyz";
|
sha256 = "1zi16pl7sqn1aa8b7zqm9qnd9vjqyfywqm8s6iap4clf86l7kss2";
|
||||||
};
|
};
|
||||||
|
|
||||||
buildInputs = [ readline ];
|
buildInputs = [ readline ];
|
||||||
|
@ -1,26 +1,26 @@
|
|||||||
{ stdenv, buildGoPackage, fetchgit, fetchhg, fetchbzr, fetchsvn }:
|
{ stdenv, buildGoPackage, fetchFromGitHub, fetchhg, fetchbzr, fetchsvn }:
|
||||||
|
|
||||||
buildGoPackage rec {
|
buildGoPackage rec {
|
||||||
name = "leaps-${version}";
|
name = "leaps-${version}";
|
||||||
version = "20160626-${stdenv.lib.strings.substring 0 7 rev}";
|
version = "0.5.1";
|
||||||
rev = "5cf7328a8c498041d2a887e89f22f138498f4621";
|
|
||||||
|
|
||||||
goPackagePath = "github.com/jeffail/leaps";
|
goPackagePath = "github.com/jeffail/leaps";
|
||||||
|
|
||||||
src = fetchgit {
|
src = fetchFromGitHub {
|
||||||
inherit rev;
|
owner = "jeffail";
|
||||||
url = "https://github.com/jeffail/leaps";
|
repo = "leaps";
|
||||||
sha256 = "1qbgz48x9yi0w9yz39zsnnhx5nx2xmrns9v8hx28jah2bvag6sq7";
|
sha256 = "0w63y777h5qc8fwnkrbawn3an9px0l1zz3649x0n8lhk125fvchj";
|
||||||
fetchSubmodules = false;
|
rev = "v${version}";
|
||||||
};
|
};
|
||||||
|
|
||||||
goDeps = ./deps.nix;
|
goDeps = ./deps.nix;
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
description = "A pair programming tool and library written in Golang";
|
description = "A pair programming tool and library written in Golang";
|
||||||
homepage = "https://github.com/jeffail/leaps/";
|
homepage = "https://github.com/jeffail/leaps/";
|
||||||
license = "MIT";
|
license = "MIT";
|
||||||
maintainers = with stdenv.lib.maintainers; [ qknight ];
|
maintainers = with stdenv.lib.maintainers; [ qknight ];
|
||||||
meta.platforms = stdenv.lib.platforms.linux;
|
meta.platforms = stdenv.lib.platforms.linux;
|
||||||
broken = true;
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,11 +1,94 @@
|
|||||||
[
|
[
|
||||||
{
|
{
|
||||||
goPackagePath = "golang.org/x/net";
|
goPackagePath = "github.com/amir/raidman";
|
||||||
fetch = {
|
fetch = {
|
||||||
type = "git";
|
type = "git";
|
||||||
url = "https://go.googlesource.com/net";
|
url = "https://github.com/amir/raidman";
|
||||||
rev = "07b51741c1d6423d4a6abab1c49940ec09cb1aaf";
|
rev = "91c20f3f475cab75bb40ad7951d9bbdde357ade7";
|
||||||
sha256 = "12lvdj0k2gww4hw5f79qb9yswqpy4i3bgv1likmf3mllgdxfm20w";
|
sha256 = "0pkqy5hzjkk04wj1ljq8jsyla358ilxi4lkmvkk73b3dh2wcqvpp";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
goPackagePath = "github.com/elazarl/go-bindata-assetfs";
|
||||||
|
fetch = {
|
||||||
|
type = "git";
|
||||||
|
url = "https://github.com/elazarl/go-bindata-assetfs";
|
||||||
|
rev = "57eb5e1fc594ad4b0b1dbea7b286d299e0cb43c2";
|
||||||
|
sha256 = "1za29pa15y2xsa1lza97jlkax9qj93ks4a2j58xzmay6rczfkb9i";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
goPackagePath = "github.com/garyburd/redigo";
|
||||||
|
fetch = {
|
||||||
|
type = "git";
|
||||||
|
url = "https://github.com/garyburd/redigo";
|
||||||
|
rev = "8873b2f1995f59d4bcdd2b0dc9858e2cb9bf0c13";
|
||||||
|
sha256 = "1lzhb99pcwwf5ddcs0bw00fwf9m1d0k7b92fqz2a01jlij4pm5l2";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
goPackagePath = "github.com/go-sql-driver/mysql";
|
||||||
|
fetch = {
|
||||||
|
type = "git";
|
||||||
|
url = "https://github.com/go-sql-driver/mysql";
|
||||||
|
rev = "7ebe0a500653eeb1859664bed5e48dec1e164e73";
|
||||||
|
sha256 = "1gyan3lyn2j00di9haq7zm3zcwckn922iigx3fvml6s2bsp6ljas";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
goPackagePath = "github.com/golang/protobuf";
|
||||||
|
fetch = {
|
||||||
|
type = "git";
|
||||||
|
url = "https://github.com/golang/protobuf";
|
||||||
|
rev = "bf531ff1a004f24ee53329dfd5ce0b41bfdc17df";
|
||||||
|
sha256 = "10lnvmq28jp2wk1xc32mdk4745lal2bmdvbjirckb9wlv07zzzf0";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
goPackagePath = "github.com/jeffail/gabs";
|
||||||
|
fetch = {
|
||||||
|
type = "git";
|
||||||
|
url = "https://github.com/jeffail/gabs";
|
||||||
|
rev = "ee1575a53249b51d636e62464ca43a13030afdb5";
|
||||||
|
sha256 = "0svv57193n8m86r7v7n0y9lny0p6nzr7xvz98va87h00mg146351";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
goPackagePath = "github.com/jeffail/util";
|
||||||
|
fetch = {
|
||||||
|
type = "git";
|
||||||
|
url = "https://github.com/jeffail/util";
|
||||||
|
rev = "48ada8ff9fcae546b5986f066720daa9033ad523";
|
||||||
|
sha256 = "0k8zz7gdv4hb691fdyb5mhlixppcq8x4ny84fanflypnv258a3i0";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
goPackagePath = "github.com/lib/pq";
|
||||||
|
fetch = {
|
||||||
|
type = "git";
|
||||||
|
url = "https://github.com/lib/pq";
|
||||||
|
rev = "3cd0097429be7d611bb644ef85b42bfb102ceea4";
|
||||||
|
sha256 = "1q7qfzyfgjk6rvid548r43fi4jhvsh4dhfvfjbp2pz4xqsvpsm7a";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
{
|
||||||
|
goPackagePath = "github.com/satori/go.uuid";
|
||||||
|
fetch = {
|
||||||
|
type = "git";
|
||||||
|
url = "https://github.com/satori/go.uuid";
|
||||||
|
rev = "f9ab0dce87d815821e221626b772e3475a0d2749";
|
||||||
|
sha256 = "0z18j6zxq9kw4lgcpmhh3k7jrb9gy1lx252xz5qhs4ywi9w77xwi";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
goPackagePath = "golang.org/x/net";
|
||||||
|
fetch = {
|
||||||
|
type = "git";
|
||||||
|
url = "https://go.googlesource.com/net";
|
||||||
|
rev = "07b51741c1d6423d4a6abab1c49940ec09cb1aaf";
|
||||||
|
sha256 = "12lvdj0k2gww4hw5f79qb9yswqpy4i3bgv1likmf3mllgdxfm20w";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -4,12 +4,12 @@
|
|||||||
# prebuilt binary subversion snapshots on sourceforge.
|
# prebuilt binary subversion snapshots on sourceforge.
|
||||||
|
|
||||||
stdenv.mkDerivation rec {
|
stdenv.mkDerivation rec {
|
||||||
version = "748"; # latest @ 2013-10-26
|
version = "787";
|
||||||
name = "xc3sprog-${version}";
|
name = "xc3sprog-${version}";
|
||||||
|
|
||||||
src = fetchsvn rec {
|
src = fetchsvn rec {
|
||||||
url = "https://svn.code.sf.net/p/xc3sprog/code/trunk";
|
url = "https://svn.code.sf.net/p/xc3sprog/code/trunk";
|
||||||
sha256 = "0wkz6094kkqz91qpa24pzlbhndc47sjmqhwk3p7ccabv0041rzk0";
|
sha256 = "1rfhms3i7375kdlg0sdg5k52ix3xv5llj2dr30vamyg7pk74y8rx";
|
||||||
rev = "${version}";
|
rev = "${version}";
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user