0
0
Fork 0
mirror of https://github.com/NixOS/nixpkgs.git synced 2025-07-13 13:40:28 +03:00

treewide: format all inactive Nix files

After final improvements to the official formatter implementation,
this commit now performs the first treewide reformat of Nix files using it.
This is part of the implementation of RFC 166.

Only "inactive" files are reformatted, meaning only files that
aren't being touched by any PR with activity in the past 2 months.
This is to avoid conflicts for PRs that might soon be merged.
Later we can do a full treewide reformat to get the rest,
which should not cause as many conflicts.

A CI check has already been running for some time to ensure that new and
already-formatted files are formatted, so the files being reformatted here
should also stay formatted.

This commit was automatically created and can be verified using

    nix-build https://github.com/infinisil/treewide-nixpkgs-reformat-script/archive/a08b3a4d199c6124ac5b36a889d9099b4383463f.tar.gz \
      --argstr baseRev b32a094368
    result/bin/apply-formatting $NIXPKGS_PATH
This commit is contained in:
Silvan Mosberger 2024-12-10 20:26:33 +01:00
parent b32a094368
commit 4f0dadbf38
21293 changed files with 701351 additions and 428307 deletions

View file

@ -1,16 +1,17 @@
/* Helper expression for copy-tarballs. This returns (nearly) all
tarballs used the free packages in Nixpkgs.
/*
Helper expression for copy-tarballs. This returns (nearly) all
tarballs used the free packages in Nixpkgs.
Typical usage:
Typical usage:
$ copy-tarballs.pl --expr 'import <nixpkgs/maintainers/scripts/all-tarballs.nix>'
$ copy-tarballs.pl --expr 'import <nixpkgs/maintainers/scripts/all-tarballs.nix>'
*/
import ../../pkgs/top-level/release.nix
{ # Don't apply hydraJob to jobs, because then we can't get to the
# dependency graph.
scrubJobs = false;
# No need to evaluate on i686.
supportedSystems = [ "x86_64-linux" ];
bootstrapConfigs = [];
}
import ../../pkgs/top-level/release.nix {
# Don't apply hydraJob to jobs, because then we can't get to the
# dependency graph.
scrubJobs = false;
# No need to evaluate on i686.
supportedSystems = [ "x86_64-linux" ];
bootstrapConfigs = [ ];
}

View file

@ -1,8 +1,11 @@
{ maintainer
, localSystem ? { system = args.system or builtins.currentSystem; }
, system ? localSystem.system
, crossSystem ? localSystem
, ...
{
maintainer,
localSystem ? {
system = args.system or builtins.currentSystem;
},
system ? localSystem.system,
crossSystem ? localSystem,
...
}@args:
# based on update.nix
@ -27,49 +30,51 @@ let
file: if builtins.pathExists file then super._internalCallByNamePackageFile file else null;
};
nixpkgsArgs = removeAttrs args [ "maintainer" "overlays" ] // {
overlays = args.overlays or [] ++ [ overlay ];
};
nixpkgsArgs =
removeAttrs args [
"maintainer"
"overlays"
]
// {
overlays = args.overlays or [ ] ++ [ overlay ];
};
pkgs = import ./../../default.nix nixpkgsArgs;
maintainer_ = pkgs.lib.maintainers.${maintainer};
packagesWith = cond: return: set:
(pkgs.lib.flatten
(pkgs.lib.mapAttrsToList
(name: pkg:
let
result = builtins.tryEval
(
if pkgs.lib.isDerivation pkg && cond name pkg then
# Skip packages whose closure fails on evaluation.
# This happens for pkgs like `python27Packages.djangoql`
# that have disabled Python pkgs as dependencies.
builtins.seq pkg.outPath
[ (return name pkg) ]
else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false
then packagesWith cond return pkg
else [ ]
);
in
if result.success then result.value
else [ ]
)
set
)
);
packagesWith =
cond: return: set:
(pkgs.lib.flatten (
pkgs.lib.mapAttrsToList (
name: pkg:
let
result = builtins.tryEval (
if pkgs.lib.isDerivation pkg && cond name pkg then
# Skip packages whose closure fails on evaluation.
# This happens for pkgs like `python27Packages.djangoql`
# that have disabled Python pkgs as dependencies.
builtins.seq pkg.outPath [ (return name pkg) ]
else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false then
packagesWith cond return pkg
else
[ ]
);
in
if result.success then result.value else [ ]
) set
));
in
packagesWith
(name: pkg:
(
if builtins.hasAttr "meta" pkg && builtins.hasAttr "maintainers" pkg.meta
then (
if builtins.isList pkg.meta.maintainers
then builtins.elem maintainer_ pkg.meta.maintainers
else maintainer_ == pkg.meta.maintainers
packagesWith (
name: pkg:
(
if builtins.hasAttr "meta" pkg && builtins.hasAttr "maintainers" pkg.meta then
(
if builtins.isList pkg.meta.maintainers then
builtins.elem maintainer_ pkg.meta.maintainers
else
maintainer_ == pkg.meta.maintainers
)
else false
)
else
false
)
(name: pkg: pkg)
pkgs
) (name: pkg: pkg) pkgs

View file

@ -5,48 +5,45 @@ let
};
inherit (pkgs) lib;
maintainer_ = pkgs.lib.maintainers.${maintainer};
packagesWith = cond: return: prefix: set:
(lib.flatten
(lib.mapAttrsToList
(name: pkg:
let
result = builtins.tryEval
(
if lib.isDerivation pkg && cond name pkg then
# Skip packages whose closure fails on evaluation.
# This happens for pkgs like `python27Packages.djangoql`
# that have disabled Python pkgs as dependencies.
builtins.seq pkg.outPath
[ (return "${prefix}${name}") ]
else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false
# then packagesWith cond return pkg
then packagesWith cond return "${name}." pkg
else [ ]
);
in
if result.success then result.value
else [ ]
)
set
)
);
packagesWith =
cond: return: prefix: set:
(lib.flatten (
lib.mapAttrsToList (
name: pkg:
let
result = builtins.tryEval (
if lib.isDerivation pkg && cond name pkg then
# Skip packages whose closure fails on evaluation.
# This happens for pkgs like `python27Packages.djangoql`
# that have disabled Python pkgs as dependencies.
builtins.seq pkg.outPath [ (return "${prefix}${name}") ]
else if
pkg.recurseForDerivations or false || pkg.recurseForRelease or false
# then packagesWith cond return pkg
then
packagesWith cond return "${name}." pkg
else
[ ]
);
in
if result.success then result.value else [ ]
) set
));
packages = packagesWith
(name: pkg:
(
if builtins.hasAttr "meta" pkg && builtins.hasAttr "maintainers" pkg.meta
then
(
if builtins.isList pkg.meta.maintainers
then builtins.elem maintainer_ pkg.meta.maintainers
else maintainer_ == pkg.meta.maintainers
)
else false
)
packages = packagesWith (
name: pkg:
(
if builtins.hasAttr "meta" pkg && builtins.hasAttr "maintainers" pkg.meta then
(
if builtins.isList pkg.meta.maintainers then
builtins.elem maintainer_ pkg.meta.maintainers
else
maintainer_ == pkg.meta.maintainers
)
else
false
)
(name: name)
""
pkgs;
) (name: name) "" pkgs;
in
pkgs.stdenv.mkDerivation {

View file

@ -1,38 +1,68 @@
let
lib = import ../../lib;
inherit (lib)
add attrNames elemAt foldl' genList length replaceStrings sort toLower trace;
add
attrNames
elemAt
foldl'
genList
length
replaceStrings
sort
toLower
trace
;
maintainers = import ../maintainer-list.nix;
simplify = replaceStrings [ "-" "_" ] [ "" "" ];
compare = a: b: simplify (toLower a) < simplify (toLower b);
namesSorted =
sort
(a: b: a.key < b.key)
(map
(n: let pos = builtins.unsafeGetAttrPos n maintainers;
in assert pos == null -> throw "maintainers entry ${n} is malformed";
{ name = n; line = pos.line; key = toLower (simplify n); })
(attrNames maintainers));
before = { name, line, key }:
foldl'
(acc: n: if n.key < key && (acc == null || n.key > acc.key) then n else acc)
null
namesSorted;
errors = foldl' add 0
(map
(i: let a = elemAt namesSorted i;
b = elemAt namesSorted (i + 1);
lim = let t = before a; in if t == null then "the initial {" else t.name;
in if a.line >= b.line
then trace
("maintainer ${a.name} (line ${toString a.line}) should be listed "
+ "after ${lim}, not after ${b.name} (line ${toString b.line})")
1
else 0)
(genList (i: i) (length namesSorted - 1)));
namesSorted = sort (a: b: a.key < b.key) (
map (
n:
let
pos = builtins.unsafeGetAttrPos n maintainers;
in
assert pos == null -> throw "maintainers entry ${n} is malformed";
{
name = n;
line = pos.line;
key = toLower (simplify n);
}
) (attrNames maintainers)
);
before =
{
name,
line,
key,
}:
foldl' (
acc: n: if n.key < key && (acc == null || n.key > acc.key) then n else acc
) null namesSorted;
errors = foldl' add 0 (
map (
i:
let
a = elemAt namesSorted i;
b = elemAt namesSorted (i + 1);
lim =
let
t = before a;
in
if t == null then "the initial {" else t.name;
in
if a.line >= b.line then
trace (
"maintainer ${a.name} (line ${toString a.line}) should be listed "
+ "after ${lim}, not after ${b.name} (line ${toString b.line})"
) 1
else
0
) (genList (i: i) (length namesSorted - 1))
);
in
assert errors == 0; "all good!"
assert errors == 0;
"all good!"
# generate edit commands to sort the list.
# may everything following the last current entry (closing } ff) in the wrong place

View file

@ -5,7 +5,8 @@ rustPlatform.buildRustPackage {
src = lib.cleanSourceWith {
src = ./.;
filter = name: type:
filter =
name: type:
let
name' = builtins.baseNameOf name;
in

View file

@ -5,20 +5,36 @@ let
trace = if builtins.getEnv "VERBOSE" == "1" then builtins.trace else (x: y: y);
rel = removeAttrs (import ../../pkgs/top-level/release.nix { }) [ "tarball" "unstable" ];
rel = removeAttrs (import ../../pkgs/top-level/release.nix { }) [
"tarball"
"unstable"
];
# Add the recurseForDerivations attribute to ensure that
# nix-instantiate recurses into nested attribute sets.
recurse = path: attrs:
recurse =
path: attrs:
if (builtins.tryEval attrs).success then
if isDerivation attrs
then
if (builtins.tryEval attrs.drvPath).success
then { inherit (attrs) name drvPath; }
else { failed = true; }
else if attrs == null then {}
else { recurseForDerivations = true; } //
mapAttrs (n: v: let path' = path ++ [n]; in trace path' (recurse path' v)) attrs
else { };
if isDerivation attrs then
if (builtins.tryEval attrs.drvPath).success then
{ inherit (attrs) name drvPath; }
else
{ failed = true; }
else if attrs == null then
{ }
else
{
recurseForDerivations = true;
}
// mapAttrs (
n: v:
let
path' = path ++ [ n ];
in
trace path' (recurse path' v)
) attrs
else
{ };
in recurse [] rel
in
recurse [ ] rel

View file

@ -1,6 +1,9 @@
# This expression returns a list of all fetchurl calls used by expr.
{ expr, lib ? import ../../lib }:
{
expr,
lib ? import ../../lib,
}:
let
inherit (lib)
@ -21,41 +24,82 @@ let
root = expr;
uniqueFiles = map (x: x.file) (genericClosure {
startSet = map (file: { key = with file; (if type == null then "" else type + "+") + hash; inherit file; }) files;
startSet = map (file: {
key = with file; (if type == null then "" else type + "+") + hash;
inherit file;
}) files;
operator = const [ ];
});
files = map (drv: { urls = drv.urls or [ drv.url ]; hash = drv.outputHash; isPatch = (drv?postFetch && drv.postFetch != ""); type = drv.outputHashAlgo; name = drv.name; }) fetchurlDependencies;
files = map (drv: {
urls = drv.urls or [ drv.url ];
hash = drv.outputHash;
isPatch = (drv ? postFetch && drv.postFetch != "");
type = drv.outputHashAlgo;
name = drv.name;
}) fetchurlDependencies;
fetchurlDependencies =
filter
(drv: drv.outputHash or "" != "" && drv.outputHashMode or "flat" == "flat"
&& (drv ? url || drv ? urls))
dependencies;
fetchurlDependencies = filter (
drv:
drv.outputHash or "" != "" && drv.outputHashMode or "flat" == "flat" && (drv ? url || drv ? urls)
) dependencies;
dependencies = map (x: x.value) (genericClosure {
startSet = map keyDrv (derivationsIn' root);
operator = { key, value }: map keyDrv (immediateDependenciesOf value);
});
derivationsIn' = x:
if !canEval x then []
else if isDerivation x then optional (canEval x.drvPath) x
else if isList x then concatLists (map derivationsIn' x)
else if isAttrs x then concatLists (mapAttrsToList (n: v: addErrorContext "while finding tarballs in '${n}':" (derivationsIn' v)) x)
else [ ];
derivationsIn' =
x:
if !canEval x then
[ ]
else if isDerivation x then
optional (canEval x.drvPath) x
else if isList x then
concatLists (map derivationsIn' x)
else if isAttrs x then
concatLists (
mapAttrsToList (n: v: addErrorContext "while finding tarballs in '${n}':" (derivationsIn' v)) x
)
else
[ ];
keyDrv = drv: if canEval drv.drvPath then { key = drv.drvPath; value = drv; } else { };
keyDrv =
drv:
if canEval drv.drvPath then
{
key = drv.drvPath;
value = drv;
}
else
{ };
immediateDependenciesOf = drv:
concatLists (mapAttrsToList (n: v: derivationsIn v) (removeAttrs drv (["meta" "passthru"] ++ optionals (drv?passthru) (attrNames drv.passthru))));
immediateDependenciesOf =
drv:
concatLists (
mapAttrsToList (n: v: derivationsIn v) (
removeAttrs drv (
[
"meta"
"passthru"
]
++ optionals (drv ? passthru) (attrNames drv.passthru)
)
)
);
derivationsIn = x:
if !canEval x then []
else if isDerivation x then optional (canEval x.drvPath) x
else if isList x then concatLists (map derivationsIn x)
else [ ];
derivationsIn =
x:
if !canEval x then
[ ]
else if isDerivation x then
optional (canEval x.drvPath) x
else if isList x then
concatLists (map derivationsIn x)
else
[ ];
canEval = val: (builtins.tryEval val).success;
in uniqueFiles
in
uniqueFiles

View file

@ -1,12 +1,17 @@
# Nix script to calculate the Haskell dependencies of every haskellPackage. Used by ./hydra-report.hs.
let
pkgs = import ../../.. {};
pkgs = import ../../.. { };
inherit (pkgs) lib;
getDeps = _: pkg: let
pname = pkg.pname or null;
in {
deps = builtins.filter (x: x != null && x != pname) (map (x: x.pname or null) (pkg.propagatedBuildInputs or []));
broken = (pkg.meta.hydraPlatforms or [null]) == [];
};
getDeps =
_: pkg:
let
pname = pkg.pname or null;
in
{
deps = builtins.filter (x: x != null && x != pname) (
map (x: x.pname or null) (pkg.propagatedBuildInputs or [ ])
);
broken = (pkg.meta.hydraPlatforms or [ null ]) == [ ];
};
in
lib.mapAttrs getDeps pkgs.haskellPackages
lib.mapAttrs getDeps pkgs.haskellPackages

View file

@ -1,15 +1,20 @@
let
nixpkgs = import ../../..;
inherit (nixpkgs {}) haskellPackages lib;
inherit (nixpkgs { }) haskellPackages lib;
maintainedPkgs = lib.filterAttrs (
_: v: builtins.length (v.meta.maintainers or []) > 0
_: v: builtins.length (v.meta.maintainers or [ ]) > 0
) haskellPackages;
brokenPkgs = lib.filterAttrs (_: v: v.meta.broken) maintainedPkgs;
transitiveBrokenPkgs = lib.filterAttrs
(_: v: !(builtins.tryEval (v.outPath or null)).success && !v.meta.broken)
maintainedPkgs;
infoList = pkgs: lib.concatStringsSep "\n" (lib.mapAttrsToList (name: drv: "${name} ${(builtins.elemAt drv.meta.maintainers 0).github}") pkgs);
in {
transitiveBrokenPkgs = lib.filterAttrs (
_: v: !(builtins.tryEval (v.outPath or null)).success && !v.meta.broken
) maintainedPkgs;
infoList =
pkgs:
lib.concatStringsSep "\n" (
lib.mapAttrsToList (name: drv: "${name} ${(builtins.elemAt drv.meta.maintainers 0).github}") pkgs
);
in
{
report = ''
BROKEN:
${infoList brokenPkgs}
@ -17,6 +22,5 @@ in {
TRANSITIVE BROKEN:
${infoList transitiveBrokenPkgs}
'';
transitiveErrors =
builtins.attrValues transitiveBrokenPkgs;
transitiveErrors = builtins.attrValues transitiveBrokenPkgs;
}

View file

@ -10,12 +10,14 @@
# This mapping contains all maintainers in ../../mainatainer-list.nix, but it
# ignores maintainers who don't have a GitHub account or an email address.
let
pkgs = import ../../.. {};
pkgs = import ../../.. { };
maintainers = import ../../maintainer-list.nix;
inherit (pkgs) lib;
mkMailGithubPair = _: maintainer:
mkMailGithubPair =
_: maintainer:
if (maintainer ? email) && (maintainer ? github) then
{ "${maintainer.email}" = maintainer.github; }
else
{};
in lib.zipAttrsWith (_: builtins.head) (lib.mapAttrsToList mkMailGithubPair maintainers)
{ };
in
lib.zipAttrsWith (_: builtins.head) (lib.mapAttrsToList mkMailGithubPair maintainers)

View file

@ -1,60 +1,63 @@
/* Nix expression to test for regressions in the Haskell configuration overlays.
/*
Nix expression to test for regressions in the Haskell configuration overlays.
test-configurations.nix determines all attributes touched by given Haskell
configuration overlays (i. e. pkgs/development/haskell-modules/configuration-*.nix)
and builds all derivations (or at least a reasonable subset) affected by
these overrides.
test-configurations.nix determines all attributes touched by given Haskell
configuration overlays (i. e. pkgs/development/haskell-modules/configuration-*.nix)
and builds all derivations (or at least a reasonable subset) affected by
these overrides.
By default, it checks `configuration-{common,nix,ghc-8.10.x}.nix`. You can
invoke it like this:
By default, it checks `configuration-{common,nix,ghc-8.10.x}.nix`. You can
invoke it like this:
nix-build maintainers/scripts/haskell/test-configurations.nix --keep-going
nix-build maintainers/scripts/haskell/test-configurations.nix --keep-going
It is possible to specify other configurations:
It is possible to specify other configurations:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg files '[ "configuration-ghc-9.0.x.nix" "configuration-ghc-9.2.x.nix" ]' \
--keep-going
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg files '[ "configuration-ghc-9.0.x.nix" "configuration-ghc-9.2.x.nix" ]' \
--keep-going
You can also just supply a single string:
You can also just supply a single string:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--argstr files "configuration-arm.nix" --keep-going
nix-build maintainers/scripts/haskell/test-configurations.nix \
--argstr files "configuration-arm.nix" --keep-going
You can even supply full paths which is handy, as it allows for tab-completing
the configurations:
You can even supply full paths which is handy, as it allows for tab-completing
the configurations:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--argstr files pkgs/development/haskell-modules/configuration-arm.nix \
--keep-going
nix-build maintainers/scripts/haskell/test-configurations.nix \
--argstr files pkgs/development/haskell-modules/configuration-arm.nix \
--keep-going
By default, derivation that fail to evaluate are skipped, unless they are
just marked as broken. You can check for other eval errors like this:
By default, derivation that fail to evaluate are skipped, unless they are
just marked as broken. You can check for other eval errors like this:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg skipEvalErrors false --keep-going
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg skipEvalErrors false --keep-going
You can also disable checking broken packages by passing a nixpkgs config:
You can also disable checking broken packages by passing a nixpkgs config:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg config '{ allowBroken = false; }' --keep-going
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg config '{ allowBroken = false; }' --keep-going
By default the haskell.packages.ghc*Binary sets used for bootstrapping GHC
are _not_ tested. You can change this using:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg skipBinaryGHCs false --keep-going
By default the haskell.packages.ghc*Binary sets used for bootstrapping GHC
are _not_ tested. You can change this using:
nix-build maintainers/scripts/haskell/test-configurations.nix \
--arg skipBinaryGHCs false --keep-going
*/
{ files ? [
{
files ? [
"configuration-common.nix"
"configuration-nix.nix"
"configuration-ghc-8.10.x.nix"
]
, nixpkgsPath ? ../../..
, config ? { allowBroken = true; }
, skipEvalErrors ? true
, skipBinaryGHCs ? true
],
nixpkgsPath ? ../../..,
config ? {
allowBroken = true;
},
skipEvalErrors ? true,
skipBinaryGHCs ? true,
}:
let
@ -62,97 +65,99 @@ let
inherit (pkgs) lib;
# see usage explanation for the input format `files` allows
files' = builtins.map builtins.baseNameOf (
if !builtins.isList files then [ files ] else files
);
files' = builtins.map builtins.baseNameOf (if !builtins.isList files then [ files ] else files);
packageSetsWithVersionedHead = pkgs.haskell.packages // (
let
headSet = pkgs.haskell.packages.ghcHEAD;
# Determine the next GHC release version following GHC HEAD.
# GHC HEAD always has an uneven, tentative version number, e.g. 9.7.
# GHC releases always have even numbers, i.e. GHC 9.8 is branched off from
# GHC HEAD 9.7. Since we use the to be release number for GHC HEAD's
# configuration file, we need to calculate this here.
headVersion = lib.pipe headSet.ghc.version [
lib.versions.splitVersion
(lib.take 2)
lib.concatStrings
lib.strings.toInt
(builtins.add 1)
toString
];
in
{
"ghc${headVersion}" = headSet;
}
);
packageSetsWithVersionedHead =
pkgs.haskell.packages
// (
let
headSet = pkgs.haskell.packages.ghcHEAD;
# Determine the next GHC release version following GHC HEAD.
# GHC HEAD always has an uneven, tentative version number, e.g. 9.7.
# GHC releases always have even numbers, i.e. GHC 9.8 is branched off from
# GHC HEAD 9.7. Since we use the to be release number for GHC HEAD's
# configuration file, we need to calculate this here.
headVersion = lib.pipe headSet.ghc.version [
lib.versions.splitVersion
(lib.take 2)
lib.concatStrings
lib.strings.toInt
(builtins.add 1)
toString
];
in
{
"ghc${headVersion}" = headSet;
}
);
setsForFile = fileName:
setsForFile =
fileName:
let
# extract the unique part of the config's file name
configName = builtins.head (
builtins.match "configuration-(.+).nix" fileName
);
configName = builtins.head (builtins.match "configuration-(.+).nix" fileName);
# match the major and minor version of the GHC the config is intended for, if any
configVersion = lib.concatStrings (
builtins.match "ghc-([0-9]+).([0-9]+).x" configName
);
configVersion = lib.concatStrings (builtins.match "ghc-([0-9]+).([0-9]+).x" configName);
# return all package sets under haskell.packages matching the version components
setsForVersion = builtins.map (name: packageSetsWithVersionedHead.${name}) (
builtins.filter (setName:
lib.hasPrefix "ghc${configVersion}" setName
&& (skipBinaryGHCs -> !(lib.hasInfix "Binary" setName))
) (
builtins.attrNames packageSetsWithVersionedHead
)
setsForVersion = builtins.map (name: packageSetsWithVersionedHead.${name}) (
builtins.filter (
setName:
lib.hasPrefix "ghc${configVersion}" setName && (skipBinaryGHCs -> !(lib.hasInfix "Binary" setName))
) (builtins.attrNames packageSetsWithVersionedHead)
);
defaultSets = [ pkgs.haskellPackages ];
in {
in
{
# use plain haskellPackages for the version-agnostic files
# TODO(@sternenseemann): also consider currently selected versioned sets
"common" = defaultSets;
"nix" = defaultSets;
"arm" = defaultSets;
"darwin" = defaultSets;
}.${configName} or setsForVersion;
}
.${configName} or setsForVersion;
# attribute set that has all the attributes of haskellPackages set to null
availableHaskellPackages = builtins.listToAttrs (
builtins.map (attr: lib.nameValuePair attr null) (
builtins.attrNames pkgs.haskellPackages
)
builtins.map (attr: lib.nameValuePair attr null) (builtins.attrNames pkgs.haskellPackages)
);
# evaluate a configuration and only return the attributes changed by it,
# pass availableHaskellPackages as super in case intersectAttrs is used
overriddenAttrs = fileName: builtins.attrNames (
lib.fix (self:
import (nixpkgsPath + "/pkgs/development/haskell-modules/${fileName}") {
haskellLib = pkgs.haskell.lib.compose;
inherit pkgs;
} self availableHaskellPackages
)
);
overriddenAttrs =
fileName:
builtins.attrNames (
lib.fix (
self:
import (nixpkgsPath + "/pkgs/development/haskell-modules/${fileName}") {
haskellLib = pkgs.haskell.lib.compose;
inherit pkgs;
} self availableHaskellPackages
)
);
# list of derivations that are affected by overrides in the given configuration
# overlays. For common, nix, darwin etc. only the derivation from the default
# package set will be emitted.
packages = builtins.filter (v:
lib.warnIf (v.meta.broken or false) "${v.pname} is marked as broken" (
v != null
&& (skipEvalErrors -> (builtins.tryEval (v.outPath or v)).success)
)
) (
lib.concatMap (fileName:
let
sets = setsForFile fileName;
attrs = overriddenAttrs fileName;
in
lib.concatMap (set: builtins.map (attr: set.${attr}) attrs) sets
) files'
);
packages =
builtins.filter
(
v:
lib.warnIf (v.meta.broken or false) "${v.pname} is marked as broken" (
v != null && (skipEvalErrors -> (builtins.tryEval (v.outPath or v)).success)
)
)
(
lib.concatMap (
fileName:
let
sets = setsForFile fileName;
attrs = overriddenAttrs fileName;
in
lib.concatMap (set: builtins.map (attr: set.${attr}) attrs) sets
) files'
);
in
packages

View file

@ -1,15 +1,16 @@
let
nixpkgs = import ../../..;
inherit (nixpkgs {}) pkgs lib;
getEvaluating = x:
inherit (nixpkgs { }) pkgs lib;
getEvaluating =
x:
builtins.attrNames (
lib.filterAttrs (
_: v: (builtins.tryEval (v.outPath or null)).success && lib.isDerivation v && !v.meta.broken
) x
);
brokenDeps = lib.subtractLists
(getEvaluating pkgs.haskellPackages)
(getEvaluating (nixpkgs { config.allowBroken = true; }).haskellPackages);
brokenDeps = lib.subtractLists (getEvaluating pkgs.haskellPackages) (
getEvaluating (nixpkgs { config.allowBroken = true; }).haskellPackages
);
in
''
${lib.concatMapStringsSep "\n" (x: " - ${x}") brokenDeps}

View file

@ -1,4 +1,10 @@
{ stdenv, lib, makeWrapper, perl, perlPackages }:
{
stdenv,
lib,
makeWrapper,
perl,
perlPackages,
}:
stdenv.mkDerivation {
name = "nix-generate-from-cpan-3";
@ -6,7 +12,11 @@ stdenv.mkDerivation {
nativeBuildInputs = [ makeWrapper ];
buildInputs = with perlPackages; [
perl GetoptLongDescriptive CPANPLUS Readonly LogLog4perl
perl
GetoptLongDescriptive
CPANPLUS
Readonly
LogLog4perl
];
dontUnpack = true;

View file

@ -1,24 +1,32 @@
{ stdenv, lib, makeWrapper, perl, perlPackages }:
{
stdenv,
lib,
makeWrapper,
perl,
perlPackages,
}:
stdenv.mkDerivation {
pname = "nixpkgs-lint";
version = "1";
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ perl perlPackages.XMLSimple ];
buildInputs = [
perl
perlPackages.XMLSimple
];
dontUnpack = true;
dontBuild = true;
installPhase =
''
mkdir -p $out/bin
cp ${./nixpkgs-lint.pl} $out/bin/nixpkgs-lint
# make the built version hermetic
substituteInPlace $out/bin/nixpkgs-lint \
--replace-fail "#! /usr/bin/env nix-shell" "#! ${lib.getExe perl}"
wrapProgram $out/bin/nixpkgs-lint --set PERL5LIB $PERL5LIB
'';
installPhase = ''
mkdir -p $out/bin
cp ${./nixpkgs-lint.pl} $out/bin/nixpkgs-lint
# make the built version hermetic
substituteInPlace $out/bin/nixpkgs-lint \
--replace-fail "#! /usr/bin/env nix-shell" "#! ${lib.getExe perl}"
wrapProgram $out/bin/nixpkgs-lint --set PERL5LIB $PERL5LIB
'';
meta = with lib; {
maintainers = [ maintainers.eelco ];

View file

@ -1,8 +1,16 @@
{ nixpkgs ? import ../.. { }
{
nixpkgs ? import ../.. { },
}:
with nixpkgs;
let
pyEnv = python3.withPackages(ps: with ps; [ packaging requests toolz pyyaml ]);
pyEnv = python3.withPackages (
ps: with ps; [
packaging
requests
toolz
pyyaml
]
);
in
mkShell {
packages = [

View file

@ -5,42 +5,50 @@
See https://nixos.org/manual/nixpkgs/unstable/#var-passthru-updateScript
*/
{ package ? null
, maintainer ? null
, predicate ? null
, get-script ? pkg: pkg.updateScript or null
, path ? null
, max-workers ? null
, include-overlays ? false
, keep-going ? null
, commit ? null
, skip-prompt ? null
{
package ? null,
maintainer ? null,
predicate ? null,
get-script ? pkg: pkg.updateScript or null,
path ? null,
max-workers ? null,
include-overlays ? false,
keep-going ? null,
commit ? null,
skip-prompt ? null,
}:
let
pkgs = import ./../../default.nix ((
if include-overlays == false then
{ overlays = []; }
else if include-overlays == true then
{ } # Let Nixpkgs include overlays impurely.
else { overlays = include-overlays; }
) // { config.allowAliases = false; });
pkgs = import ./../../default.nix (
(
if include-overlays == false then
{ overlays = [ ]; }
else if include-overlays == true then
{ } # Let Nixpkgs include overlays impurely.
else
{ overlays = include-overlays; }
)
// {
config.allowAliases = false;
}
);
inherit (pkgs) lib;
/* Remove duplicate elements from the list based on some extracted value. O(n^2) complexity.
*/
nubOn = f: list:
if list == [] then
[]
# Remove duplicate elements from the list based on some extracted value. O(n^2) complexity.
nubOn =
f: list:
if list == [ ] then
[ ]
else
let
x = lib.head list;
xs = lib.filter (p: f x != f p) (lib.drop 1 list);
in
[x] ++ nubOn f xs;
[ x ] ++ nubOn f xs;
/* Recursively find all packages (derivations) in `pkgs` matching `cond` predicate.
/*
Recursively find all packages (derivations) in `pkgs` matching `cond` predicate.
Type: packagesWithPath :: AttrPath (AttrPath derivation bool) AttrSet List<AttrSet{attrPath :: str; package :: derivation; }>
AttrPath :: [str]
@ -48,15 +56,18 @@ let
The packages will be returned as a list of named pairs comprising of:
- attrPath: stringified attribute path (based on `rootPath`)
- package: corresponding derivation
*/
packagesWithPath = rootPath: cond: pkgs:
*/
packagesWithPath =
rootPath: cond: pkgs:
let
packagesWithPathInner = path: pathContent:
packagesWithPathInner =
path: pathContent:
let
result = builtins.tryEval pathContent;
somewhatUniqueRepresentant =
{ package, attrPath }: {
{ package, attrPath }:
{
updateScript = (get-script package);
# Some updaters use the same `updateScript` value for all packages.
# Also compare `meta.description`.
@ -67,79 +78,95 @@ let
dedupResults = lst: nubOn somewhatUniqueRepresentant (lib.concatLists lst);
in
if result.success then
let
evaluatedPathContent = result.value;
in
if lib.isDerivation evaluatedPathContent then
lib.optional (cond path evaluatedPathContent) { attrPath = lib.concatStringsSep "." path; package = evaluatedPathContent; }
else if lib.isAttrs evaluatedPathContent then
# If user explicitly points to an attrSet or it is marked for recursion, we recur.
if path == rootPath || evaluatedPathContent.recurseForDerivations or false || evaluatedPathContent.recurseForRelease or false then
dedupResults (lib.mapAttrsToList (name: elem: packagesWithPathInner (path ++ [name]) elem) evaluatedPathContent)
else []
else []
else [];
if result.success then
let
evaluatedPathContent = result.value;
in
if lib.isDerivation evaluatedPathContent then
lib.optional (cond path evaluatedPathContent) {
attrPath = lib.concatStringsSep "." path;
package = evaluatedPathContent;
}
else if lib.isAttrs evaluatedPathContent then
# If user explicitly points to an attrSet or it is marked for recursion, we recur.
if
path == rootPath
|| evaluatedPathContent.recurseForDerivations or false
|| evaluatedPathContent.recurseForRelease or false
then
dedupResults (
lib.mapAttrsToList (name: elem: packagesWithPathInner (path ++ [ name ]) elem) evaluatedPathContent
)
else
[ ]
else
[ ]
else
[ ];
in
packagesWithPathInner rootPath pkgs;
packagesWithPathInner rootPath pkgs;
/* Recursively find all packages (derivations) in `pkgs` matching `cond` predicate.
*/
packagesWith = packagesWithPath [];
# Recursively find all packages (derivations) in `pkgs` matching `cond` predicate.
packagesWith = packagesWithPath [ ];
/* Recursively find all packages in `pkgs` with updateScript matching given predicate.
*/
packagesWithUpdateScriptMatchingPredicate = cond:
packagesWith (path: pkg: (get-script pkg != null) && cond path pkg);
# Recursively find all packages in `pkgs` with updateScript matching given predicate.
packagesWithUpdateScriptMatchingPredicate =
cond: packagesWith (path: pkg: (get-script pkg != null) && cond path pkg);
/* Recursively find all packages in `pkgs` with updateScript by given maintainer.
*/
packagesWithUpdateScriptAndMaintainer = maintainer':
# Recursively find all packages in `pkgs` with updateScript by given maintainer.
packagesWithUpdateScriptAndMaintainer =
maintainer':
let
maintainer =
if ! builtins.hasAttr maintainer' lib.maintainers then
if !builtins.hasAttr maintainer' lib.maintainers then
builtins.throw "Maintainer with name `${maintainer'} does not exist in `maintainers/maintainer-list.nix`."
else
builtins.getAttr maintainer' lib.maintainers;
in
packagesWithUpdateScriptMatchingPredicate (path: pkg:
(if builtins.hasAttr "maintainers" pkg.meta
then (if builtins.isList pkg.meta.maintainers
then builtins.elem maintainer pkg.meta.maintainers
else maintainer == pkg.meta.maintainers
)
else false
)
);
packagesWithUpdateScriptMatchingPredicate (
path: pkg:
(
if builtins.hasAttr "maintainers" pkg.meta then
(
if builtins.isList pkg.meta.maintainers then
builtins.elem maintainer pkg.meta.maintainers
else
maintainer == pkg.meta.maintainers
)
else
false
)
);
/* Recursively find all packages under `path` in `pkgs` with updateScript.
*/
packagesWithUpdateScript = path: pkgs:
# Recursively find all packages under `path` in `pkgs` with updateScript.
packagesWithUpdateScript =
path: pkgs:
let
prefix = lib.splitString "." path;
pathContent = lib.attrByPath prefix null pkgs;
in
if pathContent == null then
builtins.throw "Attribute path `${path}` does not exist."
else
packagesWithPath prefix (path: pkg: (get-script pkg != null))
pathContent;
if pathContent == null then
builtins.throw "Attribute path `${path}` does not exist."
else
packagesWithPath prefix (path: pkg: (get-script pkg != null)) pathContent;
/* Find a package under `path` in `pkgs` and require that it has an updateScript.
*/
packageByName = path: pkgs:
# Find a package under `path` in `pkgs` and require that it has an updateScript.
packageByName =
path: pkgs:
let
package = lib.attrByPath (lib.splitString "." path) null pkgs;
package = lib.attrByPath (lib.splitString "." path) null pkgs;
in
if package == null then
builtins.throw "Package with an attribute name `${path}` does not exist."
else if get-script package == null then
builtins.throw "Package with an attribute name `${path}` does not have a `passthru.updateScript` attribute defined."
else
{ attrPath = path; inherit package; };
if package == null then
builtins.throw "Package with an attribute name `${path}` does not exist."
else if get-script package == null then
builtins.throw "Package with an attribute name `${path}` does not have a `passthru.updateScript` attribute defined."
else
{
attrPath = path;
inherit package;
};
/* List of packages matched based on the CLI arguments.
*/
# List of packages matched based on the CLI arguments.
packages =
if package != null then
[ (packageByName package pkgs) ]
@ -192,19 +219,22 @@ let
--argstr skip-prompt true
'';
/* Transform a matched package into an object for update.py.
*/
packageData = { package, attrPath }: let updateScript = get-script package; in {
name = package.name;
pname = lib.getName package;
oldVersion = lib.getVersion package;
updateScript = map builtins.toString (lib.toList (updateScript.command or updateScript));
supportedFeatures = updateScript.supportedFeatures or [];
attrPath = updateScript.attrPath or attrPath;
};
# Transform a matched package into an object for update.py.
packageData =
{ package, attrPath }:
let
updateScript = get-script package;
in
{
name = package.name;
pname = lib.getName package;
oldVersion = lib.getVersion package;
updateScript = map builtins.toString (lib.toList (updateScript.command or updateScript));
supportedFeatures = updateScript.supportedFeatures or [ ];
attrPath = updateScript.attrPath or attrPath;
};
/* JSON file with data for update.py.
*/
# JSON file with data for update.py.
packagesJson = pkgs.writeText "packages.json" (builtins.toJSON (map packageData packages));
optionalArgs =
@ -215,7 +245,8 @@ let
args = [ packagesJson ] ++ optionalArgs;
in pkgs.stdenv.mkDerivation {
in
pkgs.stdenv.mkDerivation {
name = "nixpkgs-update-script";
buildCommand = ''
echo ""
@ -231,5 +262,9 @@ in pkgs.stdenv.mkDerivation {
unset shellHook # do not contaminate nested shells
exec ${pkgs.python3.interpreter} ${./update.py} ${builtins.concatStringsSep " " args}
'';
nativeBuildInputs = [ pkgs.git pkgs.nix pkgs.cacert ];
nativeBuildInputs = [
pkgs.git
pkgs.nix
pkgs.cacert
];
}