mirror of
https://github.com/NixOS/nix
synced 2025-06-24 22:11:15 +02:00
Merge remote-tracking branch 'upstream/master' into lfs
This commit is contained in:
commit
6a3b4afc0a
347 changed files with 8407 additions and 5795 deletions
11
.mergify.yml
11
.mergify.yml
|
@ -106,3 +106,14 @@ pull_request_rules:
|
|||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
||||
- name: backport patches to 2.26
|
||||
conditions:
|
||||
- label=backport 2.26-maintenance
|
||||
actions:
|
||||
backport:
|
||||
branches:
|
||||
- "2.26-maintenance"
|
||||
labels:
|
||||
- automatic backport
|
||||
- merge-queue
|
||||
|
|
2
.version
2
.version
|
@ -1 +1 @@
|
|||
2.26.0
|
||||
2.27.0
|
||||
|
|
19
default.nix
19
default.nix
|
@ -1,10 +1,9 @@
|
|||
(import
|
||||
(
|
||||
let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in
|
||||
fetchTarball {
|
||||
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
|
||||
sha256 = lock.nodes.flake-compat.locked.narHash;
|
||||
}
|
||||
)
|
||||
{ src = ./.; }
|
||||
).defaultNix
|
||||
(import (
|
||||
let
|
||||
lock = builtins.fromJSON (builtins.readFile ./flake.lock);
|
||||
in
|
||||
fetchTarball {
|
||||
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
|
||||
sha256 = lock.nodes.flake-compat.locked.narHash;
|
||||
}
|
||||
) { src = ./.; }).defaultNix
|
||||
|
|
|
@ -5,7 +5,15 @@ in
|
|||
|
||||
builtinsInfo:
|
||||
let
|
||||
showBuiltin = name: { doc, type ? null, args ? [ ], experimental-feature ? null, impure-only ? false }:
|
||||
showBuiltin =
|
||||
name:
|
||||
{
|
||||
doc,
|
||||
type ? null,
|
||||
args ? [ ],
|
||||
experimental-feature ? null,
|
||||
impure-only ? false,
|
||||
}:
|
||||
let
|
||||
type' = optionalString (type != null) " (${type})";
|
||||
|
||||
|
|
|
@ -32,7 +32,13 @@ let
|
|||
|
||||
commandInfo = fromJSON commandDump;
|
||||
|
||||
showCommand = { command, details, filename, toplevel }:
|
||||
showCommand =
|
||||
{
|
||||
command,
|
||||
details,
|
||||
filename,
|
||||
toplevel,
|
||||
}:
|
||||
let
|
||||
|
||||
result = ''
|
||||
|
@ -56,26 +62,27 @@ let
|
|||
${maybeOptions}
|
||||
'';
|
||||
|
||||
showSynopsis = command: args:
|
||||
showSynopsis =
|
||||
command: args:
|
||||
let
|
||||
showArgument = arg: "*${arg.label}*" + optionalString (! arg ? arity) "...";
|
||||
showArgument = arg: "*${arg.label}*" + optionalString (!arg ? arity) "...";
|
||||
arguments = concatStringsSep " " (map showArgument args);
|
||||
in ''
|
||||
in
|
||||
''
|
||||
`${command}` [*option*...] ${arguments}
|
||||
'';
|
||||
|
||||
maybeSubcommands = optionalString (details ? commands && details.commands != {})
|
||||
''
|
||||
where *subcommand* is one of the following:
|
||||
maybeSubcommands = optionalString (details ? commands && details.commands != { }) ''
|
||||
where *subcommand* is one of the following:
|
||||
|
||||
${subcommands}
|
||||
'';
|
||||
${subcommands}
|
||||
'';
|
||||
|
||||
subcommands = if length categories > 1
|
||||
then listCategories
|
||||
else listSubcommands details.commands;
|
||||
subcommands = if length categories > 1 then listCategories else listSubcommands details.commands;
|
||||
|
||||
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues details.commands)));
|
||||
categories = sort (x: y: x.id < y.id) (
|
||||
unique (map (cmd: cmd.category) (attrValues details.commands))
|
||||
);
|
||||
|
||||
listCategories = concatStrings (map showCategory categories);
|
||||
|
||||
|
@ -99,38 +106,39 @@ let
|
|||
|
||||
${allStores}
|
||||
'';
|
||||
index = replaceStrings
|
||||
[ "@store-types@" "./local-store.md" "./local-daemon-store.md" ]
|
||||
[ storesOverview "#local-store" "#local-daemon-store" ]
|
||||
details.doc;
|
||||
index =
|
||||
replaceStrings
|
||||
[ "@store-types@" "./local-store.md" "./local-daemon-store.md" ]
|
||||
[ storesOverview "#local-store" "#local-daemon-store" ]
|
||||
details.doc;
|
||||
storesOverview =
|
||||
let
|
||||
showEntry = store:
|
||||
"- [${store.name}](#${store.slug})";
|
||||
showEntry = store: "- [${store.name}](#${store.slug})";
|
||||
in
|
||||
concatStringsSep "\n" (map showEntry storesList) + "\n";
|
||||
allStores = concatStringsSep "\n" (attrValues storePages);
|
||||
storePages = listToAttrs
|
||||
(map (s: { name = s.filename; value = s.page; }) storesList);
|
||||
storePages = listToAttrs (
|
||||
map (s: {
|
||||
name = s.filename;
|
||||
value = s.page;
|
||||
}) storesList
|
||||
);
|
||||
storesList = showStoreDocs {
|
||||
storeInfo = commandInfo.stores;
|
||||
inherit inlineHTML;
|
||||
};
|
||||
hasInfix = infix: content:
|
||||
hasInfix =
|
||||
infix: content:
|
||||
builtins.stringLength content != builtins.stringLength (replaceStrings [ infix ] [ "" ] content);
|
||||
in
|
||||
optionalString (details ? doc) (
|
||||
# An alternate implementation with builtins.match stack overflowed on some systems.
|
||||
if hasInfix "@store-types@" details.doc
|
||||
then help-stores
|
||||
else details.doc
|
||||
if hasInfix "@store-types@" details.doc then help-stores else details.doc
|
||||
);
|
||||
|
||||
maybeOptions =
|
||||
let
|
||||
allVisibleOptions = filterAttrs
|
||||
(_: o: ! o.hiddenCategory)
|
||||
(details.flags // toplevel.flags);
|
||||
allVisibleOptions = filterAttrs (_: o: !o.hiddenCategory) (details.flags // toplevel.flags);
|
||||
in
|
||||
optionalString (allVisibleOptions != { }) ''
|
||||
# Options
|
||||
|
@ -142,55 +150,73 @@ let
|
|||
> See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags.
|
||||
'';
|
||||
|
||||
showOptions = inlineHTML: allOptions:
|
||||
showOptions =
|
||||
inlineHTML: allOptions:
|
||||
let
|
||||
showCategory = cat: opts: ''
|
||||
${optionalString (cat != "") "## ${cat}"}
|
||||
|
||||
${concatStringsSep "\n" (attrValues (mapAttrs showOption opts))}
|
||||
'';
|
||||
showOption = name: option:
|
||||
showOption =
|
||||
name: option:
|
||||
let
|
||||
result = trim ''
|
||||
- ${item}
|
||||
|
||||
${option.description}
|
||||
'';
|
||||
item = if inlineHTML
|
||||
then ''<span id="opt-${name}">[`--${name}`](#opt-${name})</span> ${shortName} ${labels}''
|
||||
else "`--${name}` ${shortName} ${labels}";
|
||||
shortName = optionalString
|
||||
(option ? shortName)
|
||||
("/ `-${option.shortName}`");
|
||||
labels = optionalString
|
||||
(option ? labels)
|
||||
(concatStringsSep " " (map (s: "*${s}*") option.labels));
|
||||
in result;
|
||||
categories = mapAttrs
|
||||
# Convert each group from a list of key-value pairs back to an attrset
|
||||
(_: listToAttrs)
|
||||
(groupBy
|
||||
(cmd: cmd.value.category)
|
||||
(attrsToList allOptions));
|
||||
in concatStrings (attrValues (mapAttrs showCategory categories));
|
||||
in squash result;
|
||||
item =
|
||||
if inlineHTML then
|
||||
''<span id="opt-${name}">[`--${name}`](#opt-${name})</span> ${shortName} ${labels}''
|
||||
else
|
||||
"`--${name}` ${shortName} ${labels}";
|
||||
shortName = optionalString (option ? shortName) ("/ `-${option.shortName}`");
|
||||
labels = optionalString (option ? labels) (concatStringsSep " " (map (s: "*${s}*") option.labels));
|
||||
in
|
||||
result;
|
||||
categories =
|
||||
mapAttrs
|
||||
# Convert each group from a list of key-value pairs back to an attrset
|
||||
(_: listToAttrs)
|
||||
(groupBy (cmd: cmd.value.category) (attrsToList allOptions));
|
||||
in
|
||||
concatStrings (attrValues (mapAttrs showCategory categories));
|
||||
in
|
||||
squash result;
|
||||
|
||||
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
|
||||
|
||||
processCommand = { command, details, filename, toplevel }:
|
||||
processCommand =
|
||||
{
|
||||
command,
|
||||
details,
|
||||
filename,
|
||||
toplevel,
|
||||
}:
|
||||
let
|
||||
cmd = {
|
||||
inherit command;
|
||||
name = filename + ".md";
|
||||
value = showCommand { inherit command details filename toplevel; };
|
||||
value = showCommand {
|
||||
inherit
|
||||
command
|
||||
details
|
||||
filename
|
||||
toplevel
|
||||
;
|
||||
};
|
||||
};
|
||||
subcommand = subCmd: processCommand {
|
||||
command = command + " " + subCmd;
|
||||
details = details.commands.${subCmd};
|
||||
filename = appendName filename subCmd;
|
||||
inherit toplevel;
|
||||
};
|
||||
in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {});
|
||||
subcommand =
|
||||
subCmd:
|
||||
processCommand {
|
||||
command = command + " " + subCmd;
|
||||
details = details.commands.${subCmd};
|
||||
filename = appendName filename subCmd;
|
||||
inherit toplevel;
|
||||
};
|
||||
in
|
||||
[ cmd ] ++ concatMap subcommand (attrNames details.commands or { });
|
||||
|
||||
manpages = processCommand {
|
||||
command = "nix";
|
||||
|
@ -199,9 +225,11 @@ let
|
|||
toplevel = commandInfo.args;
|
||||
};
|
||||
|
||||
tableOfContents = let
|
||||
showEntry = page:
|
||||
" - [${page.command}](command-ref/new-cli/${page.name})";
|
||||
in concatStringsSep "\n" (map showEntry manpages) + "\n";
|
||||
tableOfContents =
|
||||
let
|
||||
showEntry = page: " - [${page.command}](command-ref/new-cli/${page.name})";
|
||||
in
|
||||
concatStringsSep "\n" (map showEntry manpages) + "\n";
|
||||
|
||||
in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }
|
||||
in
|
||||
(listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }
|
||||
|
|
|
@ -1,67 +1,99 @@
|
|||
let
|
||||
inherit (builtins) attrValues concatStringsSep isAttrs isBool mapAttrs;
|
||||
inherit (import <nix/utils.nix>) concatStrings indent optionalString squash;
|
||||
inherit (builtins)
|
||||
attrValues
|
||||
concatStringsSep
|
||||
isAttrs
|
||||
isBool
|
||||
mapAttrs
|
||||
;
|
||||
inherit (import <nix/utils.nix>)
|
||||
concatStrings
|
||||
indent
|
||||
optionalString
|
||||
squash
|
||||
;
|
||||
in
|
||||
|
||||
# `inlineHTML` is a hack to accommodate inconsistent output from `lowdown`
|
||||
{ prefix, inlineHTML ? true }: settingsInfo:
|
||||
{
|
||||
prefix,
|
||||
inlineHTML ? true,
|
||||
}:
|
||||
settingsInfo:
|
||||
|
||||
let
|
||||
|
||||
showSetting = prefix: setting: { description, documentDefault, defaultValue, aliases, value, experimentalFeature }:
|
||||
showSetting =
|
||||
prefix: setting:
|
||||
{
|
||||
description,
|
||||
documentDefault,
|
||||
defaultValue,
|
||||
aliases,
|
||||
value,
|
||||
experimentalFeature,
|
||||
}:
|
||||
let
|
||||
result = squash ''
|
||||
- ${item}
|
||||
- ${item}
|
||||
|
||||
${indent " " body}
|
||||
'';
|
||||
item = if inlineHTML
|
||||
then ''<span id="${prefix}-${setting}">[`${setting}`](#${prefix}-${setting})</span>''
|
||||
else "`${setting}`";
|
||||
${indent " " body}
|
||||
'';
|
||||
item =
|
||||
if inlineHTML then
|
||||
''<span id="${prefix}-${setting}">[`${setting}`](#${prefix}-${setting})</span>''
|
||||
else
|
||||
"`${setting}`";
|
||||
# separate body to cleanly handle indentation
|
||||
body = ''
|
||||
${experimentalFeatureNote}
|
||||
${experimentalFeatureNote}
|
||||
|
||||
${description}
|
||||
${description}
|
||||
|
||||
**Default:** ${showDefault documentDefault defaultValue}
|
||||
**Default:** ${showDefault documentDefault defaultValue}
|
||||
|
||||
${showAliases aliases}
|
||||
'';
|
||||
${showAliases aliases}
|
||||
'';
|
||||
|
||||
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
||||
> **Warning**
|
||||
>
|
||||
> This setting is part of an
|
||||
> [experimental feature](@docroot@/development/experimental-features.md).
|
||||
>
|
||||
> To change this setting, make sure the
|
||||
> [`${experimentalFeature}` experimental feature](@docroot@/development/experimental-features.md#xp-feature-${experimentalFeature})
|
||||
> is enabled.
|
||||
> For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md):
|
||||
>
|
||||
> ```
|
||||
> extra-experimental-features = ${experimentalFeature}
|
||||
> ${setting} = ...
|
||||
> ```
|
||||
'';
|
||||
> **Warning**
|
||||
>
|
||||
> This setting is part of an
|
||||
> [experimental feature](@docroot@/development/experimental-features.md).
|
||||
>
|
||||
> To change this setting, make sure the
|
||||
> [`${experimentalFeature}` experimental feature](@docroot@/development/experimental-features.md#xp-feature-${experimentalFeature})
|
||||
> is enabled.
|
||||
> For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md):
|
||||
>
|
||||
> ```
|
||||
> extra-experimental-features = ${experimentalFeature}
|
||||
> ${setting} = ...
|
||||
> ```
|
||||
'';
|
||||
|
||||
showDefault = documentDefault: defaultValue:
|
||||
showDefault =
|
||||
documentDefault: defaultValue:
|
||||
if documentDefault then
|
||||
# a StringMap value type is specified as a string, but
|
||||
# this shows the value type. The empty stringmap is `null` in
|
||||
# JSON, but that converts to `{ }` here.
|
||||
if defaultValue == "" || defaultValue == [] || isAttrs defaultValue
|
||||
then "*empty*"
|
||||
else if isBool defaultValue then
|
||||
if defaultValue then "`true`" else "`false`"
|
||||
else "`${toString defaultValue}`"
|
||||
else "*machine-specific*";
|
||||
if defaultValue == "" || defaultValue == [ ] || isAttrs defaultValue then
|
||||
"*empty*"
|
||||
else if isBool defaultValue then
|
||||
if defaultValue then "`true`" else "`false`"
|
||||
else
|
||||
"`${toString defaultValue}`"
|
||||
else
|
||||
"*machine-specific*";
|
||||
|
||||
showAliases = aliases:
|
||||
optionalString (aliases != [])
|
||||
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
|
||||
showAliases =
|
||||
aliases:
|
||||
optionalString (aliases != [ ])
|
||||
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
|
||||
|
||||
in result;
|
||||
in
|
||||
result;
|
||||
|
||||
in concatStrings (attrValues (mapAttrs (showSetting prefix) settingsInfo))
|
||||
in
|
||||
concatStrings (attrValues (mapAttrs (showSetting prefix) settingsInfo))
|
||||
|
|
|
@ -1,6 +1,20 @@
|
|||
let
|
||||
inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings;
|
||||
inherit (import <nix/utils.nix>) optionalString filterAttrs trim squash toLower unique indent;
|
||||
inherit (builtins)
|
||||
attrNames
|
||||
listToAttrs
|
||||
concatStringsSep
|
||||
readFile
|
||||
replaceStrings
|
||||
;
|
||||
inherit (import <nix/utils.nix>)
|
||||
optionalString
|
||||
filterAttrs
|
||||
trim
|
||||
squash
|
||||
toLower
|
||||
unique
|
||||
indent
|
||||
;
|
||||
showSettings = import <nix/generate-settings.nix>;
|
||||
in
|
||||
|
||||
|
@ -14,7 +28,13 @@ in
|
|||
|
||||
let
|
||||
|
||||
showStore = { name, slug }: { settings, doc, experimentalFeature }:
|
||||
showStore =
|
||||
{ name, slug }:
|
||||
{
|
||||
settings,
|
||||
doc,
|
||||
experimentalFeature,
|
||||
}:
|
||||
let
|
||||
result = squash ''
|
||||
# ${name}
|
||||
|
@ -25,7 +45,10 @@ let
|
|||
|
||||
## Settings
|
||||
|
||||
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings}
|
||||
${showSettings {
|
||||
prefix = "store-${slug}";
|
||||
inherit inlineHTML;
|
||||
} settings}
|
||||
'';
|
||||
|
||||
experimentalFeatureNote = optionalString (experimentalFeature != null) ''
|
||||
|
@ -43,15 +66,15 @@ let
|
|||
> extra-experimental-features = ${experimentalFeature}
|
||||
> ```
|
||||
'';
|
||||
in result;
|
||||
in
|
||||
result;
|
||||
|
||||
storesList = map
|
||||
(name: rec {
|
||||
inherit name;
|
||||
slug = replaceStrings [ " " ] [ "-" ] (toLower name);
|
||||
filename = "${slug}.md";
|
||||
page = showStore { inherit name slug; } storeInfo.${name};
|
||||
})
|
||||
(attrNames storeInfo);
|
||||
storesList = map (name: rec {
|
||||
inherit name;
|
||||
slug = replaceStrings [ " " ] [ "-" ] (toLower name);
|
||||
filename = "${slug}.md";
|
||||
page = showStore { inherit name slug; } storeInfo.${name};
|
||||
}) (attrNames storeInfo);
|
||||
|
||||
in storesList
|
||||
in
|
||||
storesList
|
||||
|
|
|
@ -1,5 +1,11 @@
|
|||
let
|
||||
inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings;
|
||||
inherit (builtins)
|
||||
attrNames
|
||||
listToAttrs
|
||||
concatStringsSep
|
||||
readFile
|
||||
replaceStrings
|
||||
;
|
||||
showSettings = import <nix/generate-settings.nix>;
|
||||
showStoreDocs = import <nix/generate-store-info.nix>;
|
||||
in
|
||||
|
@ -14,26 +20,28 @@ let
|
|||
|
||||
index =
|
||||
let
|
||||
showEntry = store:
|
||||
"- [${store.name}](./${store.filename})";
|
||||
showEntry = store: "- [${store.name}](./${store.filename})";
|
||||
in
|
||||
concatStringsSep "\n" (map showEntry storesList);
|
||||
|
||||
"index.md" = replaceStrings
|
||||
[ "@store-types@" ] [ index ]
|
||||
(readFile ./source/store/types/index.md.in);
|
||||
"index.md" =
|
||||
replaceStrings [ "@store-types@" ] [ index ]
|
||||
(readFile ./source/store/types/index.md.in);
|
||||
|
||||
tableOfContents =
|
||||
let
|
||||
showEntry = store:
|
||||
" - [${store.name}](store/types/${store.filename})";
|
||||
showEntry = store: " - [${store.name}](store/types/${store.filename})";
|
||||
in
|
||||
concatStringsSep "\n" (map showEntry storesList) + "\n";
|
||||
|
||||
"SUMMARY.md" = tableOfContents;
|
||||
|
||||
storePages = listToAttrs
|
||||
(map (s: { name = s.filename; value = s.page; }) storesList);
|
||||
storePages = listToAttrs (
|
||||
map (s: {
|
||||
name = s.filename;
|
||||
value = s.page;
|
||||
}) storesList
|
||||
);
|
||||
|
||||
in
|
||||
storePages // { inherit "index.md" "SUMMARY.md"; }
|
||||
|
|
|
@ -2,8 +2,8 @@ with builtins;
|
|||
with import <nix/utils.nix>;
|
||||
|
||||
let
|
||||
showExperimentalFeature = name: doc:
|
||||
''
|
||||
- [`${name}`](@docroot@/development/experimental-features.md#xp-feature-${name})
|
||||
'';
|
||||
in xps: indent " " (concatStrings (attrValues (mapAttrs showExperimentalFeature xps)))
|
||||
showExperimentalFeature = name: doc: ''
|
||||
- [`${name}`](@docroot@/development/experimental-features.md#xp-feature-${name})
|
||||
'';
|
||||
in
|
||||
xps: indent " " (concatStrings (attrValues (mapAttrs showExperimentalFeature xps)))
|
||||
|
|
|
@ -2,7 +2,8 @@ with builtins;
|
|||
with import <nix/utils.nix>;
|
||||
|
||||
let
|
||||
showExperimentalFeature = name: doc:
|
||||
showExperimentalFeature =
|
||||
name: doc:
|
||||
squash ''
|
||||
## [`${name}`]{#xp-feature-${name}}
|
||||
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
{ lib
|
||||
, mkMesonDerivation
|
||||
{
|
||||
lib,
|
||||
mkMesonDerivation,
|
||||
|
||||
, meson
|
||||
, ninja
|
||||
, lowdown-unsandboxed
|
||||
, mdbook
|
||||
, mdbook-linkcheck
|
||||
, jq
|
||||
, python3
|
||||
, rsync
|
||||
, nix-cli
|
||||
meson,
|
||||
ninja,
|
||||
lowdown-unsandboxed,
|
||||
mdbook,
|
||||
mdbook-linkcheck,
|
||||
jq,
|
||||
python3,
|
||||
rsync,
|
||||
nix-cli,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -25,18 +26,22 @@ mkMesonDerivation (finalAttrs: {
|
|||
inherit version;
|
||||
|
||||
workDir = ./.;
|
||||
fileset = fileset.difference
|
||||
(fileset.unions [
|
||||
../../.version
|
||||
# Too many different types of files to filter for now
|
||||
../../doc/manual
|
||||
./.
|
||||
])
|
||||
# Do a blacklist instead
|
||||
../../doc/manual/package.nix;
|
||||
fileset =
|
||||
fileset.difference
|
||||
(fileset.unions [
|
||||
../../.version
|
||||
# Too many different types of files to filter for now
|
||||
../../doc/manual
|
||||
./.
|
||||
])
|
||||
# Do a blacklist instead
|
||||
../../doc/manual/package.nix;
|
||||
|
||||
# TODO the man pages should probably be separate
|
||||
outputs = [ "out" "man" ];
|
||||
outputs = [
|
||||
"out"
|
||||
"man"
|
||||
];
|
||||
|
||||
# Hack for sake of the dev shell
|
||||
passthru.externalNativeBuildInputs = [
|
||||
|
@ -54,11 +59,10 @@ mkMesonDerivation (finalAttrs: {
|
|||
nix-cli
|
||||
];
|
||||
|
||||
preConfigure =
|
||||
''
|
||||
chmod u+w ./.version
|
||||
echo ${finalAttrs.version} > ./.version
|
||||
'';
|
||||
preConfigure = ''
|
||||
chmod u+w ./.version
|
||||
echo ${finalAttrs.version} > ./.version
|
||||
'';
|
||||
|
||||
postInstall = ''
|
||||
mkdir -p ''$out/nix-support
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
---
|
||||
synopsis: "Flake lock file generation now ignores local registries"
|
||||
prs: [12019]
|
||||
---
|
||||
|
||||
When resolving indirect flake references like `nixpkgs` in `flake.nix` files, Nix will no longer use the system and user flake registries. It will only use the global flake registry and overrides given on the command line via `--override-flake`.
|
||||
|
||||
This avoids accidents where users have local registry overrides that map `nixpkgs` to a `path:` flake in the local file system, which then end up in committed lock files pushed to other users.
|
||||
|
||||
In the future, we may remove the use of the registry during lock file generation altogether. It's better to explicitly specify the URL of a flake input. For example, instead of
|
||||
```nix
|
||||
{
|
||||
outputs = { self, nixpkgs }: { ... };
|
||||
}
|
||||
```
|
||||
write
|
||||
```nix
|
||||
{
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
|
||||
outputs = { self, nixpkgs }: { ... };
|
||||
}
|
||||
```
|
|
@ -1,18 +0,0 @@
|
|||
---
|
||||
synopsis: "`nix copy` supports `--profile` and `--out-link`"
|
||||
prs: [11657]
|
||||
---
|
||||
|
||||
The `nix copy` command now has flags `--profile` and `--out-link`, similar to `nix build`. `--profile` makes a profile point to the
|
||||
top-level store path, while `--out-link` create symlinks to the top-level store paths.
|
||||
|
||||
For example, when updating the local NixOS system profile from a NixOS system closure on a remote machine, instead of
|
||||
```
|
||||
# nix copy --from ssh://server $path
|
||||
# nix build --profile /nix/var/nix/profiles/system $path
|
||||
```
|
||||
you can now do
|
||||
```
|
||||
# nix copy --from ssh://server --profile /nix/var/nix/profiles/system $path
|
||||
```
|
||||
The advantage is that this avoids a time window where *path* is not a garbage collector root, and so could be deleted by a concurrent `nix store gc` process.
|
|
@ -1,8 +0,0 @@
|
|||
---
|
||||
synopsis: "`nix-instantiate --eval` now supports `--raw`"
|
||||
prs: [12119]
|
||||
---
|
||||
|
||||
The `nix-instantiate --eval` command now supports a `--raw` flag, when used
|
||||
the evaluation result must be a string, which is printed verbatim without
|
||||
quotation marks or escaping.
|
|
@ -1,21 +0,0 @@
|
|||
---
|
||||
synopsis: "Improved `NIX_SSHOPTS` parsing for better SSH option handling"
|
||||
issues: [5181]
|
||||
prs: [12020]
|
||||
---
|
||||
|
||||
The parsing of the `NIX_SSHOPTS` environment variable has been improved to handle spaces and quotes correctly.
|
||||
Previously, incorrectly split SSH options could cause failures in CLIs like `nix-copy-closure`,
|
||||
especially when using complex ssh invocations such as `-o ProxyCommand="ssh -W %h:%p ..."`.
|
||||
|
||||
This change introduces a `shellSplitString` function to ensure
|
||||
that `NIX_SSHOPTS` is parsed in a manner consistent with shell
|
||||
behavior, addressing common parsing errors.
|
||||
|
||||
For example, the following now works as expected:
|
||||
|
||||
```bash
|
||||
export NIX_SSHOPTS='-o ProxyCommand="ssh -W %h:%p ..."'
|
||||
```
|
||||
|
||||
This update improves the reliability of SSH-related operations using `NIX_SSHOPTS` across Nix CLIs.
|
|
@ -1,12 +0,0 @@
|
|||
---
|
||||
synopsis: "Support for relative path inputs"
|
||||
prs: [10089]
|
||||
---
|
||||
|
||||
Flakes can now refer to other flakes in the same repository using relative paths, e.g.
|
||||
```nix
|
||||
inputs.foo.url = "path:./foo";
|
||||
```
|
||||
uses the flake in the `foo` subdirectory of the referring flake. For more information, see the documentation on [the `path` flake input type](@docroot@/command-ref/new-cli/nix3-flake.md#path-fetcher).
|
||||
|
||||
This feature required a change to the lock file format. Previous Nix versions will not be able to use lock files that have locks for relative path inputs in them.
|
|
@ -130,6 +130,7 @@
|
|||
- [Contributing](development/contributing.md)
|
||||
- [Releases](release-notes/index.md)
|
||||
{{#include ./SUMMARY-rl-next.md}}
|
||||
- [Release 2.26 (2025-01-22)](release-notes/rl-2.26.md)
|
||||
- [Release 2.25 (2024-11-07)](release-notes/rl-2.25.md)
|
||||
- [Release 2.24 (2024-07-31)](release-notes/rl-2.24.md)
|
||||
- [Release 2.23 (2024-06-03)](release-notes/rl-2.23.md)
|
||||
|
|
|
@ -79,7 +79,7 @@ This shell also adds `./outputs/bin/nix` to your `$PATH` so you can run `nix` im
|
|||
To get a shell with one of the other [supported compilation environments](#compilation-environments):
|
||||
|
||||
```console
|
||||
$ nix develop .#native-clangStdenvPackages
|
||||
$ nix develop .#native-clangStdenv
|
||||
```
|
||||
|
||||
> **Note**
|
||||
|
@ -261,7 +261,8 @@ See [supported compilation environments](#compilation-environments) and instruct
|
|||
To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code.
|
||||
Meson's configure always produces this inside the build directory.
|
||||
|
||||
Configure your editor to use the `clangd` from the `.#native-clangStdenvPackages` shell. You can do that either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
|
||||
Configure your editor to use the `clangd` from the `.#native-clangStdenv` shell.
|
||||
You can do that either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
|
||||
|
||||
> **Note**
|
||||
>
|
||||
|
@ -277,6 +278,8 @@ You may run the formatters as a one-off using:
|
|||
./maintainers/format.sh
|
||||
```
|
||||
|
||||
### Pre-commit hooks
|
||||
|
||||
If you'd like to run the formatters before every commit, install the hooks:
|
||||
|
||||
```
|
||||
|
@ -291,3 +294,30 @@ If it fails, run `git add --patch` to approve the suggestions _and commit again_
|
|||
To refresh pre-commit hook's config file, do the following:
|
||||
1. Exit the development shell and start it again by running `nix develop`.
|
||||
2. If you also use the pre-commit hook, also run `pre-commit-hooks-install` again.
|
||||
|
||||
### VSCode
|
||||
|
||||
Insert the following json into your `.vscode/settings.json` file to configure `nixfmt`.
|
||||
This will be picked up by the _Format Document_ command, `"editor.formatOnSave"`, etc.
|
||||
|
||||
```json
|
||||
{
|
||||
"nix.formatterPath": "nixfmt",
|
||||
"nix.serverSettings": {
|
||||
"nixd": {
|
||||
"formatting": {
|
||||
"command": [
|
||||
"nixfmt"
|
||||
],
|
||||
},
|
||||
},
|
||||
"nil": {
|
||||
"formatting": {
|
||||
"command": [
|
||||
"nixfmt"
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
|
128
doc/manual/source/release-notes/rl-2.26.md
Normal file
128
doc/manual/source/release-notes/rl-2.26.md
Normal file
|
@ -0,0 +1,128 @@
|
|||
# Release 2.26.0 (2025-01-22)
|
||||
|
||||
- Support for relative path inputs [#10089](https://github.com/NixOS/nix/pull/10089)
|
||||
|
||||
Flakes can now refer to other flakes in the same repository using relative paths, e.g.
|
||||
```nix
|
||||
inputs.foo.url = "path:./foo";
|
||||
```
|
||||
uses the flake in the `foo` subdirectory of the referring flake. For more information, see the documentation on [the `path` flake input type](@docroot@/command-ref/new-cli/nix3-flake.md#path-fetcher).
|
||||
|
||||
This feature required a change to the lock file format. Previous Nix versions will not be able to use lock files that have locks for relative path inputs in them.
|
||||
|
||||
- Flake lock file generation now ignores local registries [#12019](https://github.com/NixOS/nix/pull/12019)
|
||||
|
||||
When resolving indirect flake references like `nixpkgs` in `flake.nix` files, Nix will no longer use the system and user flake registries. It will only use the global flake registry and overrides given on the command line via `--override-flake`.
|
||||
|
||||
This avoids accidents where users have local registry overrides that map `nixpkgs` to a `path:` flake in the local file system, which then end up in committed lock files pushed to other users.
|
||||
|
||||
In the future, we may remove the use of the registry during lock file generation altogether. It's better to explicitly specify the URL of a flake input. For example, instead of
|
||||
```nix
|
||||
{
|
||||
outputs = { self, nixpkgs }: { ... };
|
||||
}
|
||||
```
|
||||
write
|
||||
```nix
|
||||
{
|
||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
|
||||
outputs = { self, nixpkgs }: { ... };
|
||||
}
|
||||
```
|
||||
|
||||
- `nix copy` supports `--profile` and `--out-link` [#11657](https://github.com/NixOS/nix/pull/11657)
|
||||
|
||||
The `nix copy` command now has flags `--profile` and `--out-link`, similar to `nix build`. `--profile` makes a profile point to the
|
||||
top-level store path, while `--out-link` create symlinks to the top-level store paths.
|
||||
|
||||
For example, when updating the local NixOS system profile from a NixOS system closure on a remote machine, instead of
|
||||
```
|
||||
# nix copy --from ssh://server $path
|
||||
# nix build --profile /nix/var/nix/profiles/system $path
|
||||
```
|
||||
you can now do
|
||||
```
|
||||
# nix copy --from ssh://server --profile /nix/var/nix/profiles/system $path
|
||||
```
|
||||
The advantage is that this avoids a time window where *path* is not a garbage collector root, and so could be deleted by a concurrent `nix store gc` process.
|
||||
|
||||
- `nix-instantiate --eval` now supports `--raw` [#12119](https://github.com/NixOS/nix/pull/12119)
|
||||
|
||||
The `nix-instantiate --eval` command now supports a `--raw` flag, when used
|
||||
the evaluation result must be a string, which is printed verbatim without
|
||||
quotation marks or escaping.
|
||||
|
||||
- Improved `NIX_SSHOPTS` parsing for better SSH option handling [#5181](https://github.com/NixOS/nix/issues/5181) [#12020](https://github.com/NixOS/nix/pull/12020)
|
||||
|
||||
The parsing of the `NIX_SSHOPTS` environment variable has been improved to handle spaces and quotes correctly.
|
||||
Previously, incorrectly split SSH options could cause failures in commands like `nix-copy-closure`,
|
||||
especially when using complex SSH invocations such as `-o ProxyCommand="ssh -W %h:%p ..."`.
|
||||
|
||||
This change introduces a `shellSplitString` function to ensure
|
||||
that `NIX_SSHOPTS` is parsed in a manner consistent with shell
|
||||
behavior, addressing common parsing errors.
|
||||
|
||||
For example, the following now works as expected:
|
||||
|
||||
```bash
|
||||
export NIX_SSHOPTS='-o ProxyCommand="ssh -W %h:%p ..."'
|
||||
```
|
||||
|
||||
This update improves the reliability of SSH-related operations using `NIX_SSHOPTS` across Nix CLIs.
|
||||
|
||||
- Nix is now built using Meson
|
||||
|
||||
As proposed in [RFC 132](https://github.com/NixOS/rfcs/pull/132), Nix's build system now uses Meson/Ninja. The old Make-based build system has been removed.
|
||||
|
||||
- Evaluation caching now works for dirty Git workdirs [#11992](https://github.com/NixOS/nix/pull/11992)
|
||||
|
||||
# Contributors
|
||||
|
||||
This release was made possible by the following 45 contributors:
|
||||
|
||||
- Anatoli Babenia [**(@abitrolly)**](https://github.com/abitrolly)
|
||||
- Domagoj Mišković [**(@allrealmsoflife)**](https://github.com/allrealmsoflife)
|
||||
- Yaroslav Bolyukin [**(@CertainLach)**](https://github.com/CertainLach)
|
||||
- bryango [**(@bryango)**](https://github.com/bryango)
|
||||
- tomberek [**(@tomberek)**](https://github.com/tomberek)
|
||||
- Matej Urbas [**(@mupdt)**](https://github.com/mupdt)
|
||||
- elikoga [**(@elikoga)**](https://github.com/elikoga)
|
||||
- wh0 [**(@wh0)**](https://github.com/wh0)
|
||||
- Félix [**(@picnoir)**](https://github.com/picnoir)
|
||||
- Valentin Gagarin [**(@fricklerhandwerk)**](https://github.com/fricklerhandwerk)
|
||||
- Gavin John [**(@Pandapip1)**](https://github.com/Pandapip1)
|
||||
- Travis A. Everett [**(@abathur)**](https://github.com/abathur)
|
||||
- Vladimir Panteleev [**(@CyberShadow)**](https://github.com/CyberShadow)
|
||||
- Ilja [**(@suruaku)**](https://github.com/suruaku)
|
||||
- Jason Yundt [**(@Jayman2000)**](https://github.com/Jayman2000)
|
||||
- Mike Kusold [**(@kusold)**](https://github.com/kusold)
|
||||
- Andy Hamon [**(@andrewhamon)**](https://github.com/andrewhamon)
|
||||
- Brian McKenna [**(@puffnfresh)**](https://github.com/puffnfresh)
|
||||
- Greg Curtis [**(@gcurtis)**](https://github.com/gcurtis)
|
||||
- Andrew Poelstra [**(@apoelstra)**](https://github.com/apoelstra)
|
||||
- Linus Heckemann [**(@lheckemann)**](https://github.com/lheckemann)
|
||||
- Tristan Ross [**(@RossComputerGuy)**](https://github.com/RossComputerGuy)
|
||||
- Dominique Martinet [**(@martinetd)**](https://github.com/martinetd)
|
||||
- h0nIg [**(@h0nIg)**](https://github.com/h0nIg)
|
||||
- Eelco Dolstra [**(@edolstra)**](https://github.com/edolstra)
|
||||
- Shahar "Dawn" Or [**(@mightyiam)**](https://github.com/mightyiam)
|
||||
- NAHO [**(@trueNAHO)**](https://github.com/trueNAHO)
|
||||
- Ryan Hendrickson [**(@rhendric)**](https://github.com/rhendric)
|
||||
- the-sun-will-rise-tomorrow [**(@the-sun-will-rise-tomorrow)**](https://github.com/the-sun-will-rise-tomorrow)
|
||||
- Connor Baker [**(@ConnorBaker)**](https://github.com/ConnorBaker)
|
||||
- Cole Helbling [**(@cole-h)**](https://github.com/cole-h)
|
||||
- Jack Wilsdon [**(@jackwilsdon)**](https://github.com/jackwilsdon)
|
||||
- rekcäH nitraM [**(@dwt)**](https://github.com/dwt)
|
||||
- Martin Fischer [**(@not-my-profile)**](https://github.com/not-my-profile)
|
||||
- John Ericson [**(@Ericson2314)**](https://github.com/Ericson2314)
|
||||
- Graham Christensen [**(@grahamc)**](https://github.com/grahamc)
|
||||
- Sergei Zimmerman [**(@xokdvium)**](https://github.com/xokdvium)
|
||||
- Siddarth Kumar [**(@siddarthkay)**](https://github.com/siddarthkay)
|
||||
- Sergei Trofimovich [**(@trofi)**](https://github.com/trofi)
|
||||
- Robert Hensing [**(@roberth)**](https://github.com/roberth)
|
||||
- Mutsuha Asada [**(@momeemt)**](https://github.com/momeemt)
|
||||
- Parker Jones [**(@knotapun)**](https://github.com/knotapun)
|
||||
- Jörg Thalheim [**(@Mic92)**](https://github.com/Mic92)
|
||||
- dbdr [**(@dbdr)**](https://github.com/dbdr)
|
||||
- myclevorname [**(@myclevorname)**](https://github.com/myclevorname)
|
||||
- Philipp Otterbein
|
|
@ -11,10 +11,15 @@ rec {
|
|||
|
||||
concatStrings = concatStringsSep "";
|
||||
|
||||
attrsToList = a:
|
||||
map (name: { inherit name; value = a.${name}; }) (builtins.attrNames a);
|
||||
attrsToList =
|
||||
a:
|
||||
map (name: {
|
||||
inherit name;
|
||||
value = a.${name};
|
||||
}) (builtins.attrNames a);
|
||||
|
||||
replaceStringsRec = from: to: string:
|
||||
replaceStringsRec =
|
||||
from: to: string:
|
||||
# recursively replace occurrences of `from` with `to` within `string`
|
||||
# example:
|
||||
# replaceStringRec "--" "-" "hello-----world"
|
||||
|
@ -22,16 +27,18 @@ rec {
|
|||
let
|
||||
replaced = replaceStrings [ from ] [ to ] string;
|
||||
in
|
||||
if replaced == string then string else replaceStringsRec from to replaced;
|
||||
if replaced == string then string else replaceStringsRec from to replaced;
|
||||
|
||||
toLower = replaceStrings upperChars lowerChars;
|
||||
|
||||
squash = replaceStringsRec "\n\n\n" "\n\n";
|
||||
|
||||
trim = string:
|
||||
trim =
|
||||
string:
|
||||
# trim trailing spaces and squash non-leading spaces
|
||||
let
|
||||
trimLine = line:
|
||||
trimLine =
|
||||
line:
|
||||
let
|
||||
# separate leading spaces from the rest
|
||||
parts = split "(^ *)" line;
|
||||
|
@ -39,19 +46,30 @@ rec {
|
|||
rest = elemAt parts 2;
|
||||
# drop trailing spaces
|
||||
body = head (split " *$" rest);
|
||||
in spaces + replaceStringsRec " " " " body;
|
||||
in concatStringsSep "\n" (map trimLine (splitLines string));
|
||||
in
|
||||
spaces + replaceStringsRec " " " " body;
|
||||
in
|
||||
concatStringsSep "\n" (map trimLine (splitLines string));
|
||||
|
||||
# FIXME: O(n^2)
|
||||
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];
|
||||
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [ ];
|
||||
|
||||
nameValuePair = name: value: { inherit name value; };
|
||||
|
||||
filterAttrs = pred: set:
|
||||
listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set));
|
||||
filterAttrs =
|
||||
pred: set:
|
||||
listToAttrs (
|
||||
concatMap (
|
||||
name:
|
||||
let
|
||||
v = set.${name};
|
||||
in
|
||||
if pred name v then [ (nameValuePair name v) ] else [ ]
|
||||
) (attrNames set)
|
||||
);
|
||||
|
||||
optionalString = cond: string: if cond then string else "";
|
||||
|
||||
indent = prefix: s:
|
||||
concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
|
||||
indent =
|
||||
prefix: s: concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
|
||||
}
|
||||
|
|
438
docker.nix
438
docker.nix
|
@ -1,112 +1,113 @@
|
|||
{ pkgs ? import <nixpkgs> { }
|
||||
, lib ? pkgs.lib
|
||||
, name ? "nix"
|
||||
, tag ? "latest"
|
||||
, bundleNixpkgs ? true
|
||||
, channelName ? "nixpkgs"
|
||||
, channelURL ? "https://nixos.org/channels/nixpkgs-unstable"
|
||||
, extraPkgs ? []
|
||||
, maxLayers ? 100
|
||||
, nixConf ? {}
|
||||
, flake-registry ? null
|
||||
, uid ? 0
|
||||
, gid ? 0
|
||||
, uname ? "root"
|
||||
, gname ? "root"
|
||||
{
|
||||
pkgs ? import <nixpkgs> { },
|
||||
lib ? pkgs.lib,
|
||||
name ? "nix",
|
||||
tag ? "latest",
|
||||
bundleNixpkgs ? true,
|
||||
channelName ? "nixpkgs",
|
||||
channelURL ? "https://nixos.org/channels/nixpkgs-unstable",
|
||||
extraPkgs ? [ ],
|
||||
maxLayers ? 100,
|
||||
nixConf ? { },
|
||||
flake-registry ? null,
|
||||
uid ? 0,
|
||||
gid ? 0,
|
||||
uname ? "root",
|
||||
gname ? "root",
|
||||
}:
|
||||
let
|
||||
defaultPkgs = with pkgs; [
|
||||
nix
|
||||
bashInteractive
|
||||
coreutils-full
|
||||
gnutar
|
||||
gzip
|
||||
gnugrep
|
||||
which
|
||||
curl
|
||||
less
|
||||
wget
|
||||
man
|
||||
cacert.out
|
||||
findutils
|
||||
iana-etc
|
||||
git
|
||||
openssh
|
||||
] ++ extraPkgs;
|
||||
defaultPkgs =
|
||||
with pkgs;
|
||||
[
|
||||
nix
|
||||
bashInteractive
|
||||
coreutils-full
|
||||
gnutar
|
||||
gzip
|
||||
gnugrep
|
||||
which
|
||||
curl
|
||||
less
|
||||
wget
|
||||
man
|
||||
cacert.out
|
||||
findutils
|
||||
iana-etc
|
||||
git
|
||||
openssh
|
||||
]
|
||||
++ extraPkgs;
|
||||
|
||||
users = {
|
||||
users =
|
||||
{
|
||||
|
||||
root = {
|
||||
uid = 0;
|
||||
shell = "${pkgs.bashInteractive}/bin/bash";
|
||||
home = "/root";
|
||||
gid = 0;
|
||||
groups = [ "root" ];
|
||||
description = "System administrator";
|
||||
root = {
|
||||
uid = 0;
|
||||
shell = "${pkgs.bashInteractive}/bin/bash";
|
||||
home = "/root";
|
||||
gid = 0;
|
||||
groups = [ "root" ];
|
||||
description = "System administrator";
|
||||
};
|
||||
|
||||
nobody = {
|
||||
uid = 65534;
|
||||
shell = "${pkgs.shadow}/bin/nologin";
|
||||
home = "/var/empty";
|
||||
gid = 65534;
|
||||
groups = [ "nobody" ];
|
||||
description = "Unprivileged account (don't use!)";
|
||||
};
|
||||
|
||||
}
|
||||
// lib.optionalAttrs (uid != 0) {
|
||||
"${uname}" = {
|
||||
uid = uid;
|
||||
shell = "${pkgs.bashInteractive}/bin/bash";
|
||||
home = "/home/${uname}";
|
||||
gid = gid;
|
||||
groups = [ "${gname}" ];
|
||||
description = "Nix user";
|
||||
};
|
||||
}
|
||||
// lib.listToAttrs (
|
||||
map (n: {
|
||||
name = "nixbld${toString n}";
|
||||
value = {
|
||||
uid = 30000 + n;
|
||||
gid = 30000;
|
||||
groups = [ "nixbld" ];
|
||||
description = "Nix build user ${toString n}";
|
||||
};
|
||||
}) (lib.lists.range 1 32)
|
||||
);
|
||||
|
||||
groups =
|
||||
{
|
||||
root.gid = 0;
|
||||
nixbld.gid = 30000;
|
||||
nobody.gid = 65534;
|
||||
}
|
||||
// lib.optionalAttrs (gid != 0) {
|
||||
"${gname}".gid = gid;
|
||||
};
|
||||
|
||||
nobody = {
|
||||
uid = 65534;
|
||||
shell = "${pkgs.shadow}/bin/nologin";
|
||||
home = "/var/empty";
|
||||
gid = 65534;
|
||||
groups = [ "nobody" ];
|
||||
description = "Unprivileged account (don't use!)";
|
||||
};
|
||||
|
||||
} // lib.optionalAttrs (uid != 0) {
|
||||
"${uname}" = {
|
||||
uid = uid;
|
||||
shell = "${pkgs.bashInteractive}/bin/bash";
|
||||
home = "/home/${uname}";
|
||||
gid = gid;
|
||||
groups = [ "${gname}" ];
|
||||
description = "Nix user";
|
||||
};
|
||||
} // lib.listToAttrs (
|
||||
map
|
||||
(
|
||||
n: {
|
||||
name = "nixbld${toString n}";
|
||||
value = {
|
||||
uid = 30000 + n;
|
||||
gid = 30000;
|
||||
groups = [ "nixbld" ];
|
||||
description = "Nix build user ${toString n}";
|
||||
};
|
||||
}
|
||||
)
|
||||
(lib.lists.range 1 32)
|
||||
);
|
||||
|
||||
groups = {
|
||||
root.gid = 0;
|
||||
nixbld.gid = 30000;
|
||||
nobody.gid = 65534;
|
||||
} // lib.optionalAttrs (gid != 0) {
|
||||
"${gname}".gid = gid;
|
||||
};
|
||||
|
||||
userToPasswd = (
|
||||
k:
|
||||
{ uid
|
||||
, gid ? 65534
|
||||
, home ? "/var/empty"
|
||||
, description ? ""
|
||||
, shell ? "/bin/false"
|
||||
, groups ? [ ]
|
||||
}: "${k}:x:${toString uid}:${toString gid}:${description}:${home}:${shell}"
|
||||
);
|
||||
passwdContents = (
|
||||
lib.concatStringsSep "\n"
|
||||
(lib.attrValues (lib.mapAttrs userToPasswd users))
|
||||
{
|
||||
uid,
|
||||
gid ? 65534,
|
||||
home ? "/var/empty",
|
||||
description ? "",
|
||||
shell ? "/bin/false",
|
||||
groups ? [ ],
|
||||
}:
|
||||
"${k}:x:${toString uid}:${toString gid}:${description}:${home}:${shell}"
|
||||
);
|
||||
passwdContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs userToPasswd users)));
|
||||
|
||||
userToShadow = k: { ... }: "${k}:!:1::::::";
|
||||
shadowContents = (
|
||||
lib.concatStringsSep "\n"
|
||||
(lib.attrValues (lib.mapAttrs userToShadow users))
|
||||
);
|
||||
shadowContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs userToShadow users)));
|
||||
|
||||
# Map groups to members
|
||||
# {
|
||||
|
@ -116,42 +117,35 @@ let
|
|||
let
|
||||
# Create a flat list of user/group mappings
|
||||
mappings = (
|
||||
builtins.foldl'
|
||||
(
|
||||
acc: user:
|
||||
let
|
||||
groups = users.${user}.groups or [ ];
|
||||
in
|
||||
acc ++ map
|
||||
(group: {
|
||||
inherit user group;
|
||||
})
|
||||
groups
|
||||
)
|
||||
[ ]
|
||||
(lib.attrNames users)
|
||||
builtins.foldl' (
|
||||
acc: user:
|
||||
let
|
||||
groups = users.${user}.groups or [ ];
|
||||
in
|
||||
acc
|
||||
++ map (group: {
|
||||
inherit user group;
|
||||
}) groups
|
||||
) [ ] (lib.attrNames users)
|
||||
);
|
||||
in
|
||||
(
|
||||
builtins.foldl'
|
||||
(
|
||||
acc: v: acc // {
|
||||
${v.group} = acc.${v.group} or [ ] ++ [ v.user ];
|
||||
}
|
||||
)
|
||||
{ }
|
||||
mappings)
|
||||
(builtins.foldl' (
|
||||
acc: v:
|
||||
acc
|
||||
// {
|
||||
${v.group} = acc.${v.group} or [ ] ++ [ v.user ];
|
||||
}
|
||||
) { } mappings)
|
||||
);
|
||||
|
||||
groupToGroup = k: { gid }:
|
||||
groupToGroup =
|
||||
k:
|
||||
{ gid }:
|
||||
let
|
||||
members = groupMemberMap.${k} or [ ];
|
||||
in
|
||||
"${k}:x:${toString gid}:${lib.concatStringsSep "," members}";
|
||||
groupContents = (
|
||||
lib.concatStringsSep "\n"
|
||||
(lib.attrValues (lib.mapAttrs groupToGroup groups))
|
||||
);
|
||||
groupContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs groupToGroup groups)));
|
||||
|
||||
defaultNixConf = {
|
||||
sandbox = "false";
|
||||
|
@ -159,11 +153,17 @@ let
|
|||
trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ];
|
||||
};
|
||||
|
||||
nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v:
|
||||
let
|
||||
vStr = if builtins.isList v then lib.concatStringsSep " " v else v;
|
||||
in
|
||||
"${n} = ${vStr}") (defaultNixConf // nixConf))) + "\n";
|
||||
nixConfContents =
|
||||
(lib.concatStringsSep "\n" (
|
||||
lib.mapAttrsFlatten (
|
||||
n: v:
|
||||
let
|
||||
vStr = if builtins.isList v then lib.concatStringsSep " " v else v;
|
||||
in
|
||||
"${n} = ${vStr}"
|
||||
) (defaultNixConf // nixConf)
|
||||
))
|
||||
+ "\n";
|
||||
|
||||
userHome = if uid == 0 then "/root" else "/home/${uname}";
|
||||
|
||||
|
@ -184,21 +184,29 @@ let
|
|||
manifest = pkgs.buildPackages.runCommand "manifest.nix" { } ''
|
||||
cat > $out <<EOF
|
||||
[
|
||||
${lib.concatStringsSep "\n" (builtins.map (drv: let
|
||||
outputs = drv.outputsToInstall or [ "out" ];
|
||||
in ''
|
||||
{
|
||||
${lib.concatStringsSep "\n" (builtins.map (output: ''
|
||||
${output} = { outPath = "${lib.getOutput output drv}"; };
|
||||
'') outputs)}
|
||||
outputs = [ ${lib.concatStringsSep " " (builtins.map (x: "\"${x}\"") outputs)} ];
|
||||
name = "${drv.name}";
|
||||
outPath = "${drv}";
|
||||
system = "${drv.system}";
|
||||
type = "derivation";
|
||||
meta = { };
|
||||
}
|
||||
'') defaultPkgs)}
|
||||
${lib.concatStringsSep "\n" (
|
||||
builtins.map (
|
||||
drv:
|
||||
let
|
||||
outputs = drv.outputsToInstall or [ "out" ];
|
||||
in
|
||||
''
|
||||
{
|
||||
${lib.concatStringsSep "\n" (
|
||||
builtins.map (output: ''
|
||||
${output} = { outPath = "${lib.getOutput output drv}"; };
|
||||
'') outputs
|
||||
)}
|
||||
outputs = [ ${lib.concatStringsSep " " (builtins.map (x: "\"${x}\"") outputs)} ];
|
||||
name = "${drv.name}";
|
||||
outPath = "${drv}";
|
||||
system = "${drv.system}";
|
||||
type = "derivation";
|
||||
meta = { };
|
||||
}
|
||||
''
|
||||
) defaultPkgs
|
||||
)}
|
||||
]
|
||||
EOF
|
||||
'';
|
||||
|
@ -207,16 +215,22 @@ let
|
|||
cp -a ${rootEnv}/* $out/
|
||||
ln -s ${manifest} $out/manifest.nix
|
||||
'';
|
||||
flake-registry-path = if (flake-registry == null) then
|
||||
null
|
||||
else if (builtins.readFileType (toString flake-registry)) == "directory" then
|
||||
"${flake-registry}/flake-registry.json"
|
||||
else
|
||||
flake-registry;
|
||||
flake-registry-path =
|
||||
if (flake-registry == null) then
|
||||
null
|
||||
else if (builtins.readFileType (toString flake-registry)) == "directory" then
|
||||
"${flake-registry}/flake-registry.json"
|
||||
else
|
||||
flake-registry;
|
||||
in
|
||||
pkgs.runCommand "base-system"
|
||||
{
|
||||
inherit passwdContents groupContents shadowContents nixConfContents;
|
||||
inherit
|
||||
passwdContents
|
||||
groupContents
|
||||
shadowContents
|
||||
nixConfContents
|
||||
;
|
||||
passAsFile = [
|
||||
"passwdContents"
|
||||
"groupContents"
|
||||
|
@ -225,67 +239,79 @@ let
|
|||
];
|
||||
allowSubstitutes = false;
|
||||
preferLocalBuild = true;
|
||||
} (''
|
||||
env
|
||||
set -x
|
||||
mkdir -p $out/etc
|
||||
}
|
||||
(
|
||||
''
|
||||
env
|
||||
set -x
|
||||
mkdir -p $out/etc
|
||||
|
||||
mkdir -p $out/etc/ssl/certs
|
||||
ln -s /nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt $out/etc/ssl/certs
|
||||
mkdir -p $out/etc/ssl/certs
|
||||
ln -s /nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt $out/etc/ssl/certs
|
||||
|
||||
cat $passwdContentsPath > $out/etc/passwd
|
||||
echo "" >> $out/etc/passwd
|
||||
cat $passwdContentsPath > $out/etc/passwd
|
||||
echo "" >> $out/etc/passwd
|
||||
|
||||
cat $groupContentsPath > $out/etc/group
|
||||
echo "" >> $out/etc/group
|
||||
cat $groupContentsPath > $out/etc/group
|
||||
echo "" >> $out/etc/group
|
||||
|
||||
cat $shadowContentsPath > $out/etc/shadow
|
||||
echo "" >> $out/etc/shadow
|
||||
cat $shadowContentsPath > $out/etc/shadow
|
||||
echo "" >> $out/etc/shadow
|
||||
|
||||
mkdir -p $out/usr
|
||||
ln -s /nix/var/nix/profiles/share $out/usr/
|
||||
mkdir -p $out/usr
|
||||
ln -s /nix/var/nix/profiles/share $out/usr/
|
||||
|
||||
mkdir -p $out/nix/var/nix/gcroots
|
||||
mkdir -p $out/nix/var/nix/gcroots
|
||||
|
||||
mkdir $out/tmp
|
||||
mkdir $out/tmp
|
||||
|
||||
mkdir -p $out/var/tmp
|
||||
mkdir -p $out/var/tmp
|
||||
|
||||
mkdir -p $out/etc/nix
|
||||
cat $nixConfContentsPath > $out/etc/nix/nix.conf
|
||||
mkdir -p $out/etc/nix
|
||||
cat $nixConfContentsPath > $out/etc/nix/nix.conf
|
||||
|
||||
mkdir -p $out${userHome}
|
||||
mkdir -p $out/nix/var/nix/profiles/per-user/${uname}
|
||||
mkdir -p $out${userHome}
|
||||
mkdir -p $out/nix/var/nix/profiles/per-user/${uname}
|
||||
|
||||
ln -s ${profile} $out/nix/var/nix/profiles/default-1-link
|
||||
ln -s /nix/var/nix/profiles/default-1-link $out/nix/var/nix/profiles/default
|
||||
ln -s /nix/var/nix/profiles/default $out${userHome}/.nix-profile
|
||||
ln -s ${profile} $out/nix/var/nix/profiles/default-1-link
|
||||
ln -s /nix/var/nix/profiles/default-1-link $out/nix/var/nix/profiles/default
|
||||
ln -s /nix/var/nix/profiles/default $out${userHome}/.nix-profile
|
||||
|
||||
ln -s ${channel} $out/nix/var/nix/profiles/per-user/${uname}/channels-1-link
|
||||
ln -s /nix/var/nix/profiles/per-user/${uname}/channels-1-link $out/nix/var/nix/profiles/per-user/${uname}/channels
|
||||
ln -s ${channel} $out/nix/var/nix/profiles/per-user/${uname}/channels-1-link
|
||||
ln -s /nix/var/nix/profiles/per-user/${uname}/channels-1-link $out/nix/var/nix/profiles/per-user/${uname}/channels
|
||||
|
||||
mkdir -p $out${userHome}/.nix-defexpr
|
||||
ln -s /nix/var/nix/profiles/per-user/${uname}/channels $out${userHome}/.nix-defexpr/channels
|
||||
echo "${channelURL} ${channelName}" > $out${userHome}/.nix-channels
|
||||
mkdir -p $out${userHome}/.nix-defexpr
|
||||
ln -s /nix/var/nix/profiles/per-user/${uname}/channels $out${userHome}/.nix-defexpr/channels
|
||||
echo "${channelURL} ${channelName}" > $out${userHome}/.nix-channels
|
||||
|
||||
mkdir -p $out/bin $out/usr/bin
|
||||
ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env
|
||||
ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh
|
||||
mkdir -p $out/bin $out/usr/bin
|
||||
ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env
|
||||
ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh
|
||||
|
||||
'' + (lib.optionalString (flake-registry-path != null) ''
|
||||
nixCacheDir="${userHome}/.cache/nix"
|
||||
mkdir -p $out$nixCacheDir
|
||||
globalFlakeRegistryPath="$nixCacheDir/flake-registry.json"
|
||||
ln -s ${flake-registry-path} $out$globalFlakeRegistryPath
|
||||
mkdir -p $out/nix/var/nix/gcroots/auto
|
||||
rootName=$(${pkgs.nix}/bin/nix --extra-experimental-features nix-command hash file --type sha1 --base32 <(echo -n $globalFlakeRegistryPath))
|
||||
ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName
|
||||
''));
|
||||
''
|
||||
+ (lib.optionalString (flake-registry-path != null) ''
|
||||
nixCacheDir="${userHome}/.cache/nix"
|
||||
mkdir -p $out$nixCacheDir
|
||||
globalFlakeRegistryPath="$nixCacheDir/flake-registry.json"
|
||||
ln -s ${flake-registry-path} $out$globalFlakeRegistryPath
|
||||
mkdir -p $out/nix/var/nix/gcroots/auto
|
||||
rootName=$(${pkgs.nix}/bin/nix --extra-experimental-features nix-command hash file --type sha1 --base32 <(echo -n $globalFlakeRegistryPath))
|
||||
ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName
|
||||
'')
|
||||
);
|
||||
|
||||
in
|
||||
pkgs.dockerTools.buildLayeredImageWithNixDb {
|
||||
|
||||
inherit name tag maxLayers uid gid uname gname;
|
||||
inherit
|
||||
name
|
||||
tag
|
||||
maxLayers
|
||||
uid
|
||||
gid
|
||||
uname
|
||||
gname
|
||||
;
|
||||
|
||||
contents = [ baseSystem ];
|
||||
|
||||
|
@ -305,15 +331,19 @@ pkgs.dockerTools.buildLayeredImageWithNixDb {
|
|||
User = "${toString uid}:${toString gid}";
|
||||
Env = [
|
||||
"USER=${uname}"
|
||||
"PATH=${lib.concatStringsSep ":" [
|
||||
"${userHome}/.nix-profile/bin"
|
||||
"/nix/var/nix/profiles/default/bin"
|
||||
"/nix/var/nix/profiles/default/sbin"
|
||||
]}"
|
||||
"MANPATH=${lib.concatStringsSep ":" [
|
||||
"${userHome}/.nix-profile/share/man"
|
||||
"/nix/var/nix/profiles/default/share/man"
|
||||
]}"
|
||||
"PATH=${
|
||||
lib.concatStringsSep ":" [
|
||||
"${userHome}/.nix-profile/bin"
|
||||
"/nix/var/nix/profiles/default/bin"
|
||||
"/nix/var/nix/profiles/default/sbin"
|
||||
]
|
||||
}"
|
||||
"MANPATH=${
|
||||
lib.concatStringsSep ":" [
|
||||
"${userHome}/.nix-profile/share/man"
|
||||
"/nix/var/nix/profiles/default/share/man"
|
||||
]
|
||||
}"
|
||||
"SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
|
||||
"GIT_SSL_CAINFO=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
|
||||
"NIX_SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
|
||||
|
|
52
flake.lock
generated
52
flake.lock
generated
|
@ -36,6 +36,24 @@
|
|||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1710146030,
|
||||
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"git-hooks-nix": {
|
||||
"inputs": {
|
||||
"flake-compat": [],
|
||||
|
@ -61,6 +79,24 @@
|
|||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixfmt": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1736283758,
|
||||
"narHash": "sha256-hrKhUp2V2fk/dvzTTHFqvtOg000G1e+jyIam+D4XqhA=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixfmt",
|
||||
"rev": "8d4bd690c247004d90d8554f0b746b1231fe2436",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"repo": "nixfmt",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1734359947,
|
||||
|
@ -114,10 +150,26 @@
|
|||
"flake-compat": "flake-compat",
|
||||
"flake-parts": "flake-parts",
|
||||
"git-hooks-nix": "git-hooks-nix",
|
||||
"nixfmt": "nixfmt",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"nixpkgs-23-11": "nixpkgs-23-11",
|
||||
"nixpkgs-regression": "nixpkgs-regression"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
|
|
428
flake.nix
428
flake.nix
|
@ -5,7 +5,10 @@
|
|||
|
||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
|
||||
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
||||
inputs.flake-compat = {
|
||||
url = "github:edolstra/flake-compat";
|
||||
flake = false;
|
||||
};
|
||||
|
||||
# dev tooling
|
||||
inputs.flake-parts.url = "github:hercules-ci/flake-parts";
|
||||
|
@ -17,9 +20,15 @@
|
|||
# work around 7730 and https://github.com/NixOS/nix/issues/7807
|
||||
inputs.git-hooks-nix.inputs.flake-compat.follows = "";
|
||||
inputs.git-hooks-nix.inputs.gitignore.follows = "";
|
||||
inputs.nixfmt.url = "github:NixOS/nixfmt";
|
||||
|
||||
outputs = inputs@{ self, nixpkgs, nixpkgs-regression, ... }:
|
||||
|
||||
outputs =
|
||||
inputs@{
|
||||
self,
|
||||
nixpkgs,
|
||||
nixpkgs-regression,
|
||||
...
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (nixpkgs) lib;
|
||||
|
@ -27,9 +36,15 @@
|
|||
officialRelease = false;
|
||||
|
||||
linux32BitSystems = [ "i686-linux" ];
|
||||
linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ];
|
||||
linux64BitSystems = [
|
||||
"x86_64-linux"
|
||||
"aarch64-linux"
|
||||
];
|
||||
linuxSystems = linux32BitSystems ++ linux64BitSystems;
|
||||
darwinSystems = [ "x86_64-darwin" "aarch64-darwin" ];
|
||||
darwinSystems = [
|
||||
"x86_64-darwin"
|
||||
"aarch64-darwin"
|
||||
];
|
||||
systems = linuxSystems ++ darwinSystems;
|
||||
|
||||
crossSystems = [
|
||||
|
@ -59,63 +74,66 @@
|
|||
(Provided that the names are unique.)
|
||||
|
||||
See https://nixos.org/manual/nixpkgs/stable/index.html#function-library-lib.attrsets.concatMapAttrs
|
||||
*/
|
||||
*/
|
||||
flatMapAttrs = attrs: f: lib.concatMapAttrs f attrs;
|
||||
|
||||
forAllSystems = lib.genAttrs systems;
|
||||
|
||||
forAllCrossSystems = lib.genAttrs crossSystems;
|
||||
|
||||
forAllStdenvs = f:
|
||||
lib.listToAttrs
|
||||
(map
|
||||
(stdenvName: {
|
||||
name = "${stdenvName}Packages";
|
||||
value = f stdenvName;
|
||||
})
|
||||
stdenvs);
|
||||
|
||||
forAllStdenvs = lib.genAttrs stdenvs;
|
||||
|
||||
# We don't apply flake-parts to the whole flake so that non-development attributes
|
||||
# load without fetching any development inputs.
|
||||
devFlake = inputs.flake-parts.lib.mkFlake { inherit inputs; } {
|
||||
imports = [ ./maintainers/flake-module.nix ];
|
||||
systems = lib.subtractLists crossSystems systems;
|
||||
perSystem = { system, ... }: {
|
||||
_module.args.pkgs = nixpkgsFor.${system}.native;
|
||||
};
|
||||
perSystem =
|
||||
{ system, ... }:
|
||||
{
|
||||
_module.args.pkgs = nixpkgsFor.${system}.native;
|
||||
};
|
||||
};
|
||||
|
||||
# Memoize nixpkgs for different platforms for efficiency.
|
||||
nixpkgsFor = forAllSystems
|
||||
(system: let
|
||||
make-pkgs = crossSystem: stdenv: import nixpkgs {
|
||||
localSystem = {
|
||||
inherit system;
|
||||
};
|
||||
crossSystem = if crossSystem == null then null else {
|
||||
config = crossSystem;
|
||||
} // lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") {
|
||||
useLLVM = true;
|
||||
};
|
||||
overlays = [
|
||||
(overlayFor (p: p.${stdenv}))
|
||||
];
|
||||
};
|
||||
stdenvs = forAllStdenvs (make-pkgs null);
|
||||
native = stdenvs.stdenvPackages;
|
||||
in {
|
||||
inherit stdenvs native;
|
||||
static = native.pkgsStatic;
|
||||
llvm = native.pkgsLLVM;
|
||||
cross = forAllCrossSystems (crossSystem: make-pkgs crossSystem "stdenv");
|
||||
});
|
||||
nixpkgsFor = forAllSystems (
|
||||
system:
|
||||
let
|
||||
make-pkgs =
|
||||
crossSystem:
|
||||
forAllStdenvs (
|
||||
stdenv:
|
||||
import nixpkgs {
|
||||
localSystem = {
|
||||
inherit system;
|
||||
};
|
||||
crossSystem =
|
||||
if crossSystem == null then
|
||||
null
|
||||
else
|
||||
{
|
||||
config = crossSystem;
|
||||
}
|
||||
// lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") {
|
||||
useLLVM = true;
|
||||
};
|
||||
overlays = [
|
||||
(overlayFor (pkgs: pkgs.${stdenv}))
|
||||
];
|
||||
}
|
||||
);
|
||||
in
|
||||
rec {
|
||||
nativeForStdenv = make-pkgs null;
|
||||
crossForStdenv = forAllCrossSystems make-pkgs;
|
||||
# Alias for convenience
|
||||
native = nativeForStdenv.stdenv;
|
||||
cross = forAllCrossSystems (crossSystem: crossForStdenv.${crossSystem}.stdenv);
|
||||
}
|
||||
);
|
||||
|
||||
binaryTarball = nix: pkgs: pkgs.callPackage ./scripts/binary-tarball.nix {
|
||||
inherit nix;
|
||||
};
|
||||
|
||||
overlayFor = getStdenv: final: prev:
|
||||
overlayFor =
|
||||
getStdenv: final: prev:
|
||||
let
|
||||
stdenv = getStdenv final;
|
||||
in
|
||||
|
@ -162,12 +180,19 @@
|
|||
# See https://github.com/NixOS/nixpkgs/pull/214409
|
||||
# Remove when fixed in this flake's nixpkgs
|
||||
pre-commit =
|
||||
if prev.stdenv.hostPlatform.system == "i686-linux"
|
||||
then (prev.pre-commit.override (o: { dotnet-sdk = ""; })).overridePythonAttrs (o: { doCheck = false; })
|
||||
else prev.pre-commit;
|
||||
if prev.stdenv.hostPlatform.system == "i686-linux" then
|
||||
(prev.pre-commit.override (o: {
|
||||
dotnet-sdk = "";
|
||||
})).overridePythonAttrs
|
||||
(o: {
|
||||
doCheck = false;
|
||||
})
|
||||
else
|
||||
prev.pre-commit;
|
||||
};
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
# A Nixpkgs overlay that overrides the 'nix' and
|
||||
# 'nix-perl-bindings' packages.
|
||||
overlays.default = overlayFor (p: p.stdenv);
|
||||
|
@ -175,7 +200,6 @@
|
|||
hydraJobs = import ./packaging/hydra.nix {
|
||||
inherit
|
||||
inputs
|
||||
binaryTarball
|
||||
forAllCrossSystems
|
||||
forAllSystems
|
||||
lib
|
||||
|
@ -186,53 +210,69 @@
|
|||
;
|
||||
};
|
||||
|
||||
checks = forAllSystems (system: {
|
||||
installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system};
|
||||
installTests = self.hydraJobs.installTests.${system};
|
||||
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||
rl-next =
|
||||
let pkgs = nixpkgsFor.${system}.native;
|
||||
in pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } ''
|
||||
LANG=C.UTF-8 ${pkgs.changelog-d}/bin/changelog-d ${./doc/manual/rl-next} >$out
|
||||
'';
|
||||
repl-completion = nixpkgsFor.${system}.native.callPackage ./tests/repl-completion.nix { };
|
||||
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||
} // (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) {
|
||||
# Some perl dependencies are broken on i686-linux.
|
||||
# Since the support is only best-effort there, disable the perl
|
||||
# bindings
|
||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||
}
|
||||
# Add "passthru" tests
|
||||
// flatMapAttrs ({
|
||||
"" = nixpkgsFor.${system}.native;
|
||||
} // lib.optionalAttrs (! nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) {
|
||||
# TODO: enable static builds for darwin, blocked on:
|
||||
# https://github.com/NixOS/nixpkgs/issues/320448
|
||||
# TODO: disabled to speed up GHA CI.
|
||||
#"static-" = nixpkgsFor.${system}.static;
|
||||
})
|
||||
(nixpkgsPrefix: nixpkgs:
|
||||
flatMapAttrs nixpkgs.nixComponents
|
||||
(pkgName: pkg:
|
||||
flatMapAttrs pkg.tests or {}
|
||||
(testName: test: {
|
||||
"${nixpkgsPrefix}${pkgName}-${testName}" = test;
|
||||
})
|
||||
checks = forAllSystems (
|
||||
system:
|
||||
{
|
||||
installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system};
|
||||
installTests = self.hydraJobs.installTests.${system};
|
||||
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||
rl-next =
|
||||
let
|
||||
pkgs = nixpkgsFor.${system}.native;
|
||||
in
|
||||
pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } ''
|
||||
LANG=C.UTF-8 ${pkgs.changelog-d}/bin/changelog-d ${./doc/manual/rl-next} >$out
|
||||
'';
|
||||
repl-completion = nixpkgsFor.${system}.native.callPackage ./tests/repl-completion.nix { };
|
||||
}
|
||||
// (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||
}
|
||||
// (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) {
|
||||
# Some perl dependencies are broken on i686-linux.
|
||||
# Since the support is only best-effort there, disable the perl
|
||||
# bindings
|
||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||
}
|
||||
# Add "passthru" tests
|
||||
//
|
||||
flatMapAttrs
|
||||
(
|
||||
{
|
||||
"" = nixpkgsFor.${system}.native;
|
||||
}
|
||||
// lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) {
|
||||
# TODO: enable static builds for darwin, blocked on:
|
||||
# https://github.com/NixOS/nixpkgs/issues/320448
|
||||
# TODO: disabled to speed up GHA CI.
|
||||
#"static-" = nixpkgsFor.${system}.native.pkgsStatic;
|
||||
}
|
||||
)
|
||||
// lib.optionalAttrs (nixpkgs.stdenv.hostPlatform == nixpkgs.stdenv.buildPlatform) {
|
||||
"${nixpkgsPrefix}nix-functional-tests" = nixpkgs.nixComponents.nix-functional-tests;
|
||||
}
|
||||
)
|
||||
// devFlake.checks.${system} or {}
|
||||
(
|
||||
nixpkgsPrefix: nixpkgs:
|
||||
flatMapAttrs nixpkgs.nixComponents (
|
||||
pkgName: pkg:
|
||||
flatMapAttrs pkg.tests or { } (
|
||||
testName: test: {
|
||||
"${nixpkgsPrefix}${pkgName}-${testName}" = test;
|
||||
}
|
||||
)
|
||||
)
|
||||
// lib.optionalAttrs (nixpkgs.stdenv.hostPlatform == nixpkgs.stdenv.buildPlatform) {
|
||||
"${nixpkgsPrefix}nix-functional-tests" = nixpkgs.nixComponents.nix-functional-tests;
|
||||
}
|
||||
)
|
||||
// devFlake.checks.${system} or { }
|
||||
);
|
||||
|
||||
packages = forAllSystems (system:
|
||||
{ # Here we put attributes that map 1:1 into packages.<system>, ie
|
||||
packages = forAllSystems (
|
||||
system:
|
||||
{
|
||||
# Here we put attributes that map 1:1 into packages.<system>, ie
|
||||
# for which we don't apply the full build matrix such as cross or static.
|
||||
inherit (nixpkgsFor.${system}.native)
|
||||
changelog-d;
|
||||
changelog-d
|
||||
;
|
||||
default = self.packages.${system}.nix;
|
||||
installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system};
|
||||
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
||||
|
@ -243,96 +283,144 @@
|
|||
nix-external-api-docs = nixpkgsFor.${system}.native.nixComponents.nix-external-api-docs;
|
||||
}
|
||||
# We need to flatten recursive attribute sets of derivations to pass `flake check`.
|
||||
// flatMapAttrs
|
||||
{ # Components we'll iterate over in the upcoming lambda
|
||||
"nix-util" = { };
|
||||
"nix-util-c" = { };
|
||||
"nix-util-test-support" = { };
|
||||
"nix-util-tests" = { };
|
||||
//
|
||||
flatMapAttrs
|
||||
{
|
||||
# Components we'll iterate over in the upcoming lambda
|
||||
"nix-util" = { };
|
||||
"nix-util-c" = { };
|
||||
"nix-util-test-support" = { };
|
||||
"nix-util-tests" = { };
|
||||
|
||||
"nix-store" = { };
|
||||
"nix-store-c" = { };
|
||||
"nix-store-test-support" = { };
|
||||
"nix-store-tests" = { };
|
||||
"nix-store" = { };
|
||||
"nix-store-c" = { };
|
||||
"nix-store-test-support" = { };
|
||||
"nix-store-tests" = { };
|
||||
|
||||
"nix-fetchers" = { };
|
||||
"nix-fetchers-tests" = { };
|
||||
"nix-fetchers" = { };
|
||||
"nix-fetchers-tests" = { };
|
||||
|
||||
"nix-expr" = { };
|
||||
"nix-expr-c" = { };
|
||||
"nix-expr-test-support" = { };
|
||||
"nix-expr-tests" = { };
|
||||
"nix-expr" = { };
|
||||
"nix-expr-c" = { };
|
||||
"nix-expr-test-support" = { };
|
||||
"nix-expr-tests" = { };
|
||||
|
||||
"nix-flake" = { };
|
||||
"nix-flake-tests" = { };
|
||||
"nix-flake" = { };
|
||||
"nix-flake-tests" = { };
|
||||
|
||||
"nix-main" = { };
|
||||
"nix-main-c" = { };
|
||||
"nix-main" = { };
|
||||
"nix-main-c" = { };
|
||||
|
||||
"nix-cmd" = { };
|
||||
"nix-cmd" = { };
|
||||
|
||||
"nix-cli" = { };
|
||||
"nix-cli" = { };
|
||||
|
||||
"nix-everything" = { };
|
||||
"nix-everything" = { };
|
||||
|
||||
"nix-functional-tests" = { supportsCross = false; };
|
||||
"nix-functional-tests" = {
|
||||
supportsCross = false;
|
||||
};
|
||||
|
||||
"nix-perl-bindings" = { supportsCross = false; };
|
||||
}
|
||||
(pkgName: { supportsCross ? true }: {
|
||||
# These attributes go right into `packages.<system>`.
|
||||
"${pkgName}" = nixpkgsFor.${system}.native.nixComponents.${pkgName};
|
||||
"${pkgName}-static" = nixpkgsFor.${system}.static.nixComponents.${pkgName};
|
||||
"${pkgName}-llvm" = nixpkgsFor.${system}.llvm.nixComponents.${pkgName};
|
||||
"nix-perl-bindings" = {
|
||||
supportsCross = false;
|
||||
};
|
||||
}
|
||||
// lib.optionalAttrs supportsCross (flatMapAttrs (lib.genAttrs crossSystems (_: { })) (crossSystem: {}: {
|
||||
# These attributes go right into `packages.<system>`.
|
||||
"${pkgName}-${crossSystem}" = nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName};
|
||||
}))
|
||||
// flatMapAttrs (lib.genAttrs stdenvs (_: { })) (stdenvName: {}: {
|
||||
# These attributes go right into `packages.<system>`.
|
||||
"${pkgName}-${stdenvName}" = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".nixComponents.${pkgName};
|
||||
})
|
||||
)
|
||||
(
|
||||
pkgName:
|
||||
{
|
||||
supportsCross ? true,
|
||||
}:
|
||||
{
|
||||
# These attributes go right into `packages.<system>`.
|
||||
"${pkgName}" = nixpkgsFor.${system}.native.nixComponents.${pkgName};
|
||||
"${pkgName}-static" = nixpkgsFor.${system}.native.pkgsStatic.nixComponents.${pkgName};
|
||||
"${pkgName}-llvm" = nixpkgsFor.${system}.native.pkgsLLVM.nixComponents.${pkgName};
|
||||
}
|
||||
// lib.optionalAttrs supportsCross (
|
||||
flatMapAttrs (lib.genAttrs crossSystems (_: { })) (
|
||||
crossSystem:
|
||||
{ }:
|
||||
{
|
||||
# These attributes go right into `packages.<system>`.
|
||||
"${pkgName}-${crossSystem}" = nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName};
|
||||
}
|
||||
)
|
||||
)
|
||||
// flatMapAttrs (lib.genAttrs stdenvs (_: { })) (
|
||||
stdenvName:
|
||||
{ }:
|
||||
{
|
||||
# These attributes go right into `packages.<system>`.
|
||||
"${pkgName}-${stdenvName}" =
|
||||
nixpkgsFor.${system}.nativeForStdenv.${stdenvName}.nixComponents.${pkgName};
|
||||
}
|
||||
)
|
||||
)
|
||||
// lib.optionalAttrs (builtins.elem system linux64BitSystems) {
|
||||
dockerImage =
|
||||
let
|
||||
pkgs = nixpkgsFor.${system}.native;
|
||||
image = import ./docker.nix { inherit pkgs; tag = pkgs.nix.version; };
|
||||
in
|
||||
pkgs.runCommand
|
||||
"docker-image-tarball-${pkgs.nix.version}"
|
||||
{ meta.description = "Docker image with Nix for ${system}"; }
|
||||
''
|
||||
mkdir -p $out/nix-support
|
||||
image=$out/image.tar.gz
|
||||
ln -s ${image} $image
|
||||
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
});
|
||||
dockerImage =
|
||||
let
|
||||
pkgs = nixpkgsFor.${system}.native;
|
||||
image = import ./docker.nix {
|
||||
inherit pkgs;
|
||||
tag = pkgs.nix.version;
|
||||
};
|
||||
in
|
||||
pkgs.runCommand "docker-image-tarball-${pkgs.nix.version}"
|
||||
{ meta.description = "Docker image with Nix for ${system}"; }
|
||||
''
|
||||
mkdir -p $out/nix-support
|
||||
image=$out/image.tar.gz
|
||||
ln -s ${image} $image
|
||||
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
}
|
||||
);
|
||||
|
||||
devShells = let
|
||||
makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; };
|
||||
prefixAttrs = prefix: lib.concatMapAttrs (k: v: { "${prefix}-${k}" = v; });
|
||||
in
|
||||
forAllSystems (system:
|
||||
prefixAttrs "native" (forAllStdenvs (stdenvName: makeShell {
|
||||
pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages";
|
||||
})) //
|
||||
lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) (
|
||||
prefixAttrs "static" (forAllStdenvs (stdenvName: makeShell {
|
||||
pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsStatic;
|
||||
})) //
|
||||
prefixAttrs "llvm" (forAllStdenvs (stdenvName: makeShell {
|
||||
pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsLLVM;
|
||||
})) //
|
||||
prefixAttrs "cross" (forAllCrossSystems (crossSystem: makeShell {
|
||||
pkgs = nixpkgsFor.${system}.cross.${crossSystem};
|
||||
}))
|
||||
) //
|
||||
{
|
||||
default = self.devShells.${system}.native-stdenvPackages;
|
||||
devShells =
|
||||
let
|
||||
makeShell = import ./packaging/dev-shell.nix { inherit inputs lib devFlake; };
|
||||
prefixAttrs = prefix: lib.concatMapAttrs (k: v: { "${prefix}-${k}" = v; });
|
||||
in
|
||||
forAllSystems (
|
||||
system:
|
||||
prefixAttrs "native" (
|
||||
forAllStdenvs (
|
||||
stdenvName:
|
||||
makeShell {
|
||||
pkgs = nixpkgsFor.${system}.nativeForStdenv.${stdenvName};
|
||||
}
|
||||
)
|
||||
)
|
||||
// lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) (
|
||||
prefixAttrs "static" (
|
||||
forAllStdenvs (
|
||||
stdenvName:
|
||||
makeShell {
|
||||
pkgs = nixpkgsFor.${system}.nativeForStdenv.${stdenvName}.pkgsStatic;
|
||||
}
|
||||
)
|
||||
)
|
||||
// prefixAttrs "llvm" (
|
||||
forAllStdenvs (
|
||||
stdenvName:
|
||||
makeShell {
|
||||
pkgs = nixpkgsFor.${system}.nativeForStdenv.${stdenvName}.pkgsLLVM;
|
||||
}
|
||||
)
|
||||
)
|
||||
// prefixAttrs "cross" (
|
||||
forAllCrossSystems (
|
||||
crossSystem:
|
||||
makeShell {
|
||||
pkgs = nixpkgsFor.${system}.cross.${crossSystem};
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
// {
|
||||
native = self.devShells.${system}.native-stdenv;
|
||||
default = self.devShells.${system}.native;
|
||||
}
|
||||
);
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
|
@ -98,5 +98,39 @@
|
|||
"aks.kenji@protonmail.com": "a-kenji",
|
||||
"54070204+0x5a4@users.noreply.github.com": "0x5a4",
|
||||
"brian@bmcgee.ie": "brianmcgee",
|
||||
"squalus@squalus.net": "squalus"
|
||||
"squalus@squalus.net": "squalus",
|
||||
"kusold@users.noreply.github.com": "kusold",
|
||||
"37929162+mergify[bot]@users.noreply.github.com": "mergify[bot]",
|
||||
"ilja@mailbox.org": "suruaku",
|
||||
"and.ham95@gmail.com": "andrewhamon",
|
||||
"andy.hamon@discordapp.com": "andrewhamon",
|
||||
"siddarthkay@gmail.com": "siddarthkay",
|
||||
"apoelstra@wpsoftware.net": "apoelstra",
|
||||
"asmadeus@codewreck.org": "martinetd",
|
||||
"tristan.ross@midstall.com": "RossComputerGuy",
|
||||
"bryanlais@gmail.com": "bryango",
|
||||
"157494086+allrealmsoflife@users.noreply.github.com": "allrealmsoflife",
|
||||
"ConnorBaker01@gmail.com": "ConnorBaker",
|
||||
"me@momee.mt": "momeemt",
|
||||
"martin@push-f.com": "not-my-profile",
|
||||
"90870942+trueNAHO@users.noreply.github.com": "trueNAHO",
|
||||
"49885263+knotapun@users.noreply.github.com": "knotapun",
|
||||
"iam@lach.pw": "CertainLach",
|
||||
"elikowa@gmail.com": "elikoga",
|
||||
"greg.curtis@jetpack.io": "gcurtis",
|
||||
"git@sphalerite.org": "lheckemann",
|
||||
"mightyiampresence@gmail.com": "mightyiam",
|
||||
"spamfaenger@gmx.de": "dwt",
|
||||
"graham@grahamc.com": "grahamc",
|
||||
"wh0@users.noreply.github.com": "wh0",
|
||||
"25388474+mupdt@users.noreply.github.com": "mupdt",
|
||||
"anatoli@rainforce.org": "abitrolly",
|
||||
"h0nIg@users.noreply.github.com": "h0nIg",
|
||||
"CyberShadow@users.noreply.github.com": "CyberShadow",
|
||||
"gavinnjohn@gmail.com": "Pandapip1",
|
||||
"picnoir@alternativebit.fr": "picnoir",
|
||||
"140354451+myclevorname@users.noreply.github.com": "myclevorname",
|
||||
"bonniot@gmail.com": "dbdr",
|
||||
"jack@wilsdon.me": "jackwilsdon",
|
||||
"143541718+WxNzEMof@users.noreply.github.com": "the-sun-will-rise-tomorrow"
|
||||
}
|
|
@ -86,5 +86,37 @@
|
|||
"Aleksanaa": "Aleksana",
|
||||
"YorikSar": "Yuriy Taraday",
|
||||
"kjeremy": "Jeremy Kolb",
|
||||
"artemist": "Artemis Tosini"
|
||||
"artemist": "Artemis Tosini",
|
||||
"the-sun-will-rise-tomorrow": null,
|
||||
"gcurtis": "Greg Curtis",
|
||||
"ConnorBaker": "Connor Baker",
|
||||
"abitrolly": "Anatoli Babenia",
|
||||
"allrealmsoflife": "Domagoj Mi\u0161kovi\u0107",
|
||||
"andrewhamon": "Andy Hamon",
|
||||
"picnoir": "F\u00e9lix",
|
||||
"dbdr": null,
|
||||
"suruaku": "Ilja",
|
||||
"jackwilsdon": "Jack Wilsdon",
|
||||
"mergify[bot]": null,
|
||||
"kusold": "Mike Kusold",
|
||||
"lheckemann": "Linus Heckemann",
|
||||
"h0nIg": null,
|
||||
"grahamc": "Graham Christensen",
|
||||
"not-my-profile": "Martin Fischer",
|
||||
"CyberShadow": "Vladimir Panteleev",
|
||||
"Pandapip1": "Gavin John",
|
||||
"RossComputerGuy": "Tristan Ross",
|
||||
"elikoga": null,
|
||||
"martinetd": "Dominique Martinet",
|
||||
"knotapun": "Parker Jones",
|
||||
"mightyiam": "Shahar \"Dawn\" Or",
|
||||
"siddarthkay": "Siddarth Kumar",
|
||||
"apoelstra": "Andrew Poelstra",
|
||||
"myclevorname": null,
|
||||
"CertainLach": "Yaroslav Bolyukin",
|
||||
"trueNAHO": "NAHO",
|
||||
"wh0": null,
|
||||
"mupdt": "Matej Urbas",
|
||||
"momeemt": "Mutsuha Asada",
|
||||
"dwt": "\u202erekc\u00e4H nitraM\u202e"
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -144,12 +144,10 @@ release:
|
|||
|
||||
Make a pull request and auto-merge it.
|
||||
|
||||
* Create a milestone for the next release, move all unresolved issues
|
||||
from the previous milestone, and close the previous milestone. Set
|
||||
the date for the next milestone 6 weeks from now.
|
||||
|
||||
* Create a backport label.
|
||||
|
||||
* Add the new backport label to `.mergify.yml`.
|
||||
|
||||
* Post an [announcement on Discourse](https://discourse.nixos.org/c/announcements/8), including the contents of
|
||||
`rl-$VERSION.md`.
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@ my $flakeUrl = $evalInfo->{flake};
|
|||
my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die) if $flakeUrl;
|
||||
my $nixRev = ($flakeInfo ? $flakeInfo->{revision} : $evalInfo->{jobsetevalinputs}->{nix}->{revision}) or die;
|
||||
|
||||
my $buildInfo = decode_json(fetch("$evalUrl/job/build.nix.x86_64-linux", 'application/json'));
|
||||
my $buildInfo = decode_json(fetch("$evalUrl/job/build.nix-everything.x86_64-linux", 'application/json'));
|
||||
#print Dumper($buildInfo);
|
||||
|
||||
my $releaseName = $buildInfo->{nixname};
|
||||
|
@ -91,7 +91,7 @@ sub getStorePath {
|
|||
sub copyManual {
|
||||
my $manual;
|
||||
eval {
|
||||
$manual = getStorePath("build.nix.x86_64-linux", "doc");
|
||||
$manual = getStorePath("manual");
|
||||
};
|
||||
if ($@) {
|
||||
warn "$@";
|
||||
|
@ -240,12 +240,12 @@ if ($haveDocker) {
|
|||
# Upload nix-fallback-paths.nix.
|
||||
write_file("$tmpDir/fallback-paths.nix",
|
||||
"{\n" .
|
||||
" x86_64-linux = \"" . getStorePath("build.nix.x86_64-linux") . "\";\n" .
|
||||
" i686-linux = \"" . getStorePath("build.nix.i686-linux") . "\";\n" .
|
||||
" aarch64-linux = \"" . getStorePath("build.nix.aarch64-linux") . "\";\n" .
|
||||
" riscv64-linux = \"" . getStorePath("buildCross.nix.riscv64-unknown-linux-gnu.x86_64-linux") . "\";\n" .
|
||||
" x86_64-darwin = \"" . getStorePath("build.nix.x86_64-darwin") . "\";\n" .
|
||||
" aarch64-darwin = \"" . getStorePath("build.nix.aarch64-darwin") . "\";\n" .
|
||||
" x86_64-linux = \"" . getStorePath("build.nix-everything.x86_64-linux") . "\";\n" .
|
||||
" i686-linux = \"" . getStorePath("build.nix-everything.i686-linux") . "\";\n" .
|
||||
" aarch64-linux = \"" . getStorePath("build.nix-everything.aarch64-linux") . "\";\n" .
|
||||
" riscv64-linux = \"" . getStorePath("buildCross.nix-everything.riscv64-unknown-linux-gnu.x86_64-linux") . "\";\n" .
|
||||
" x86_64-darwin = \"" . getStorePath("build.nix-everything.x86_64-darwin") . "\";\n" .
|
||||
" aarch64-darwin = \"" . getStorePath("build.nix-everything.aarch64-darwin") . "\";\n" .
|
||||
"}\n");
|
||||
|
||||
# Upload release files to S3.
|
||||
|
|
|
@ -1,14 +1,18 @@
|
|||
{ runCommand
|
||||
, system
|
||||
, buildPackages
|
||||
, cacert
|
||||
, nix
|
||||
{
|
||||
runCommand,
|
||||
system,
|
||||
buildPackages,
|
||||
cacert,
|
||||
nix,
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
installerClosureInfo = buildPackages.closureInfo {
|
||||
rootPaths = [ nix cacert ];
|
||||
rootPaths = [
|
||||
nix
|
||||
cacert
|
||||
];
|
||||
};
|
||||
|
||||
inherit (nix) version;
|
||||
|
@ -22,18 +26,18 @@ in
|
|||
|
||||
runCommand "nix-binary-tarball-${version}" env ''
|
||||
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
|
||||
cp ${./create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
|
||||
substitute ${./install-nix-from-tarball.sh} $TMPDIR/install \
|
||||
cp ${../scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
|
||||
substitute ${../scripts/install-nix-from-tarball.sh} $TMPDIR/install \
|
||||
--subst-var-by nix ${nix} \
|
||||
--subst-var-by cacert ${cacert}
|
||||
|
||||
substitute ${./install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
||||
substitute ${../scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
||||
--subst-var-by nix ${nix} \
|
||||
--subst-var-by cacert ${cacert}
|
||||
substitute ${./install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
||||
substitute ${../scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
||||
--subst-var-by nix ${nix} \
|
||||
--subst-var-by cacert ${cacert}
|
||||
substitute ${./install-multi-user.sh} $TMPDIR/install-multi-user \
|
||||
substitute ${../scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
|
||||
--subst-var-by nix ${nix} \
|
||||
--subst-var-by cacert ${cacert}
|
||||
|
|
@ -13,9 +13,11 @@ let
|
|||
|
||||
versionSuffix = lib.optionalString (!officialRelease) "pre";
|
||||
|
||||
fineVersionSuffix = lib.optionalString
|
||||
(!officialRelease)
|
||||
"pre${builtins.substring 0 8 (src.lastModifiedDate or src.lastModified or "19700101")}_${src.shortRev or "dirty"}";
|
||||
fineVersionSuffix =
|
||||
lib.optionalString (!officialRelease)
|
||||
"pre${
|
||||
builtins.substring 0 8 (src.lastModifiedDate or src.lastModified or "19700101")
|
||||
}_${src.shortRev or "dirty"}";
|
||||
|
||||
fineVersion = baseVersion + fineVersionSuffix;
|
||||
in
|
||||
|
@ -54,7 +56,9 @@ in
|
|||
|
||||
nix-cli = callPackage ../src/nix/package.nix { version = fineVersion; };
|
||||
|
||||
nix-functional-tests = callPackage ../src/nix-functional-tests/package.nix { version = fineVersion; };
|
||||
nix-functional-tests = callPackage ../src/nix-functional-tests/package.nix {
|
||||
version = fineVersion;
|
||||
};
|
||||
|
||||
nix-manual = callPackage ../doc/manual/package.nix { version = fineVersion; };
|
||||
nix-internal-api-docs = callPackage ../src/internal-api-docs/package.nix { version = fineVersion; };
|
||||
|
|
|
@ -19,9 +19,7 @@ let
|
|||
|
||||
root = ../.;
|
||||
|
||||
stdenv = if prevStdenv.isDarwin && prevStdenv.isx86_64
|
||||
then darwinStdenv
|
||||
else prevStdenv;
|
||||
stdenv = if prevStdenv.isDarwin && prevStdenv.isx86_64 then darwinStdenv else prevStdenv;
|
||||
|
||||
# Fix the following error with the default x86_64-darwin SDK:
|
||||
#
|
||||
|
@ -38,11 +36,14 @@ let
|
|||
# Indirection for Nixpkgs to override when package.nix files are vendored
|
||||
filesetToSource = lib.fileset.toSource;
|
||||
|
||||
/** Given a set of layers, create a mkDerivation-like function */
|
||||
mkPackageBuilder = exts: userFn:
|
||||
stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn);
|
||||
/**
|
||||
Given a set of layers, create a mkDerivation-like function
|
||||
*/
|
||||
mkPackageBuilder =
|
||||
exts: userFn: stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn);
|
||||
|
||||
localSourceLayer = finalAttrs: prevAttrs:
|
||||
localSourceLayer =
|
||||
finalAttrs: prevAttrs:
|
||||
let
|
||||
workDirPath =
|
||||
# Ideally we'd pick finalAttrs.workDir, but for now `mkDerivation` has
|
||||
|
@ -51,8 +52,13 @@ let
|
|||
prevAttrs.workDir;
|
||||
|
||||
workDirSubpath = lib.path.removePrefix root workDirPath;
|
||||
sources = assert prevAttrs.fileset._type == "fileset"; prevAttrs.fileset;
|
||||
src = lib.fileset.toSource { fileset = sources; inherit root; };
|
||||
sources =
|
||||
assert prevAttrs.fileset._type == "fileset";
|
||||
prevAttrs.fileset;
|
||||
src = lib.fileset.toSource {
|
||||
fileset = sources;
|
||||
inherit root;
|
||||
};
|
||||
|
||||
in
|
||||
{
|
||||
|
@ -64,115 +70,129 @@ let
|
|||
workDir = null;
|
||||
};
|
||||
|
||||
mesonLayer = finalAttrs: prevAttrs:
|
||||
{
|
||||
# NOTE:
|
||||
# As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26,
|
||||
# `mesonBuildType` defaults to `plain` if not specified. We want our Nix-built binaries to be optimized by default.
|
||||
# More on build types here: https://mesonbuild.com/Builtin-options.html#details-for-buildtype.
|
||||
mesonBuildType = "release";
|
||||
# NOTE:
|
||||
# Users who are debugging Nix builds are expected to set the environment variable `mesonBuildType`, per the
|
||||
# guidance in https://github.com/NixOS/nix/blob/8a3fc27f1b63a08ac983ee46435a56cf49ebaf4a/doc/manual/source/development/debugging.md?plain=1#L10.
|
||||
# For this reason, we don't want to refer to `finalAttrs.mesonBuildType` here, but rather use the environment variable.
|
||||
preConfigure = prevAttrs.preConfigure or "" + lib.optionalString (
|
||||
!stdenv.hostPlatform.isWindows
|
||||
# build failure
|
||||
&& !stdenv.hostPlatform.isStatic
|
||||
) ''
|
||||
case "$mesonBuildType" in
|
||||
release|minsize) appendToVar mesonFlags "-Db_lto=true" ;;
|
||||
*) appendToVar mesonFlags "-Db_lto=false" ;;
|
||||
esac
|
||||
'';
|
||||
nativeBuildInputs = [
|
||||
pkgs.buildPackages.meson
|
||||
pkgs.buildPackages.ninja
|
||||
] ++ prevAttrs.nativeBuildInputs or [];
|
||||
mesonCheckFlags = prevAttrs.mesonCheckFlags or [] ++ [
|
||||
"--print-errorlogs"
|
||||
];
|
||||
};
|
||||
mesonLayer = finalAttrs: prevAttrs: {
|
||||
# NOTE:
|
||||
# As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26,
|
||||
# `mesonBuildType` defaults to `plain` if not specified. We want our Nix-built binaries to be optimized by default.
|
||||
# More on build types here: https://mesonbuild.com/Builtin-options.html#details-for-buildtype.
|
||||
mesonBuildType = "release";
|
||||
# NOTE:
|
||||
# Users who are debugging Nix builds are expected to set the environment variable `mesonBuildType`, per the
|
||||
# guidance in https://github.com/NixOS/nix/blob/8a3fc27f1b63a08ac983ee46435a56cf49ebaf4a/doc/manual/source/development/debugging.md?plain=1#L10.
|
||||
# For this reason, we don't want to refer to `finalAttrs.mesonBuildType` here, but rather use the environment variable.
|
||||
preConfigure =
|
||||
prevAttrs.preConfigure or ""
|
||||
+
|
||||
lib.optionalString
|
||||
(
|
||||
!stdenv.hostPlatform.isWindows
|
||||
# build failure
|
||||
&& !stdenv.hostPlatform.isStatic
|
||||
# LTO breaks exception handling on x86-64-darwin.
|
||||
&& stdenv.system != "x86_64-darwin"
|
||||
)
|
||||
''
|
||||
case "$mesonBuildType" in
|
||||
release|minsize) appendToVar mesonFlags "-Db_lto=true" ;;
|
||||
*) appendToVar mesonFlags "-Db_lto=false" ;;
|
||||
esac
|
||||
'';
|
||||
nativeBuildInputs = [
|
||||
pkgs.buildPackages.meson
|
||||
pkgs.buildPackages.ninja
|
||||
] ++ prevAttrs.nativeBuildInputs or [ ];
|
||||
mesonCheckFlags = prevAttrs.mesonCheckFlags or [ ] ++ [
|
||||
"--print-errorlogs"
|
||||
];
|
||||
};
|
||||
|
||||
mesonBuildLayer = finalAttrs: prevAttrs:
|
||||
{
|
||||
nativeBuildInputs = prevAttrs.nativeBuildInputs or [] ++ [
|
||||
pkgs.buildPackages.pkg-config
|
||||
];
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
env = prevAttrs.env or {}
|
||||
// lib.optionalAttrs
|
||||
(stdenv.isLinux
|
||||
&& !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")
|
||||
&& !(stdenv.hostPlatform.useLLVM or false))
|
||||
{ LDFLAGS = "-fuse-ld=gold"; };
|
||||
};
|
||||
mesonBuildLayer = finalAttrs: prevAttrs: {
|
||||
nativeBuildInputs = prevAttrs.nativeBuildInputs or [ ] ++ [
|
||||
pkgs.buildPackages.pkg-config
|
||||
];
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
env =
|
||||
prevAttrs.env or { }
|
||||
// lib.optionalAttrs (
|
||||
stdenv.isLinux
|
||||
&& !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")
|
||||
&& !(stdenv.hostPlatform.useLLVM or false)
|
||||
) { LDFLAGS = "-fuse-ld=gold"; };
|
||||
};
|
||||
|
||||
mesonLibraryLayer = finalAttrs: prevAttrs:
|
||||
{
|
||||
outputs = prevAttrs.outputs or [ "out" ] ++ [ "dev" ];
|
||||
};
|
||||
mesonLibraryLayer = finalAttrs: prevAttrs: {
|
||||
outputs = prevAttrs.outputs or [ "out" ] ++ [ "dev" ];
|
||||
};
|
||||
|
||||
# Work around weird `--as-needed` linker behavior with BSD, see
|
||||
# https://github.com/mesonbuild/meson/issues/3593
|
||||
bsdNoLinkAsNeeded = finalAttrs: prevAttrs:
|
||||
bsdNoLinkAsNeeded =
|
||||
finalAttrs: prevAttrs:
|
||||
lib.optionalAttrs stdenv.hostPlatform.isBSD {
|
||||
mesonFlags = [ (lib.mesonBool "b_asneeded" false) ] ++ prevAttrs.mesonFlags or [];
|
||||
mesonFlags = [ (lib.mesonBool "b_asneeded" false) ] ++ prevAttrs.mesonFlags or [ ];
|
||||
};
|
||||
|
||||
miscGoodPractice = finalAttrs: prevAttrs:
|
||||
{
|
||||
strictDeps = prevAttrs.strictDeps or true;
|
||||
enableParallelBuilding = true;
|
||||
};
|
||||
miscGoodPractice = finalAttrs: prevAttrs: {
|
||||
strictDeps = prevAttrs.strictDeps or true;
|
||||
enableParallelBuilding = true;
|
||||
};
|
||||
in
|
||||
scope: {
|
||||
inherit stdenv;
|
||||
|
||||
aws-sdk-cpp = (pkgs.aws-sdk-cpp.override {
|
||||
apis = [ "s3" "transfer" ];
|
||||
customMemoryManagement = false;
|
||||
}).overrideAttrs {
|
||||
# only a stripped down version is built, which takes a lot less resources
|
||||
# to build, so we don't need a "big-parallel" machine.
|
||||
requiredSystemFeatures = [ ];
|
||||
};
|
||||
aws-sdk-cpp =
|
||||
(pkgs.aws-sdk-cpp.override {
|
||||
apis = [
|
||||
"s3"
|
||||
"transfer"
|
||||
];
|
||||
customMemoryManagement = false;
|
||||
}).overrideAttrs
|
||||
{
|
||||
# only a stripped down version is built, which takes a lot less resources
|
||||
# to build, so we don't need a "big-parallel" machine.
|
||||
requiredSystemFeatures = [ ];
|
||||
};
|
||||
|
||||
boehmgc = pkgs.boehmgc.override {
|
||||
enableLargeConfig = true;
|
||||
};
|
||||
|
||||
# TODO Hack until https://github.com/NixOS/nixpkgs/issues/45462 is fixed.
|
||||
boost = (pkgs.boost.override {
|
||||
extraB2Args = [
|
||||
"--with-container"
|
||||
"--with-context"
|
||||
"--with-coroutine"
|
||||
];
|
||||
}).overrideAttrs (old: {
|
||||
# Need to remove `--with-*` to use `--with-libraries=...`
|
||||
buildPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.buildPhase;
|
||||
installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase;
|
||||
});
|
||||
boost =
|
||||
(pkgs.boost.override {
|
||||
extraB2Args = [
|
||||
"--with-container"
|
||||
"--with-context"
|
||||
"--with-coroutine"
|
||||
];
|
||||
}).overrideAttrs
|
||||
(old: {
|
||||
# Need to remove `--with-*` to use `--with-libraries=...`
|
||||
buildPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.buildPhase;
|
||||
installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase;
|
||||
});
|
||||
|
||||
libgit2 = pkgs.libgit2.overrideAttrs (attrs: {
|
||||
cmakeFlags = attrs.cmakeFlags or []
|
||||
++ [ "-DUSE_SSH=exec" ];
|
||||
nativeBuildInputs = attrs.nativeBuildInputs or []
|
||||
cmakeFlags = attrs.cmakeFlags or [ ] ++ [ "-DUSE_SSH=exec" ];
|
||||
nativeBuildInputs =
|
||||
attrs.nativeBuildInputs or [ ]
|
||||
# gitMinimal does not build on Windows. See packbuilder patch.
|
||||
++ lib.optionals (!stdenv.hostPlatform.isWindows) [
|
||||
# Needed for `git apply`; see `prePatch`
|
||||
pkgs.buildPackages.gitMinimal
|
||||
];
|
||||
# Only `git apply` can handle git binary patches
|
||||
prePatch = attrs.prePatch or ""
|
||||
prePatch =
|
||||
attrs.prePatch or ""
|
||||
+ lib.optionalString (!stdenv.hostPlatform.isWindows) ''
|
||||
patch() {
|
||||
git apply
|
||||
}
|
||||
'';
|
||||
patches = attrs.patches or []
|
||||
patches =
|
||||
attrs.patches or [ ]
|
||||
++ [
|
||||
./patches/libgit2-mempack-thin-packfile.patch
|
||||
]
|
||||
|
@ -186,27 +206,24 @@ scope: {
|
|||
|
||||
inherit resolvePath filesetToSource;
|
||||
|
||||
mkMesonDerivation =
|
||||
mkPackageBuilder [
|
||||
miscGoodPractice
|
||||
localSourceLayer
|
||||
mesonLayer
|
||||
];
|
||||
mkMesonExecutable =
|
||||
mkPackageBuilder [
|
||||
miscGoodPractice
|
||||
bsdNoLinkAsNeeded
|
||||
localSourceLayer
|
||||
mesonLayer
|
||||
mesonBuildLayer
|
||||
];
|
||||
mkMesonLibrary =
|
||||
mkPackageBuilder [
|
||||
miscGoodPractice
|
||||
bsdNoLinkAsNeeded
|
||||
localSourceLayer
|
||||
mesonLayer
|
||||
mesonBuildLayer
|
||||
mesonLibraryLayer
|
||||
];
|
||||
mkMesonDerivation = mkPackageBuilder [
|
||||
miscGoodPractice
|
||||
localSourceLayer
|
||||
mesonLayer
|
||||
];
|
||||
mkMesonExecutable = mkPackageBuilder [
|
||||
miscGoodPractice
|
||||
bsdNoLinkAsNeeded
|
||||
localSourceLayer
|
||||
mesonLayer
|
||||
mesonBuildLayer
|
||||
];
|
||||
mkMesonLibrary = mkPackageBuilder [
|
||||
miscGoodPractice
|
||||
bsdNoLinkAsNeeded
|
||||
localSourceLayer
|
||||
mesonLayer
|
||||
mesonBuildLayer
|
||||
mesonLibraryLayer
|
||||
];
|
||||
}
|
||||
|
|
|
@ -1,128 +1,141 @@
|
|||
{ lib, devFlake }:
|
||||
{
|
||||
lib,
|
||||
inputs,
|
||||
devFlake,
|
||||
}:
|
||||
|
||||
{ pkgs }:
|
||||
|
||||
pkgs.nixComponents.nix-util.overrideAttrs (attrs:
|
||||
pkgs.nixComponents.nix-util.overrideAttrs (
|
||||
attrs:
|
||||
|
||||
let
|
||||
stdenv = pkgs.nixDependencies.stdenv;
|
||||
buildCanExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
|
||||
modular = devFlake.getSystem stdenv.buildPlatform.system;
|
||||
transformFlag = prefix: flag:
|
||||
assert builtins.isString flag;
|
||||
let
|
||||
rest = builtins.substring 2 (builtins.stringLength flag) flag;
|
||||
in
|
||||
let
|
||||
stdenv = pkgs.nixDependencies.stdenv;
|
||||
buildCanExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
|
||||
modular = devFlake.getSystem stdenv.buildPlatform.system;
|
||||
transformFlag =
|
||||
prefix: flag:
|
||||
assert builtins.isString flag;
|
||||
let
|
||||
rest = builtins.substring 2 (builtins.stringLength flag) flag;
|
||||
in
|
||||
"-D${prefix}:${rest}";
|
||||
havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix;
|
||||
ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags;
|
||||
in {
|
||||
pname = "shell-for-" + attrs.pname;
|
||||
havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix;
|
||||
ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags;
|
||||
in
|
||||
{
|
||||
pname = "shell-for-" + attrs.pname;
|
||||
|
||||
# Remove the version suffix to avoid unnecessary attempts to substitute in nix develop
|
||||
version = lib.fileContents ../.version;
|
||||
name = attrs.pname;
|
||||
# Remove the version suffix to avoid unnecessary attempts to substitute in nix develop
|
||||
version = lib.fileContents ../.version;
|
||||
name = attrs.pname;
|
||||
|
||||
installFlags = "sysconfdir=$(out)/etc";
|
||||
shellHook = ''
|
||||
PATH=$prefix/bin:$PATH
|
||||
unset PYTHONPATH
|
||||
export MANPATH=$out/share/man:$MANPATH
|
||||
installFlags = "sysconfdir=$(out)/etc";
|
||||
shellHook = ''
|
||||
PATH=$prefix/bin:$PATH
|
||||
unset PYTHONPATH
|
||||
export MANPATH=$out/share/man:$MANPATH
|
||||
|
||||
# Make bash completion work.
|
||||
XDG_DATA_DIRS+=:$out/share
|
||||
# Make bash completion work.
|
||||
XDG_DATA_DIRS+=:$out/share
|
||||
|
||||
# Make the default phases do the right thing.
|
||||
# FIXME: this wouldn't be needed if the ninja package set buildPhase() instead of $buildPhase.
|
||||
# FIXME: mesonConfigurePhase shouldn't cd to the build directory. It would be better to pass '-C <dir>' to ninja.
|
||||
# Make the default phases do the right thing.
|
||||
# FIXME: this wouldn't be needed if the ninja package set buildPhase() instead of $buildPhase.
|
||||
# FIXME: mesonConfigurePhase shouldn't cd to the build directory. It would be better to pass '-C <dir>' to ninja.
|
||||
|
||||
cdToBuildDir() {
|
||||
if [[ ! -e build.ninja ]]; then
|
||||
cd build
|
||||
fi
|
||||
}
|
||||
cdToBuildDir() {
|
||||
if [[ ! -e build.ninja ]]; then
|
||||
cd build
|
||||
fi
|
||||
}
|
||||
|
||||
configurePhase() {
|
||||
mesonConfigurePhase
|
||||
}
|
||||
configurePhase() {
|
||||
mesonConfigurePhase
|
||||
}
|
||||
|
||||
buildPhase() {
|
||||
cdToBuildDir
|
||||
ninjaBuildPhase
|
||||
}
|
||||
buildPhase() {
|
||||
cdToBuildDir
|
||||
ninjaBuildPhase
|
||||
}
|
||||
|
||||
checkPhase() {
|
||||
cdToBuildDir
|
||||
mesonCheckPhase
|
||||
}
|
||||
checkPhase() {
|
||||
cdToBuildDir
|
||||
mesonCheckPhase
|
||||
}
|
||||
|
||||
installPhase() {
|
||||
cdToBuildDir
|
||||
ninjaInstallPhase
|
||||
}
|
||||
'';
|
||||
installPhase() {
|
||||
cdToBuildDir
|
||||
ninjaInstallPhase
|
||||
}
|
||||
'';
|
||||
|
||||
# We use this shell with the local checkout, not unpackPhase.
|
||||
src = null;
|
||||
# We use this shell with the local checkout, not unpackPhase.
|
||||
src = null;
|
||||
|
||||
env = {
|
||||
# Needed for Meson to find Boost.
|
||||
# https://github.com/NixOS/nixpkgs/issues/86131.
|
||||
BOOST_INCLUDEDIR = "${lib.getDev pkgs.nixDependencies.boost}/include";
|
||||
BOOST_LIBRARYDIR = "${lib.getLib pkgs.nixDependencies.boost}/lib";
|
||||
# For `make format`, to work without installing pre-commit
|
||||
_NIX_PRE_COMMIT_HOOKS_CONFIG =
|
||||
"${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml" modular.pre-commit.settings.rawConfig}";
|
||||
};
|
||||
env = {
|
||||
# Needed for Meson to find Boost.
|
||||
# https://github.com/NixOS/nixpkgs/issues/86131.
|
||||
BOOST_INCLUDEDIR = "${lib.getDev pkgs.nixDependencies.boost}/include";
|
||||
BOOST_LIBRARYDIR = "${lib.getLib pkgs.nixDependencies.boost}/lib";
|
||||
# For `make format`, to work without installing pre-commit
|
||||
_NIX_PRE_COMMIT_HOOKS_CONFIG = "${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml"
|
||||
modular.pre-commit.settings.rawConfig
|
||||
}";
|
||||
};
|
||||
|
||||
mesonFlags =
|
||||
map (transformFlag "libutil") (ignoreCrossFile pkgs.nixComponents.nix-util.mesonFlags)
|
||||
++ map (transformFlag "libstore") (ignoreCrossFile pkgs.nixComponents.nix-store.mesonFlags)
|
||||
++ map (transformFlag "libfetchers") (ignoreCrossFile pkgs.nixComponents.nix-fetchers.mesonFlags)
|
||||
++ lib.optionals havePerl (map (transformFlag "perl") (ignoreCrossFile pkgs.nixComponents.nix-perl-bindings.mesonFlags))
|
||||
++ map (transformFlag "libexpr") (ignoreCrossFile pkgs.nixComponents.nix-expr.mesonFlags)
|
||||
++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents.nix-cmd.mesonFlags)
|
||||
;
|
||||
mesonFlags =
|
||||
map (transformFlag "libutil") (ignoreCrossFile pkgs.nixComponents.nix-util.mesonFlags)
|
||||
++ map (transformFlag "libstore") (ignoreCrossFile pkgs.nixComponents.nix-store.mesonFlags)
|
||||
++ map (transformFlag "libfetchers") (ignoreCrossFile pkgs.nixComponents.nix-fetchers.mesonFlags)
|
||||
++ lib.optionals havePerl (
|
||||
map (transformFlag "perl") (ignoreCrossFile pkgs.nixComponents.nix-perl-bindings.mesonFlags)
|
||||
)
|
||||
++ map (transformFlag "libexpr") (ignoreCrossFile pkgs.nixComponents.nix-expr.mesonFlags)
|
||||
++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents.nix-cmd.mesonFlags);
|
||||
|
||||
nativeBuildInputs = attrs.nativeBuildInputs or []
|
||||
++ pkgs.nixComponents.nix-util.nativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-store.nativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-fetchers.nativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-expr.nativeBuildInputs
|
||||
++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.nativeBuildInputs
|
||||
++ lib.optionals buildCanExecuteHost pkgs.nixComponents.nix-manual.externalNativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-internal-api-docs.nativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-external-api-docs.nativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-functional-tests.externalNativeBuildInputs
|
||||
++ lib.optional
|
||||
(!buildCanExecuteHost
|
||||
# Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479
|
||||
&& !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin)
|
||||
&& stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages
|
||||
&& lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages))
|
||||
pkgs.buildPackages.mesonEmulatorHook
|
||||
++ [
|
||||
pkgs.buildPackages.cmake
|
||||
pkgs.buildPackages.shellcheck
|
||||
pkgs.buildPackages.changelog-d
|
||||
modular.pre-commit.settings.package
|
||||
(pkgs.writeScriptBin "pre-commit-hooks-install"
|
||||
modular.pre-commit.settings.installationScript)
|
||||
]
|
||||
# TODO: Remove the darwin check once
|
||||
# https://github.com/NixOS/nixpkgs/pull/291814 is available
|
||||
++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear
|
||||
++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) (lib.hiPrio pkgs.buildPackages.clang-tools);
|
||||
nativeBuildInputs =
|
||||
attrs.nativeBuildInputs or [ ]
|
||||
++ pkgs.nixComponents.nix-util.nativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-store.nativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-fetchers.nativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-expr.nativeBuildInputs
|
||||
++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.nativeBuildInputs
|
||||
++ lib.optionals buildCanExecuteHost pkgs.nixComponents.nix-manual.externalNativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-internal-api-docs.nativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-external-api-docs.nativeBuildInputs
|
||||
++ pkgs.nixComponents.nix-functional-tests.externalNativeBuildInputs
|
||||
++ lib.optional (
|
||||
!buildCanExecuteHost
|
||||
# Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479
|
||||
&& !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin)
|
||||
&& stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages
|
||||
&& lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages)
|
||||
) pkgs.buildPackages.mesonEmulatorHook
|
||||
++ [
|
||||
pkgs.buildPackages.cmake
|
||||
pkgs.buildPackages.shellcheck
|
||||
pkgs.buildPackages.changelog-d
|
||||
modular.pre-commit.settings.package
|
||||
(pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript)
|
||||
inputs.nixfmt.packages.${pkgs.hostPlatform.system}.default
|
||||
]
|
||||
# TODO: Remove the darwin check once
|
||||
# https://github.com/NixOS/nixpkgs/pull/291814 is available
|
||||
++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear
|
||||
++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) (
|
||||
lib.hiPrio pkgs.buildPackages.clang-tools
|
||||
);
|
||||
|
||||
buildInputs = attrs.buildInputs or []
|
||||
++ pkgs.nixComponents.nix-util.buildInputs
|
||||
++ pkgs.nixComponents.nix-store.buildInputs
|
||||
++ pkgs.nixComponents.nix-store-tests.externalBuildInputs
|
||||
++ pkgs.nixComponents.nix-fetchers.buildInputs
|
||||
++ pkgs.nixComponents.nix-expr.buildInputs
|
||||
++ pkgs.nixComponents.nix-expr.externalPropagatedBuildInputs
|
||||
++ pkgs.nixComponents.nix-cmd.buildInputs
|
||||
++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.externalBuildInputs
|
||||
++ lib.optional havePerl pkgs.perl
|
||||
;
|
||||
})
|
||||
buildInputs =
|
||||
attrs.buildInputs or [ ]
|
||||
++ pkgs.nixComponents.nix-util.buildInputs
|
||||
++ pkgs.nixComponents.nix-store.buildInputs
|
||||
++ pkgs.nixComponents.nix-store-tests.externalBuildInputs
|
||||
++ pkgs.nixComponents.nix-fetchers.buildInputs
|
||||
++ pkgs.nixComponents.nix-expr.buildInputs
|
||||
++ pkgs.nixComponents.nix-expr.externalPropagatedBuildInputs
|
||||
++ pkgs.nixComponents.nix-cmd.buildInputs
|
||||
++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.externalBuildInputs
|
||||
++ lib.optional havePerl pkgs.perl;
|
||||
}
|
||||
)
|
||||
|
|
|
@ -42,27 +42,31 @@
|
|||
}:
|
||||
|
||||
let
|
||||
libs = {
|
||||
inherit
|
||||
nix-util
|
||||
nix-util-c
|
||||
nix-store
|
||||
nix-store-c
|
||||
nix-fetchers
|
||||
nix-expr
|
||||
nix-expr-c
|
||||
nix-flake
|
||||
nix-flake-c
|
||||
nix-main
|
||||
nix-main-c
|
||||
nix-cmd
|
||||
;
|
||||
} // lib.optionalAttrs (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) {
|
||||
# Currently fails in static build
|
||||
inherit
|
||||
nix-perl-bindings
|
||||
;
|
||||
};
|
||||
libs =
|
||||
{
|
||||
inherit
|
||||
nix-util
|
||||
nix-util-c
|
||||
nix-store
|
||||
nix-store-c
|
||||
nix-fetchers
|
||||
nix-expr
|
||||
nix-expr-c
|
||||
nix-flake
|
||||
nix-flake-c
|
||||
nix-main
|
||||
nix-main-c
|
||||
nix-cmd
|
||||
;
|
||||
}
|
||||
// lib.optionalAttrs
|
||||
(!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
|
||||
{
|
||||
# Currently fails in static build
|
||||
inherit
|
||||
nix-perl-bindings
|
||||
;
|
||||
};
|
||||
|
||||
dev = stdenv.mkDerivation (finalAttrs: {
|
||||
name = "nix-${nix-cli.version}-dev";
|
||||
|
@ -77,10 +81,9 @@ let
|
|||
'';
|
||||
passthru = {
|
||||
tests = {
|
||||
pkg-config =
|
||||
testers.hasPkgConfigModules {
|
||||
package = finalAttrs.finalPackage;
|
||||
};
|
||||
pkg-config = testers.hasPkgConfigModules {
|
||||
package = finalAttrs.finalPackage;
|
||||
};
|
||||
};
|
||||
|
||||
# If we were to fully emulate output selection here, we'd confuse the Nix CLIs,
|
||||
|
@ -123,70 +126,84 @@ in
|
|||
];
|
||||
|
||||
meta.mainProgram = "nix";
|
||||
}).overrideAttrs (finalAttrs: prevAttrs: {
|
||||
doCheck = true;
|
||||
doInstallCheck = true;
|
||||
}).overrideAttrs
|
||||
(
|
||||
finalAttrs: prevAttrs: {
|
||||
doCheck = true;
|
||||
doInstallCheck = true;
|
||||
|
||||
checkInputs = [
|
||||
# Make sure the unit tests have passed
|
||||
nix-util-tests.tests.run
|
||||
nix-store-tests.tests.run
|
||||
nix-expr-tests.tests.run
|
||||
nix-fetchers-tests.tests.run
|
||||
nix-flake-tests.tests.run
|
||||
checkInputs =
|
||||
[
|
||||
# Make sure the unit tests have passed
|
||||
nix-util-tests.tests.run
|
||||
nix-store-tests.tests.run
|
||||
nix-expr-tests.tests.run
|
||||
nix-fetchers-tests.tests.run
|
||||
nix-flake-tests.tests.run
|
||||
|
||||
# Make sure the functional tests have passed
|
||||
nix-functional-tests
|
||||
# Make sure the functional tests have passed
|
||||
nix-functional-tests
|
||||
|
||||
# dev bundle is ok
|
||||
# (checkInputs must be empty paths??)
|
||||
(runCommand "check-pkg-config" { checked = dev.tests.pkg-config; } "mkdir $out")
|
||||
] ++ lib.optionals (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) [
|
||||
# Perl currently fails in static build
|
||||
# TODO: Split out tests into a separate derivation?
|
||||
nix-perl-bindings
|
||||
];
|
||||
passthru = prevAttrs.passthru // {
|
||||
inherit (nix-cli) version;
|
||||
# dev bundle is ok
|
||||
# (checkInputs must be empty paths??)
|
||||
(runCommand "check-pkg-config" { checked = dev.tests.pkg-config; } "mkdir $out")
|
||||
]
|
||||
++ lib.optionals
|
||||
(!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
|
||||
[
|
||||
# Perl currently fails in static build
|
||||
# TODO: Split out tests into a separate derivation?
|
||||
nix-perl-bindings
|
||||
];
|
||||
passthru = prevAttrs.passthru // {
|
||||
inherit (nix-cli) version;
|
||||
|
||||
/**
|
||||
These are the libraries that are part of the Nix project. They are used
|
||||
by the Nix CLI and other tools.
|
||||
/**
|
||||
These are the libraries that are part of the Nix project. They are used
|
||||
by the Nix CLI and other tools.
|
||||
|
||||
If you need to use these libraries in your project, we recommend to use
|
||||
the `-c` C API libraries exclusively, if possible.
|
||||
If you need to use these libraries in your project, we recommend to use
|
||||
the `-c` C API libraries exclusively, if possible.
|
||||
|
||||
We also recommend that you build the complete package to ensure that the unit tests pass.
|
||||
You could do this in CI, or by passing it in an unused environment variable. e.g in a `mkDerivation` call:
|
||||
We also recommend that you build the complete package to ensure that the unit tests pass.
|
||||
You could do this in CI, or by passing it in an unused environment variable. e.g in a `mkDerivation` call:
|
||||
|
||||
```nix
|
||||
buildInputs = [ nix.libs.nix-util-c nix.libs.nix-store-c ];
|
||||
# Make sure the nix libs we use are ok
|
||||
unusedInputsForTests = [ nix ];
|
||||
disallowedReferences = nix.all;
|
||||
```
|
||||
*/
|
||||
inherit libs;
|
||||
```nix
|
||||
buildInputs = [ nix.libs.nix-util-c nix.libs.nix-store-c ];
|
||||
# Make sure the nix libs we use are ok
|
||||
unusedInputsForTests = [ nix ];
|
||||
disallowedReferences = nix.all;
|
||||
```
|
||||
*/
|
||||
inherit libs;
|
||||
|
||||
tests = prevAttrs.passthru.tests or {} // {
|
||||
# TODO: create a proper fixpoint and:
|
||||
# pkg-config =
|
||||
# testers.hasPkgConfigModules {
|
||||
# package = finalPackage;
|
||||
# };
|
||||
};
|
||||
tests = prevAttrs.passthru.tests or { } // {
|
||||
# TODO: create a proper fixpoint and:
|
||||
# pkg-config =
|
||||
# testers.hasPkgConfigModules {
|
||||
# package = finalPackage;
|
||||
# };
|
||||
};
|
||||
|
||||
/**
|
||||
A derivation referencing the `dev` outputs of the Nix libraries.
|
||||
*/
|
||||
inherit dev;
|
||||
inherit devdoc;
|
||||
doc = nix-manual;
|
||||
outputs = [ "out" "dev" "devdoc" "doc" ];
|
||||
all = lib.attrValues (lib.genAttrs finalAttrs.passthru.outputs (outName: finalAttrs.finalPackage.${outName}));
|
||||
};
|
||||
meta = prevAttrs.meta // {
|
||||
description = "The Nix package manager";
|
||||
pkgConfigModules = dev.meta.pkgConfigModules;
|
||||
};
|
||||
})
|
||||
/**
|
||||
A derivation referencing the `dev` outputs of the Nix libraries.
|
||||
*/
|
||||
inherit dev;
|
||||
inherit devdoc;
|
||||
doc = nix-manual;
|
||||
outputs = [
|
||||
"out"
|
||||
"dev"
|
||||
"devdoc"
|
||||
"doc"
|
||||
];
|
||||
all = lib.attrValues (
|
||||
lib.genAttrs finalAttrs.passthru.outputs (outName: finalAttrs.finalPackage.${outName})
|
||||
);
|
||||
};
|
||||
meta = prevAttrs.meta // {
|
||||
description = "The Nix package manager";
|
||||
pkgConfigModules = dev.meta.pkgConfigModules;
|
||||
};
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,22 +1,24 @@
|
|||
{ inputs
|
||||
, binaryTarball
|
||||
, forAllCrossSystems
|
||||
, forAllSystems
|
||||
, lib
|
||||
, linux64BitSystems
|
||||
, nixpkgsFor
|
||||
, self
|
||||
, officialRelease
|
||||
{
|
||||
inputs,
|
||||
forAllCrossSystems,
|
||||
forAllSystems,
|
||||
lib,
|
||||
linux64BitSystems,
|
||||
nixpkgsFor,
|
||||
self,
|
||||
officialRelease,
|
||||
}:
|
||||
let
|
||||
inherit (inputs) nixpkgs nixpkgs-regression;
|
||||
|
||||
installScriptFor = tarballs:
|
||||
nixpkgsFor.x86_64-linux.native.callPackage ../scripts/installer.nix {
|
||||
installScriptFor =
|
||||
tarballs:
|
||||
nixpkgsFor.x86_64-linux.native.callPackage ./installer {
|
||||
inherit tarballs;
|
||||
};
|
||||
|
||||
testNixVersions = pkgs: daemon:
|
||||
testNixVersions =
|
||||
pkgs: daemon:
|
||||
pkgs.nixComponents.nix-functional-tests.override {
|
||||
pname = "nix-daemon-compat-tests";
|
||||
version = "${pkgs.nix.version}-with-daemon-${daemon.version}";
|
||||
|
@ -54,44 +56,72 @@ let
|
|||
in
|
||||
{
|
||||
# Binary package for various platforms.
|
||||
build = forAllPackages (pkgName:
|
||||
forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.${pkgName}));
|
||||
build = forAllPackages (
|
||||
pkgName: forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.${pkgName})
|
||||
);
|
||||
|
||||
shellInputs = removeAttrs
|
||||
(forAllSystems (system: self.devShells.${system}.default.inputDerivation))
|
||||
[ "i686-linux" ];
|
||||
shellInputs = removeAttrs (forAllSystems (
|
||||
system: self.devShells.${system}.default.inputDerivation
|
||||
)) [ "i686-linux" ];
|
||||
|
||||
buildStatic = forAllPackages (pkgName:
|
||||
lib.genAttrs linux64BitSystems (system: nixpkgsFor.${system}.static.nixComponents.${pkgName}));
|
||||
buildStatic = forAllPackages (
|
||||
pkgName:
|
||||
lib.genAttrs linux64BitSystems (
|
||||
system: nixpkgsFor.${system}.native.pkgsStatic.nixComponents.${pkgName}
|
||||
)
|
||||
);
|
||||
|
||||
buildCross = forAllPackages (pkgName:
|
||||
buildCross = forAllPackages (
|
||||
pkgName:
|
||||
# Hack to avoid non-evaling package
|
||||
(if pkgName == "nix-functional-tests" then lib.flip builtins.removeAttrs ["x86_64-w64-mingw32"] else lib.id)
|
||||
(forAllCrossSystems (crossSystem:
|
||||
lib.genAttrs [ "x86_64-linux" ] (system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName}))));
|
||||
(
|
||||
if pkgName == "nix-functional-tests" then
|
||||
lib.flip builtins.removeAttrs [ "x86_64-w64-mingw32" ]
|
||||
else
|
||||
lib.id
|
||||
)
|
||||
(
|
||||
forAllCrossSystems (
|
||||
crossSystem:
|
||||
lib.genAttrs [ "x86_64-linux" ] (
|
||||
system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName}
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
buildNoGc = let
|
||||
components = forAllSystems (system:
|
||||
nixpkgsFor.${system}.native.nixComponents.overrideScope (self: super: {
|
||||
nix-expr = super.nix-expr.override { enableGC = false; };
|
||||
})
|
||||
);
|
||||
in forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName}));
|
||||
buildNoGc =
|
||||
let
|
||||
components = forAllSystems (
|
||||
system:
|
||||
nixpkgsFor.${system}.native.nixComponents.overrideScope (
|
||||
self: super: {
|
||||
nix-expr = super.nix-expr.override { enableGC = false; };
|
||||
}
|
||||
)
|
||||
);
|
||||
in
|
||||
forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName}));
|
||||
|
||||
buildNoTests = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-cli);
|
||||
|
||||
# Toggles some settings for better coverage. Windows needs these
|
||||
# library combinations, and Debian build Nix with GNU readline too.
|
||||
buildReadlineNoMarkdown = let
|
||||
components = forAllSystems (system:
|
||||
nixpkgsFor.${system}.native.nixComponents.overrideScope (self: super: {
|
||||
nix-cmd = super.nix-cmd.override {
|
||||
enableMarkdown = false;
|
||||
readlineFlavor = "readline";
|
||||
};
|
||||
})
|
||||
);
|
||||
in forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName}));
|
||||
buildReadlineNoMarkdown =
|
||||
let
|
||||
components = forAllSystems (
|
||||
system:
|
||||
nixpkgsFor.${system}.native.nixComponents.overrideScope (
|
||||
self: super: {
|
||||
nix-cmd = super.nix-cmd.override {
|
||||
enableMarkdown = false;
|
||||
readlineFlavor = "readline";
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
in
|
||||
forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName}));
|
||||
|
||||
# Perl bindings for various platforms.
|
||||
perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-perl-bindings);
|
||||
|
@ -99,13 +129,16 @@ in
|
|||
# Binary tarball for various platforms, containing a Nix store
|
||||
# with the closure of 'nix' package, and the second half of
|
||||
# the installation script.
|
||||
binaryTarball = forAllSystems (system: binaryTarball nixpkgsFor.${system}.native.nix nixpkgsFor.${system}.native);
|
||||
binaryTarball = forAllSystems (
|
||||
system: nixpkgsFor.${system}.native.callPackage ./binary-tarball.nix { }
|
||||
);
|
||||
|
||||
binaryTarballCross = lib.genAttrs [ "x86_64-linux" ] (system:
|
||||
forAllCrossSystems (crossSystem:
|
||||
binaryTarball
|
||||
nixpkgsFor.${system}.cross.${crossSystem}.nix
|
||||
nixpkgsFor.${system}.cross.${crossSystem}));
|
||||
binaryTarballCross = lib.genAttrs [ "x86_64-linux" ] (
|
||||
system:
|
||||
forAllCrossSystems (
|
||||
crossSystem: nixpkgsFor.${system}.cross.${crossSystem}.callPackage ./binary-tarball.nix { }
|
||||
)
|
||||
);
|
||||
|
||||
# The first half of the installation script. This is uploaded
|
||||
# to https://nixos.org/nix/install. It downloads the binary
|
||||
|
@ -124,9 +157,12 @@ in
|
|||
self.hydraJobs.binaryTarballCross."x86_64-linux"."riscv64-unknown-linux-gnu"
|
||||
];
|
||||
|
||||
installerScriptForGHA = forAllSystems (system: nixpkgsFor.${system}.native.callPackage ../scripts/installer.nix {
|
||||
tarballs = [ self.hydraJobs.binaryTarball.${system} ];
|
||||
});
|
||||
installerScriptForGHA = forAllSystems (
|
||||
system:
|
||||
nixpkgsFor.${system}.native.callPackage ./installer {
|
||||
tarballs = [ self.hydraJobs.binaryTarball.${system} ];
|
||||
}
|
||||
);
|
||||
|
||||
# docker image with Nix inside
|
||||
dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage);
|
||||
|
@ -147,16 +183,20 @@ in
|
|||
external-api-docs = nixpkgsFor.x86_64-linux.native.nixComponents.nix-external-api-docs;
|
||||
|
||||
# System tests.
|
||||
tests = import ../tests/nixos { inherit lib nixpkgs nixpkgsFor self; } // {
|
||||
tests =
|
||||
import ../tests/nixos {
|
||||
inherit lib nixpkgs nixpkgsFor;
|
||||
inherit (self.inputs) nixpkgs-23-11;
|
||||
}
|
||||
// {
|
||||
|
||||
# Make sure that nix-env still produces the exact same result
|
||||
# on a particular version of Nixpkgs.
|
||||
evalNixpkgs =
|
||||
let
|
||||
inherit (nixpkgsFor.x86_64-linux.native) runCommand nix;
|
||||
in
|
||||
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
||||
''
|
||||
# Make sure that nix-env still produces the exact same result
|
||||
# on a particular version of Nixpkgs.
|
||||
evalNixpkgs =
|
||||
let
|
||||
inherit (nixpkgsFor.x86_64-linux.native) runCommand nix;
|
||||
in
|
||||
runCommand "eval-nixos" { buildInputs = [ nix ]; } ''
|
||||
type -p nix-env
|
||||
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
|
||||
(
|
||||
|
@ -167,36 +207,36 @@ in
|
|||
mkdir $out
|
||||
'';
|
||||
|
||||
nixpkgsLibTests =
|
||||
forAllSystems (system:
|
||||
import (nixpkgs + "/lib/tests/test-with-nix.nix")
|
||||
{
|
||||
lib = nixpkgsFor.${system}.native.lib;
|
||||
nix = self.packages.${system}.nix-cli;
|
||||
pkgs = nixpkgsFor.${system}.native;
|
||||
}
|
||||
nixpkgsLibTests = forAllSystems (
|
||||
system:
|
||||
import (nixpkgs + "/lib/tests/test-with-nix.nix") {
|
||||
lib = nixpkgsFor.${system}.native.lib;
|
||||
nix = self.packages.${system}.nix-cli;
|
||||
pkgs = nixpkgsFor.${system}.native;
|
||||
}
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
|
||||
pkgs = nixpkgsFor.x86_64-linux.native;
|
||||
nixpkgs = nixpkgs-regression;
|
||||
};
|
||||
|
||||
installTests = forAllSystems (system:
|
||||
let pkgs = nixpkgsFor.${system}.native; in
|
||||
pkgs.runCommand "install-tests"
|
||||
{
|
||||
againstSelf = testNixVersions pkgs pkgs.nix;
|
||||
againstCurrentLatest =
|
||||
# FIXME: temporarily disable this on macOS because of #3605.
|
||||
if system == "x86_64-linux"
|
||||
then testNixVersions pkgs pkgs.nixVersions.latest
|
||||
else null;
|
||||
# Disabled because the latest stable version doesn't handle
|
||||
# `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work
|
||||
# againstLatestStable = testNixVersions pkgs pkgs.nixStable;
|
||||
} "touch $out");
|
||||
installTests = forAllSystems (
|
||||
system:
|
||||
let
|
||||
pkgs = nixpkgsFor.${system}.native;
|
||||
in
|
||||
pkgs.runCommand "install-tests" {
|
||||
againstSelf = testNixVersions pkgs pkgs.nix;
|
||||
againstCurrentLatest =
|
||||
# FIXME: temporarily disable this on macOS because of #3605.
|
||||
if system == "x86_64-linux" then testNixVersions pkgs pkgs.nixVersions.latest else null;
|
||||
# Disabled because the latest stable version doesn't handle
|
||||
# `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work
|
||||
# againstLatestStable = testNixVersions pkgs pkgs.nixStable;
|
||||
} "touch $out"
|
||||
);
|
||||
|
||||
installerTests = import ../tests/installer {
|
||||
binaryTarballs = self.hydraJobs.binaryTarball;
|
||||
|
|
42
packaging/installer/default.nix
Normal file
42
packaging/installer/default.nix
Normal file
|
@ -0,0 +1,42 @@
|
|||
{
|
||||
lib,
|
||||
runCommand,
|
||||
nix,
|
||||
tarballs,
|
||||
}:
|
||||
|
||||
runCommand "installer-script"
|
||||
{
|
||||
buildInputs = [ nix ];
|
||||
}
|
||||
''
|
||||
mkdir -p $out/nix-support
|
||||
|
||||
# Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
|
||||
tarballPath() {
|
||||
# Remove the store prefix
|
||||
local path=''${1#${builtins.storeDir}/}
|
||||
# Get the path relative to the derivation root
|
||||
local rest=''${path#*/}
|
||||
# Get the derivation hash
|
||||
local drvHash=''${path%%-*}
|
||||
echo "$drvHash/$rest"
|
||||
}
|
||||
|
||||
substitute ${./install.in} $out/install \
|
||||
${
|
||||
lib.concatMapStrings (
|
||||
tarball:
|
||||
let
|
||||
inherit (tarball.stdenv.hostPlatform) system;
|
||||
in
|
||||
''
|
||||
\
|
||||
--replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \
|
||||
--replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \
|
||||
''
|
||||
) tarballs
|
||||
} --replace '@nixVersion@' ${nix.version}
|
||||
|
||||
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
||||
''
|
|
@ -1,36 +0,0 @@
|
|||
{ lib
|
||||
, runCommand
|
||||
, nix
|
||||
, tarballs
|
||||
}:
|
||||
|
||||
runCommand "installer-script" {
|
||||
buildInputs = [ nix ];
|
||||
} ''
|
||||
mkdir -p $out/nix-support
|
||||
|
||||
# Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
|
||||
tarballPath() {
|
||||
# Remove the store prefix
|
||||
local path=''${1#${builtins.storeDir}/}
|
||||
# Get the path relative to the derivation root
|
||||
local rest=''${path#*/}
|
||||
# Get the derivation hash
|
||||
local drvHash=''${path%%-*}
|
||||
echo "$drvHash/$rest"
|
||||
}
|
||||
|
||||
substitute ${./install.in} $out/install \
|
||||
${lib.concatMapStrings
|
||||
(tarball: let
|
||||
inherit (tarball.stdenv.hostPlatform) system;
|
||||
in '' \
|
||||
--replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \
|
||||
--replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \
|
||||
''
|
||||
)
|
||||
tarballs
|
||||
} --replace '@nixVersion@' ${nix.version}
|
||||
|
||||
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
||||
''
|
|
@ -1,11 +1,12 @@
|
|||
{ lib
|
||||
, mkMesonDerivation
|
||||
{
|
||||
lib,
|
||||
mkMesonDerivation,
|
||||
|
||||
, doxygen
|
||||
doxygen,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -39,11 +40,10 @@ mkMesonDerivation (finalAttrs: {
|
|||
doxygen
|
||||
];
|
||||
|
||||
preConfigure =
|
||||
''
|
||||
chmod u+w ./.version
|
||||
echo ${finalAttrs.version} > ./.version
|
||||
'';
|
||||
preConfigure = ''
|
||||
chmod u+w ./.version
|
||||
echo ${finalAttrs.version} > ./.version
|
||||
'';
|
||||
|
||||
postInstall = ''
|
||||
mkdir -p ''${!outputDoc}/nix-support
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
{ lib
|
||||
, mkMesonDerivation
|
||||
{
|
||||
lib,
|
||||
mkMesonDerivation,
|
||||
|
||||
, doxygen
|
||||
doxygen,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -17,27 +18,28 @@ mkMesonDerivation (finalAttrs: {
|
|||
inherit version;
|
||||
|
||||
workDir = ./.;
|
||||
fileset = let
|
||||
cpp = fileset.fileFilter (file: file.hasExt "cc" || file.hasExt "hh");
|
||||
in fileset.unions [
|
||||
./.version
|
||||
../../.version
|
||||
./meson.build
|
||||
./doxygen.cfg.in
|
||||
# Source is not compiled, but still must be available for Doxygen
|
||||
# to gather comments.
|
||||
(cpp ../.)
|
||||
];
|
||||
fileset =
|
||||
let
|
||||
cpp = fileset.fileFilter (file: file.hasExt "cc" || file.hasExt "hh");
|
||||
in
|
||||
fileset.unions [
|
||||
./.version
|
||||
../../.version
|
||||
./meson.build
|
||||
./doxygen.cfg.in
|
||||
# Source is not compiled, but still must be available for Doxygen
|
||||
# to gather comments.
|
||||
(cpp ../.)
|
||||
];
|
||||
|
||||
nativeBuildInputs = [
|
||||
doxygen
|
||||
];
|
||||
|
||||
preConfigure =
|
||||
''
|
||||
chmod u+w ./.version
|
||||
echo ${finalAttrs.version} > ./.version
|
||||
'';
|
||||
preConfigure = ''
|
||||
chmod u+w ./.version
|
||||
echo ${finalAttrs.version} > ./.version
|
||||
'';
|
||||
|
||||
postInstall = ''
|
||||
mkdir -p ''${!outputDoc}/nix-support
|
||||
|
|
|
@ -347,7 +347,7 @@ struct MixEnvironment : virtual Args
|
|||
void setEnviron();
|
||||
};
|
||||
|
||||
void completeFlakeInputPath(
|
||||
void completeFlakeInputAttrPath(
|
||||
AddCompletions & completions,
|
||||
ref<EvalState> evalState,
|
||||
const std::vector<FlakeRef> & flakeRefs,
|
||||
|
|
|
@ -33,7 +33,7 @@ namespace nix {
|
|||
|
||||
namespace fs { using namespace std::filesystem; }
|
||||
|
||||
void completeFlakeInputPath(
|
||||
void completeFlakeInputAttrPath(
|
||||
AddCompletions & completions,
|
||||
ref<EvalState> evalState,
|
||||
const std::vector<FlakeRef> & flakeRefs,
|
||||
|
@ -117,10 +117,10 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
.labels = {"input-path"},
|
||||
.handler = {[&](std::string s) {
|
||||
warn("'--update-input' is a deprecated alias for 'flake update' and will be removed in a future version.");
|
||||
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
|
||||
lockFlags.inputUpdates.insert(flake::parseInputAttrPath(s));
|
||||
}},
|
||||
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
|
||||
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
}}
|
||||
});
|
||||
|
||||
|
@ -129,15 +129,15 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
.description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.",
|
||||
.category = category,
|
||||
.labels = {"input-path", "flake-url"},
|
||||
.handler = {[&](std::string inputPath, std::string flakeRef) {
|
||||
.handler = {[&](std::string inputAttrPath, std::string flakeRef) {
|
||||
lockFlags.writeLockFile = false;
|
||||
lockFlags.inputOverrides.insert_or_assign(
|
||||
flake::parseInputPath(inputPath),
|
||||
flake::parseInputAttrPath(inputAttrPath),
|
||||
parseFlakeRef(fetchSettings, flakeRef, absPath(getCommandBaseDir()), true));
|
||||
}},
|
||||
.completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) {
|
||||
if (n == 0) {
|
||||
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
} else if (n == 1) {
|
||||
completeFlakeRef(completions, getEvalState()->store, prefix);
|
||||
}
|
||||
|
|
|
@ -1,32 +1,33 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util
|
||||
, nix-store
|
||||
, nix-fetchers
|
||||
, nix-expr
|
||||
, nix-flake
|
||||
, nix-main
|
||||
, editline
|
||||
, readline
|
||||
, lowdown
|
||||
, nlohmann_json
|
||||
nix-util,
|
||||
nix-store,
|
||||
nix-fetchers,
|
||||
nix-expr,
|
||||
nix-flake,
|
||||
nix-main,
|
||||
editline,
|
||||
readline,
|
||||
lowdown,
|
||||
nlohmann_json,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
|
||||
# Whether to enable Markdown rendering in the Nix binary.
|
||||
, enableMarkdown ? !stdenv.hostPlatform.isWindows
|
||||
# Whether to enable Markdown rendering in the Nix binary.
|
||||
enableMarkdown ? !stdenv.hostPlatform.isWindows,
|
||||
|
||||
# Which interactive line editor library to use for Nix's repl.
|
||||
#
|
||||
# Currently supported choices are:
|
||||
#
|
||||
# - editline (default)
|
||||
# - readline
|
||||
, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline"
|
||||
# Which interactive line editor library to use for Nix's repl.
|
||||
#
|
||||
# Currently supported choices are:
|
||||
#
|
||||
# - editline (default)
|
||||
# - readline
|
||||
readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline",
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-store-c
|
||||
, nix-expr
|
||||
nix-store-c,
|
||||
nix-expr,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-store-test-support
|
||||
, nix-expr
|
||||
, nix-expr-c
|
||||
nix-store-test-support,
|
||||
nix-expr,
|
||||
nix-expr-c,
|
||||
|
||||
, rapidcheck
|
||||
rapidcheck,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,20 +1,21 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
, stdenv
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
buildPackages,
|
||||
stdenv,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-expr
|
||||
, nix-expr-c
|
||||
, nix-expr-test-support
|
||||
nix-expr,
|
||||
nix-expr-c,
|
||||
nix-expr-test-support,
|
||||
|
||||
, rapidcheck
|
||||
, gtest
|
||||
, runCommand
|
||||
rapidcheck,
|
||||
gtest,
|
||||
runCommand,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
, resolvePath
|
||||
version,
|
||||
resolvePath,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -58,16 +59,22 @@ mkMesonExecutable (finalAttrs: {
|
|||
|
||||
passthru = {
|
||||
tests = {
|
||||
run = runCommand "${finalAttrs.pname}-run" {
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
} (lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
'' + ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
'');
|
||||
run =
|
||||
runCommand "${finalAttrs.pname}-run"
|
||||
{
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
}
|
||||
(
|
||||
lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
''
|
||||
+ ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
''
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -20,77 +20,77 @@ let
|
|||
# Resolve a input spec into a node name. An input spec is
|
||||
# either a node name, or a 'follows' path from the root
|
||||
# node.
|
||||
resolveInput = inputSpec:
|
||||
if builtins.isList inputSpec
|
||||
then getInputByPath lockFile.root inputSpec
|
||||
else inputSpec;
|
||||
resolveInput =
|
||||
inputSpec: if builtins.isList inputSpec then getInputByPath lockFile.root inputSpec else inputSpec;
|
||||
|
||||
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
|
||||
# Follow an input attrpath (e.g. ["dwarffs" "nixpkgs"]) from the
|
||||
# root node, returning the final node.
|
||||
getInputByPath = nodeName: path:
|
||||
if path == []
|
||||
then nodeName
|
||||
getInputByPath =
|
||||
nodeName: path:
|
||||
if path == [ ] then
|
||||
nodeName
|
||||
else
|
||||
getInputByPath
|
||||
# Since this could be a 'follows' input, call resolveInput.
|
||||
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
|
||||
(builtins.tail path);
|
||||
|
||||
allNodes =
|
||||
builtins.mapAttrs
|
||||
(key: node:
|
||||
let
|
||||
allNodes = builtins.mapAttrs (
|
||||
key: node:
|
||||
let
|
||||
|
||||
parentNode = allNodes.${getInputByPath lockFile.root node.parent};
|
||||
parentNode = allNodes.${getInputByPath lockFile.root node.parent};
|
||||
|
||||
sourceInfo =
|
||||
if overrides ? ${key}
|
||||
then
|
||||
overrides.${key}.sourceInfo
|
||||
else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/"
|
||||
then
|
||||
parentNode.sourceInfo // {
|
||||
outPath = parentNode.outPath + ("/" + node.locked.path);
|
||||
}
|
||||
else
|
||||
# FIXME: remove obsolete node.info.
|
||||
# Note: lock file entries are always final.
|
||||
fetchTreeFinal (node.info or {} // removeAttrs node.locked ["dir"]);
|
||||
sourceInfo =
|
||||
if overrides ? ${key} then
|
||||
overrides.${key}.sourceInfo
|
||||
else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/" then
|
||||
parentNode.sourceInfo
|
||||
// {
|
||||
outPath = parentNode.outPath + ("/" + node.locked.path);
|
||||
}
|
||||
else
|
||||
# FIXME: remove obsolete node.info.
|
||||
# Note: lock file entries are always final.
|
||||
fetchTreeFinal (node.info or { } // removeAttrs node.locked [ "dir" ]);
|
||||
|
||||
subdir = overrides.${key}.dir or node.locked.dir or "";
|
||||
subdir = overrides.${key}.dir or node.locked.dir or "";
|
||||
|
||||
outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir);
|
||||
outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir);
|
||||
|
||||
flake = import (outPath + "/flake.nix");
|
||||
flake = import (outPath + "/flake.nix");
|
||||
|
||||
inputs = builtins.mapAttrs
|
||||
(inputName: inputSpec: allNodes.${resolveInput inputSpec})
|
||||
(node.inputs or {});
|
||||
inputs = builtins.mapAttrs (inputName: inputSpec: allNodes.${resolveInput inputSpec}) (
|
||||
node.inputs or { }
|
||||
);
|
||||
|
||||
outputs = flake.outputs (inputs // { self = result; });
|
||||
outputs = flake.outputs (inputs // { self = result; });
|
||||
|
||||
result =
|
||||
outputs
|
||||
# We add the sourceInfo attribute for its metadata, as they are
|
||||
# relevant metadata for the flake. However, the outPath of the
|
||||
# sourceInfo does not necessarily match the outPath of the flake,
|
||||
# as the flake may be in a subdirectory of a source.
|
||||
# This is shadowed in the next //
|
||||
// sourceInfo
|
||||
// {
|
||||
# This shadows the sourceInfo.outPath
|
||||
inherit outPath;
|
||||
result =
|
||||
outputs
|
||||
# We add the sourceInfo attribute for its metadata, as they are
|
||||
# relevant metadata for the flake. However, the outPath of the
|
||||
# sourceInfo does not necessarily match the outPath of the flake,
|
||||
# as the flake may be in a subdirectory of a source.
|
||||
# This is shadowed in the next //
|
||||
// sourceInfo
|
||||
// {
|
||||
# This shadows the sourceInfo.outPath
|
||||
inherit outPath;
|
||||
|
||||
inherit inputs; inherit outputs; inherit sourceInfo; _type = "flake";
|
||||
};
|
||||
inherit inputs;
|
||||
inherit outputs;
|
||||
inherit sourceInfo;
|
||||
_type = "flake";
|
||||
};
|
||||
|
||||
in
|
||||
if node.flake or true then
|
||||
assert builtins.isFunction flake.outputs;
|
||||
result
|
||||
else
|
||||
sourceInfo
|
||||
)
|
||||
lockFile.nodes;
|
||||
in
|
||||
if node.flake or true then
|
||||
assert builtins.isFunction flake.outputs;
|
||||
result
|
||||
else
|
||||
sourceInfo
|
||||
) lockFile.nodes;
|
||||
|
||||
in allNodes.${lockFile.root}
|
||||
in
|
||||
allNodes.${lockFile.root}
|
||||
|
|
|
@ -3114,7 +3114,7 @@ std::optional<SourcePath> EvalState::resolveLookupPathPath(const LookupPath::Pat
|
|||
}
|
||||
}
|
||||
|
||||
if (path.pathExists())
|
||||
if (path.resolveSymlinks().pathExists())
|
||||
return finish(std::move(path));
|
||||
else {
|
||||
logWarning({
|
||||
|
|
|
@ -1,40 +1,72 @@
|
|||
{ system ? "" # obsolete
|
||||
, url
|
||||
, hash ? "" # an SRI hash
|
||||
{
|
||||
system ? "", # obsolete
|
||||
url,
|
||||
hash ? "", # an SRI hash
|
||||
|
||||
# Legacy hash specification
|
||||
, md5 ? "", sha1 ? "", sha256 ? "", sha512 ? ""
|
||||
, outputHash ?
|
||||
if hash != "" then hash else if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256
|
||||
, outputHashAlgo ?
|
||||
if hash != "" then "" else if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256"
|
||||
# Legacy hash specification
|
||||
md5 ? "",
|
||||
sha1 ? "",
|
||||
sha256 ? "",
|
||||
sha512 ? "",
|
||||
outputHash ?
|
||||
if hash != "" then
|
||||
hash
|
||||
else if sha512 != "" then
|
||||
sha512
|
||||
else if sha1 != "" then
|
||||
sha1
|
||||
else if md5 != "" then
|
||||
md5
|
||||
else
|
||||
sha256,
|
||||
outputHashAlgo ?
|
||||
if hash != "" then
|
||||
""
|
||||
else if sha512 != "" then
|
||||
"sha512"
|
||||
else if sha1 != "" then
|
||||
"sha1"
|
||||
else if md5 != "" then
|
||||
"md5"
|
||||
else
|
||||
"sha256",
|
||||
|
||||
, executable ? false
|
||||
, unpack ? false
|
||||
, name ? baseNameOf (toString url)
|
||||
, impure ? false
|
||||
executable ? false,
|
||||
unpack ? false,
|
||||
name ? baseNameOf (toString url),
|
||||
impure ? false,
|
||||
}:
|
||||
|
||||
derivation ({
|
||||
builder = "builtin:fetchurl";
|
||||
derivation (
|
||||
{
|
||||
builder = "builtin:fetchurl";
|
||||
|
||||
# New-style output content requirements.
|
||||
outputHashMode = if unpack || executable then "recursive" else "flat";
|
||||
# New-style output content requirements.
|
||||
outputHashMode = if unpack || executable then "recursive" else "flat";
|
||||
|
||||
inherit name url executable unpack;
|
||||
inherit
|
||||
name
|
||||
url
|
||||
executable
|
||||
unpack
|
||||
;
|
||||
|
||||
system = "builtin";
|
||||
system = "builtin";
|
||||
|
||||
# No need to double the amount of network traffic
|
||||
preferLocalBuild = true;
|
||||
# No need to double the amount of network traffic
|
||||
preferLocalBuild = true;
|
||||
|
||||
# This attribute does nothing; it's here to avoid changing evaluation results.
|
||||
impureEnvVars = [
|
||||
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
|
||||
];
|
||||
# This attribute does nothing; it's here to avoid changing evaluation results.
|
||||
impureEnvVars = [
|
||||
"http_proxy"
|
||||
"https_proxy"
|
||||
"ftp_proxy"
|
||||
"all_proxy"
|
||||
"no_proxy"
|
||||
];
|
||||
|
||||
# To make "nix-prefetch-url" work.
|
||||
urls = [ url ];
|
||||
} // (if impure
|
||||
then { __impure = true; }
|
||||
else { inherit outputHashAlgo outputHash; }))
|
||||
# To make "nix-prefetch-url" work.
|
||||
urls = [ url ];
|
||||
}
|
||||
// (if impure then { __impure = true; } else { inherit outputHashAlgo outputHash; })
|
||||
)
|
||||
|
|
|
@ -1,21 +1,27 @@
|
|||
attrs @ { drvPath, outputs, name, ... }:
|
||||
attrs@{
|
||||
drvPath,
|
||||
outputs,
|
||||
name,
|
||||
...
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
commonAttrs = (builtins.listToAttrs outputsList) //
|
||||
{ all = map (x: x.value) outputsList;
|
||||
inherit drvPath name;
|
||||
type = "derivation";
|
||||
};
|
||||
commonAttrs = (builtins.listToAttrs outputsList) // {
|
||||
all = map (x: x.value) outputsList;
|
||||
inherit drvPath name;
|
||||
type = "derivation";
|
||||
};
|
||||
|
||||
outputToAttrListElement = outputName:
|
||||
{ name = outputName;
|
||||
value = commonAttrs // {
|
||||
outPath = builtins.getAttr outputName attrs;
|
||||
inherit outputName;
|
||||
};
|
||||
outputToAttrListElement = outputName: {
|
||||
name = outputName;
|
||||
value = commonAttrs // {
|
||||
outPath = builtins.getAttr outputName attrs;
|
||||
inherit outputName;
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
outputsList = map outputToAttrListElement outputs;
|
||||
|
||||
in (builtins.head outputsList).value
|
||||
|
||||
in
|
||||
(builtins.head outputsList).value
|
||||
|
|
|
@ -1,33 +1,34 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
mkMesonLibrary,
|
||||
|
||||
, bison
|
||||
, flex
|
||||
, cmake # for resolving toml11 dep
|
||||
bison,
|
||||
flex,
|
||||
cmake, # for resolving toml11 dep
|
||||
|
||||
, nix-util
|
||||
, nix-store
|
||||
, nix-fetchers
|
||||
, boost
|
||||
, boehmgc
|
||||
, nlohmann_json
|
||||
, toml11
|
||||
nix-util,
|
||||
nix-store,
|
||||
nix-fetchers,
|
||||
boost,
|
||||
boehmgc,
|
||||
nlohmann_json,
|
||||
toml11,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
|
||||
# Whether to use garbage collection for the Nix language evaluator.
|
||||
#
|
||||
# If it is disabled, we just leak memory, but this is not as bad as it
|
||||
# sounds so long as evaluation just takes places within short-lived
|
||||
# processes. (When the process exits, the memory is reclaimed; it is
|
||||
# only leaked *within* the process.)
|
||||
#
|
||||
# Temporarily disabled on Windows because the `GC_throw_bad_alloc`
|
||||
# symbol is missing during linking.
|
||||
, enableGC ? !stdenv.hostPlatform.isWindows
|
||||
# Whether to use garbage collection for the Nix language evaluator.
|
||||
#
|
||||
# If it is disabled, we just leak memory, but this is not as bad as it
|
||||
# sounds so long as evaluation just takes places within short-lived
|
||||
# processes. (When the process exits, the memory is reclaimed; it is
|
||||
# only leaked *within* the process.)
|
||||
#
|
||||
# Temporarily disabled on Windows because the `GC_throw_bad_alloc`
|
||||
# symbol is missing during linking.
|
||||
enableGC ? !stdenv.hostPlatform.isWindows,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -51,10 +52,7 @@ mkMesonLibrary (finalAttrs: {
|
|||
(fileset.fileFilter (file: file.hasExt "hh") ./.)
|
||||
./lexer.l
|
||||
./parser.y
|
||||
(fileset.difference
|
||||
(fileset.fileFilter (file: file.hasExt "nix") ./.)
|
||||
./package.nix
|
||||
)
|
||||
(fileset.difference (fileset.fileFilter (file: file.hasExt "nix") ./.) ./package.nix)
|
||||
];
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
|
|
@ -26,27 +26,34 @@
|
|||
Note that `derivation` is very bare-bones, and provides almost no commands during the build.
|
||||
Most likely, you'll want to use functions like `stdenv.mkDerivation` in Nixpkgs to set up a basic environment.
|
||||
*/
|
||||
drvAttrs @ { outputs ? [ "out" ], ... }:
|
||||
drvAttrs@{
|
||||
outputs ? [ "out" ],
|
||||
...
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
strict = derivationStrict drvAttrs;
|
||||
|
||||
commonAttrs = drvAttrs // (builtins.listToAttrs outputsList) //
|
||||
{ all = map (x: x.value) outputsList;
|
||||
commonAttrs =
|
||||
drvAttrs
|
||||
// (builtins.listToAttrs outputsList)
|
||||
// {
|
||||
all = map (x: x.value) outputsList;
|
||||
inherit drvAttrs;
|
||||
};
|
||||
|
||||
outputToAttrListElement = outputName:
|
||||
{ name = outputName;
|
||||
value = commonAttrs // {
|
||||
outPath = builtins.getAttr outputName strict;
|
||||
drvPath = strict.drvPath;
|
||||
type = "derivation";
|
||||
inherit outputName;
|
||||
};
|
||||
outputToAttrListElement = outputName: {
|
||||
name = outputName;
|
||||
value = commonAttrs // {
|
||||
outPath = builtins.getAttr outputName strict;
|
||||
drvPath = strict.drvPath;
|
||||
type = "derivation";
|
||||
inherit outputName;
|
||||
};
|
||||
};
|
||||
|
||||
outputsList = map outputToAttrListElement outputs;
|
||||
|
||||
in (builtins.head outputsList).value
|
||||
in
|
||||
(builtins.head outputsList).value
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
, stdenv
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
buildPackages,
|
||||
stdenv,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-fetchers
|
||||
, nix-store-test-support
|
||||
nix-fetchers,
|
||||
nix-store-test-support,
|
||||
|
||||
, rapidcheck
|
||||
, gtest
|
||||
, runCommand
|
||||
rapidcheck,
|
||||
gtest,
|
||||
runCommand,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
, resolvePath
|
||||
version,
|
||||
resolvePath,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -56,16 +57,22 @@ mkMesonExecutable (finalAttrs: {
|
|||
|
||||
passthru = {
|
||||
tests = {
|
||||
run = runCommand "${finalAttrs.pname}-run" {
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
} (lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
'' + ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
'');
|
||||
run =
|
||||
runCommand "${finalAttrs.pname}-run"
|
||||
{
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
}
|
||||
(
|
||||
lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
''
|
||||
+ ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
''
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -207,7 +207,8 @@ static git_packbuilder_progress PACKBUILDER_PROGRESS_CHECK_INTERRUPT = &packBuil
|
|||
|
||||
} // extern "C"
|
||||
|
||||
static void initRepoAtomically(std::filesystem::path &path, bool bare) {
|
||||
static void initRepoAtomically(std::filesystem::path &path, bool bare)
|
||||
{
|
||||
if (pathExists(path.string())) return;
|
||||
|
||||
Path tmpDir = createTempDir(os_string_to_string(PathViewNG { std::filesystem::path(path).parent_path() }));
|
||||
|
@ -545,13 +546,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
// then use code that was removed in this commit (see blame)
|
||||
|
||||
auto dir = this->path;
|
||||
Strings gitArgs;
|
||||
if (shallow) {
|
||||
gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec };
|
||||
}
|
||||
else {
|
||||
gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--", url, refspec };
|
||||
}
|
||||
Strings gitArgs{"-C", dir.string(), "--git-dir", ".", "fetch", "--quiet", "--force"};
|
||||
if (shallow)
|
||||
append(gitArgs, {"--depth", "1"});
|
||||
append(gitArgs, {std::string("--"), url, refspec});
|
||||
|
||||
runProgram(RunOptions {
|
||||
.program = "git",
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util
|
||||
, nix-store
|
||||
, nlohmann_json
|
||||
, libgit2
|
||||
, curl
|
||||
nix-util,
|
||||
nix-store,
|
||||
nlohmann_json,
|
||||
libgit2,
|
||||
curl,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-store-c
|
||||
, nix-expr-c
|
||||
, nix-flake
|
||||
nix-store-c,
|
||||
nix-expr-c,
|
||||
nix-flake,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,20 +1,21 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
, stdenv
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
buildPackages,
|
||||
stdenv,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-flake
|
||||
, nix-flake-c
|
||||
, nix-expr-test-support
|
||||
nix-flake,
|
||||
nix-flake-c,
|
||||
nix-expr-test-support,
|
||||
|
||||
, rapidcheck
|
||||
, gtest
|
||||
, runCommand
|
||||
rapidcheck,
|
||||
gtest,
|
||||
runCommand,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
, resolvePath
|
||||
version,
|
||||
resolvePath,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -58,17 +59,23 @@ mkMesonExecutable (finalAttrs: {
|
|||
|
||||
passthru = {
|
||||
tests = {
|
||||
run = runCommand "${finalAttrs.pname}-run" {
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
} (lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
'' + ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
export NIX_CONFIG="extra-experimental-features = flakes"
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
'');
|
||||
run =
|
||||
runCommand "${finalAttrs.pname}-run"
|
||||
{
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
}
|
||||
(
|
||||
lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
''
|
||||
+ ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
export NIX_CONFIG="extra-experimental-features = flakes"
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
''
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
|||
EvalState & state,
|
||||
Value * value,
|
||||
const PosIdx pos,
|
||||
const InputPath & lockRootPath,
|
||||
const InputAttrPath & lockRootAttrPath,
|
||||
const SourcePath & flakeDir);
|
||||
|
||||
static FlakeInput parseFlakeInput(
|
||||
|
@ -113,7 +113,7 @@ static FlakeInput parseFlakeInput(
|
|||
std::string_view inputName,
|
||||
Value * value,
|
||||
const PosIdx pos,
|
||||
const InputPath & lockRootPath,
|
||||
const InputAttrPath & lockRootAttrPath,
|
||||
const SourcePath & flakeDir)
|
||||
{
|
||||
expectType(state, nAttrs, *value, pos);
|
||||
|
@ -137,7 +137,7 @@ static FlakeInput parseFlakeInput(
|
|||
else if (attr.value->type() == nPath) {
|
||||
auto path = attr.value->path();
|
||||
if (path.accessor != flakeDir.accessor)
|
||||
throw Error("input path '%s' at %s must be in the same source tree as %s",
|
||||
throw Error("input attribute path '%s' at %s must be in the same source tree as %s",
|
||||
path, state.positions[attr.pos], flakeDir);
|
||||
url = "path:" + flakeDir.path.makeRelative(path.path);
|
||||
}
|
||||
|
@ -149,11 +149,11 @@ static FlakeInput parseFlakeInput(
|
|||
expectType(state, nBool, *attr.value, attr.pos);
|
||||
input.isFlake = attr.value->boolean();
|
||||
} else if (attr.name == sInputs) {
|
||||
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootPath, flakeDir);
|
||||
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootAttrPath, flakeDir);
|
||||
} else if (attr.name == sFollows) {
|
||||
expectType(state, nString, *attr.value, attr.pos);
|
||||
auto follows(parseInputPath(attr.value->c_str()));
|
||||
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
|
||||
auto follows(parseInputAttrPath(attr.value->c_str()));
|
||||
follows.insert(follows.begin(), lockRootAttrPath.begin(), lockRootAttrPath.end());
|
||||
input.follows = follows;
|
||||
} else {
|
||||
// Allow selecting a subset of enum values
|
||||
|
@ -220,7 +220,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
|||
EvalState & state,
|
||||
Value * value,
|
||||
const PosIdx pos,
|
||||
const InputPath & lockRootPath,
|
||||
const InputAttrPath & lockRootAttrPath,
|
||||
const SourcePath & flakeDir)
|
||||
{
|
||||
std::map<FlakeId, FlakeInput> inputs;
|
||||
|
@ -233,7 +233,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
|||
state.symbols[inputAttr.name],
|
||||
inputAttr.value,
|
||||
inputAttr.pos,
|
||||
lockRootPath,
|
||||
lockRootAttrPath,
|
||||
flakeDir));
|
||||
}
|
||||
|
||||
|
@ -246,7 +246,7 @@ static Flake readFlake(
|
|||
const FlakeRef & resolvedRef,
|
||||
const FlakeRef & lockedRef,
|
||||
const SourcePath & rootDir,
|
||||
const InputPath & lockRootPath)
|
||||
const InputAttrPath & lockRootAttrPath)
|
||||
{
|
||||
auto flakeDir = rootDir / CanonPath(resolvedRef.subdir);
|
||||
auto flakePath = flakeDir / "flake.nix";
|
||||
|
@ -270,7 +270,7 @@ static Flake readFlake(
|
|||
auto sInputs = state.symbols.create("inputs");
|
||||
|
||||
if (auto inputs = vInfo.attrs()->get(sInputs))
|
||||
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootPath, flakeDir);
|
||||
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootAttrPath, flakeDir);
|
||||
|
||||
auto sOutputs = state.symbols.create("outputs");
|
||||
|
||||
|
@ -347,12 +347,12 @@ static Flake getFlake(
|
|||
const FlakeRef & originalRef,
|
||||
bool useRegistries,
|
||||
FlakeCache & flakeCache,
|
||||
const InputPath & lockRootPath)
|
||||
const InputAttrPath & lockRootAttrPath)
|
||||
{
|
||||
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
state, originalRef, useRegistries, flakeCache);
|
||||
|
||||
return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootPath);
|
||||
return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootAttrPath);
|
||||
}
|
||||
|
||||
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool useRegistries)
|
||||
|
@ -407,12 +407,12 @@ LockedFlake lockFlake(
|
|||
{
|
||||
FlakeInput input;
|
||||
SourcePath sourcePath;
|
||||
std::optional<InputPath> parentInputPath; // FIXME: rename to inputPathPrefix?
|
||||
std::optional<InputAttrPath> parentInputAttrPath; // FIXME: rename to inputAttrPathPrefix?
|
||||
};
|
||||
|
||||
std::map<InputPath, OverrideTarget> overrides;
|
||||
std::set<InputPath> explicitCliOverrides;
|
||||
std::set<InputPath> overridesUsed, updatesUsed;
|
||||
std::map<InputAttrPath, OverrideTarget> overrides;
|
||||
std::set<InputAttrPath> explicitCliOverrides;
|
||||
std::set<InputAttrPath> overridesUsed, updatesUsed;
|
||||
std::map<ref<Node>, SourcePath> nodePaths;
|
||||
|
||||
for (auto & i : lockFlags.inputOverrides) {
|
||||
|
@ -436,9 +436,9 @@ LockedFlake lockFlake(
|
|||
std::function<void(
|
||||
const FlakeInputs & flakeInputs,
|
||||
ref<Node> node,
|
||||
const InputPath & inputPathPrefix,
|
||||
const InputAttrPath & inputAttrPathPrefix,
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
const InputPath & followsPrefix,
|
||||
const InputAttrPath & followsPrefix,
|
||||
const SourcePath & sourcePath,
|
||||
bool trustLock)>
|
||||
computeLocks;
|
||||
|
@ -450,7 +450,7 @@ LockedFlake lockFlake(
|
|||
/* The node whose locks are to be updated.*/
|
||||
ref<Node> node,
|
||||
/* The path to this node in the lock file graph. */
|
||||
const InputPath & inputPathPrefix,
|
||||
const InputAttrPath & inputAttrPathPrefix,
|
||||
/* The old node, if any, from which locks can be
|
||||
copied. */
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
|
@ -458,59 +458,59 @@ LockedFlake lockFlake(
|
|||
interpreted. When a node is initially locked, it's
|
||||
relative to the node's flake; when it's already locked,
|
||||
it's relative to the root of the lock file. */
|
||||
const InputPath & followsPrefix,
|
||||
const InputAttrPath & followsPrefix,
|
||||
/* The source path of this node's flake. */
|
||||
const SourcePath & sourcePath,
|
||||
bool trustLock)
|
||||
{
|
||||
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
|
||||
debug("computing lock file node '%s'", printInputAttrPath(inputAttrPathPrefix));
|
||||
|
||||
/* Get the overrides (i.e. attributes of the form
|
||||
'inputs.nixops.inputs.nixpkgs.url = ...'). */
|
||||
for (auto & [id, input] : flakeInputs) {
|
||||
for (auto & [idOverride, inputOverride] : input.overrides) {
|
||||
auto inputPath(inputPathPrefix);
|
||||
inputPath.push_back(id);
|
||||
inputPath.push_back(idOverride);
|
||||
overrides.emplace(inputPath,
|
||||
auto inputAttrPath(inputAttrPathPrefix);
|
||||
inputAttrPath.push_back(id);
|
||||
inputAttrPath.push_back(idOverride);
|
||||
overrides.emplace(inputAttrPath,
|
||||
OverrideTarget {
|
||||
.input = inputOverride,
|
||||
.sourcePath = sourcePath,
|
||||
.parentInputPath = inputPathPrefix
|
||||
.parentInputAttrPath = inputAttrPathPrefix
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/* Check whether this input has overrides for a
|
||||
non-existent input. */
|
||||
for (auto [inputPath, inputOverride] : overrides) {
|
||||
auto inputPath2(inputPath);
|
||||
auto follow = inputPath2.back();
|
||||
inputPath2.pop_back();
|
||||
if (inputPath2 == inputPathPrefix && !flakeInputs.count(follow))
|
||||
for (auto [inputAttrPath, inputOverride] : overrides) {
|
||||
auto inputAttrPath2(inputAttrPath);
|
||||
auto follow = inputAttrPath2.back();
|
||||
inputAttrPath2.pop_back();
|
||||
if (inputAttrPath2 == inputAttrPathPrefix && !flakeInputs.count(follow))
|
||||
warn(
|
||||
"input '%s' has an override for a non-existent input '%s'",
|
||||
printInputPath(inputPathPrefix), follow);
|
||||
printInputAttrPath(inputAttrPathPrefix), follow);
|
||||
}
|
||||
|
||||
/* Go over the flake inputs, resolve/fetch them if
|
||||
necessary (i.e. if they're new or the flakeref changed
|
||||
from what's in the lock file). */
|
||||
for (auto & [id, input2] : flakeInputs) {
|
||||
auto inputPath(inputPathPrefix);
|
||||
inputPath.push_back(id);
|
||||
auto inputPathS = printInputPath(inputPath);
|
||||
debug("computing input '%s'", inputPathS);
|
||||
auto inputAttrPath(inputAttrPathPrefix);
|
||||
inputAttrPath.push_back(id);
|
||||
auto inputAttrPathS = printInputAttrPath(inputAttrPath);
|
||||
debug("computing input '%s'", inputAttrPathS);
|
||||
|
||||
try {
|
||||
|
||||
/* Do we have an override for this input from one of the
|
||||
ancestors? */
|
||||
auto i = overrides.find(inputPath);
|
||||
auto i = overrides.find(inputAttrPath);
|
||||
bool hasOverride = i != overrides.end();
|
||||
bool hasCliOverride = explicitCliOverrides.contains(inputPath);
|
||||
bool hasCliOverride = explicitCliOverrides.contains(inputAttrPath);
|
||||
if (hasOverride)
|
||||
overridesUsed.insert(inputPath);
|
||||
overridesUsed.insert(inputAttrPath);
|
||||
auto input = hasOverride ? i->second.input : input2;
|
||||
|
||||
/* Resolve relative 'path:' inputs relative to
|
||||
|
@ -525,11 +525,11 @@ LockedFlake lockFlake(
|
|||
/* Resolve 'follows' later (since it may refer to an input
|
||||
path we haven't processed yet. */
|
||||
if (input.follows) {
|
||||
InputPath target;
|
||||
InputAttrPath target;
|
||||
|
||||
target.insert(target.end(), input.follows->begin(), input.follows->end());
|
||||
|
||||
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
|
||||
debug("input '%s' follows '%s'", inputAttrPathS, printInputAttrPath(target));
|
||||
node->inputs.insert_or_assign(id, target);
|
||||
continue;
|
||||
}
|
||||
|
@ -538,7 +538,7 @@ LockedFlake lockFlake(
|
|||
|
||||
auto overridenParentPath =
|
||||
input.ref->input.isRelative()
|
||||
? std::optional<InputPath>(hasOverride ? i->second.parentInputPath : inputPathPrefix)
|
||||
? std::optional<InputAttrPath>(hasOverride ? i->second.parentInputAttrPath : inputAttrPathPrefix)
|
||||
: std::nullopt;
|
||||
|
||||
auto resolveRelativePath = [&]() -> std::optional<SourcePath>
|
||||
|
@ -557,9 +557,9 @@ LockedFlake lockFlake(
|
|||
auto getInputFlake = [&]()
|
||||
{
|
||||
if (auto resolvedPath = resolveRelativePath()) {
|
||||
return readFlake(state, *input.ref, *input.ref, *input.ref, *resolvedPath, inputPath);
|
||||
return readFlake(state, *input.ref, *input.ref, *input.ref, *resolvedPath, inputAttrPath);
|
||||
} else {
|
||||
return getFlake(state, *input.ref, useRegistries, flakeCache, inputPath);
|
||||
return getFlake(state, *input.ref, useRegistries, flakeCache, inputAttrPath);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -567,19 +567,19 @@ LockedFlake lockFlake(
|
|||
And the input is not in updateInputs? */
|
||||
std::shared_ptr<LockedNode> oldLock;
|
||||
|
||||
updatesUsed.insert(inputPath);
|
||||
updatesUsed.insert(inputAttrPath);
|
||||
|
||||
if (oldNode && !lockFlags.inputUpdates.count(inputPath))
|
||||
if (oldNode && !lockFlags.inputUpdates.count(inputAttrPath))
|
||||
if (auto oldLock2 = get(oldNode->inputs, id))
|
||||
if (auto oldLock3 = std::get_if<0>(&*oldLock2))
|
||||
oldLock = *oldLock3;
|
||||
|
||||
if (oldLock
|
||||
&& oldLock->originalRef == *input.ref
|
||||
&& oldLock->parentPath == overridenParentPath
|
||||
&& oldLock->parentInputAttrPath == overridenParentPath
|
||||
&& !hasCliOverride)
|
||||
{
|
||||
debug("keeping existing input '%s'", inputPathS);
|
||||
debug("keeping existing input '%s'", inputAttrPathS);
|
||||
|
||||
/* Copy the input from the old lock since its flakeref
|
||||
didn't change and there is no override from a
|
||||
|
@ -588,18 +588,18 @@ LockedFlake lockFlake(
|
|||
oldLock->lockedRef,
|
||||
oldLock->originalRef,
|
||||
oldLock->isFlake,
|
||||
oldLock->parentPath);
|
||||
oldLock->parentInputAttrPath);
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
|
||||
/* If we have this input in updateInputs, then we
|
||||
must fetch the flake to update it. */
|
||||
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
|
||||
auto lb = lockFlags.inputUpdates.lower_bound(inputAttrPath);
|
||||
|
||||
auto mustRefetch =
|
||||
lb != lockFlags.inputUpdates.end()
|
||||
&& lb->size() > inputPath.size()
|
||||
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
|
||||
&& lb->size() > inputAttrPath.size()
|
||||
&& std::equal(inputAttrPath.begin(), inputAttrPath.end(), lb->begin());
|
||||
|
||||
FlakeInputs fakeInputs;
|
||||
|
||||
|
@ -618,7 +618,7 @@ LockedFlake lockFlake(
|
|||
if (!trustLock) {
|
||||
// It is possible that the flake has changed,
|
||||
// so we must confirm all the follows that are in the lock file are also in the flake.
|
||||
auto overridePath(inputPath);
|
||||
auto overridePath(inputAttrPath);
|
||||
overridePath.push_back(i.first);
|
||||
auto o = overrides.find(overridePath);
|
||||
// If the override disappeared, we have to refetch the flake,
|
||||
|
@ -642,21 +642,21 @@ LockedFlake lockFlake(
|
|||
if (mustRefetch) {
|
||||
auto inputFlake = getInputFlake();
|
||||
nodePaths.emplace(childNode, inputFlake.path.parent());
|
||||
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, followsPrefix,
|
||||
computeLocks(inputFlake.inputs, childNode, inputAttrPath, oldLock, followsPrefix,
|
||||
inputFlake.path, false);
|
||||
} else {
|
||||
computeLocks(fakeInputs, childNode, inputPath, oldLock, followsPrefix, sourcePath, true);
|
||||
computeLocks(fakeInputs, childNode, inputAttrPath, oldLock, followsPrefix, sourcePath, true);
|
||||
}
|
||||
|
||||
} else {
|
||||
/* We need to create a new lock file entry. So fetch
|
||||
this input. */
|
||||
debug("creating new input '%s'", inputPathS);
|
||||
debug("creating new input '%s'", inputAttrPathS);
|
||||
|
||||
if (!lockFlags.allowUnlocked
|
||||
&& !input.ref->input.isLocked()
|
||||
&& !input.ref->input.isRelative())
|
||||
throw Error("cannot update unlocked flake input '%s' in pure mode", inputPathS);
|
||||
throw Error("cannot update unlocked flake input '%s' in pure mode", inputAttrPathS);
|
||||
|
||||
/* Note: in case of an --override-input, we use
|
||||
the *original* ref (input2.ref) for the
|
||||
|
@ -665,7 +665,7 @@ LockedFlake lockFlake(
|
|||
nuked the next time we update the lock
|
||||
file. That is, overrides are sticky unless you
|
||||
use --no-write-lock-file. */
|
||||
auto ref = (input2.ref && explicitCliOverrides.contains(inputPath)) ? *input2.ref : *input.ref;
|
||||
auto ref = (input2.ref && explicitCliOverrides.contains(inputAttrPath)) ? *input2.ref : *input.ref;
|
||||
|
||||
if (input.isFlake) {
|
||||
auto inputFlake = getInputFlake();
|
||||
|
@ -691,11 +691,11 @@ LockedFlake lockFlake(
|
|||
own lock file. */
|
||||
nodePaths.emplace(childNode, inputFlake.path.parent());
|
||||
computeLocks(
|
||||
inputFlake.inputs, childNode, inputPath,
|
||||
inputFlake.inputs, childNode, inputAttrPath,
|
||||
oldLock
|
||||
? std::dynamic_pointer_cast<const Node>(oldLock)
|
||||
: readLockFile(state.fetchSettings, inputFlake.lockFilePath()).root.get_ptr(),
|
||||
oldLock ? followsPrefix : inputPath,
|
||||
oldLock ? followsPrefix : inputAttrPath,
|
||||
inputFlake.path,
|
||||
false);
|
||||
}
|
||||
|
@ -722,7 +722,7 @@ LockedFlake lockFlake(
|
|||
}
|
||||
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while updating the flake input '%s'", inputPathS);
|
||||
e.addTrace({}, "while updating the flake input '%s'", inputAttrPathS);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
@ -742,11 +742,11 @@ LockedFlake lockFlake(
|
|||
for (auto & i : lockFlags.inputOverrides)
|
||||
if (!overridesUsed.count(i.first))
|
||||
warn("the flag '--override-input %s %s' does not match any input",
|
||||
printInputPath(i.first), i.second);
|
||||
printInputAttrPath(i.first), i.second);
|
||||
|
||||
for (auto & i : lockFlags.inputUpdates)
|
||||
if (!updatesUsed.count(i))
|
||||
warn("'%s' does not match any input of this flake", printInputPath(i));
|
||||
warn("'%s' does not match any input of this flake", printInputAttrPath(i));
|
||||
|
||||
/* Check 'follows' inputs. */
|
||||
newLockFile.check();
|
||||
|
|
|
@ -57,7 +57,7 @@ struct FlakeInput
|
|||
* false = (fetched) static source path
|
||||
*/
|
||||
bool isFlake = true;
|
||||
std::optional<InputPath> follows;
|
||||
std::optional<InputAttrPath> follows;
|
||||
FlakeInputs overrides;
|
||||
};
|
||||
|
||||
|
@ -201,13 +201,13 @@ struct LockFlags
|
|||
/**
|
||||
* Flake inputs to be overridden.
|
||||
*/
|
||||
std::map<InputPath, FlakeRef> inputOverrides;
|
||||
std::map<InputAttrPath, FlakeRef> inputOverrides;
|
||||
|
||||
/**
|
||||
* Flake inputs to be updated. This means that any existing lock
|
||||
* for those inputs will be ignored.
|
||||
*/
|
||||
std::set<InputPath> inputUpdates;
|
||||
std::set<InputAttrPath> inputUpdates;
|
||||
};
|
||||
|
||||
LockedFlake lockFlake(
|
||||
|
|
|
@ -43,7 +43,7 @@ LockedNode::LockedNode(
|
|||
: lockedRef(getFlakeRef(fetchSettings, json, "locked", "info")) // FIXME: remove "info"
|
||||
, originalRef(getFlakeRef(fetchSettings, json, "original", nullptr))
|
||||
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
||||
, parentPath(json.find("parent") != json.end() ? (std::optional<InputPath>) json["parent"] : std::nullopt)
|
||||
, parentInputAttrPath(json.find("parent") != json.end() ? (std::optional<InputAttrPath>) json["parent"] : std::nullopt)
|
||||
{
|
||||
if (!lockedRef.input.isConsideredLocked(fetchSettings) && !lockedRef.input.isRelative())
|
||||
throw Error("Lock file contains unlocked input '%s'. Use '--allow-dirty-locks' to accept this lock file.",
|
||||
|
@ -59,7 +59,7 @@ StorePath LockedNode::computeStorePath(Store & store) const
|
|||
return lockedRef.input.computeStorePath(store);
|
||||
}
|
||||
|
||||
static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & path, std::vector<InputPath> & visited)
|
||||
static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputAttrPath & path, std::vector<InputAttrPath> & visited)
|
||||
{
|
||||
auto pos = root;
|
||||
|
||||
|
@ -67,8 +67,8 @@ static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & pa
|
|||
|
||||
if (found != visited.end()) {
|
||||
std::vector<std::string> cycle;
|
||||
std::transform(found, visited.cend(), std::back_inserter(cycle), printInputPath);
|
||||
cycle.push_back(printInputPath(path));
|
||||
std::transform(found, visited.cend(), std::back_inserter(cycle), printInputAttrPath);
|
||||
cycle.push_back(printInputAttrPath(path));
|
||||
throw Error("follow cycle detected: [%s]", concatStringsSep(" -> ", cycle));
|
||||
}
|
||||
visited.push_back(path);
|
||||
|
@ -90,9 +90,9 @@ static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & pa
|
|||
return pos;
|
||||
}
|
||||
|
||||
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
|
||||
std::shared_ptr<Node> LockFile::findInput(const InputAttrPath & path)
|
||||
{
|
||||
std::vector<InputPath> visited;
|
||||
std::vector<InputAttrPath> visited;
|
||||
return doFind(root, path, visited);
|
||||
}
|
||||
|
||||
|
@ -115,7 +115,7 @@ LockFile::LockFile(
|
|||
if (jsonNode.find("inputs") == jsonNode.end()) return;
|
||||
for (auto & i : jsonNode["inputs"].items()) {
|
||||
if (i.value().is_array()) { // FIXME: remove, obsolete
|
||||
InputPath path;
|
||||
InputAttrPath path;
|
||||
for (auto & j : i.value())
|
||||
path.push_back(j);
|
||||
node.inputs.insert_or_assign(i.key(), path);
|
||||
|
@ -203,8 +203,8 @@ std::pair<nlohmann::json, LockFile::KeyMap> LockFile::toJSON() const
|
|||
n["locked"].erase("__final");
|
||||
if (!lockedNode->isFlake)
|
||||
n["flake"] = false;
|
||||
if (lockedNode->parentPath)
|
||||
n["parent"] = *lockedNode->parentPath;
|
||||
if (lockedNode->parentInputAttrPath)
|
||||
n["parent"] = *lockedNode->parentInputAttrPath;
|
||||
}
|
||||
|
||||
nodes[key] = std::move(n);
|
||||
|
@ -267,36 +267,36 @@ bool LockFile::operator ==(const LockFile & other) const
|
|||
return toJSON().first == other.toJSON().first;
|
||||
}
|
||||
|
||||
InputPath parseInputPath(std::string_view s)
|
||||
InputAttrPath parseInputAttrPath(std::string_view s)
|
||||
{
|
||||
InputPath path;
|
||||
InputAttrPath path;
|
||||
|
||||
for (auto & elem : tokenizeString<std::vector<std::string>>(s, "/")) {
|
||||
if (!std::regex_match(elem, flakeIdRegex))
|
||||
throw UsageError("invalid flake input path element '%s'", elem);
|
||||
throw UsageError("invalid flake input attribute path element '%s'", elem);
|
||||
path.push_back(elem);
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
|
||||
std::map<InputAttrPath, Node::Edge> LockFile::getAllInputs() const
|
||||
{
|
||||
std::set<ref<Node>> done;
|
||||
std::map<InputPath, Node::Edge> res;
|
||||
std::map<InputAttrPath, Node::Edge> res;
|
||||
|
||||
std::function<void(const InputPath & prefix, ref<Node> node)> recurse;
|
||||
std::function<void(const InputAttrPath & prefix, ref<Node> node)> recurse;
|
||||
|
||||
recurse = [&](const InputPath & prefix, ref<Node> node)
|
||||
recurse = [&](const InputAttrPath & prefix, ref<Node> node)
|
||||
{
|
||||
if (!done.insert(node).second) return;
|
||||
|
||||
for (auto &[id, input] : node->inputs) {
|
||||
auto inputPath(prefix);
|
||||
inputPath.push_back(id);
|
||||
res.emplace(inputPath, input);
|
||||
auto inputAttrPath(prefix);
|
||||
inputAttrPath.push_back(id);
|
||||
res.emplace(inputAttrPath, input);
|
||||
if (auto child = std::get_if<0>(&input))
|
||||
recurse(inputPath, *child);
|
||||
recurse(inputAttrPath, *child);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -320,7 +320,7 @@ std::ostream & operator <<(std::ostream & stream, const Node::Edge & edge)
|
|||
if (auto node = std::get_if<0>(&edge))
|
||||
stream << describe((*node)->lockedRef);
|
||||
else if (auto follows = std::get_if<1>(&edge))
|
||||
stream << fmt("follows '%s'", printInputPath(*follows));
|
||||
stream << fmt("follows '%s'", printInputAttrPath(*follows));
|
||||
return stream;
|
||||
}
|
||||
|
||||
|
@ -347,15 +347,15 @@ std::string LockFile::diff(const LockFile & oldLocks, const LockFile & newLocks)
|
|||
while (i != oldFlat.end() || j != newFlat.end()) {
|
||||
if (j != newFlat.end() && (i == oldFlat.end() || i->first > j->first)) {
|
||||
res += fmt("• " ANSI_GREEN "Added input '%s':" ANSI_NORMAL "\n %s\n",
|
||||
printInputPath(j->first), j->second);
|
||||
printInputAttrPath(j->first), j->second);
|
||||
++j;
|
||||
} else if (i != oldFlat.end() && (j == newFlat.end() || i->first < j->first)) {
|
||||
res += fmt("• " ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputPath(i->first));
|
||||
res += fmt("• " ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputAttrPath(i->first));
|
||||
++i;
|
||||
} else {
|
||||
if (!equals(i->second, j->second)) {
|
||||
res += fmt("• " ANSI_BOLD "Updated input '%s':" ANSI_NORMAL "\n %s\n → %s\n",
|
||||
printInputPath(i->first),
|
||||
printInputAttrPath(i->first),
|
||||
i->second,
|
||||
j->second);
|
||||
}
|
||||
|
@ -371,19 +371,19 @@ void LockFile::check()
|
|||
{
|
||||
auto inputs = getAllInputs();
|
||||
|
||||
for (auto & [inputPath, input] : inputs) {
|
||||
for (auto & [inputAttrPath, input] : inputs) {
|
||||
if (auto follows = std::get_if<1>(&input)) {
|
||||
if (!follows->empty() && !findInput(*follows))
|
||||
throw Error("input '%s' follows a non-existent input '%s'",
|
||||
printInputPath(inputPath),
|
||||
printInputPath(*follows));
|
||||
printInputAttrPath(inputAttrPath),
|
||||
printInputAttrPath(*follows));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void check();
|
||||
|
||||
std::string printInputPath(const InputPath & path)
|
||||
std::string printInputAttrPath(const InputAttrPath & path)
|
||||
{
|
||||
return concatStringsSep("/", path);
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ class StorePath;
|
|||
|
||||
namespace nix::flake {
|
||||
|
||||
typedef std::vector<FlakeId> InputPath;
|
||||
typedef std::vector<FlakeId> InputAttrPath;
|
||||
|
||||
struct LockedNode;
|
||||
|
||||
|
@ -23,7 +23,7 @@ struct LockedNode;
|
|||
*/
|
||||
struct Node : std::enable_shared_from_this<Node>
|
||||
{
|
||||
typedef std::variant<ref<LockedNode>, InputPath> Edge;
|
||||
typedef std::variant<ref<LockedNode>, InputAttrPath> Edge;
|
||||
|
||||
std::map<FlakeId, Edge> inputs;
|
||||
|
||||
|
@ -40,17 +40,17 @@ struct LockedNode : Node
|
|||
|
||||
/* The node relative to which relative source paths
|
||||
(e.g. 'path:../foo') are interpreted. */
|
||||
std::optional<InputPath> parentPath;
|
||||
std::optional<InputAttrPath> parentInputAttrPath;
|
||||
|
||||
LockedNode(
|
||||
const FlakeRef & lockedRef,
|
||||
const FlakeRef & originalRef,
|
||||
bool isFlake = true,
|
||||
std::optional<InputPath> parentPath = {})
|
||||
: lockedRef(lockedRef)
|
||||
, originalRef(originalRef)
|
||||
std::optional<InputAttrPath> parentInputAttrPath = {})
|
||||
: lockedRef(std::move(lockedRef))
|
||||
, originalRef(std::move(originalRef))
|
||||
, isFlake(isFlake)
|
||||
, parentPath(parentPath)
|
||||
, parentInputAttrPath(std::move(parentInputAttrPath))
|
||||
{ }
|
||||
|
||||
LockedNode(
|
||||
|
@ -83,9 +83,9 @@ struct LockFile
|
|||
|
||||
bool operator ==(const LockFile & other) const;
|
||||
|
||||
std::shared_ptr<Node> findInput(const InputPath & path);
|
||||
std::shared_ptr<Node> findInput(const InputAttrPath & path);
|
||||
|
||||
std::map<InputPath, Node::Edge> getAllInputs() const;
|
||||
std::map<InputAttrPath, Node::Edge> getAllInputs() const;
|
||||
|
||||
static std::string diff(const LockFile & oldLocks, const LockFile & newLocks);
|
||||
|
||||
|
@ -97,8 +97,8 @@ struct LockFile
|
|||
|
||||
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile);
|
||||
|
||||
InputPath parseInputPath(std::string_view s);
|
||||
InputAttrPath parseInputAttrPath(std::string_view s);
|
||||
|
||||
std::string printInputPath(const InputPath & path);
|
||||
std::string printInputAttrPath(const InputAttrPath & path);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util
|
||||
, nix-store
|
||||
, nix-fetchers
|
||||
, nix-expr
|
||||
, nlohmann_json
|
||||
nix-util,
|
||||
nix-store,
|
||||
nix-fetchers,
|
||||
nix-expr,
|
||||
nlohmann_json,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util-c
|
||||
, nix-store
|
||||
, nix-store-c
|
||||
, nix-main
|
||||
nix-util-c,
|
||||
nix-store,
|
||||
nix-store-c,
|
||||
nix-main,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, openssl
|
||||
openssl,
|
||||
|
||||
, nix-util
|
||||
, nix-store
|
||||
nix-util,
|
||||
nix-store,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util-c
|
||||
, nix-store
|
||||
nix-util-c,
|
||||
nix-store,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util-test-support
|
||||
, nix-store
|
||||
, nix-store-c
|
||||
nix-util-test-support,
|
||||
nix-store,
|
||||
nix-store-c,
|
||||
|
||||
, rapidcheck
|
||||
rapidcheck,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,21 +1,22 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
, stdenv
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
buildPackages,
|
||||
stdenv,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-store
|
||||
, nix-store-c
|
||||
, nix-store-test-support
|
||||
, sqlite
|
||||
nix-store,
|
||||
nix-store-c,
|
||||
nix-store-test-support,
|
||||
sqlite,
|
||||
|
||||
, rapidcheck
|
||||
, gtest
|
||||
, runCommand
|
||||
rapidcheck,
|
||||
gtest,
|
||||
runCommand,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
, filesetToSource
|
||||
version,
|
||||
filesetToSource,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -64,26 +65,33 @@ mkMesonExecutable (finalAttrs: {
|
|||
|
||||
passthru = {
|
||||
tests = {
|
||||
run = let
|
||||
# Some data is shared with the functional tests: they create it,
|
||||
# we consume it.
|
||||
data = filesetToSource {
|
||||
root = ../..;
|
||||
fileset = lib.fileset.unions [
|
||||
./data
|
||||
../../tests/functional/derivation
|
||||
];
|
||||
};
|
||||
in runCommand "${finalAttrs.pname}-run" {
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
} (lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
'' + ''
|
||||
export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
'');
|
||||
run =
|
||||
let
|
||||
# Some data is shared with the functional tests: they create it,
|
||||
# we consume it.
|
||||
data = filesetToSource {
|
||||
root = ../..;
|
||||
fileset = lib.fileset.unions [
|
||||
./data
|
||||
../../tests/functional/derivation
|
||||
];
|
||||
};
|
||||
in
|
||||
runCommand "${finalAttrs.pname}-run"
|
||||
{
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
}
|
||||
(
|
||||
lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
''
|
||||
+ ''
|
||||
export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
''
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -1,25 +1,26 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
mkMesonLibrary,
|
||||
|
||||
, unixtools
|
||||
, darwin
|
||||
unixtools,
|
||||
darwin,
|
||||
|
||||
, nix-util
|
||||
, boost
|
||||
, curl
|
||||
, aws-sdk-cpp
|
||||
, libseccomp
|
||||
, nlohmann_json
|
||||
, sqlite
|
||||
nix-util,
|
||||
boost,
|
||||
curl,
|
||||
aws-sdk-cpp,
|
||||
libseccomp,
|
||||
nlohmann_json,
|
||||
sqlite,
|
||||
|
||||
, busybox-sandbox-shell ? null
|
||||
busybox-sandbox-shell ? null,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
|
||||
, embeddedSandboxShell ? stdenv.hostPlatform.isStatic
|
||||
embeddedSandboxShell ? stdenv.hostPlatform.isStatic,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -48,19 +49,20 @@ mkMesonLibrary (finalAttrs: {
|
|||
(fileset.fileFilter (file: file.hasExt "sql") ./.)
|
||||
];
|
||||
|
||||
nativeBuildInputs =
|
||||
lib.optional embeddedSandboxShell unixtools.hexdump;
|
||||
nativeBuildInputs = lib.optional embeddedSandboxShell unixtools.hexdump;
|
||||
|
||||
buildInputs = [
|
||||
boost
|
||||
curl
|
||||
sqlite
|
||||
] ++ lib.optional stdenv.hostPlatform.isLinux libseccomp
|
||||
buildInputs =
|
||||
[
|
||||
boost
|
||||
curl
|
||||
sqlite
|
||||
]
|
||||
++ lib.optional stdenv.hostPlatform.isLinux libseccomp
|
||||
# There have been issues building these dependencies
|
||||
++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox
|
||||
++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin))
|
||||
aws-sdk-cpp
|
||||
;
|
||||
++ lib.optional (
|
||||
stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)
|
||||
) aws-sdk-cpp;
|
||||
|
||||
propagatedBuildInputs = [
|
||||
nix-util
|
||||
|
@ -75,12 +77,14 @@ mkMesonLibrary (finalAttrs: {
|
|||
echo ${version} > ../../.version
|
||||
'';
|
||||
|
||||
mesonFlags = [
|
||||
(lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux)
|
||||
(lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell)
|
||||
] ++ lib.optionals stdenv.hostPlatform.isLinux [
|
||||
(lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox")
|
||||
];
|
||||
mesonFlags =
|
||||
[
|
||||
(lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux)
|
||||
(lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell)
|
||||
]
|
||||
++ lib.optionals stdenv.hostPlatform.isLinux [
|
||||
(lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox")
|
||||
];
|
||||
|
||||
env = {
|
||||
# Needed for Meson to find Boost.
|
||||
|
|
|
@ -539,11 +539,21 @@ void RemoteStore::addMultipleToStore(
|
|||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs)
|
||||
{
|
||||
// `addMultipleToStore` is single threaded
|
||||
size_t bytesExpected = 0;
|
||||
for (auto & [pathInfo, _] : pathsToCopy) {
|
||||
bytesExpected += pathInfo.narSize;
|
||||
}
|
||||
act.setExpected(actCopyPath, bytesExpected);
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
sink << pathsToCopy.size();
|
||||
size_t nrTotal = pathsToCopy.size();
|
||||
sink << nrTotal;
|
||||
// Reverse, so we can release memory at the original start
|
||||
std::reverse(pathsToCopy.begin(), pathsToCopy.end());
|
||||
while (!pathsToCopy.empty()) {
|
||||
act.progress(nrTotal - pathsToCopy.size(), nrTotal, size_t(1), size_t(0));
|
||||
|
||||
auto & [pathInfo, pathSource] = pathsToCopy.back();
|
||||
WorkerProto::Serialise<ValidPathInfo>::write(*this,
|
||||
WorkerProto::WriteConn {
|
||||
|
|
|
@ -242,8 +242,8 @@ void Store::addMultipleToStore(
|
|||
storePathsToAdd.insert(thingToAdd.first.path);
|
||||
}
|
||||
|
||||
auto showProgress = [&]() {
|
||||
act.progress(nrDone, pathsToCopy.size(), nrRunning, nrFailed);
|
||||
auto showProgress = [&, nrTotal = pathsToCopy.size()]() {
|
||||
act.progress(nrDone, nrTotal, nrRunning, nrFailed);
|
||||
};
|
||||
|
||||
processGraph<StorePath>(
|
||||
|
@ -1104,9 +1104,6 @@ std::map<StorePath, StorePath> copyPaths(
|
|||
return storePathForDst;
|
||||
};
|
||||
|
||||
// total is accessed by each copy, which are each handled in separate threads
|
||||
std::atomic<uint64_t> total = 0;
|
||||
|
||||
for (auto & missingPath : sortedMissing) {
|
||||
auto info = srcStore.queryPathInfo(missingPath);
|
||||
|
||||
|
@ -1116,9 +1113,10 @@ std::map<StorePath, StorePath> copyPaths(
|
|||
ValidPathInfo infoForDst = *info;
|
||||
infoForDst.path = storePathForDst;
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
auto source = sinkToSource([&, narSize = info->narSize](Sink & sink) {
|
||||
// We can reasonably assume that the copy will happen whenever we
|
||||
// read the path, so log something about that at that point
|
||||
uint64_t total = 0;
|
||||
auto srcUri = srcStore.getUri();
|
||||
auto dstUri = dstStore.getUri();
|
||||
auto storePathS = srcStore.printStorePath(missingPath);
|
||||
|
@ -1129,13 +1127,13 @@ std::map<StorePath, StorePath> copyPaths(
|
|||
|
||||
LambdaSink progressSink([&](std::string_view data) {
|
||||
total += data.size();
|
||||
act.progress(total, info->narSize);
|
||||
act.progress(total, narSize);
|
||||
});
|
||||
TeeSink tee { sink, progressSink };
|
||||
|
||||
srcStore.narFromPath(missingPath, tee);
|
||||
});
|
||||
pathsToCopy.push_back(std::pair{infoForDst, std::move(source)});
|
||||
pathsToCopy.emplace_back(std::move(infoForDst), std::move(source));
|
||||
}
|
||||
|
||||
dstStore.addMultipleToStore(std::move(pathsToCopy), act, repair, checkSigs);
|
||||
|
|
|
@ -2565,7 +2565,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
case FileIngestionMethod::Git: {
|
||||
return git::dumpHash(
|
||||
outputHash.hashAlgo,
|
||||
{getFSSourceAccessor(), CanonPath(tmpDir + "/tmp")}).hash;
|
||||
{getFSSourceAccessor(), CanonPath(actualPath)}).hash;
|
||||
}
|
||||
}
|
||||
assert(false);
|
||||
|
@ -2657,10 +2657,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
wanted.to_string(HashFormat::SRI, true),
|
||||
got.to_string(HashFormat::SRI, true)));
|
||||
}
|
||||
if (!newInfo0.references.empty())
|
||||
if (!newInfo0.references.empty()) {
|
||||
auto numViolations = newInfo.references.size();
|
||||
delayedException = std::make_exception_ptr(
|
||||
BuildError("illegal path references in fixed-output derivation '%s'",
|
||||
worker.store.printStorePath(drvPath)));
|
||||
BuildError("fixed-output derivations must not reference store paths: '%s' references %d distinct paths, e.g. '%s'",
|
||||
worker.store.printStorePath(drvPath),
|
||||
numViolations,
|
||||
worker.store.printStorePath(*newInfo.references.begin())));
|
||||
}
|
||||
|
||||
return newInfo0;
|
||||
},
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util
|
||||
nix-util,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util
|
||||
, nix-util-c
|
||||
nix-util,
|
||||
nix-util-c,
|
||||
|
||||
, rapidcheck
|
||||
rapidcheck,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
, stdenv
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
buildPackages,
|
||||
stdenv,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-util
|
||||
, nix-util-c
|
||||
, nix-util-test-support
|
||||
nix-util,
|
||||
nix-util-c,
|
||||
nix-util-test-support,
|
||||
|
||||
, rapidcheck
|
||||
, gtest
|
||||
, runCommand
|
||||
rapidcheck,
|
||||
gtest,
|
||||
runCommand,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -57,16 +58,22 @@ mkMesonExecutable (finalAttrs: {
|
|||
|
||||
passthru = {
|
||||
tests = {
|
||||
run = runCommand "${finalAttrs.pname}-run" {
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
} (lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
'' + ''
|
||||
export _NIX_TEST_UNIT_DATA=${./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
'');
|
||||
run =
|
||||
runCommand "${finalAttrs.pname}-run"
|
||||
{
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
}
|
||||
(
|
||||
lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
''
|
||||
+ ''
|
||||
export _NIX_TEST_UNIT_DATA=${./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
''
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -1,18 +1,19 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
mkMesonLibrary,
|
||||
|
||||
, boost
|
||||
, brotli
|
||||
, libarchive
|
||||
, libcpuid
|
||||
, libsodium
|
||||
, nlohmann_json
|
||||
, openssl
|
||||
boost,
|
||||
brotli,
|
||||
libarchive,
|
||||
libcpuid,
|
||||
libsodium,
|
||||
nlohmann_json,
|
||||
openssl,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -43,8 +44,7 @@ mkMesonLibrary (finalAttrs: {
|
|||
brotli
|
||||
libsodium
|
||||
openssl
|
||||
] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid
|
||||
;
|
||||
] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
|
||||
|
||||
propagatedBuildInputs = [
|
||||
boost
|
||||
|
|
|
@ -274,6 +274,17 @@ std::optional<typename T::value_type> pop(T & c)
|
|||
}
|
||||
|
||||
|
||||
/**
|
||||
* Append items to a container. TODO: remove this once we can use
|
||||
* C++23's `append_range()`.
|
||||
*/
|
||||
template<class C, typename T>
|
||||
void append(C & c, std::initializer_list<T> l)
|
||||
{
|
||||
c.insert(c.end(), l.begin(), l.end());
|
||||
}
|
||||
|
||||
|
||||
template<typename T>
|
||||
class Callback;
|
||||
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
{ name, channelName, src }:
|
||||
{
|
||||
name,
|
||||
channelName,
|
||||
src,
|
||||
}:
|
||||
|
||||
derivation {
|
||||
builder = "builtin:unpack-channel";
|
||||
|
|
|
@ -8,13 +8,15 @@ derivation {
|
|||
inherit manifest;
|
||||
|
||||
# !!! grmbl, need structured data for passing this in a clean way.
|
||||
derivations =
|
||||
map (d:
|
||||
[ (d.meta.active or "true")
|
||||
(d.meta.priority or 5)
|
||||
(builtins.length d.outputs)
|
||||
] ++ map (output: builtins.getAttr output d) d.outputs)
|
||||
derivations;
|
||||
derivations = map (
|
||||
d:
|
||||
[
|
||||
(d.meta.active or "true")
|
||||
(d.meta.priority or 5)
|
||||
(builtins.length d.outputs)
|
||||
]
|
||||
++ map (output: builtins.getAttr output d) d.outputs
|
||||
) derivations;
|
||||
|
||||
# Building user environments remotely just causes huge amounts of
|
||||
# network traffic, so don't do that.
|
||||
|
|
|
@ -95,20 +95,20 @@ public:
|
|||
.optional=true,
|
||||
.handler={[&](std::vector<std::string> inputsToUpdate){
|
||||
for (const auto & inputToUpdate : inputsToUpdate) {
|
||||
InputPath inputPath;
|
||||
InputAttrPath inputAttrPath;
|
||||
try {
|
||||
inputPath = flake::parseInputPath(inputToUpdate);
|
||||
inputAttrPath = flake::parseInputAttrPath(inputToUpdate);
|
||||
} catch (Error & e) {
|
||||
warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate);
|
||||
throw e;
|
||||
}
|
||||
if (lockFlags.inputUpdates.contains(inputPath))
|
||||
warn("Input '%s' was specified multiple times. You may have done this by accident.");
|
||||
lockFlags.inputUpdates.insert(inputPath);
|
||||
if (lockFlags.inputUpdates.contains(inputAttrPath))
|
||||
warn("Input '%s' was specified multiple times. You may have done this by accident.", printInputAttrPath(inputAttrPath));
|
||||
lockFlags.inputUpdates.insert(inputAttrPath);
|
||||
}
|
||||
}},
|
||||
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
|
||||
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
}}
|
||||
});
|
||||
|
||||
|
@ -304,7 +304,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
|
|||
} else if (auto follows = std::get_if<1>(&input.second)) {
|
||||
logger->cout("%s" ANSI_BOLD "%s" ANSI_NORMAL " follows input '%s'",
|
||||
prefix + (last ? treeLast : treeConn), input.first,
|
||||
printInputPath(*follows));
|
||||
printInputAttrPath(*follows));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
{ lib
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-store
|
||||
, nix-expr
|
||||
, nix-main
|
||||
, nix-cmd
|
||||
nix-store,
|
||||
nix-expr,
|
||||
nix-main,
|
||||
nix-cmd,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -20,64 +21,67 @@ mkMesonExecutable (finalAttrs: {
|
|||
inherit version;
|
||||
|
||||
workDir = ./.;
|
||||
fileset = fileset.unions ([
|
||||
../../nix-meson-build-support
|
||||
./nix-meson-build-support
|
||||
../../.version
|
||||
./.version
|
||||
./meson.build
|
||||
./meson.options
|
||||
|
||||
# Symbolic links to other dirs
|
||||
## exes
|
||||
./build-remote
|
||||
./doc
|
||||
./nix-build
|
||||
./nix-channel
|
||||
./nix-collect-garbage
|
||||
./nix-copy-closure
|
||||
./nix-env
|
||||
./nix-instantiate
|
||||
./nix-store
|
||||
## dirs
|
||||
./scripts
|
||||
../../scripts
|
||||
./misc
|
||||
../../misc
|
||||
|
||||
# Doc nix files for --help
|
||||
../../doc/manual/generate-manpage.nix
|
||||
../../doc/manual/utils.nix
|
||||
../../doc/manual/generate-settings.nix
|
||||
../../doc/manual/generate-store-info.nix
|
||||
|
||||
# Other files to be included as string literals
|
||||
../nix-channel/unpack-channel.nix
|
||||
../nix-env/buildenv.nix
|
||||
./get-env.sh
|
||||
./help-stores.md
|
||||
../../doc/manual/source/store/types/index.md.in
|
||||
./profiles.md
|
||||
../../doc/manual/source/command-ref/files/profiles.md
|
||||
|
||||
# Files
|
||||
] ++ lib.concatMap
|
||||
(dir: [
|
||||
(fileset.fileFilter (file: file.hasExt "cc") dir)
|
||||
(fileset.fileFilter (file: file.hasExt "hh") dir)
|
||||
(fileset.fileFilter (file: file.hasExt "md") dir)
|
||||
])
|
||||
fileset = fileset.unions (
|
||||
[
|
||||
./.
|
||||
../build-remote
|
||||
../nix-build
|
||||
../nix-channel
|
||||
../nix-collect-garbage
|
||||
../nix-copy-closure
|
||||
../nix-env
|
||||
../nix-instantiate
|
||||
../nix-store
|
||||
../../nix-meson-build-support
|
||||
./nix-meson-build-support
|
||||
../../.version
|
||||
./.version
|
||||
./meson.build
|
||||
./meson.options
|
||||
|
||||
# Symbolic links to other dirs
|
||||
## exes
|
||||
./build-remote
|
||||
./doc
|
||||
./nix-build
|
||||
./nix-channel
|
||||
./nix-collect-garbage
|
||||
./nix-copy-closure
|
||||
./nix-env
|
||||
./nix-instantiate
|
||||
./nix-store
|
||||
## dirs
|
||||
./scripts
|
||||
../../scripts
|
||||
./misc
|
||||
../../misc
|
||||
|
||||
# Doc nix files for --help
|
||||
../../doc/manual/generate-manpage.nix
|
||||
../../doc/manual/utils.nix
|
||||
../../doc/manual/generate-settings.nix
|
||||
../../doc/manual/generate-store-info.nix
|
||||
|
||||
# Other files to be included as string literals
|
||||
../nix-channel/unpack-channel.nix
|
||||
../nix-env/buildenv.nix
|
||||
./get-env.sh
|
||||
./help-stores.md
|
||||
../../doc/manual/source/store/types/index.md.in
|
||||
./profiles.md
|
||||
../../doc/manual/source/command-ref/files/profiles.md
|
||||
|
||||
# Files
|
||||
]
|
||||
++
|
||||
lib.concatMap
|
||||
(dir: [
|
||||
(fileset.fileFilter (file: file.hasExt "cc") dir)
|
||||
(fileset.fileFilter (file: file.hasExt "hh") dir)
|
||||
(fileset.fileFilter (file: file.hasExt "md") dir)
|
||||
])
|
||||
[
|
||||
./.
|
||||
../build-remote
|
||||
../nix-build
|
||||
../nix-channel
|
||||
../nix-collect-garbage
|
||||
../nix-copy-closure
|
||||
../nix-env
|
||||
../nix-instantiate
|
||||
../nix-store
|
||||
]
|
||||
);
|
||||
|
||||
buildInputs = [
|
||||
|
|
|
@ -1,76 +1,82 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, mkMesonDerivation
|
||||
, pkg-config
|
||||
, perl
|
||||
, perlPackages
|
||||
, nix-store
|
||||
, version
|
||||
, curl
|
||||
, bzip2
|
||||
, libsodium
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
mkMesonDerivation,
|
||||
pkg-config,
|
||||
perl,
|
||||
perlPackages,
|
||||
nix-store,
|
||||
version,
|
||||
curl,
|
||||
bzip2,
|
||||
libsodium,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (lib) fileset;
|
||||
in
|
||||
|
||||
perl.pkgs.toPerlModule (mkMesonDerivation (finalAttrs: {
|
||||
pname = "nix-perl";
|
||||
inherit version;
|
||||
perl.pkgs.toPerlModule (
|
||||
mkMesonDerivation (finalAttrs: {
|
||||
pname = "nix-perl";
|
||||
inherit version;
|
||||
|
||||
workDir = ./.;
|
||||
fileset = fileset.unions ([
|
||||
./.version
|
||||
../../.version
|
||||
./MANIFEST
|
||||
./lib
|
||||
./meson.build
|
||||
./meson.options
|
||||
] ++ lib.optionals finalAttrs.doCheck [
|
||||
./.yath.rc.in
|
||||
./t
|
||||
]);
|
||||
workDir = ./.;
|
||||
fileset = fileset.unions (
|
||||
[
|
||||
./.version
|
||||
../../.version
|
||||
./MANIFEST
|
||||
./lib
|
||||
./meson.build
|
||||
./meson.options
|
||||
]
|
||||
++ lib.optionals finalAttrs.doCheck [
|
||||
./.yath.rc.in
|
||||
./t
|
||||
]
|
||||
);
|
||||
|
||||
nativeBuildInputs = [
|
||||
pkg-config
|
||||
perl
|
||||
curl
|
||||
];
|
||||
nativeBuildInputs = [
|
||||
pkg-config
|
||||
perl
|
||||
curl
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
nix-store
|
||||
] ++ finalAttrs.passthru.externalBuildInputs;
|
||||
buildInputs = [
|
||||
nix-store
|
||||
] ++ finalAttrs.passthru.externalBuildInputs;
|
||||
|
||||
# Hack for sake of the dev shell
|
||||
passthru.externalBuildInputs = [
|
||||
bzip2
|
||||
libsodium
|
||||
];
|
||||
# Hack for sake of the dev shell
|
||||
passthru.externalBuildInputs = [
|
||||
bzip2
|
||||
libsodium
|
||||
];
|
||||
|
||||
# `perlPackages.Test2Harness` is marked broken for Darwin
|
||||
doCheck = !stdenv.isDarwin;
|
||||
# `perlPackages.Test2Harness` is marked broken for Darwin
|
||||
doCheck = !stdenv.isDarwin;
|
||||
|
||||
nativeCheckInputs = [
|
||||
perlPackages.Test2Harness
|
||||
];
|
||||
nativeCheckInputs = [
|
||||
perlPackages.Test2Harness
|
||||
];
|
||||
|
||||
preConfigure =
|
||||
# "Inline" .version so its not a symlink, and includes the suffix
|
||||
''
|
||||
chmod u+w .version
|
||||
echo ${finalAttrs.version} > .version
|
||||
'';
|
||||
preConfigure =
|
||||
# "Inline" .version so its not a symlink, and includes the suffix
|
||||
''
|
||||
chmod u+w .version
|
||||
echo ${finalAttrs.version} > .version
|
||||
'';
|
||||
|
||||
mesonFlags = [
|
||||
(lib.mesonOption "dbi_path" "${perlPackages.DBI}/${perl.libPrefix}")
|
||||
(lib.mesonOption "dbd_sqlite_path" "${perlPackages.DBDSQLite}/${perl.libPrefix}")
|
||||
(lib.mesonEnable "tests" finalAttrs.doCheck)
|
||||
];
|
||||
mesonFlags = [
|
||||
(lib.mesonOption "dbi_path" "${perlPackages.DBI}/${perl.libPrefix}")
|
||||
(lib.mesonOption "dbd_sqlite_path" "${perlPackages.DBDSQLite}/${perl.libPrefix}")
|
||||
(lib.mesonEnable "tests" finalAttrs.doCheck)
|
||||
];
|
||||
|
||||
mesonCheckFlags = [
|
||||
"--print-errorlogs"
|
||||
];
|
||||
mesonCheckFlags = [
|
||||
"--print-errorlogs"
|
||||
];
|
||||
|
||||
strictDeps = false;
|
||||
}))
|
||||
strictDeps = false;
|
||||
})
|
||||
)
|
||||
|
|
|
@ -1,6 +1,25 @@
|
|||
let
|
||||
sixteenBytes = "0123456789abcdef";
|
||||
times16 = s: builtins.concatStringsSep "" [s s s s s s s s s s s s s s s s];
|
||||
times16 =
|
||||
s:
|
||||
builtins.concatStringsSep "" [
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
s
|
||||
];
|
||||
exp = n: x: if n == 1 then x else times16 (exp (n - 1) x);
|
||||
sixteenMegabyte = exp 6 sixteenBytes;
|
||||
in
|
||||
|
|
|
@ -4,24 +4,39 @@ with import ./config.nix;
|
|||
|
||||
let
|
||||
|
||||
mkDerivation = args:
|
||||
derivation ({
|
||||
inherit system;
|
||||
builder = busybox;
|
||||
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" ''
|
||||
if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi;
|
||||
eval "$buildCommand"
|
||||
'')];
|
||||
outputHashMode = "recursive";
|
||||
outputHashAlgo = "sha256";
|
||||
} // removeAttrs args ["builder" "meta" "passthru"])
|
||||
// { meta = args.meta or {}; passthru = args.passthru or {}; };
|
||||
mkDerivation =
|
||||
args:
|
||||
derivation (
|
||||
{
|
||||
inherit system;
|
||||
builder = busybox;
|
||||
args = [
|
||||
"sh"
|
||||
"-e"
|
||||
args.builder or (builtins.toFile "builder-${args.name}.sh" ''
|
||||
if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi;
|
||||
eval "$buildCommand"
|
||||
'')
|
||||
];
|
||||
outputHashMode = "recursive";
|
||||
outputHashAlgo = "sha256";
|
||||
}
|
||||
// removeAttrs args [
|
||||
"builder"
|
||||
"meta"
|
||||
"passthru"
|
||||
]
|
||||
)
|
||||
// {
|
||||
meta = args.meta or { };
|
||||
passthru = args.passthru or { };
|
||||
};
|
||||
|
||||
input1 = mkDerivation {
|
||||
shell = busybox;
|
||||
name = "build-remote-input-1";
|
||||
buildCommand = "echo hi-input1; echo FOO > $out";
|
||||
requiredSystemFeatures = ["foo"];
|
||||
requiredSystemFeatures = [ "foo" ];
|
||||
outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=";
|
||||
};
|
||||
|
||||
|
@ -29,7 +44,7 @@ let
|
|||
shell = busybox;
|
||||
name = "build-remote-input-2";
|
||||
buildCommand = "echo hi; echo BAR > $out";
|
||||
requiredSystemFeatures = ["bar"];
|
||||
requiredSystemFeatures = [ "bar" ];
|
||||
outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q=";
|
||||
};
|
||||
|
||||
|
@ -41,21 +56,20 @@ let
|
|||
read x < ${input2}
|
||||
echo $x BAZ > $out
|
||||
'';
|
||||
requiredSystemFeatures = ["baz"];
|
||||
requiredSystemFeatures = [ "baz" ];
|
||||
outputHash = "sha256-daKAcPp/+BYMQsVi/YYMlCKoNAxCNDsaivwSHgQqD2s=";
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
mkDerivation {
|
||||
shell = busybox;
|
||||
name = "build-remote";
|
||||
passthru = { inherit input1 input2 input3; };
|
||||
buildCommand =
|
||||
''
|
||||
read x < ${input1}
|
||||
read y < ${input3}
|
||||
echo "$x $y" > $out
|
||||
'';
|
||||
outputHash = "sha256-5SxbkUw6xe2l9TE1uwCvTtTDysD1vhRor38OtDF0LqQ=";
|
||||
}
|
||||
mkDerivation {
|
||||
shell = busybox;
|
||||
name = "build-remote";
|
||||
passthru = { inherit input1 input2 input3; };
|
||||
buildCommand = ''
|
||||
read x < ${input1}
|
||||
read y < ${input3}
|
||||
echo "$x $y" > $out
|
||||
'';
|
||||
outputHash = "sha256-5SxbkUw6xe2l9TE1uwCvTtTDysD1vhRor38OtDF0LqQ=";
|
||||
}
|
||||
|
|
|
@ -1,39 +1,61 @@
|
|||
{ busybox, contentAddressed ? false }:
|
||||
{
|
||||
busybox,
|
||||
contentAddressed ? false,
|
||||
}:
|
||||
|
||||
with import ./config.nix;
|
||||
|
||||
let
|
||||
|
||||
caArgs = if contentAddressed then {
|
||||
outputHashMode = "recursive";
|
||||
outputHashAlgo = "sha256";
|
||||
__contentAddressed = true;
|
||||
} else {};
|
||||
caArgs =
|
||||
if contentAddressed then
|
||||
{
|
||||
outputHashMode = "recursive";
|
||||
outputHashAlgo = "sha256";
|
||||
__contentAddressed = true;
|
||||
}
|
||||
else
|
||||
{ };
|
||||
|
||||
mkDerivation = args:
|
||||
derivation ({
|
||||
inherit system;
|
||||
builder = busybox;
|
||||
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" ''
|
||||
if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi;
|
||||
eval "$buildCommand"
|
||||
'')];
|
||||
} // removeAttrs args ["builder" "meta" "passthru"]
|
||||
// caArgs)
|
||||
// { meta = args.meta or {}; passthru = args.passthru or {}; };
|
||||
mkDerivation =
|
||||
args:
|
||||
derivation (
|
||||
{
|
||||
inherit system;
|
||||
builder = busybox;
|
||||
args = [
|
||||
"sh"
|
||||
"-e"
|
||||
args.builder or (builtins.toFile "builder-${args.name}.sh" ''
|
||||
if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi;
|
||||
eval "$buildCommand"
|
||||
'')
|
||||
];
|
||||
}
|
||||
// removeAttrs args [
|
||||
"builder"
|
||||
"meta"
|
||||
"passthru"
|
||||
]
|
||||
// caArgs
|
||||
)
|
||||
// {
|
||||
meta = args.meta or { };
|
||||
passthru = args.passthru or { };
|
||||
};
|
||||
|
||||
input1 = mkDerivation {
|
||||
shell = busybox;
|
||||
name = "build-remote-input-1";
|
||||
buildCommand = "echo hi-input1; echo FOO > $out";
|
||||
requiredSystemFeatures = ["foo"];
|
||||
requiredSystemFeatures = [ "foo" ];
|
||||
};
|
||||
|
||||
input2 = mkDerivation {
|
||||
shell = busybox;
|
||||
name = "build-remote-input-2";
|
||||
buildCommand = "echo hi; echo BAR > $out";
|
||||
requiredSystemFeatures = ["bar"];
|
||||
requiredSystemFeatures = [ "bar" ];
|
||||
};
|
||||
|
||||
input3 = mkDerivation {
|
||||
|
@ -44,19 +66,18 @@ let
|
|||
read x < ${input2}
|
||||
echo $x BAZ > $out
|
||||
'';
|
||||
requiredSystemFeatures = ["baz"];
|
||||
requiredSystemFeatures = [ "baz" ];
|
||||
};
|
||||
|
||||
in
|
||||
|
||||
mkDerivation {
|
||||
shell = busybox;
|
||||
name = "build-remote";
|
||||
passthru = { inherit input1 input2 input3; };
|
||||
buildCommand =
|
||||
''
|
||||
read x < ${input1}
|
||||
read y < ${input3}
|
||||
echo "$x $y" > $out
|
||||
'';
|
||||
}
|
||||
mkDerivation {
|
||||
shell = busybox;
|
||||
name = "build-remote";
|
||||
passthru = { inherit input1 input2 input3; };
|
||||
buildCommand = ''
|
||||
read x < ${input1}
|
||||
read y < ${input3}
|
||||
echo "$x $y" > $out
|
||||
'';
|
||||
}
|
||||
|
|
|
@ -1 +1,5 @@
|
|||
{ inNixShell ? false, ... }@args: import ./shell.nix (args // { contentAddressed = true; })
|
||||
{
|
||||
inNixShell ? false,
|
||||
...
|
||||
}@args:
|
||||
import ./shell.nix (args // { contentAddressed = true; })
|
||||
|
|
|
@ -1,13 +1,21 @@
|
|||
with import ./config.nix;
|
||||
|
||||
let mkCADerivation = args: mkDerivation ({
|
||||
__contentAddressed = true;
|
||||
outputHashMode = "recursive";
|
||||
outputHashAlgo = "sha256";
|
||||
} // args);
|
||||
let
|
||||
mkCADerivation =
|
||||
args:
|
||||
mkDerivation (
|
||||
{
|
||||
__contentAddressed = true;
|
||||
outputHashMode = "recursive";
|
||||
outputHashAlgo = "sha256";
|
||||
}
|
||||
// args
|
||||
);
|
||||
in
|
||||
|
||||
{ seed ? 0 }:
|
||||
{
|
||||
seed ? 0,
|
||||
}:
|
||||
# A simple content-addressed derivation.
|
||||
# The derivation can be arbitrarily modified by passing a different `seed`,
|
||||
# but the output will always be the same
|
||||
|
@ -23,7 +31,11 @@ rec {
|
|||
};
|
||||
rootCA = mkCADerivation {
|
||||
name = "rootCA";
|
||||
outputs = [ "out" "dev" "foo" ];
|
||||
outputs = [
|
||||
"out"
|
||||
"dev"
|
||||
"foo"
|
||||
];
|
||||
buildCommand = ''
|
||||
echo "building a CA derivation"
|
||||
echo "The seed is ${toString seed}"
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
{
|
||||
outputs = { self }: import ./content-addressed.nix {};
|
||||
outputs = { self }: import ./content-addressed.nix { };
|
||||
}
|
||||
|
|
|
@ -1,10 +1,16 @@
|
|||
with import ./config.nix;
|
||||
|
||||
let mkCADerivation = args: mkDerivation ({
|
||||
__contentAddressed = true;
|
||||
outputHashMode = "recursive";
|
||||
outputHashAlgo = "sha256";
|
||||
} // args);
|
||||
let
|
||||
mkCADerivation =
|
||||
args:
|
||||
mkDerivation (
|
||||
{
|
||||
__contentAddressed = true;
|
||||
outputHashMode = "recursive";
|
||||
outputHashAlgo = "sha256";
|
||||
}
|
||||
// args
|
||||
);
|
||||
in
|
||||
|
||||
rec {
|
||||
|
@ -15,13 +21,15 @@ rec {
|
|||
echo $(date) > $out/current-time
|
||||
'';
|
||||
};
|
||||
dep = seed: mkCADerivation {
|
||||
name = "dep";
|
||||
inherit seed;
|
||||
buildCommand = ''
|
||||
echo ${currentTime} > $out
|
||||
'';
|
||||
};
|
||||
dep =
|
||||
seed:
|
||||
mkCADerivation {
|
||||
name = "dep";
|
||||
inherit seed;
|
||||
buildCommand = ''
|
||||
echo ${currentTime} > $out
|
||||
'';
|
||||
};
|
||||
dep1 = dep 1;
|
||||
dep2 = dep 2;
|
||||
toplevel = mkCADerivation {
|
||||
|
@ -32,4 +40,3 @@ rec {
|
|||
'';
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
# A derivation that would certainly fail if several builders tried to
|
||||
# build it at once.
|
||||
|
||||
|
||||
with import ./config.nix;
|
||||
|
||||
mkDerivation {
|
||||
|
|
|
@ -2,11 +2,16 @@ with import ./config.nix;
|
|||
|
||||
rec {
|
||||
|
||||
dep = import ./dependencies.nix {};
|
||||
dep = import ./dependencies.nix { };
|
||||
|
||||
makeTest = nr: args: mkDerivation ({
|
||||
name = "check-refs-" + toString nr;
|
||||
} // args);
|
||||
makeTest =
|
||||
nr: args:
|
||||
mkDerivation (
|
||||
{
|
||||
name = "check-refs-" + toString nr;
|
||||
}
|
||||
// args
|
||||
);
|
||||
|
||||
src = builtins.toFile "aux-ref" "bla bla";
|
||||
|
||||
|
@ -22,31 +27,31 @@ rec {
|
|||
|
||||
test3 = makeTest 3 {
|
||||
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link";
|
||||
allowedReferences = [];
|
||||
allowedReferences = [ ];
|
||||
inherit dep;
|
||||
};
|
||||
|
||||
test4 = makeTest 4 {
|
||||
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link";
|
||||
allowedReferences = [dep];
|
||||
allowedReferences = [ dep ];
|
||||
inherit dep;
|
||||
};
|
||||
|
||||
test5 = makeTest 5 {
|
||||
builder = builtins.toFile "builder.sh" "mkdir $out";
|
||||
allowedReferences = [];
|
||||
allowedReferences = [ ];
|
||||
inherit dep;
|
||||
};
|
||||
|
||||
test6 = makeTest 6 {
|
||||
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $out $out/link";
|
||||
allowedReferences = [];
|
||||
allowedReferences = [ ];
|
||||
inherit dep;
|
||||
};
|
||||
|
||||
test7 = makeTest 7 {
|
||||
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $out $out/link";
|
||||
allowedReferences = ["out"];
|
||||
allowedReferences = [ "out" ];
|
||||
inherit dep;
|
||||
};
|
||||
|
||||
|
@ -58,19 +63,19 @@ rec {
|
|||
test9 = makeTest 9 {
|
||||
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link";
|
||||
inherit dep;
|
||||
disallowedReferences = [dep];
|
||||
disallowedReferences = [ dep ];
|
||||
};
|
||||
|
||||
test10 = makeTest 10 {
|
||||
builder = builtins.toFile "builder.sh" "mkdir $out; echo $test5; ln -s $dep $out/link";
|
||||
inherit dep test5;
|
||||
disallowedReferences = [test5];
|
||||
disallowedReferences = [ test5 ];
|
||||
};
|
||||
|
||||
test11 = makeTest 11 {
|
||||
__structuredAttrs = true;
|
||||
unsafeDiscardReferences.out = true;
|
||||
outputChecks.out.allowedReferences = [];
|
||||
outputChecks.out.allowedReferences = [ ];
|
||||
buildCommand = ''echo ${dep} > "''${outputs[out]}"'';
|
||||
};
|
||||
|
||||
|
|
|
@ -22,36 +22,48 @@ rec {
|
|||
'';
|
||||
};
|
||||
|
||||
makeTest = nr: allowreqs: mkDerivation {
|
||||
name = "check-reqs-" + toString nr;
|
||||
inherit deps;
|
||||
builder = builtins.toFile "builder.sh" ''
|
||||
mkdir $out
|
||||
ln -s $deps $out/depdir1
|
||||
'';
|
||||
allowedRequisites = allowreqs;
|
||||
};
|
||||
makeTest =
|
||||
nr: allowreqs:
|
||||
mkDerivation {
|
||||
name = "check-reqs-" + toString nr;
|
||||
inherit deps;
|
||||
builder = builtins.toFile "builder.sh" ''
|
||||
mkdir $out
|
||||
ln -s $deps $out/depdir1
|
||||
'';
|
||||
allowedRequisites = allowreqs;
|
||||
};
|
||||
|
||||
# When specifying all the requisites, the build succeeds.
|
||||
test1 = makeTest 1 [ dep1 dep2 deps ];
|
||||
test1 = makeTest 1 [
|
||||
dep1
|
||||
dep2
|
||||
deps
|
||||
];
|
||||
|
||||
# But missing anything it fails.
|
||||
test2 = makeTest 2 [ dep2 deps ];
|
||||
test3 = makeTest 3 [ dep1 deps ];
|
||||
test2 = makeTest 2 [
|
||||
dep2
|
||||
deps
|
||||
];
|
||||
test3 = makeTest 3 [
|
||||
dep1
|
||||
deps
|
||||
];
|
||||
test4 = makeTest 4 [ deps ];
|
||||
test5 = makeTest 5 [];
|
||||
test5 = makeTest 5 [ ];
|
||||
|
||||
test6 = mkDerivation {
|
||||
name = "check-reqs";
|
||||
inherit deps;
|
||||
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $deps $out/depdir1";
|
||||
disallowedRequisites = [dep1];
|
||||
disallowedRequisites = [ dep1 ];
|
||||
};
|
||||
|
||||
test7 = mkDerivation {
|
||||
name = "check-reqs";
|
||||
inherit deps;
|
||||
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $deps $out/depdir1";
|
||||
disallowedRequisites = [test1];
|
||||
disallowedRequisites = [ test1 ];
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
{checkBuildId ? 0}:
|
||||
{
|
||||
checkBuildId ? 0,
|
||||
}:
|
||||
|
||||
with import ./config.nix;
|
||||
|
||||
|
@ -6,41 +8,38 @@ with import ./config.nix;
|
|||
nondeterministic = mkDerivation {
|
||||
inherit checkBuildId;
|
||||
name = "nondeterministic";
|
||||
buildCommand =
|
||||
''
|
||||
mkdir $out
|
||||
date +%s.%N > $out/date
|
||||
echo "CHECK_TMPDIR=$TMPDIR"
|
||||
echo "checkBuildId=$checkBuildId"
|
||||
echo "$checkBuildId" > $TMPDIR/checkBuildId
|
||||
'';
|
||||
buildCommand = ''
|
||||
mkdir $out
|
||||
date +%s.%N > $out/date
|
||||
echo "CHECK_TMPDIR=$TMPDIR"
|
||||
echo "checkBuildId=$checkBuildId"
|
||||
echo "$checkBuildId" > $TMPDIR/checkBuildId
|
||||
'';
|
||||
};
|
||||
|
||||
deterministic = mkDerivation {
|
||||
inherit checkBuildId;
|
||||
name = "deterministic";
|
||||
buildCommand =
|
||||
''
|
||||
mkdir $out
|
||||
echo date > $out/date
|
||||
echo "CHECK_TMPDIR=$TMPDIR"
|
||||
echo "checkBuildId=$checkBuildId"
|
||||
echo "$checkBuildId" > $TMPDIR/checkBuildId
|
||||
'';
|
||||
buildCommand = ''
|
||||
mkdir $out
|
||||
echo date > $out/date
|
||||
echo "CHECK_TMPDIR=$TMPDIR"
|
||||
echo "checkBuildId=$checkBuildId"
|
||||
echo "$checkBuildId" > $TMPDIR/checkBuildId
|
||||
'';
|
||||
};
|
||||
|
||||
failed = mkDerivation {
|
||||
inherit checkBuildId;
|
||||
name = "failed";
|
||||
buildCommand =
|
||||
''
|
||||
mkdir $out
|
||||
echo date > $out/date
|
||||
echo "CHECK_TMPDIR=$TMPDIR"
|
||||
echo "checkBuildId=$checkBuildId"
|
||||
echo "$checkBuildId" > $TMPDIR/checkBuildId
|
||||
false
|
||||
'';
|
||||
buildCommand = ''
|
||||
mkdir $out
|
||||
echo date > $out/date
|
||||
echo "CHECK_TMPDIR=$TMPDIR"
|
||||
echo "checkBuildId=$checkBuildId"
|
||||
echo "$checkBuildId" > $TMPDIR/checkBuildId
|
||||
false
|
||||
'';
|
||||
};
|
||||
|
||||
hashmismatch = import <nix/fetchurl.nix> {
|
||||
|
|
|
@ -60,6 +60,7 @@ unset XDG_DATA_HOME
|
|||
unset XDG_CONFIG_HOME
|
||||
unset XDG_CONFIG_DIRS
|
||||
unset XDG_CACHE_HOME
|
||||
unset GIT_DIR
|
||||
|
||||
export IMPURE_VAR1=foo
|
||||
export IMPURE_VAR2=bar
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
{ hashInvalidator ? "" }:
|
||||
{
|
||||
hashInvalidator ? "",
|
||||
}:
|
||||
with import ./config.nix;
|
||||
|
||||
let
|
||||
|
|
|
@ -2,5 +2,8 @@ derivation {
|
|||
name = "advanced-attributes-defaults";
|
||||
system = "my-system";
|
||||
builder = "/bin/bash";
|
||||
args = [ "-c" "echo hello > $out" ];
|
||||
args = [
|
||||
"-c"
|
||||
"echo hello > $out"
|
||||
];
|
||||
}
|
||||
|
|
|
@ -2,7 +2,13 @@ derivation {
|
|||
name = "advanced-attributes-structured-attrs-defaults";
|
||||
system = "my-system";
|
||||
builder = "/bin/bash";
|
||||
args = [ "-c" "echo hello > $out" ];
|
||||
outputs = [ "out" "dev" ];
|
||||
args = [
|
||||
"-c"
|
||||
"echo hello > $out"
|
||||
];
|
||||
outputs = [
|
||||
"out"
|
||||
"dev"
|
||||
];
|
||||
__structuredAttrs = true;
|
||||
}
|
||||
|
|
|
@ -4,42 +4,58 @@ let
|
|||
inherit system;
|
||||
name = "foo";
|
||||
builder = "/bin/bash";
|
||||
args = ["-c" "echo foo > $out"];
|
||||
args = [
|
||||
"-c"
|
||||
"echo foo > $out"
|
||||
];
|
||||
};
|
||||
bar = derivation {
|
||||
inherit system;
|
||||
name = "bar";
|
||||
builder = "/bin/bash";
|
||||
args = ["-c" "echo bar > $out"];
|
||||
args = [
|
||||
"-c"
|
||||
"echo bar > $out"
|
||||
];
|
||||
};
|
||||
in
|
||||
derivation {
|
||||
inherit system;
|
||||
name = "advanced-attributes-structured-attrs";
|
||||
builder = "/bin/bash";
|
||||
args = [ "-c" "echo hello > $out" ];
|
||||
args = [
|
||||
"-c"
|
||||
"echo hello > $out"
|
||||
];
|
||||
__sandboxProfile = "sandcastle";
|
||||
__noChroot = true;
|
||||
__impureHostDeps = ["/usr/bin/ditto"];
|
||||
impureEnvVars = ["UNICORN"];
|
||||
__impureHostDeps = [ "/usr/bin/ditto" ];
|
||||
impureEnvVars = [ "UNICORN" ];
|
||||
__darwinAllowLocalNetworking = true;
|
||||
outputs = [ "out" "bin" "dev" ];
|
||||
outputs = [
|
||||
"out"
|
||||
"bin"
|
||||
"dev"
|
||||
];
|
||||
__structuredAttrs = true;
|
||||
outputChecks = {
|
||||
out = {
|
||||
allowedReferences = [foo];
|
||||
allowedRequisites = [foo];
|
||||
allowedReferences = [ foo ];
|
||||
allowedRequisites = [ foo ];
|
||||
};
|
||||
bin = {
|
||||
disallowedReferences = [bar];
|
||||
disallowedRequisites = [bar];
|
||||
disallowedReferences = [ bar ];
|
||||
disallowedRequisites = [ bar ];
|
||||
};
|
||||
dev = {
|
||||
maxSize = 789;
|
||||
maxClosureSize = 5909;
|
||||
};
|
||||
};
|
||||
requiredSystemFeatures = ["rainbow" "uid-range"];
|
||||
requiredSystemFeatures = [
|
||||
"rainbow"
|
||||
"uid-range"
|
||||
];
|
||||
preferLocalBuild = true;
|
||||
allowSubstitutes = false;
|
||||
}
|
||||
|
|
|
@ -4,30 +4,42 @@ let
|
|||
inherit system;
|
||||
name = "foo";
|
||||
builder = "/bin/bash";
|
||||
args = ["-c" "echo foo > $out"];
|
||||
args = [
|
||||
"-c"
|
||||
"echo foo > $out"
|
||||
];
|
||||
};
|
||||
bar = derivation {
|
||||
inherit system;
|
||||
name = "bar";
|
||||
builder = "/bin/bash";
|
||||
args = ["-c" "echo bar > $out"];
|
||||
args = [
|
||||
"-c"
|
||||
"echo bar > $out"
|
||||
];
|
||||
};
|
||||
in
|
||||
derivation {
|
||||
inherit system;
|
||||
name = "advanced-attributes";
|
||||
builder = "/bin/bash";
|
||||
args = [ "-c" "echo hello > $out" ];
|
||||
args = [
|
||||
"-c"
|
||||
"echo hello > $out"
|
||||
];
|
||||
__sandboxProfile = "sandcastle";
|
||||
__noChroot = true;
|
||||
__impureHostDeps = ["/usr/bin/ditto"];
|
||||
impureEnvVars = ["UNICORN"];
|
||||
__impureHostDeps = [ "/usr/bin/ditto" ];
|
||||
impureEnvVars = [ "UNICORN" ];
|
||||
__darwinAllowLocalNetworking = true;
|
||||
allowedReferences = [foo];
|
||||
allowedRequisites = [foo];
|
||||
disallowedReferences = [bar];
|
||||
disallowedRequisites = [bar];
|
||||
requiredSystemFeatures = ["rainbow" "uid-range"];
|
||||
allowedReferences = [ foo ];
|
||||
allowedRequisites = [ foo ];
|
||||
disallowedReferences = [ bar ];
|
||||
disallowedRequisites = [ bar ];
|
||||
requiredSystemFeatures = [
|
||||
"rainbow"
|
||||
"uid-range"
|
||||
];
|
||||
preferLocalBuild = true;
|
||||
allowSubstitutes = false;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
with import ./config.nix;
|
||||
|
||||
let innerName = "foo"; in
|
||||
let
|
||||
innerName = "foo";
|
||||
in
|
||||
|
||||
mkDerivation rec {
|
||||
name = "${innerName}.drv";
|
||||
|
|
|
@ -2,28 +2,33 @@ with import ./config.nix;
|
|||
|
||||
rec {
|
||||
|
||||
printRefs =
|
||||
''
|
||||
echo $exportReferencesGraph
|
||||
while read path; do
|
||||
read drv
|
||||
read nrRefs
|
||||
echo "$path has $nrRefs references"
|
||||
echo "$path" >> $out
|
||||
for ((n = 0; n < $nrRefs; n++)); do read ref; echo "ref $ref"; test -e "$ref"; done
|
||||
done < refs
|
||||
'';
|
||||
printRefs = ''
|
||||
echo $exportReferencesGraph
|
||||
while read path; do
|
||||
read drv
|
||||
read nrRefs
|
||||
echo "$path has $nrRefs references"
|
||||
echo "$path" >> $out
|
||||
for ((n = 0; n < $nrRefs; n++)); do read ref; echo "ref $ref"; test -e "$ref"; done
|
||||
done < refs
|
||||
'';
|
||||
|
||||
foo."bar.runtimeGraph" = mkDerivation {
|
||||
name = "dependencies";
|
||||
builder = builtins.toFile "build-graph-builder" "${printRefs}";
|
||||
exportReferencesGraph = ["refs" (import ./dependencies.nix {})];
|
||||
exportReferencesGraph = [
|
||||
"refs"
|
||||
(import ./dependencies.nix { })
|
||||
];
|
||||
};
|
||||
|
||||
foo."bar.buildGraph" = mkDerivation {
|
||||
name = "dependencies";
|
||||
builder = builtins.toFile "build-graph-builder" "${printRefs}";
|
||||
exportReferencesGraph = ["refs" (import ./dependencies.nix {}).drvPath];
|
||||
exportReferencesGraph = [
|
||||
"refs"
|
||||
(import ./dependencies.nix { }).drvPath
|
||||
];
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -2,16 +2,29 @@
|
|||
with import ./config.nix;
|
||||
let
|
||||
|
||||
mkDerivation = args:
|
||||
derivation ({
|
||||
inherit system;
|
||||
builder = busybox;
|
||||
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" ''
|
||||
if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi;
|
||||
eval "$buildCommand"
|
||||
'')];
|
||||
} // removeAttrs args ["builder" "meta"])
|
||||
// { meta = args.meta or {}; };
|
||||
mkDerivation =
|
||||
args:
|
||||
derivation (
|
||||
{
|
||||
inherit system;
|
||||
builder = busybox;
|
||||
args = [
|
||||
"sh"
|
||||
"-e"
|
||||
args.builder or (builtins.toFile "builder-${args.name}.sh" ''
|
||||
if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi;
|
||||
eval "$buildCommand"
|
||||
'')
|
||||
];
|
||||
}
|
||||
// removeAttrs args [
|
||||
"builder"
|
||||
"meta"
|
||||
]
|
||||
)
|
||||
// {
|
||||
meta = args.meta or { };
|
||||
};
|
||||
in
|
||||
{
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue