1
0
Fork 0
mirror of https://github.com/NixOS/nix synced 2025-06-25 06:31:14 +02:00

Merge remote-tracking branch 'upstream/master' into lfs

This commit is contained in:
Leandro Reina 2025-01-27 14:44:41 +01:00
commit 6a3b4afc0a
347 changed files with 8407 additions and 5795 deletions

View file

@ -106,3 +106,14 @@ pull_request_rules:
labels: labels:
- automatic backport - automatic backport
- merge-queue - merge-queue
- name: backport patches to 2.26
conditions:
- label=backport 2.26-maintenance
actions:
backport:
branches:
- "2.26-maintenance"
labels:
- automatic backport
- merge-queue

View file

@ -1 +1 @@
2.26.0 2.27.0

View file

@ -1,10 +1,9 @@
(import (import (
( let
let lock = builtins.fromJSON (builtins.readFile ./flake.lock); in lock = builtins.fromJSON (builtins.readFile ./flake.lock);
fetchTarball { in
url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; fetchTarball {
sha256 = lock.nodes.flake-compat.locked.narHash; url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz";
} sha256 = lock.nodes.flake-compat.locked.narHash;
) }
{ src = ./.; } ) { src = ./.; }).defaultNix
).defaultNix

View file

@ -5,7 +5,15 @@ in
builtinsInfo: builtinsInfo:
let let
showBuiltin = name: { doc, type ? null, args ? [ ], experimental-feature ? null, impure-only ? false }: showBuiltin =
name:
{
doc,
type ? null,
args ? [ ],
experimental-feature ? null,
impure-only ? false,
}:
let let
type' = optionalString (type != null) " (${type})"; type' = optionalString (type != null) " (${type})";

View file

@ -32,7 +32,13 @@ let
commandInfo = fromJSON commandDump; commandInfo = fromJSON commandDump;
showCommand = { command, details, filename, toplevel }: showCommand =
{
command,
details,
filename,
toplevel,
}:
let let
result = '' result = ''
@ -56,26 +62,27 @@ let
${maybeOptions} ${maybeOptions}
''; '';
showSynopsis = command: args: showSynopsis =
command: args:
let let
showArgument = arg: "*${arg.label}*" + optionalString (! arg ? arity) "..."; showArgument = arg: "*${arg.label}*" + optionalString (!arg ? arity) "...";
arguments = concatStringsSep " " (map showArgument args); arguments = concatStringsSep " " (map showArgument args);
in '' in
''
`${command}` [*option*...] ${arguments} `${command}` [*option*...] ${arguments}
''; '';
maybeSubcommands = optionalString (details ? commands && details.commands != {}) maybeSubcommands = optionalString (details ? commands && details.commands != { }) ''
'' where *subcommand* is one of the following:
where *subcommand* is one of the following:
${subcommands} ${subcommands}
''; '';
subcommands = if length categories > 1 subcommands = if length categories > 1 then listCategories else listSubcommands details.commands;
then listCategories
else listSubcommands details.commands;
categories = sort (x: y: x.id < y.id) (unique (map (cmd: cmd.category) (attrValues details.commands))); categories = sort (x: y: x.id < y.id) (
unique (map (cmd: cmd.category) (attrValues details.commands))
);
listCategories = concatStrings (map showCategory categories); listCategories = concatStrings (map showCategory categories);
@ -99,38 +106,39 @@ let
${allStores} ${allStores}
''; '';
index = replaceStrings index =
[ "@store-types@" "./local-store.md" "./local-daemon-store.md" ] replaceStrings
[ storesOverview "#local-store" "#local-daemon-store" ] [ "@store-types@" "./local-store.md" "./local-daemon-store.md" ]
details.doc; [ storesOverview "#local-store" "#local-daemon-store" ]
details.doc;
storesOverview = storesOverview =
let let
showEntry = store: showEntry = store: "- [${store.name}](#${store.slug})";
"- [${store.name}](#${store.slug})";
in in
concatStringsSep "\n" (map showEntry storesList) + "\n"; concatStringsSep "\n" (map showEntry storesList) + "\n";
allStores = concatStringsSep "\n" (attrValues storePages); allStores = concatStringsSep "\n" (attrValues storePages);
storePages = listToAttrs storePages = listToAttrs (
(map (s: { name = s.filename; value = s.page; }) storesList); map (s: {
name = s.filename;
value = s.page;
}) storesList
);
storesList = showStoreDocs { storesList = showStoreDocs {
storeInfo = commandInfo.stores; storeInfo = commandInfo.stores;
inherit inlineHTML; inherit inlineHTML;
}; };
hasInfix = infix: content: hasInfix =
infix: content:
builtins.stringLength content != builtins.stringLength (replaceStrings [ infix ] [ "" ] content); builtins.stringLength content != builtins.stringLength (replaceStrings [ infix ] [ "" ] content);
in in
optionalString (details ? doc) ( optionalString (details ? doc) (
# An alternate implementation with builtins.match stack overflowed on some systems. # An alternate implementation with builtins.match stack overflowed on some systems.
if hasInfix "@store-types@" details.doc if hasInfix "@store-types@" details.doc then help-stores else details.doc
then help-stores
else details.doc
); );
maybeOptions = maybeOptions =
let let
allVisibleOptions = filterAttrs allVisibleOptions = filterAttrs (_: o: !o.hiddenCategory) (details.flags // toplevel.flags);
(_: o: ! o.hiddenCategory)
(details.flags // toplevel.flags);
in in
optionalString (allVisibleOptions != { }) '' optionalString (allVisibleOptions != { }) ''
# Options # Options
@ -142,55 +150,73 @@ let
> See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags. > See [`man nix.conf`](@docroot@/command-ref/conf-file.md#command-line-flags) for overriding configuration settings with command line flags.
''; '';
showOptions = inlineHTML: allOptions: showOptions =
inlineHTML: allOptions:
let let
showCategory = cat: opts: '' showCategory = cat: opts: ''
${optionalString (cat != "") "## ${cat}"} ${optionalString (cat != "") "## ${cat}"}
${concatStringsSep "\n" (attrValues (mapAttrs showOption opts))} ${concatStringsSep "\n" (attrValues (mapAttrs showOption opts))}
''; '';
showOption = name: option: showOption =
name: option:
let let
result = trim '' result = trim ''
- ${item} - ${item}
${option.description} ${option.description}
''; '';
item = if inlineHTML item =
then ''<span id="opt-${name}">[`--${name}`](#opt-${name})</span> ${shortName} ${labels}'' if inlineHTML then
else "`--${name}` ${shortName} ${labels}"; ''<span id="opt-${name}">[`--${name}`](#opt-${name})</span> ${shortName} ${labels}''
shortName = optionalString else
(option ? shortName) "`--${name}` ${shortName} ${labels}";
("/ `-${option.shortName}`"); shortName = optionalString (option ? shortName) ("/ `-${option.shortName}`");
labels = optionalString labels = optionalString (option ? labels) (concatStringsSep " " (map (s: "*${s}*") option.labels));
(option ? labels) in
(concatStringsSep " " (map (s: "*${s}*") option.labels)); result;
in result; categories =
categories = mapAttrs mapAttrs
# Convert each group from a list of key-value pairs back to an attrset # Convert each group from a list of key-value pairs back to an attrset
(_: listToAttrs) (_: listToAttrs)
(groupBy (groupBy (cmd: cmd.value.category) (attrsToList allOptions));
(cmd: cmd.value.category) in
(attrsToList allOptions)); concatStrings (attrValues (mapAttrs showCategory categories));
in concatStrings (attrValues (mapAttrs showCategory categories)); in
in squash result; squash result;
appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name; appendName = filename: name: (if filename == "nix" then "nix3" else filename) + "-" + name;
processCommand = { command, details, filename, toplevel }: processCommand =
{
command,
details,
filename,
toplevel,
}:
let let
cmd = { cmd = {
inherit command; inherit command;
name = filename + ".md"; name = filename + ".md";
value = showCommand { inherit command details filename toplevel; }; value = showCommand {
inherit
command
details
filename
toplevel
;
};
}; };
subcommand = subCmd: processCommand { subcommand =
command = command + " " + subCmd; subCmd:
details = details.commands.${subCmd}; processCommand {
filename = appendName filename subCmd; command = command + " " + subCmd;
inherit toplevel; details = details.commands.${subCmd};
}; filename = appendName filename subCmd;
in [ cmd ] ++ concatMap subcommand (attrNames details.commands or {}); inherit toplevel;
};
in
[ cmd ] ++ concatMap subcommand (attrNames details.commands or { });
manpages = processCommand { manpages = processCommand {
command = "nix"; command = "nix";
@ -199,9 +225,11 @@ let
toplevel = commandInfo.args; toplevel = commandInfo.args;
}; };
tableOfContents = let tableOfContents =
showEntry = page: let
" - [${page.command}](command-ref/new-cli/${page.name})"; showEntry = page: " - [${page.command}](command-ref/new-cli/${page.name})";
in concatStringsSep "\n" (map showEntry manpages) + "\n"; in
concatStringsSep "\n" (map showEntry manpages) + "\n";
in (listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; } in
(listToAttrs manpages) // { "SUMMARY.md" = tableOfContents; }

View file

@ -1,67 +1,99 @@
let let
inherit (builtins) attrValues concatStringsSep isAttrs isBool mapAttrs; inherit (builtins)
inherit (import <nix/utils.nix>) concatStrings indent optionalString squash; attrValues
concatStringsSep
isAttrs
isBool
mapAttrs
;
inherit (import <nix/utils.nix>)
concatStrings
indent
optionalString
squash
;
in in
# `inlineHTML` is a hack to accommodate inconsistent output from `lowdown` # `inlineHTML` is a hack to accommodate inconsistent output from `lowdown`
{ prefix, inlineHTML ? true }: settingsInfo: {
prefix,
inlineHTML ? true,
}:
settingsInfo:
let let
showSetting = prefix: setting: { description, documentDefault, defaultValue, aliases, value, experimentalFeature }: showSetting =
prefix: setting:
{
description,
documentDefault,
defaultValue,
aliases,
value,
experimentalFeature,
}:
let let
result = squash '' result = squash ''
- ${item} - ${item}
${indent " " body} ${indent " " body}
''; '';
item = if inlineHTML item =
then ''<span id="${prefix}-${setting}">[`${setting}`](#${prefix}-${setting})</span>'' if inlineHTML then
else "`${setting}`"; ''<span id="${prefix}-${setting}">[`${setting}`](#${prefix}-${setting})</span>''
else
"`${setting}`";
# separate body to cleanly handle indentation # separate body to cleanly handle indentation
body = '' body = ''
${experimentalFeatureNote} ${experimentalFeatureNote}
${description} ${description}
**Default:** ${showDefault documentDefault defaultValue} **Default:** ${showDefault documentDefault defaultValue}
${showAliases aliases} ${showAliases aliases}
''; '';
experimentalFeatureNote = optionalString (experimentalFeature != null) '' experimentalFeatureNote = optionalString (experimentalFeature != null) ''
> **Warning** > **Warning**
> >
> This setting is part of an > This setting is part of an
> [experimental feature](@docroot@/development/experimental-features.md). > [experimental feature](@docroot@/development/experimental-features.md).
> >
> To change this setting, make sure the > To change this setting, make sure the
> [`${experimentalFeature}` experimental feature](@docroot@/development/experimental-features.md#xp-feature-${experimentalFeature}) > [`${experimentalFeature}` experimental feature](@docroot@/development/experimental-features.md#xp-feature-${experimentalFeature})
> is enabled. > is enabled.
> For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md): > For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md):
> >
> ``` > ```
> extra-experimental-features = ${experimentalFeature} > extra-experimental-features = ${experimentalFeature}
> ${setting} = ... > ${setting} = ...
> ``` > ```
''; '';
showDefault = documentDefault: defaultValue: showDefault =
documentDefault: defaultValue:
if documentDefault then if documentDefault then
# a StringMap value type is specified as a string, but # a StringMap value type is specified as a string, but
# this shows the value type. The empty stringmap is `null` in # this shows the value type. The empty stringmap is `null` in
# JSON, but that converts to `{ }` here. # JSON, but that converts to `{ }` here.
if defaultValue == "" || defaultValue == [] || isAttrs defaultValue if defaultValue == "" || defaultValue == [ ] || isAttrs defaultValue then
then "*empty*" "*empty*"
else if isBool defaultValue then else if isBool defaultValue then
if defaultValue then "`true`" else "`false`" if defaultValue then "`true`" else "`false`"
else "`${toString defaultValue}`" else
else "*machine-specific*"; "`${toString defaultValue}`"
else
"*machine-specific*";
showAliases = aliases: showAliases =
optionalString (aliases != []) aliases:
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}"; optionalString (aliases != [ ])
"**Deprecated alias:** ${(concatStringsSep ", " (map (s: "`${s}`") aliases))}";
in result; in
result;
in concatStrings (attrValues (mapAttrs (showSetting prefix) settingsInfo)) in
concatStrings (attrValues (mapAttrs (showSetting prefix) settingsInfo))

View file

@ -1,6 +1,20 @@
let let
inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings; inherit (builtins)
inherit (import <nix/utils.nix>) optionalString filterAttrs trim squash toLower unique indent; attrNames
listToAttrs
concatStringsSep
readFile
replaceStrings
;
inherit (import <nix/utils.nix>)
optionalString
filterAttrs
trim
squash
toLower
unique
indent
;
showSettings = import <nix/generate-settings.nix>; showSettings = import <nix/generate-settings.nix>;
in in
@ -14,7 +28,13 @@ in
let let
showStore = { name, slug }: { settings, doc, experimentalFeature }: showStore =
{ name, slug }:
{
settings,
doc,
experimentalFeature,
}:
let let
result = squash '' result = squash ''
# ${name} # ${name}
@ -25,7 +45,10 @@ let
## Settings ## Settings
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings} ${showSettings {
prefix = "store-${slug}";
inherit inlineHTML;
} settings}
''; '';
experimentalFeatureNote = optionalString (experimentalFeature != null) '' experimentalFeatureNote = optionalString (experimentalFeature != null) ''
@ -43,15 +66,15 @@ let
> extra-experimental-features = ${experimentalFeature} > extra-experimental-features = ${experimentalFeature}
> ``` > ```
''; '';
in result; in
result;
storesList = map storesList = map (name: rec {
(name: rec { inherit name;
inherit name; slug = replaceStrings [ " " ] [ "-" ] (toLower name);
slug = replaceStrings [ " " ] [ "-" ] (toLower name); filename = "${slug}.md";
filename = "${slug}.md"; page = showStore { inherit name slug; } storeInfo.${name};
page = showStore { inherit name slug; } storeInfo.${name}; }) (attrNames storeInfo);
})
(attrNames storeInfo);
in storesList in
storesList

View file

@ -1,5 +1,11 @@
let let
inherit (builtins) attrNames listToAttrs concatStringsSep readFile replaceStrings; inherit (builtins)
attrNames
listToAttrs
concatStringsSep
readFile
replaceStrings
;
showSettings = import <nix/generate-settings.nix>; showSettings = import <nix/generate-settings.nix>;
showStoreDocs = import <nix/generate-store-info.nix>; showStoreDocs = import <nix/generate-store-info.nix>;
in in
@ -14,26 +20,28 @@ let
index = index =
let let
showEntry = store: showEntry = store: "- [${store.name}](./${store.filename})";
"- [${store.name}](./${store.filename})";
in in
concatStringsSep "\n" (map showEntry storesList); concatStringsSep "\n" (map showEntry storesList);
"index.md" = replaceStrings "index.md" =
[ "@store-types@" ] [ index ] replaceStrings [ "@store-types@" ] [ index ]
(readFile ./source/store/types/index.md.in); (readFile ./source/store/types/index.md.in);
tableOfContents = tableOfContents =
let let
showEntry = store: showEntry = store: " - [${store.name}](store/types/${store.filename})";
" - [${store.name}](store/types/${store.filename})";
in in
concatStringsSep "\n" (map showEntry storesList) + "\n"; concatStringsSep "\n" (map showEntry storesList) + "\n";
"SUMMARY.md" = tableOfContents; "SUMMARY.md" = tableOfContents;
storePages = listToAttrs storePages = listToAttrs (
(map (s: { name = s.filename; value = s.page; }) storesList); map (s: {
name = s.filename;
value = s.page;
}) storesList
);
in in
storePages // { inherit "index.md" "SUMMARY.md"; } storePages // { inherit "index.md" "SUMMARY.md"; }

View file

@ -2,8 +2,8 @@ with builtins;
with import <nix/utils.nix>; with import <nix/utils.nix>;
let let
showExperimentalFeature = name: doc: showExperimentalFeature = name: doc: ''
'' - [`${name}`](@docroot@/development/experimental-features.md#xp-feature-${name})
- [`${name}`](@docroot@/development/experimental-features.md#xp-feature-${name}) '';
''; in
in xps: indent " " (concatStrings (attrValues (mapAttrs showExperimentalFeature xps))) xps: indent " " (concatStrings (attrValues (mapAttrs showExperimentalFeature xps)))

View file

@ -2,7 +2,8 @@ with builtins;
with import <nix/utils.nix>; with import <nix/utils.nix>;
let let
showExperimentalFeature = name: doc: showExperimentalFeature =
name: doc:
squash '' squash ''
## [`${name}`]{#xp-feature-${name}} ## [`${name}`]{#xp-feature-${name}}

View file

@ -1,19 +1,20 @@
{ lib {
, mkMesonDerivation lib,
mkMesonDerivation,
, meson meson,
, ninja ninja,
, lowdown-unsandboxed lowdown-unsandboxed,
, mdbook mdbook,
, mdbook-linkcheck mdbook-linkcheck,
, jq jq,
, python3 python3,
, rsync rsync,
, nix-cli nix-cli,
# Configuration Options # Configuration Options
, version version,
}: }:
let let
@ -25,18 +26,22 @@ mkMesonDerivation (finalAttrs: {
inherit version; inherit version;
workDir = ./.; workDir = ./.;
fileset = fileset.difference fileset =
(fileset.unions [ fileset.difference
../../.version (fileset.unions [
# Too many different types of files to filter for now ../../.version
../../doc/manual # Too many different types of files to filter for now
./. ../../doc/manual
]) ./.
# Do a blacklist instead ])
../../doc/manual/package.nix; # Do a blacklist instead
../../doc/manual/package.nix;
# TODO the man pages should probably be separate # TODO the man pages should probably be separate
outputs = [ "out" "man" ]; outputs = [
"out"
"man"
];
# Hack for sake of the dev shell # Hack for sake of the dev shell
passthru.externalNativeBuildInputs = [ passthru.externalNativeBuildInputs = [
@ -54,11 +59,10 @@ mkMesonDerivation (finalAttrs: {
nix-cli nix-cli
]; ];
preConfigure = preConfigure = ''
'' chmod u+w ./.version
chmod u+w ./.version echo ${finalAttrs.version} > ./.version
echo ${finalAttrs.version} > ./.version '';
'';
postInstall = '' postInstall = ''
mkdir -p ''$out/nix-support mkdir -p ''$out/nix-support

View file

@ -1,22 +0,0 @@
---
synopsis: "Flake lock file generation now ignores local registries"
prs: [12019]
---
When resolving indirect flake references like `nixpkgs` in `flake.nix` files, Nix will no longer use the system and user flake registries. It will only use the global flake registry and overrides given on the command line via `--override-flake`.
This avoids accidents where users have local registry overrides that map `nixpkgs` to a `path:` flake in the local file system, which then end up in committed lock files pushed to other users.
In the future, we may remove the use of the registry during lock file generation altogether. It's better to explicitly specify the URL of a flake input. For example, instead of
```nix
{
outputs = { self, nixpkgs }: { ... };
}
```
write
```nix
{
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
outputs = { self, nixpkgs }: { ... };
}
```

View file

@ -1,18 +0,0 @@
---
synopsis: "`nix copy` supports `--profile` and `--out-link`"
prs: [11657]
---
The `nix copy` command now has flags `--profile` and `--out-link`, similar to `nix build`. `--profile` makes a profile point to the
top-level store path, while `--out-link` create symlinks to the top-level store paths.
For example, when updating the local NixOS system profile from a NixOS system closure on a remote machine, instead of
```
# nix copy --from ssh://server $path
# nix build --profile /nix/var/nix/profiles/system $path
```
you can now do
```
# nix copy --from ssh://server --profile /nix/var/nix/profiles/system $path
```
The advantage is that this avoids a time window where *path* is not a garbage collector root, and so could be deleted by a concurrent `nix store gc` process.

View file

@ -1,8 +0,0 @@
---
synopsis: "`nix-instantiate --eval` now supports `--raw`"
prs: [12119]
---
The `nix-instantiate --eval` command now supports a `--raw` flag, when used
the evaluation result must be a string, which is printed verbatim without
quotation marks or escaping.

View file

@ -1,21 +0,0 @@
---
synopsis: "Improved `NIX_SSHOPTS` parsing for better SSH option handling"
issues: [5181]
prs: [12020]
---
The parsing of the `NIX_SSHOPTS` environment variable has been improved to handle spaces and quotes correctly.
Previously, incorrectly split SSH options could cause failures in CLIs like `nix-copy-closure`,
especially when using complex ssh invocations such as `-o ProxyCommand="ssh -W %h:%p ..."`.
This change introduces a `shellSplitString` function to ensure
that `NIX_SSHOPTS` is parsed in a manner consistent with shell
behavior, addressing common parsing errors.
For example, the following now works as expected:
```bash
export NIX_SSHOPTS='-o ProxyCommand="ssh -W %h:%p ..."'
```
This update improves the reliability of SSH-related operations using `NIX_SSHOPTS` across Nix CLIs.

View file

@ -1,12 +0,0 @@
---
synopsis: "Support for relative path inputs"
prs: [10089]
---
Flakes can now refer to other flakes in the same repository using relative paths, e.g.
```nix
inputs.foo.url = "path:./foo";
```
uses the flake in the `foo` subdirectory of the referring flake. For more information, see the documentation on [the `path` flake input type](@docroot@/command-ref/new-cli/nix3-flake.md#path-fetcher).
This feature required a change to the lock file format. Previous Nix versions will not be able to use lock files that have locks for relative path inputs in them.

View file

@ -130,6 +130,7 @@
- [Contributing](development/contributing.md) - [Contributing](development/contributing.md)
- [Releases](release-notes/index.md) - [Releases](release-notes/index.md)
{{#include ./SUMMARY-rl-next.md}} {{#include ./SUMMARY-rl-next.md}}
- [Release 2.26 (2025-01-22)](release-notes/rl-2.26.md)
- [Release 2.25 (2024-11-07)](release-notes/rl-2.25.md) - [Release 2.25 (2024-11-07)](release-notes/rl-2.25.md)
- [Release 2.24 (2024-07-31)](release-notes/rl-2.24.md) - [Release 2.24 (2024-07-31)](release-notes/rl-2.24.md)
- [Release 2.23 (2024-06-03)](release-notes/rl-2.23.md) - [Release 2.23 (2024-06-03)](release-notes/rl-2.23.md)

View file

@ -79,7 +79,7 @@ This shell also adds `./outputs/bin/nix` to your `$PATH` so you can run `nix` im
To get a shell with one of the other [supported compilation environments](#compilation-environments): To get a shell with one of the other [supported compilation environments](#compilation-environments):
```console ```console
$ nix develop .#native-clangStdenvPackages $ nix develop .#native-clangStdenv
``` ```
> **Note** > **Note**
@ -261,7 +261,8 @@ See [supported compilation environments](#compilation-environments) and instruct
To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code. To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code.
Meson's configure always produces this inside the build directory. Meson's configure always produces this inside the build directory.
Configure your editor to use the `clangd` from the `.#native-clangStdenvPackages` shell. You can do that either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration). Configure your editor to use the `clangd` from the `.#native-clangStdenv` shell.
You can do that either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
> **Note** > **Note**
> >
@ -277,6 +278,8 @@ You may run the formatters as a one-off using:
./maintainers/format.sh ./maintainers/format.sh
``` ```
### Pre-commit hooks
If you'd like to run the formatters before every commit, install the hooks: If you'd like to run the formatters before every commit, install the hooks:
``` ```
@ -291,3 +294,30 @@ If it fails, run `git add --patch` to approve the suggestions _and commit again_
To refresh pre-commit hook's config file, do the following: To refresh pre-commit hook's config file, do the following:
1. Exit the development shell and start it again by running `nix develop`. 1. Exit the development shell and start it again by running `nix develop`.
2. If you also use the pre-commit hook, also run `pre-commit-hooks-install` again. 2. If you also use the pre-commit hook, also run `pre-commit-hooks-install` again.
### VSCode
Insert the following json into your `.vscode/settings.json` file to configure `nixfmt`.
This will be picked up by the _Format Document_ command, `"editor.formatOnSave"`, etc.
```json
{
"nix.formatterPath": "nixfmt",
"nix.serverSettings": {
"nixd": {
"formatting": {
"command": [
"nixfmt"
],
},
},
"nil": {
"formatting": {
"command": [
"nixfmt"
],
},
},
},
}
```

View file

@ -0,0 +1,128 @@
# Release 2.26.0 (2025-01-22)
- Support for relative path inputs [#10089](https://github.com/NixOS/nix/pull/10089)
Flakes can now refer to other flakes in the same repository using relative paths, e.g.
```nix
inputs.foo.url = "path:./foo";
```
uses the flake in the `foo` subdirectory of the referring flake. For more information, see the documentation on [the `path` flake input type](@docroot@/command-ref/new-cli/nix3-flake.md#path-fetcher).
This feature required a change to the lock file format. Previous Nix versions will not be able to use lock files that have locks for relative path inputs in them.
- Flake lock file generation now ignores local registries [#12019](https://github.com/NixOS/nix/pull/12019)
When resolving indirect flake references like `nixpkgs` in `flake.nix` files, Nix will no longer use the system and user flake registries. It will only use the global flake registry and overrides given on the command line via `--override-flake`.
This avoids accidents where users have local registry overrides that map `nixpkgs` to a `path:` flake in the local file system, which then end up in committed lock files pushed to other users.
In the future, we may remove the use of the registry during lock file generation altogether. It's better to explicitly specify the URL of a flake input. For example, instead of
```nix
{
outputs = { self, nixpkgs }: { ... };
}
```
write
```nix
{
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
outputs = { self, nixpkgs }: { ... };
}
```
- `nix copy` supports `--profile` and `--out-link` [#11657](https://github.com/NixOS/nix/pull/11657)
The `nix copy` command now has flags `--profile` and `--out-link`, similar to `nix build`. `--profile` makes a profile point to the
top-level store path, while `--out-link` create symlinks to the top-level store paths.
For example, when updating the local NixOS system profile from a NixOS system closure on a remote machine, instead of
```
# nix copy --from ssh://server $path
# nix build --profile /nix/var/nix/profiles/system $path
```
you can now do
```
# nix copy --from ssh://server --profile /nix/var/nix/profiles/system $path
```
The advantage is that this avoids a time window where *path* is not a garbage collector root, and so could be deleted by a concurrent `nix store gc` process.
- `nix-instantiate --eval` now supports `--raw` [#12119](https://github.com/NixOS/nix/pull/12119)
The `nix-instantiate --eval` command now supports a `--raw` flag, when used
the evaluation result must be a string, which is printed verbatim without
quotation marks or escaping.
- Improved `NIX_SSHOPTS` parsing for better SSH option handling [#5181](https://github.com/NixOS/nix/issues/5181) [#12020](https://github.com/NixOS/nix/pull/12020)
The parsing of the `NIX_SSHOPTS` environment variable has been improved to handle spaces and quotes correctly.
Previously, incorrectly split SSH options could cause failures in commands like `nix-copy-closure`,
especially when using complex SSH invocations such as `-o ProxyCommand="ssh -W %h:%p ..."`.
This change introduces a `shellSplitString` function to ensure
that `NIX_SSHOPTS` is parsed in a manner consistent with shell
behavior, addressing common parsing errors.
For example, the following now works as expected:
```bash
export NIX_SSHOPTS='-o ProxyCommand="ssh -W %h:%p ..."'
```
This update improves the reliability of SSH-related operations using `NIX_SSHOPTS` across Nix CLIs.
- Nix is now built using Meson
As proposed in [RFC 132](https://github.com/NixOS/rfcs/pull/132), Nix's build system now uses Meson/Ninja. The old Make-based build system has been removed.
- Evaluation caching now works for dirty Git workdirs [#11992](https://github.com/NixOS/nix/pull/11992)
# Contributors
This release was made possible by the following 45 contributors:
- Anatoli Babenia [**(@abitrolly)**](https://github.com/abitrolly)
- Domagoj Mišković [**(@allrealmsoflife)**](https://github.com/allrealmsoflife)
- Yaroslav Bolyukin [**(@CertainLach)**](https://github.com/CertainLach)
- bryango [**(@bryango)**](https://github.com/bryango)
- tomberek [**(@tomberek)**](https://github.com/tomberek)
- Matej Urbas [**(@mupdt)**](https://github.com/mupdt)
- elikoga [**(@elikoga)**](https://github.com/elikoga)
- wh0 [**(@wh0)**](https://github.com/wh0)
- Félix [**(@picnoir)**](https://github.com/picnoir)
- Valentin Gagarin [**(@fricklerhandwerk)**](https://github.com/fricklerhandwerk)
- Gavin John [**(@Pandapip1)**](https://github.com/Pandapip1)
- Travis A. Everett [**(@abathur)**](https://github.com/abathur)
- Vladimir Panteleev [**(@CyberShadow)**](https://github.com/CyberShadow)
- Ilja [**(@suruaku)**](https://github.com/suruaku)
- Jason Yundt [**(@Jayman2000)**](https://github.com/Jayman2000)
- Mike Kusold [**(@kusold)**](https://github.com/kusold)
- Andy Hamon [**(@andrewhamon)**](https://github.com/andrewhamon)
- Brian McKenna [**(@puffnfresh)**](https://github.com/puffnfresh)
- Greg Curtis [**(@gcurtis)**](https://github.com/gcurtis)
- Andrew Poelstra [**(@apoelstra)**](https://github.com/apoelstra)
- Linus Heckemann [**(@lheckemann)**](https://github.com/lheckemann)
- Tristan Ross [**(@RossComputerGuy)**](https://github.com/RossComputerGuy)
- Dominique Martinet [**(@martinetd)**](https://github.com/martinetd)
- h0nIg [**(@h0nIg)**](https://github.com/h0nIg)
- Eelco Dolstra [**(@edolstra)**](https://github.com/edolstra)
- Shahar "Dawn" Or [**(@mightyiam)**](https://github.com/mightyiam)
- NAHO [**(@trueNAHO)**](https://github.com/trueNAHO)
- Ryan Hendrickson [**(@rhendric)**](https://github.com/rhendric)
- the-sun-will-rise-tomorrow [**(@the-sun-will-rise-tomorrow)**](https://github.com/the-sun-will-rise-tomorrow)
- Connor Baker [**(@ConnorBaker)**](https://github.com/ConnorBaker)
- Cole Helbling [**(@cole-h)**](https://github.com/cole-h)
- Jack Wilsdon [**(@jackwilsdon)**](https://github.com/jackwilsdon)
- rekcäH nitraM [**(@dwt)**](https://github.com/dwt)
- Martin Fischer [**(@not-my-profile)**](https://github.com/not-my-profile)
- John Ericson [**(@Ericson2314)**](https://github.com/Ericson2314)
- Graham Christensen [**(@grahamc)**](https://github.com/grahamc)
- Sergei Zimmerman [**(@xokdvium)**](https://github.com/xokdvium)
- Siddarth Kumar [**(@siddarthkay)**](https://github.com/siddarthkay)
- Sergei Trofimovich [**(@trofi)**](https://github.com/trofi)
- Robert Hensing [**(@roberth)**](https://github.com/roberth)
- Mutsuha Asada [**(@momeemt)**](https://github.com/momeemt)
- Parker Jones [**(@knotapun)**](https://github.com/knotapun)
- Jörg Thalheim [**(@Mic92)**](https://github.com/Mic92)
- dbdr [**(@dbdr)**](https://github.com/dbdr)
- myclevorname [**(@myclevorname)**](https://github.com/myclevorname)
- Philipp Otterbein

View file

@ -11,10 +11,15 @@ rec {
concatStrings = concatStringsSep ""; concatStrings = concatStringsSep "";
attrsToList = a: attrsToList =
map (name: { inherit name; value = a.${name}; }) (builtins.attrNames a); a:
map (name: {
inherit name;
value = a.${name};
}) (builtins.attrNames a);
replaceStringsRec = from: to: string: replaceStringsRec =
from: to: string:
# recursively replace occurrences of `from` with `to` within `string` # recursively replace occurrences of `from` with `to` within `string`
# example: # example:
# replaceStringRec "--" "-" "hello-----world" # replaceStringRec "--" "-" "hello-----world"
@ -22,16 +27,18 @@ rec {
let let
replaced = replaceStrings [ from ] [ to ] string; replaced = replaceStrings [ from ] [ to ] string;
in in
if replaced == string then string else replaceStringsRec from to replaced; if replaced == string then string else replaceStringsRec from to replaced;
toLower = replaceStrings upperChars lowerChars; toLower = replaceStrings upperChars lowerChars;
squash = replaceStringsRec "\n\n\n" "\n\n"; squash = replaceStringsRec "\n\n\n" "\n\n";
trim = string: trim =
string:
# trim trailing spaces and squash non-leading spaces # trim trailing spaces and squash non-leading spaces
let let
trimLine = line: trimLine =
line:
let let
# separate leading spaces from the rest # separate leading spaces from the rest
parts = split "(^ *)" line; parts = split "(^ *)" line;
@ -39,19 +46,30 @@ rec {
rest = elemAt parts 2; rest = elemAt parts 2;
# drop trailing spaces # drop trailing spaces
body = head (split " *$" rest); body = head (split " *$" rest);
in spaces + replaceStringsRec " " " " body; in
in concatStringsSep "\n" (map trimLine (splitLines string)); spaces + replaceStringsRec " " " " body;
in
concatStringsSep "\n" (map trimLine (splitLines string));
# FIXME: O(n^2) # FIXME: O(n^2)
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) []; unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [ ];
nameValuePair = name: value: { inherit name value; }; nameValuePair = name: value: { inherit name value; };
filterAttrs = pred: set: filterAttrs =
listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set)); pred: set:
listToAttrs (
concatMap (
name:
let
v = set.${name};
in
if pred name v then [ (nameValuePair name v) ] else [ ]
) (attrNames set)
);
optionalString = cond: string: if cond then string else ""; optionalString = cond: string: if cond then string else "";
indent = prefix: s: indent =
concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s)); prefix: s: concatStringsSep "\n" (map (x: if x == "" then x else "${prefix}${x}") (splitLines s));
} }

View file

@ -1,112 +1,113 @@
{ pkgs ? import <nixpkgs> { } {
, lib ? pkgs.lib pkgs ? import <nixpkgs> { },
, name ? "nix" lib ? pkgs.lib,
, tag ? "latest" name ? "nix",
, bundleNixpkgs ? true tag ? "latest",
, channelName ? "nixpkgs" bundleNixpkgs ? true,
, channelURL ? "https://nixos.org/channels/nixpkgs-unstable" channelName ? "nixpkgs",
, extraPkgs ? [] channelURL ? "https://nixos.org/channels/nixpkgs-unstable",
, maxLayers ? 100 extraPkgs ? [ ],
, nixConf ? {} maxLayers ? 100,
, flake-registry ? null nixConf ? { },
, uid ? 0 flake-registry ? null,
, gid ? 0 uid ? 0,
, uname ? "root" gid ? 0,
, gname ? "root" uname ? "root",
gname ? "root",
}: }:
let let
defaultPkgs = with pkgs; [ defaultPkgs =
nix with pkgs;
bashInteractive [
coreutils-full nix
gnutar bashInteractive
gzip coreutils-full
gnugrep gnutar
which gzip
curl gnugrep
less which
wget curl
man less
cacert.out wget
findutils man
iana-etc cacert.out
git findutils
openssh iana-etc
] ++ extraPkgs; git
openssh
]
++ extraPkgs;
users = { users =
{
root = { root = {
uid = 0; uid = 0;
shell = "${pkgs.bashInteractive}/bin/bash"; shell = "${pkgs.bashInteractive}/bin/bash";
home = "/root"; home = "/root";
gid = 0; gid = 0;
groups = [ "root" ]; groups = [ "root" ];
description = "System administrator"; description = "System administrator";
};
nobody = {
uid = 65534;
shell = "${pkgs.shadow}/bin/nologin";
home = "/var/empty";
gid = 65534;
groups = [ "nobody" ];
description = "Unprivileged account (don't use!)";
};
}
// lib.optionalAttrs (uid != 0) {
"${uname}" = {
uid = uid;
shell = "${pkgs.bashInteractive}/bin/bash";
home = "/home/${uname}";
gid = gid;
groups = [ "${gname}" ];
description = "Nix user";
};
}
// lib.listToAttrs (
map (n: {
name = "nixbld${toString n}";
value = {
uid = 30000 + n;
gid = 30000;
groups = [ "nixbld" ];
description = "Nix build user ${toString n}";
};
}) (lib.lists.range 1 32)
);
groups =
{
root.gid = 0;
nixbld.gid = 30000;
nobody.gid = 65534;
}
// lib.optionalAttrs (gid != 0) {
"${gname}".gid = gid;
}; };
nobody = {
uid = 65534;
shell = "${pkgs.shadow}/bin/nologin";
home = "/var/empty";
gid = 65534;
groups = [ "nobody" ];
description = "Unprivileged account (don't use!)";
};
} // lib.optionalAttrs (uid != 0) {
"${uname}" = {
uid = uid;
shell = "${pkgs.bashInteractive}/bin/bash";
home = "/home/${uname}";
gid = gid;
groups = [ "${gname}" ];
description = "Nix user";
};
} // lib.listToAttrs (
map
(
n: {
name = "nixbld${toString n}";
value = {
uid = 30000 + n;
gid = 30000;
groups = [ "nixbld" ];
description = "Nix build user ${toString n}";
};
}
)
(lib.lists.range 1 32)
);
groups = {
root.gid = 0;
nixbld.gid = 30000;
nobody.gid = 65534;
} // lib.optionalAttrs (gid != 0) {
"${gname}".gid = gid;
};
userToPasswd = ( userToPasswd = (
k: k:
{ uid {
, gid ? 65534 uid,
, home ? "/var/empty" gid ? 65534,
, description ? "" home ? "/var/empty",
, shell ? "/bin/false" description ? "",
, groups ? [ ] shell ? "/bin/false",
}: "${k}:x:${toString uid}:${toString gid}:${description}:${home}:${shell}" groups ? [ ],
); }:
passwdContents = ( "${k}:x:${toString uid}:${toString gid}:${description}:${home}:${shell}"
lib.concatStringsSep "\n"
(lib.attrValues (lib.mapAttrs userToPasswd users))
); );
passwdContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs userToPasswd users)));
userToShadow = k: { ... }: "${k}:!:1::::::"; userToShadow = k: { ... }: "${k}:!:1::::::";
shadowContents = ( shadowContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs userToShadow users)));
lib.concatStringsSep "\n"
(lib.attrValues (lib.mapAttrs userToShadow users))
);
# Map groups to members # Map groups to members
# { # {
@ -116,42 +117,35 @@ let
let let
# Create a flat list of user/group mappings # Create a flat list of user/group mappings
mappings = ( mappings = (
builtins.foldl' builtins.foldl' (
( acc: user:
acc: user: let
let groups = users.${user}.groups or [ ];
groups = users.${user}.groups or [ ]; in
in acc
acc ++ map ++ map (group: {
(group: { inherit user group;
inherit user group; }) groups
}) ) [ ] (lib.attrNames users)
groups
)
[ ]
(lib.attrNames users)
); );
in in
( (builtins.foldl' (
builtins.foldl' acc: v:
( acc
acc: v: acc // { // {
${v.group} = acc.${v.group} or [ ] ++ [ v.user ]; ${v.group} = acc.${v.group} or [ ] ++ [ v.user ];
} }
) ) { } mappings)
{ }
mappings)
); );
groupToGroup = k: { gid }: groupToGroup =
k:
{ gid }:
let let
members = groupMemberMap.${k} or [ ]; members = groupMemberMap.${k} or [ ];
in in
"${k}:x:${toString gid}:${lib.concatStringsSep "," members}"; "${k}:x:${toString gid}:${lib.concatStringsSep "," members}";
groupContents = ( groupContents = (lib.concatStringsSep "\n" (lib.attrValues (lib.mapAttrs groupToGroup groups)));
lib.concatStringsSep "\n"
(lib.attrValues (lib.mapAttrs groupToGroup groups))
);
defaultNixConf = { defaultNixConf = {
sandbox = "false"; sandbox = "false";
@ -159,11 +153,17 @@ let
trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ]; trusted-public-keys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ];
}; };
nixConfContents = (lib.concatStringsSep "\n" (lib.mapAttrsFlatten (n: v: nixConfContents =
let (lib.concatStringsSep "\n" (
vStr = if builtins.isList v then lib.concatStringsSep " " v else v; lib.mapAttrsFlatten (
in n: v:
"${n} = ${vStr}") (defaultNixConf // nixConf))) + "\n"; let
vStr = if builtins.isList v then lib.concatStringsSep " " v else v;
in
"${n} = ${vStr}"
) (defaultNixConf // nixConf)
))
+ "\n";
userHome = if uid == 0 then "/root" else "/home/${uname}"; userHome = if uid == 0 then "/root" else "/home/${uname}";
@ -184,21 +184,29 @@ let
manifest = pkgs.buildPackages.runCommand "manifest.nix" { } '' manifest = pkgs.buildPackages.runCommand "manifest.nix" { } ''
cat > $out <<EOF cat > $out <<EOF
[ [
${lib.concatStringsSep "\n" (builtins.map (drv: let ${lib.concatStringsSep "\n" (
outputs = drv.outputsToInstall or [ "out" ]; builtins.map (
in '' drv:
{ let
${lib.concatStringsSep "\n" (builtins.map (output: '' outputs = drv.outputsToInstall or [ "out" ];
${output} = { outPath = "${lib.getOutput output drv}"; }; in
'') outputs)} ''
outputs = [ ${lib.concatStringsSep " " (builtins.map (x: "\"${x}\"") outputs)} ]; {
name = "${drv.name}"; ${lib.concatStringsSep "\n" (
outPath = "${drv}"; builtins.map (output: ''
system = "${drv.system}"; ${output} = { outPath = "${lib.getOutput output drv}"; };
type = "derivation"; '') outputs
meta = { }; )}
} outputs = [ ${lib.concatStringsSep " " (builtins.map (x: "\"${x}\"") outputs)} ];
'') defaultPkgs)} name = "${drv.name}";
outPath = "${drv}";
system = "${drv.system}";
type = "derivation";
meta = { };
}
''
) defaultPkgs
)}
] ]
EOF EOF
''; '';
@ -207,16 +215,22 @@ let
cp -a ${rootEnv}/* $out/ cp -a ${rootEnv}/* $out/
ln -s ${manifest} $out/manifest.nix ln -s ${manifest} $out/manifest.nix
''; '';
flake-registry-path = if (flake-registry == null) then flake-registry-path =
null if (flake-registry == null) then
else if (builtins.readFileType (toString flake-registry)) == "directory" then null
"${flake-registry}/flake-registry.json" else if (builtins.readFileType (toString flake-registry)) == "directory" then
else "${flake-registry}/flake-registry.json"
flake-registry; else
flake-registry;
in in
pkgs.runCommand "base-system" pkgs.runCommand "base-system"
{ {
inherit passwdContents groupContents shadowContents nixConfContents; inherit
passwdContents
groupContents
shadowContents
nixConfContents
;
passAsFile = [ passAsFile = [
"passwdContents" "passwdContents"
"groupContents" "groupContents"
@ -225,67 +239,79 @@ let
]; ];
allowSubstitutes = false; allowSubstitutes = false;
preferLocalBuild = true; preferLocalBuild = true;
} ('' }
env (
set -x ''
mkdir -p $out/etc env
set -x
mkdir -p $out/etc
mkdir -p $out/etc/ssl/certs mkdir -p $out/etc/ssl/certs
ln -s /nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt $out/etc/ssl/certs ln -s /nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt $out/etc/ssl/certs
cat $passwdContentsPath > $out/etc/passwd cat $passwdContentsPath > $out/etc/passwd
echo "" >> $out/etc/passwd echo "" >> $out/etc/passwd
cat $groupContentsPath > $out/etc/group cat $groupContentsPath > $out/etc/group
echo "" >> $out/etc/group echo "" >> $out/etc/group
cat $shadowContentsPath > $out/etc/shadow cat $shadowContentsPath > $out/etc/shadow
echo "" >> $out/etc/shadow echo "" >> $out/etc/shadow
mkdir -p $out/usr mkdir -p $out/usr
ln -s /nix/var/nix/profiles/share $out/usr/ ln -s /nix/var/nix/profiles/share $out/usr/
mkdir -p $out/nix/var/nix/gcroots mkdir -p $out/nix/var/nix/gcroots
mkdir $out/tmp mkdir $out/tmp
mkdir -p $out/var/tmp mkdir -p $out/var/tmp
mkdir -p $out/etc/nix mkdir -p $out/etc/nix
cat $nixConfContentsPath > $out/etc/nix/nix.conf cat $nixConfContentsPath > $out/etc/nix/nix.conf
mkdir -p $out${userHome} mkdir -p $out${userHome}
mkdir -p $out/nix/var/nix/profiles/per-user/${uname} mkdir -p $out/nix/var/nix/profiles/per-user/${uname}
ln -s ${profile} $out/nix/var/nix/profiles/default-1-link ln -s ${profile} $out/nix/var/nix/profiles/default-1-link
ln -s /nix/var/nix/profiles/default-1-link $out/nix/var/nix/profiles/default ln -s /nix/var/nix/profiles/default-1-link $out/nix/var/nix/profiles/default
ln -s /nix/var/nix/profiles/default $out${userHome}/.nix-profile ln -s /nix/var/nix/profiles/default $out${userHome}/.nix-profile
ln -s ${channel} $out/nix/var/nix/profiles/per-user/${uname}/channels-1-link ln -s ${channel} $out/nix/var/nix/profiles/per-user/${uname}/channels-1-link
ln -s /nix/var/nix/profiles/per-user/${uname}/channels-1-link $out/nix/var/nix/profiles/per-user/${uname}/channels ln -s /nix/var/nix/profiles/per-user/${uname}/channels-1-link $out/nix/var/nix/profiles/per-user/${uname}/channels
mkdir -p $out${userHome}/.nix-defexpr mkdir -p $out${userHome}/.nix-defexpr
ln -s /nix/var/nix/profiles/per-user/${uname}/channels $out${userHome}/.nix-defexpr/channels ln -s /nix/var/nix/profiles/per-user/${uname}/channels $out${userHome}/.nix-defexpr/channels
echo "${channelURL} ${channelName}" > $out${userHome}/.nix-channels echo "${channelURL} ${channelName}" > $out${userHome}/.nix-channels
mkdir -p $out/bin $out/usr/bin mkdir -p $out/bin $out/usr/bin
ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env ln -s ${pkgs.coreutils}/bin/env $out/usr/bin/env
ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh ln -s ${pkgs.bashInteractive}/bin/bash $out/bin/sh
'' + (lib.optionalString (flake-registry-path != null) '' ''
nixCacheDir="${userHome}/.cache/nix" + (lib.optionalString (flake-registry-path != null) ''
mkdir -p $out$nixCacheDir nixCacheDir="${userHome}/.cache/nix"
globalFlakeRegistryPath="$nixCacheDir/flake-registry.json" mkdir -p $out$nixCacheDir
ln -s ${flake-registry-path} $out$globalFlakeRegistryPath globalFlakeRegistryPath="$nixCacheDir/flake-registry.json"
mkdir -p $out/nix/var/nix/gcroots/auto ln -s ${flake-registry-path} $out$globalFlakeRegistryPath
rootName=$(${pkgs.nix}/bin/nix --extra-experimental-features nix-command hash file --type sha1 --base32 <(echo -n $globalFlakeRegistryPath)) mkdir -p $out/nix/var/nix/gcroots/auto
ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName rootName=$(${pkgs.nix}/bin/nix --extra-experimental-features nix-command hash file --type sha1 --base32 <(echo -n $globalFlakeRegistryPath))
'')); ln -s $globalFlakeRegistryPath $out/nix/var/nix/gcroots/auto/$rootName
'')
);
in in
pkgs.dockerTools.buildLayeredImageWithNixDb { pkgs.dockerTools.buildLayeredImageWithNixDb {
inherit name tag maxLayers uid gid uname gname; inherit
name
tag
maxLayers
uid
gid
uname
gname
;
contents = [ baseSystem ]; contents = [ baseSystem ];
@ -305,15 +331,19 @@ pkgs.dockerTools.buildLayeredImageWithNixDb {
User = "${toString uid}:${toString gid}"; User = "${toString uid}:${toString gid}";
Env = [ Env = [
"USER=${uname}" "USER=${uname}"
"PATH=${lib.concatStringsSep ":" [ "PATH=${
"${userHome}/.nix-profile/bin" lib.concatStringsSep ":" [
"/nix/var/nix/profiles/default/bin" "${userHome}/.nix-profile/bin"
"/nix/var/nix/profiles/default/sbin" "/nix/var/nix/profiles/default/bin"
]}" "/nix/var/nix/profiles/default/sbin"
"MANPATH=${lib.concatStringsSep ":" [ ]
"${userHome}/.nix-profile/share/man" }"
"/nix/var/nix/profiles/default/share/man" "MANPATH=${
]}" lib.concatStringsSep ":" [
"${userHome}/.nix-profile/share/man"
"/nix/var/nix/profiles/default/share/man"
]
}"
"SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt" "SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
"GIT_SSL_CAINFO=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt" "GIT_SSL_CAINFO=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"
"NIX_SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt" "NIX_SSL_CERT_FILE=/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt"

52
flake.lock generated
View file

@ -36,6 +36,24 @@
"type": "github" "type": "github"
} }
}, },
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"git-hooks-nix": { "git-hooks-nix": {
"inputs": { "inputs": {
"flake-compat": [], "flake-compat": [],
@ -61,6 +79,24 @@
"type": "github" "type": "github"
} }
}, },
"nixfmt": {
"inputs": {
"flake-utils": "flake-utils"
},
"locked": {
"lastModified": 1736283758,
"narHash": "sha256-hrKhUp2V2fk/dvzTTHFqvtOg000G1e+jyIam+D4XqhA=",
"owner": "NixOS",
"repo": "nixfmt",
"rev": "8d4bd690c247004d90d8554f0b746b1231fe2436",
"type": "github"
},
"original": {
"owner": "NixOS",
"repo": "nixfmt",
"type": "github"
}
},
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1734359947, "lastModified": 1734359947,
@ -114,10 +150,26 @@
"flake-compat": "flake-compat", "flake-compat": "flake-compat",
"flake-parts": "flake-parts", "flake-parts": "flake-parts",
"git-hooks-nix": "git-hooks-nix", "git-hooks-nix": "git-hooks-nix",
"nixfmt": "nixfmt",
"nixpkgs": "nixpkgs", "nixpkgs": "nixpkgs",
"nixpkgs-23-11": "nixpkgs-23-11", "nixpkgs-23-11": "nixpkgs-23-11",
"nixpkgs-regression": "nixpkgs-regression" "nixpkgs-regression": "nixpkgs-regression"
} }
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
} }
}, },
"root": "root", "root": "root",

428
flake.nix
View file

@ -5,7 +5,10 @@
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446"; inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446";
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; inputs.flake-compat = {
url = "github:edolstra/flake-compat";
flake = false;
};
# dev tooling # dev tooling
inputs.flake-parts.url = "github:hercules-ci/flake-parts"; inputs.flake-parts.url = "github:hercules-ci/flake-parts";
@ -17,9 +20,15 @@
# work around 7730 and https://github.com/NixOS/nix/issues/7807 # work around 7730 and https://github.com/NixOS/nix/issues/7807
inputs.git-hooks-nix.inputs.flake-compat.follows = ""; inputs.git-hooks-nix.inputs.flake-compat.follows = "";
inputs.git-hooks-nix.inputs.gitignore.follows = ""; inputs.git-hooks-nix.inputs.gitignore.follows = "";
inputs.nixfmt.url = "github:NixOS/nixfmt";
outputs = inputs@{ self, nixpkgs, nixpkgs-regression, ... }: outputs =
inputs@{
self,
nixpkgs,
nixpkgs-regression,
...
}:
let let
inherit (nixpkgs) lib; inherit (nixpkgs) lib;
@ -27,9 +36,15 @@
officialRelease = false; officialRelease = false;
linux32BitSystems = [ "i686-linux" ]; linux32BitSystems = [ "i686-linux" ];
linux64BitSystems = [ "x86_64-linux" "aarch64-linux" ]; linux64BitSystems = [
"x86_64-linux"
"aarch64-linux"
];
linuxSystems = linux32BitSystems ++ linux64BitSystems; linuxSystems = linux32BitSystems ++ linux64BitSystems;
darwinSystems = [ "x86_64-darwin" "aarch64-darwin" ]; darwinSystems = [
"x86_64-darwin"
"aarch64-darwin"
];
systems = linuxSystems ++ darwinSystems; systems = linuxSystems ++ darwinSystems;
crossSystems = [ crossSystems = [
@ -59,63 +74,66 @@
(Provided that the names are unique.) (Provided that the names are unique.)
See https://nixos.org/manual/nixpkgs/stable/index.html#function-library-lib.attrsets.concatMapAttrs See https://nixos.org/manual/nixpkgs/stable/index.html#function-library-lib.attrsets.concatMapAttrs
*/ */
flatMapAttrs = attrs: f: lib.concatMapAttrs f attrs; flatMapAttrs = attrs: f: lib.concatMapAttrs f attrs;
forAllSystems = lib.genAttrs systems; forAllSystems = lib.genAttrs systems;
forAllCrossSystems = lib.genAttrs crossSystems; forAllCrossSystems = lib.genAttrs crossSystems;
forAllStdenvs = f: forAllStdenvs = lib.genAttrs stdenvs;
lib.listToAttrs
(map
(stdenvName: {
name = "${stdenvName}Packages";
value = f stdenvName;
})
stdenvs);
# We don't apply flake-parts to the whole flake so that non-development attributes # We don't apply flake-parts to the whole flake so that non-development attributes
# load without fetching any development inputs. # load without fetching any development inputs.
devFlake = inputs.flake-parts.lib.mkFlake { inherit inputs; } { devFlake = inputs.flake-parts.lib.mkFlake { inherit inputs; } {
imports = [ ./maintainers/flake-module.nix ]; imports = [ ./maintainers/flake-module.nix ];
systems = lib.subtractLists crossSystems systems; systems = lib.subtractLists crossSystems systems;
perSystem = { system, ... }: { perSystem =
_module.args.pkgs = nixpkgsFor.${system}.native; { system, ... }:
}; {
_module.args.pkgs = nixpkgsFor.${system}.native;
};
}; };
# Memoize nixpkgs for different platforms for efficiency. # Memoize nixpkgs for different platforms for efficiency.
nixpkgsFor = forAllSystems nixpkgsFor = forAllSystems (
(system: let system:
make-pkgs = crossSystem: stdenv: import nixpkgs { let
localSystem = { make-pkgs =
inherit system; crossSystem:
}; forAllStdenvs (
crossSystem = if crossSystem == null then null else { stdenv:
config = crossSystem; import nixpkgs {
} // lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") { localSystem = {
useLLVM = true; inherit system;
}; };
overlays = [ crossSystem =
(overlayFor (p: p.${stdenv})) if crossSystem == null then
]; null
}; else
stdenvs = forAllStdenvs (make-pkgs null); {
native = stdenvs.stdenvPackages; config = crossSystem;
in { }
inherit stdenvs native; // lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") {
static = native.pkgsStatic; useLLVM = true;
llvm = native.pkgsLLVM; };
cross = forAllCrossSystems (crossSystem: make-pkgs crossSystem "stdenv"); overlays = [
}); (overlayFor (pkgs: pkgs.${stdenv}))
];
}
);
in
rec {
nativeForStdenv = make-pkgs null;
crossForStdenv = forAllCrossSystems make-pkgs;
# Alias for convenience
native = nativeForStdenv.stdenv;
cross = forAllCrossSystems (crossSystem: crossForStdenv.${crossSystem}.stdenv);
}
);
binaryTarball = nix: pkgs: pkgs.callPackage ./scripts/binary-tarball.nix { overlayFor =
inherit nix; getStdenv: final: prev:
};
overlayFor = getStdenv: final: prev:
let let
stdenv = getStdenv final; stdenv = getStdenv final;
in in
@ -162,12 +180,19 @@
# See https://github.com/NixOS/nixpkgs/pull/214409 # See https://github.com/NixOS/nixpkgs/pull/214409
# Remove when fixed in this flake's nixpkgs # Remove when fixed in this flake's nixpkgs
pre-commit = pre-commit =
if prev.stdenv.hostPlatform.system == "i686-linux" if prev.stdenv.hostPlatform.system == "i686-linux" then
then (prev.pre-commit.override (o: { dotnet-sdk = ""; })).overridePythonAttrs (o: { doCheck = false; }) (prev.pre-commit.override (o: {
else prev.pre-commit; dotnet-sdk = "";
})).overridePythonAttrs
(o: {
doCheck = false;
})
else
prev.pre-commit;
}; };
in { in
{
# A Nixpkgs overlay that overrides the 'nix' and # A Nixpkgs overlay that overrides the 'nix' and
# 'nix-perl-bindings' packages. # 'nix-perl-bindings' packages.
overlays.default = overlayFor (p: p.stdenv); overlays.default = overlayFor (p: p.stdenv);
@ -175,7 +200,6 @@
hydraJobs = import ./packaging/hydra.nix { hydraJobs = import ./packaging/hydra.nix {
inherit inherit
inputs inputs
binaryTarball
forAllCrossSystems forAllCrossSystems
forAllSystems forAllSystems
lib lib
@ -186,53 +210,69 @@
; ;
}; };
checks = forAllSystems (system: { checks = forAllSystems (
installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system}; system:
installTests = self.hydraJobs.installTests.${system}; {
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system}; installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system};
rl-next = installTests = self.hydraJobs.installTests.${system};
let pkgs = nixpkgsFor.${system}.native; nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
in pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } '' rl-next =
LANG=C.UTF-8 ${pkgs.changelog-d}/bin/changelog-d ${./doc/manual/rl-next} >$out let
''; pkgs = nixpkgsFor.${system}.native;
repl-completion = nixpkgsFor.${system}.native.callPackage ./tests/repl-completion.nix { }; in
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) { pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } ''
dockerImage = self.hydraJobs.dockerImage.${system}; LANG=C.UTF-8 ${pkgs.changelog-d}/bin/changelog-d ${./doc/manual/rl-next} >$out
} // (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) { '';
# Some perl dependencies are broken on i686-linux. repl-completion = nixpkgsFor.${system}.native.callPackage ./tests/repl-completion.nix { };
# Since the support is only best-effort there, disable the perl }
# bindings // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
perlBindings = self.hydraJobs.perlBindings.${system}; dockerImage = self.hydraJobs.dockerImage.${system};
} }
# Add "passthru" tests // (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) {
// flatMapAttrs ({ # Some perl dependencies are broken on i686-linux.
"" = nixpkgsFor.${system}.native; # Since the support is only best-effort there, disable the perl
} // lib.optionalAttrs (! nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) { # bindings
# TODO: enable static builds for darwin, blocked on: perlBindings = self.hydraJobs.perlBindings.${system};
# https://github.com/NixOS/nixpkgs/issues/320448 }
# TODO: disabled to speed up GHA CI. # Add "passthru" tests
#"static-" = nixpkgsFor.${system}.static; //
}) flatMapAttrs
(nixpkgsPrefix: nixpkgs: (
flatMapAttrs nixpkgs.nixComponents {
(pkgName: pkg: "" = nixpkgsFor.${system}.native;
flatMapAttrs pkg.tests or {} }
(testName: test: { // lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.hostPlatform.isDarwin) {
"${nixpkgsPrefix}${pkgName}-${testName}" = test; # TODO: enable static builds for darwin, blocked on:
}) # https://github.com/NixOS/nixpkgs/issues/320448
# TODO: disabled to speed up GHA CI.
#"static-" = nixpkgsFor.${system}.native.pkgsStatic;
}
) )
// lib.optionalAttrs (nixpkgs.stdenv.hostPlatform == nixpkgs.stdenv.buildPlatform) { (
"${nixpkgsPrefix}nix-functional-tests" = nixpkgs.nixComponents.nix-functional-tests; nixpkgsPrefix: nixpkgs:
} flatMapAttrs nixpkgs.nixComponents (
) pkgName: pkg:
// devFlake.checks.${system} or {} flatMapAttrs pkg.tests or { } (
testName: test: {
"${nixpkgsPrefix}${pkgName}-${testName}" = test;
}
)
)
// lib.optionalAttrs (nixpkgs.stdenv.hostPlatform == nixpkgs.stdenv.buildPlatform) {
"${nixpkgsPrefix}nix-functional-tests" = nixpkgs.nixComponents.nix-functional-tests;
}
)
// devFlake.checks.${system} or { }
); );
packages = forAllSystems (system: packages = forAllSystems (
{ # Here we put attributes that map 1:1 into packages.<system>, ie system:
{
# Here we put attributes that map 1:1 into packages.<system>, ie
# for which we don't apply the full build matrix such as cross or static. # for which we don't apply the full build matrix such as cross or static.
inherit (nixpkgsFor.${system}.native) inherit (nixpkgsFor.${system}.native)
changelog-d; changelog-d
;
default = self.packages.${system}.nix; default = self.packages.${system}.nix;
installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system}; installerScriptForGHA = self.hydraJobs.installerScriptForGHA.${system};
binaryTarball = self.hydraJobs.binaryTarball.${system}; binaryTarball = self.hydraJobs.binaryTarball.${system};
@ -243,96 +283,144 @@
nix-external-api-docs = nixpkgsFor.${system}.native.nixComponents.nix-external-api-docs; nix-external-api-docs = nixpkgsFor.${system}.native.nixComponents.nix-external-api-docs;
} }
# We need to flatten recursive attribute sets of derivations to pass `flake check`. # We need to flatten recursive attribute sets of derivations to pass `flake check`.
// flatMapAttrs //
{ # Components we'll iterate over in the upcoming lambda flatMapAttrs
"nix-util" = { }; {
"nix-util-c" = { }; # Components we'll iterate over in the upcoming lambda
"nix-util-test-support" = { }; "nix-util" = { };
"nix-util-tests" = { }; "nix-util-c" = { };
"nix-util-test-support" = { };
"nix-util-tests" = { };
"nix-store" = { }; "nix-store" = { };
"nix-store-c" = { }; "nix-store-c" = { };
"nix-store-test-support" = { }; "nix-store-test-support" = { };
"nix-store-tests" = { }; "nix-store-tests" = { };
"nix-fetchers" = { }; "nix-fetchers" = { };
"nix-fetchers-tests" = { }; "nix-fetchers-tests" = { };
"nix-expr" = { }; "nix-expr" = { };
"nix-expr-c" = { }; "nix-expr-c" = { };
"nix-expr-test-support" = { }; "nix-expr-test-support" = { };
"nix-expr-tests" = { }; "nix-expr-tests" = { };
"nix-flake" = { }; "nix-flake" = { };
"nix-flake-tests" = { }; "nix-flake-tests" = { };
"nix-main" = { }; "nix-main" = { };
"nix-main-c" = { }; "nix-main-c" = { };
"nix-cmd" = { }; "nix-cmd" = { };
"nix-cli" = { }; "nix-cli" = { };
"nix-everything" = { }; "nix-everything" = { };
"nix-functional-tests" = { supportsCross = false; }; "nix-functional-tests" = {
supportsCross = false;
};
"nix-perl-bindings" = { supportsCross = false; }; "nix-perl-bindings" = {
} supportsCross = false;
(pkgName: { supportsCross ? true }: { };
# These attributes go right into `packages.<system>`.
"${pkgName}" = nixpkgsFor.${system}.native.nixComponents.${pkgName};
"${pkgName}-static" = nixpkgsFor.${system}.static.nixComponents.${pkgName};
"${pkgName}-llvm" = nixpkgsFor.${system}.llvm.nixComponents.${pkgName};
} }
// lib.optionalAttrs supportsCross (flatMapAttrs (lib.genAttrs crossSystems (_: { })) (crossSystem: {}: { (
# These attributes go right into `packages.<system>`. pkgName:
"${pkgName}-${crossSystem}" = nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName}; {
})) supportsCross ? true,
// flatMapAttrs (lib.genAttrs stdenvs (_: { })) (stdenvName: {}: { }:
# These attributes go right into `packages.<system>`. {
"${pkgName}-${stdenvName}" = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".nixComponents.${pkgName}; # These attributes go right into `packages.<system>`.
}) "${pkgName}" = nixpkgsFor.${system}.native.nixComponents.${pkgName};
) "${pkgName}-static" = nixpkgsFor.${system}.native.pkgsStatic.nixComponents.${pkgName};
"${pkgName}-llvm" = nixpkgsFor.${system}.native.pkgsLLVM.nixComponents.${pkgName};
}
// lib.optionalAttrs supportsCross (
flatMapAttrs (lib.genAttrs crossSystems (_: { })) (
crossSystem:
{ }:
{
# These attributes go right into `packages.<system>`.
"${pkgName}-${crossSystem}" = nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName};
}
)
)
// flatMapAttrs (lib.genAttrs stdenvs (_: { })) (
stdenvName:
{ }:
{
# These attributes go right into `packages.<system>`.
"${pkgName}-${stdenvName}" =
nixpkgsFor.${system}.nativeForStdenv.${stdenvName}.nixComponents.${pkgName};
}
)
)
// lib.optionalAttrs (builtins.elem system linux64BitSystems) { // lib.optionalAttrs (builtins.elem system linux64BitSystems) {
dockerImage = dockerImage =
let let
pkgs = nixpkgsFor.${system}.native; pkgs = nixpkgsFor.${system}.native;
image = import ./docker.nix { inherit pkgs; tag = pkgs.nix.version; }; image = import ./docker.nix {
in inherit pkgs;
pkgs.runCommand tag = pkgs.nix.version;
"docker-image-tarball-${pkgs.nix.version}" };
{ meta.description = "Docker image with Nix for ${system}"; } in
'' pkgs.runCommand "docker-image-tarball-${pkgs.nix.version}"
mkdir -p $out/nix-support { meta.description = "Docker image with Nix for ${system}"; }
image=$out/image.tar.gz ''
ln -s ${image} $image mkdir -p $out/nix-support
echo "file binary-dist $image" >> $out/nix-support/hydra-build-products image=$out/image.tar.gz
''; ln -s ${image} $image
}); echo "file binary-dist $image" >> $out/nix-support/hydra-build-products
'';
}
);
devShells = let devShells =
makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; }; let
prefixAttrs = prefix: lib.concatMapAttrs (k: v: { "${prefix}-${k}" = v; }); makeShell = import ./packaging/dev-shell.nix { inherit inputs lib devFlake; };
in prefixAttrs = prefix: lib.concatMapAttrs (k: v: { "${prefix}-${k}" = v; });
forAllSystems (system: in
prefixAttrs "native" (forAllStdenvs (stdenvName: makeShell { forAllSystems (
pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages"; system:
})) // prefixAttrs "native" (
lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) ( forAllStdenvs (
prefixAttrs "static" (forAllStdenvs (stdenvName: makeShell { stdenvName:
pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsStatic; makeShell {
})) // pkgs = nixpkgsFor.${system}.nativeForStdenv.${stdenvName};
prefixAttrs "llvm" (forAllStdenvs (stdenvName: makeShell { }
pkgs = nixpkgsFor.${system}.stdenvs."${stdenvName}Packages".pkgsLLVM; )
})) // )
prefixAttrs "cross" (forAllCrossSystems (crossSystem: makeShell { // lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin) (
pkgs = nixpkgsFor.${system}.cross.${crossSystem}; prefixAttrs "static" (
})) forAllStdenvs (
) // stdenvName:
{ makeShell {
default = self.devShells.${system}.native-stdenvPackages; pkgs = nixpkgsFor.${system}.nativeForStdenv.${stdenvName}.pkgsStatic;
}
)
)
// prefixAttrs "llvm" (
forAllStdenvs (
stdenvName:
makeShell {
pkgs = nixpkgsFor.${system}.nativeForStdenv.${stdenvName}.pkgsLLVM;
}
)
)
// prefixAttrs "cross" (
forAllCrossSystems (
crossSystem:
makeShell {
pkgs = nixpkgsFor.${system}.cross.${crossSystem};
}
)
)
)
// {
native = self.devShells.${system}.native-stdenv;
default = self.devShells.${system}.native;
} }
); );
}; };
} }

View file

@ -98,5 +98,39 @@
"aks.kenji@protonmail.com": "a-kenji", "aks.kenji@protonmail.com": "a-kenji",
"54070204+0x5a4@users.noreply.github.com": "0x5a4", "54070204+0x5a4@users.noreply.github.com": "0x5a4",
"brian@bmcgee.ie": "brianmcgee", "brian@bmcgee.ie": "brianmcgee",
"squalus@squalus.net": "squalus" "squalus@squalus.net": "squalus",
"kusold@users.noreply.github.com": "kusold",
"37929162+mergify[bot]@users.noreply.github.com": "mergify[bot]",
"ilja@mailbox.org": "suruaku",
"and.ham95@gmail.com": "andrewhamon",
"andy.hamon@discordapp.com": "andrewhamon",
"siddarthkay@gmail.com": "siddarthkay",
"apoelstra@wpsoftware.net": "apoelstra",
"asmadeus@codewreck.org": "martinetd",
"tristan.ross@midstall.com": "RossComputerGuy",
"bryanlais@gmail.com": "bryango",
"157494086+allrealmsoflife@users.noreply.github.com": "allrealmsoflife",
"ConnorBaker01@gmail.com": "ConnorBaker",
"me@momee.mt": "momeemt",
"martin@push-f.com": "not-my-profile",
"90870942+trueNAHO@users.noreply.github.com": "trueNAHO",
"49885263+knotapun@users.noreply.github.com": "knotapun",
"iam@lach.pw": "CertainLach",
"elikowa@gmail.com": "elikoga",
"greg.curtis@jetpack.io": "gcurtis",
"git@sphalerite.org": "lheckemann",
"mightyiampresence@gmail.com": "mightyiam",
"spamfaenger@gmx.de": "dwt",
"graham@grahamc.com": "grahamc",
"wh0@users.noreply.github.com": "wh0",
"25388474+mupdt@users.noreply.github.com": "mupdt",
"anatoli@rainforce.org": "abitrolly",
"h0nIg@users.noreply.github.com": "h0nIg",
"CyberShadow@users.noreply.github.com": "CyberShadow",
"gavinnjohn@gmail.com": "Pandapip1",
"picnoir@alternativebit.fr": "picnoir",
"140354451+myclevorname@users.noreply.github.com": "myclevorname",
"bonniot@gmail.com": "dbdr",
"jack@wilsdon.me": "jackwilsdon",
"143541718+WxNzEMof@users.noreply.github.com": "the-sun-will-rise-tomorrow"
} }

View file

@ -86,5 +86,37 @@
"Aleksanaa": "Aleksana", "Aleksanaa": "Aleksana",
"YorikSar": "Yuriy Taraday", "YorikSar": "Yuriy Taraday",
"kjeremy": "Jeremy Kolb", "kjeremy": "Jeremy Kolb",
"artemist": "Artemis Tosini" "artemist": "Artemis Tosini",
"the-sun-will-rise-tomorrow": null,
"gcurtis": "Greg Curtis",
"ConnorBaker": "Connor Baker",
"abitrolly": "Anatoli Babenia",
"allrealmsoflife": "Domagoj Mi\u0161kovi\u0107",
"andrewhamon": "Andy Hamon",
"picnoir": "F\u00e9lix",
"dbdr": null,
"suruaku": "Ilja",
"jackwilsdon": "Jack Wilsdon",
"mergify[bot]": null,
"kusold": "Mike Kusold",
"lheckemann": "Linus Heckemann",
"h0nIg": null,
"grahamc": "Graham Christensen",
"not-my-profile": "Martin Fischer",
"CyberShadow": "Vladimir Panteleev",
"Pandapip1": "Gavin John",
"RossComputerGuy": "Tristan Ross",
"elikoga": null,
"martinetd": "Dominique Martinet",
"knotapun": "Parker Jones",
"mightyiam": "Shahar \"Dawn\" Or",
"siddarthkay": "Siddarth Kumar",
"apoelstra": "Andrew Poelstra",
"myclevorname": null,
"CertainLach": "Yaroslav Bolyukin",
"trueNAHO": "NAHO",
"wh0": null,
"mupdt": "Matej Urbas",
"momeemt": "Mutsuha Asada",
"dwt": "\u202erekc\u00e4H nitraM\u202e"
} }

File diff suppressed because it is too large Load diff

View file

@ -144,12 +144,10 @@ release:
Make a pull request and auto-merge it. Make a pull request and auto-merge it.
* Create a milestone for the next release, move all unresolved issues
from the previous milestone, and close the previous milestone. Set
the date for the next milestone 6 weeks from now.
* Create a backport label. * Create a backport label.
* Add the new backport label to `.mergify.yml`.
* Post an [announcement on Discourse](https://discourse.nixos.org/c/announcements/8), including the contents of * Post an [announcement on Discourse](https://discourse.nixos.org/c/announcements/8), including the contents of
`rl-$VERSION.md`. `rl-$VERSION.md`.

View file

@ -42,7 +42,7 @@ my $flakeUrl = $evalInfo->{flake};
my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die) if $flakeUrl; my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die) if $flakeUrl;
my $nixRev = ($flakeInfo ? $flakeInfo->{revision} : $evalInfo->{jobsetevalinputs}->{nix}->{revision}) or die; my $nixRev = ($flakeInfo ? $flakeInfo->{revision} : $evalInfo->{jobsetevalinputs}->{nix}->{revision}) or die;
my $buildInfo = decode_json(fetch("$evalUrl/job/build.nix.x86_64-linux", 'application/json')); my $buildInfo = decode_json(fetch("$evalUrl/job/build.nix-everything.x86_64-linux", 'application/json'));
#print Dumper($buildInfo); #print Dumper($buildInfo);
my $releaseName = $buildInfo->{nixname}; my $releaseName = $buildInfo->{nixname};
@ -91,7 +91,7 @@ sub getStorePath {
sub copyManual { sub copyManual {
my $manual; my $manual;
eval { eval {
$manual = getStorePath("build.nix.x86_64-linux", "doc"); $manual = getStorePath("manual");
}; };
if ($@) { if ($@) {
warn "$@"; warn "$@";
@ -240,12 +240,12 @@ if ($haveDocker) {
# Upload nix-fallback-paths.nix. # Upload nix-fallback-paths.nix.
write_file("$tmpDir/fallback-paths.nix", write_file("$tmpDir/fallback-paths.nix",
"{\n" . "{\n" .
" x86_64-linux = \"" . getStorePath("build.nix.x86_64-linux") . "\";\n" . " x86_64-linux = \"" . getStorePath("build.nix-everything.x86_64-linux") . "\";\n" .
" i686-linux = \"" . getStorePath("build.nix.i686-linux") . "\";\n" . " i686-linux = \"" . getStorePath("build.nix-everything.i686-linux") . "\";\n" .
" aarch64-linux = \"" . getStorePath("build.nix.aarch64-linux") . "\";\n" . " aarch64-linux = \"" . getStorePath("build.nix-everything.aarch64-linux") . "\";\n" .
" riscv64-linux = \"" . getStorePath("buildCross.nix.riscv64-unknown-linux-gnu.x86_64-linux") . "\";\n" . " riscv64-linux = \"" . getStorePath("buildCross.nix-everything.riscv64-unknown-linux-gnu.x86_64-linux") . "\";\n" .
" x86_64-darwin = \"" . getStorePath("build.nix.x86_64-darwin") . "\";\n" . " x86_64-darwin = \"" . getStorePath("build.nix-everything.x86_64-darwin") . "\";\n" .
" aarch64-darwin = \"" . getStorePath("build.nix.aarch64-darwin") . "\";\n" . " aarch64-darwin = \"" . getStorePath("build.nix-everything.aarch64-darwin") . "\";\n" .
"}\n"); "}\n");
# Upload release files to S3. # Upload release files to S3.

View file

@ -1,14 +1,18 @@
{ runCommand {
, system runCommand,
, buildPackages system,
, cacert buildPackages,
, nix cacert,
nix,
}: }:
let let
installerClosureInfo = buildPackages.closureInfo { installerClosureInfo = buildPackages.closureInfo {
rootPaths = [ nix cacert ]; rootPaths = [
nix
cacert
];
}; };
inherit (nix) version; inherit (nix) version;
@ -22,18 +26,18 @@ in
runCommand "nix-binary-tarball-${version}" env '' runCommand "nix-binary-tarball-${version}" env ''
cp ${installerClosureInfo}/registration $TMPDIR/reginfo cp ${installerClosureInfo}/registration $TMPDIR/reginfo
cp ${./create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh cp ${../scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
substitute ${./install-nix-from-tarball.sh} $TMPDIR/install \ substitute ${../scripts/install-nix-from-tarball.sh} $TMPDIR/install \
--subst-var-by nix ${nix} \ --subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert} --subst-var-by cacert ${cacert}
substitute ${./install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \ substitute ${../scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
--subst-var-by nix ${nix} \ --subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert} --subst-var-by cacert ${cacert}
substitute ${./install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \ substitute ${../scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
--subst-var-by nix ${nix} \ --subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert} --subst-var-by cacert ${cacert}
substitute ${./install-multi-user.sh} $TMPDIR/install-multi-user \ substitute ${../scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
--subst-var-by nix ${nix} \ --subst-var-by nix ${nix} \
--subst-var-by cacert ${cacert} --subst-var-by cacert ${cacert}

View file

@ -13,9 +13,11 @@ let
versionSuffix = lib.optionalString (!officialRelease) "pre"; versionSuffix = lib.optionalString (!officialRelease) "pre";
fineVersionSuffix = lib.optionalString fineVersionSuffix =
(!officialRelease) lib.optionalString (!officialRelease)
"pre${builtins.substring 0 8 (src.lastModifiedDate or src.lastModified or "19700101")}_${src.shortRev or "dirty"}"; "pre${
builtins.substring 0 8 (src.lastModifiedDate or src.lastModified or "19700101")
}_${src.shortRev or "dirty"}";
fineVersion = baseVersion + fineVersionSuffix; fineVersion = baseVersion + fineVersionSuffix;
in in
@ -54,7 +56,9 @@ in
nix-cli = callPackage ../src/nix/package.nix { version = fineVersion; }; nix-cli = callPackage ../src/nix/package.nix { version = fineVersion; };
nix-functional-tests = callPackage ../src/nix-functional-tests/package.nix { version = fineVersion; }; nix-functional-tests = callPackage ../src/nix-functional-tests/package.nix {
version = fineVersion;
};
nix-manual = callPackage ../doc/manual/package.nix { version = fineVersion; }; nix-manual = callPackage ../doc/manual/package.nix { version = fineVersion; };
nix-internal-api-docs = callPackage ../src/internal-api-docs/package.nix { version = fineVersion; }; nix-internal-api-docs = callPackage ../src/internal-api-docs/package.nix { version = fineVersion; };

View file

@ -19,9 +19,7 @@ let
root = ../.; root = ../.;
stdenv = if prevStdenv.isDarwin && prevStdenv.isx86_64 stdenv = if prevStdenv.isDarwin && prevStdenv.isx86_64 then darwinStdenv else prevStdenv;
then darwinStdenv
else prevStdenv;
# Fix the following error with the default x86_64-darwin SDK: # Fix the following error with the default x86_64-darwin SDK:
# #
@ -38,11 +36,14 @@ let
# Indirection for Nixpkgs to override when package.nix files are vendored # Indirection for Nixpkgs to override when package.nix files are vendored
filesetToSource = lib.fileset.toSource; filesetToSource = lib.fileset.toSource;
/** Given a set of layers, create a mkDerivation-like function */ /**
mkPackageBuilder = exts: userFn: Given a set of layers, create a mkDerivation-like function
stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn); */
mkPackageBuilder =
exts: userFn: stdenv.mkDerivation (lib.extends (lib.composeManyExtensions exts) userFn);
localSourceLayer = finalAttrs: prevAttrs: localSourceLayer =
finalAttrs: prevAttrs:
let let
workDirPath = workDirPath =
# Ideally we'd pick finalAttrs.workDir, but for now `mkDerivation` has # Ideally we'd pick finalAttrs.workDir, but for now `mkDerivation` has
@ -51,8 +52,13 @@ let
prevAttrs.workDir; prevAttrs.workDir;
workDirSubpath = lib.path.removePrefix root workDirPath; workDirSubpath = lib.path.removePrefix root workDirPath;
sources = assert prevAttrs.fileset._type == "fileset"; prevAttrs.fileset; sources =
src = lib.fileset.toSource { fileset = sources; inherit root; }; assert prevAttrs.fileset._type == "fileset";
prevAttrs.fileset;
src = lib.fileset.toSource {
fileset = sources;
inherit root;
};
in in
{ {
@ -64,115 +70,129 @@ let
workDir = null; workDir = null;
}; };
mesonLayer = finalAttrs: prevAttrs: mesonLayer = finalAttrs: prevAttrs: {
{ # NOTE:
# NOTE: # As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26,
# As of https://github.com/NixOS/nixpkgs/blob/8baf8241cea0c7b30e0b8ae73474cb3de83c1a30/pkgs/by-name/me/meson/setup-hook.sh#L26, # `mesonBuildType` defaults to `plain` if not specified. We want our Nix-built binaries to be optimized by default.
# `mesonBuildType` defaults to `plain` if not specified. We want our Nix-built binaries to be optimized by default. # More on build types here: https://mesonbuild.com/Builtin-options.html#details-for-buildtype.
# More on build types here: https://mesonbuild.com/Builtin-options.html#details-for-buildtype. mesonBuildType = "release";
mesonBuildType = "release"; # NOTE:
# NOTE: # Users who are debugging Nix builds are expected to set the environment variable `mesonBuildType`, per the
# Users who are debugging Nix builds are expected to set the environment variable `mesonBuildType`, per the # guidance in https://github.com/NixOS/nix/blob/8a3fc27f1b63a08ac983ee46435a56cf49ebaf4a/doc/manual/source/development/debugging.md?plain=1#L10.
# guidance in https://github.com/NixOS/nix/blob/8a3fc27f1b63a08ac983ee46435a56cf49ebaf4a/doc/manual/source/development/debugging.md?plain=1#L10. # For this reason, we don't want to refer to `finalAttrs.mesonBuildType` here, but rather use the environment variable.
# For this reason, we don't want to refer to `finalAttrs.mesonBuildType` here, but rather use the environment variable. preConfigure =
preConfigure = prevAttrs.preConfigure or "" + lib.optionalString ( prevAttrs.preConfigure or ""
!stdenv.hostPlatform.isWindows +
# build failure lib.optionalString
&& !stdenv.hostPlatform.isStatic (
) '' !stdenv.hostPlatform.isWindows
case "$mesonBuildType" in # build failure
release|minsize) appendToVar mesonFlags "-Db_lto=true" ;; && !stdenv.hostPlatform.isStatic
*) appendToVar mesonFlags "-Db_lto=false" ;; # LTO breaks exception handling on x86-64-darwin.
esac && stdenv.system != "x86_64-darwin"
''; )
nativeBuildInputs = [ ''
pkgs.buildPackages.meson case "$mesonBuildType" in
pkgs.buildPackages.ninja release|minsize) appendToVar mesonFlags "-Db_lto=true" ;;
] ++ prevAttrs.nativeBuildInputs or []; *) appendToVar mesonFlags "-Db_lto=false" ;;
mesonCheckFlags = prevAttrs.mesonCheckFlags or [] ++ [ esac
"--print-errorlogs" '';
]; nativeBuildInputs = [
}; pkgs.buildPackages.meson
pkgs.buildPackages.ninja
] ++ prevAttrs.nativeBuildInputs or [ ];
mesonCheckFlags = prevAttrs.mesonCheckFlags or [ ] ++ [
"--print-errorlogs"
];
};
mesonBuildLayer = finalAttrs: prevAttrs: mesonBuildLayer = finalAttrs: prevAttrs: {
{ nativeBuildInputs = prevAttrs.nativeBuildInputs or [ ] ++ [
nativeBuildInputs = prevAttrs.nativeBuildInputs or [] ++ [ pkgs.buildPackages.pkg-config
pkgs.buildPackages.pkg-config ];
]; separateDebugInfo = !stdenv.hostPlatform.isStatic;
separateDebugInfo = !stdenv.hostPlatform.isStatic; hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie"; env =
env = prevAttrs.env or {} prevAttrs.env or { }
// lib.optionalAttrs // lib.optionalAttrs (
(stdenv.isLinux stdenv.isLinux
&& !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux") && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")
&& !(stdenv.hostPlatform.useLLVM or false)) && !(stdenv.hostPlatform.useLLVM or false)
{ LDFLAGS = "-fuse-ld=gold"; }; ) { LDFLAGS = "-fuse-ld=gold"; };
}; };
mesonLibraryLayer = finalAttrs: prevAttrs: mesonLibraryLayer = finalAttrs: prevAttrs: {
{ outputs = prevAttrs.outputs or [ "out" ] ++ [ "dev" ];
outputs = prevAttrs.outputs or [ "out" ] ++ [ "dev" ]; };
};
# Work around weird `--as-needed` linker behavior with BSD, see # Work around weird `--as-needed` linker behavior with BSD, see
# https://github.com/mesonbuild/meson/issues/3593 # https://github.com/mesonbuild/meson/issues/3593
bsdNoLinkAsNeeded = finalAttrs: prevAttrs: bsdNoLinkAsNeeded =
finalAttrs: prevAttrs:
lib.optionalAttrs stdenv.hostPlatform.isBSD { lib.optionalAttrs stdenv.hostPlatform.isBSD {
mesonFlags = [ (lib.mesonBool "b_asneeded" false) ] ++ prevAttrs.mesonFlags or []; mesonFlags = [ (lib.mesonBool "b_asneeded" false) ] ++ prevAttrs.mesonFlags or [ ];
}; };
miscGoodPractice = finalAttrs: prevAttrs: miscGoodPractice = finalAttrs: prevAttrs: {
{ strictDeps = prevAttrs.strictDeps or true;
strictDeps = prevAttrs.strictDeps or true; enableParallelBuilding = true;
enableParallelBuilding = true; };
};
in in
scope: { scope: {
inherit stdenv; inherit stdenv;
aws-sdk-cpp = (pkgs.aws-sdk-cpp.override { aws-sdk-cpp =
apis = [ "s3" "transfer" ]; (pkgs.aws-sdk-cpp.override {
customMemoryManagement = false; apis = [
}).overrideAttrs { "s3"
# only a stripped down version is built, which takes a lot less resources "transfer"
# to build, so we don't need a "big-parallel" machine. ];
requiredSystemFeatures = [ ]; customMemoryManagement = false;
}; }).overrideAttrs
{
# only a stripped down version is built, which takes a lot less resources
# to build, so we don't need a "big-parallel" machine.
requiredSystemFeatures = [ ];
};
boehmgc = pkgs.boehmgc.override { boehmgc = pkgs.boehmgc.override {
enableLargeConfig = true; enableLargeConfig = true;
}; };
# TODO Hack until https://github.com/NixOS/nixpkgs/issues/45462 is fixed. # TODO Hack until https://github.com/NixOS/nixpkgs/issues/45462 is fixed.
boost = (pkgs.boost.override { boost =
extraB2Args = [ (pkgs.boost.override {
"--with-container" extraB2Args = [
"--with-context" "--with-container"
"--with-coroutine" "--with-context"
]; "--with-coroutine"
}).overrideAttrs (old: { ];
# Need to remove `--with-*` to use `--with-libraries=...` }).overrideAttrs
buildPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.buildPhase; (old: {
installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase; # Need to remove `--with-*` to use `--with-libraries=...`
}); buildPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.buildPhase;
installPhase = lib.replaceStrings [ "--without-python" ] [ "" ] old.installPhase;
});
libgit2 = pkgs.libgit2.overrideAttrs (attrs: { libgit2 = pkgs.libgit2.overrideAttrs (attrs: {
cmakeFlags = attrs.cmakeFlags or [] cmakeFlags = attrs.cmakeFlags or [ ] ++ [ "-DUSE_SSH=exec" ];
++ [ "-DUSE_SSH=exec" ]; nativeBuildInputs =
nativeBuildInputs = attrs.nativeBuildInputs or [] attrs.nativeBuildInputs or [ ]
# gitMinimal does not build on Windows. See packbuilder patch. # gitMinimal does not build on Windows. See packbuilder patch.
++ lib.optionals (!stdenv.hostPlatform.isWindows) [ ++ lib.optionals (!stdenv.hostPlatform.isWindows) [
# Needed for `git apply`; see `prePatch` # Needed for `git apply`; see `prePatch`
pkgs.buildPackages.gitMinimal pkgs.buildPackages.gitMinimal
]; ];
# Only `git apply` can handle git binary patches # Only `git apply` can handle git binary patches
prePatch = attrs.prePatch or "" prePatch =
attrs.prePatch or ""
+ lib.optionalString (!stdenv.hostPlatform.isWindows) '' + lib.optionalString (!stdenv.hostPlatform.isWindows) ''
patch() { patch() {
git apply git apply
} }
''; '';
patches = attrs.patches or [] patches =
attrs.patches or [ ]
++ [ ++ [
./patches/libgit2-mempack-thin-packfile.patch ./patches/libgit2-mempack-thin-packfile.patch
] ]
@ -186,27 +206,24 @@ scope: {
inherit resolvePath filesetToSource; inherit resolvePath filesetToSource;
mkMesonDerivation = mkMesonDerivation = mkPackageBuilder [
mkPackageBuilder [ miscGoodPractice
miscGoodPractice localSourceLayer
localSourceLayer mesonLayer
mesonLayer ];
]; mkMesonExecutable = mkPackageBuilder [
mkMesonExecutable = miscGoodPractice
mkPackageBuilder [ bsdNoLinkAsNeeded
miscGoodPractice localSourceLayer
bsdNoLinkAsNeeded mesonLayer
localSourceLayer mesonBuildLayer
mesonLayer ];
mesonBuildLayer mkMesonLibrary = mkPackageBuilder [
]; miscGoodPractice
mkMesonLibrary = bsdNoLinkAsNeeded
mkPackageBuilder [ localSourceLayer
miscGoodPractice mesonLayer
bsdNoLinkAsNeeded mesonBuildLayer
localSourceLayer mesonLibraryLayer
mesonLayer ];
mesonBuildLayer
mesonLibraryLayer
];
} }

View file

@ -1,128 +1,141 @@
{ lib, devFlake }: {
lib,
inputs,
devFlake,
}:
{ pkgs }: { pkgs }:
pkgs.nixComponents.nix-util.overrideAttrs (attrs: pkgs.nixComponents.nix-util.overrideAttrs (
attrs:
let let
stdenv = pkgs.nixDependencies.stdenv; stdenv = pkgs.nixDependencies.stdenv;
buildCanExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform; buildCanExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
modular = devFlake.getSystem stdenv.buildPlatform.system; modular = devFlake.getSystem stdenv.buildPlatform.system;
transformFlag = prefix: flag: transformFlag =
assert builtins.isString flag; prefix: flag:
let assert builtins.isString flag;
rest = builtins.substring 2 (builtins.stringLength flag) flag; let
in rest = builtins.substring 2 (builtins.stringLength flag) flag;
in
"-D${prefix}:${rest}"; "-D${prefix}:${rest}";
havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix; havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix;
ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags; ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags;
in { in
pname = "shell-for-" + attrs.pname; {
pname = "shell-for-" + attrs.pname;
# Remove the version suffix to avoid unnecessary attempts to substitute in nix develop # Remove the version suffix to avoid unnecessary attempts to substitute in nix develop
version = lib.fileContents ../.version; version = lib.fileContents ../.version;
name = attrs.pname; name = attrs.pname;
installFlags = "sysconfdir=$(out)/etc"; installFlags = "sysconfdir=$(out)/etc";
shellHook = '' shellHook = ''
PATH=$prefix/bin:$PATH PATH=$prefix/bin:$PATH
unset PYTHONPATH unset PYTHONPATH
export MANPATH=$out/share/man:$MANPATH export MANPATH=$out/share/man:$MANPATH
# Make bash completion work. # Make bash completion work.
XDG_DATA_DIRS+=:$out/share XDG_DATA_DIRS+=:$out/share
# Make the default phases do the right thing. # Make the default phases do the right thing.
# FIXME: this wouldn't be needed if the ninja package set buildPhase() instead of $buildPhase. # FIXME: this wouldn't be needed if the ninja package set buildPhase() instead of $buildPhase.
# FIXME: mesonConfigurePhase shouldn't cd to the build directory. It would be better to pass '-C <dir>' to ninja. # FIXME: mesonConfigurePhase shouldn't cd to the build directory. It would be better to pass '-C <dir>' to ninja.
cdToBuildDir() { cdToBuildDir() {
if [[ ! -e build.ninja ]]; then if [[ ! -e build.ninja ]]; then
cd build cd build
fi fi
} }
configurePhase() { configurePhase() {
mesonConfigurePhase mesonConfigurePhase
} }
buildPhase() { buildPhase() {
cdToBuildDir cdToBuildDir
ninjaBuildPhase ninjaBuildPhase
} }
checkPhase() { checkPhase() {
cdToBuildDir cdToBuildDir
mesonCheckPhase mesonCheckPhase
} }
installPhase() { installPhase() {
cdToBuildDir cdToBuildDir
ninjaInstallPhase ninjaInstallPhase
} }
''; '';
# We use this shell with the local checkout, not unpackPhase. # We use this shell with the local checkout, not unpackPhase.
src = null; src = null;
env = { env = {
# Needed for Meson to find Boost. # Needed for Meson to find Boost.
# https://github.com/NixOS/nixpkgs/issues/86131. # https://github.com/NixOS/nixpkgs/issues/86131.
BOOST_INCLUDEDIR = "${lib.getDev pkgs.nixDependencies.boost}/include"; BOOST_INCLUDEDIR = "${lib.getDev pkgs.nixDependencies.boost}/include";
BOOST_LIBRARYDIR = "${lib.getLib pkgs.nixDependencies.boost}/lib"; BOOST_LIBRARYDIR = "${lib.getLib pkgs.nixDependencies.boost}/lib";
# For `make format`, to work without installing pre-commit # For `make format`, to work without installing pre-commit
_NIX_PRE_COMMIT_HOOKS_CONFIG = _NIX_PRE_COMMIT_HOOKS_CONFIG = "${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml"
"${(pkgs.formats.yaml { }).generate "pre-commit-config.yaml" modular.pre-commit.settings.rawConfig}"; modular.pre-commit.settings.rawConfig
}; }";
};
mesonFlags = mesonFlags =
map (transformFlag "libutil") (ignoreCrossFile pkgs.nixComponents.nix-util.mesonFlags) map (transformFlag "libutil") (ignoreCrossFile pkgs.nixComponents.nix-util.mesonFlags)
++ map (transformFlag "libstore") (ignoreCrossFile pkgs.nixComponents.nix-store.mesonFlags) ++ map (transformFlag "libstore") (ignoreCrossFile pkgs.nixComponents.nix-store.mesonFlags)
++ map (transformFlag "libfetchers") (ignoreCrossFile pkgs.nixComponents.nix-fetchers.mesonFlags) ++ map (transformFlag "libfetchers") (ignoreCrossFile pkgs.nixComponents.nix-fetchers.mesonFlags)
++ lib.optionals havePerl (map (transformFlag "perl") (ignoreCrossFile pkgs.nixComponents.nix-perl-bindings.mesonFlags)) ++ lib.optionals havePerl (
++ map (transformFlag "libexpr") (ignoreCrossFile pkgs.nixComponents.nix-expr.mesonFlags) map (transformFlag "perl") (ignoreCrossFile pkgs.nixComponents.nix-perl-bindings.mesonFlags)
++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents.nix-cmd.mesonFlags) )
; ++ map (transformFlag "libexpr") (ignoreCrossFile pkgs.nixComponents.nix-expr.mesonFlags)
++ map (transformFlag "libcmd") (ignoreCrossFile pkgs.nixComponents.nix-cmd.mesonFlags);
nativeBuildInputs = attrs.nativeBuildInputs or [] nativeBuildInputs =
++ pkgs.nixComponents.nix-util.nativeBuildInputs attrs.nativeBuildInputs or [ ]
++ pkgs.nixComponents.nix-store.nativeBuildInputs ++ pkgs.nixComponents.nix-util.nativeBuildInputs
++ pkgs.nixComponents.nix-fetchers.nativeBuildInputs ++ pkgs.nixComponents.nix-store.nativeBuildInputs
++ pkgs.nixComponents.nix-expr.nativeBuildInputs ++ pkgs.nixComponents.nix-fetchers.nativeBuildInputs
++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.nativeBuildInputs ++ pkgs.nixComponents.nix-expr.nativeBuildInputs
++ lib.optionals buildCanExecuteHost pkgs.nixComponents.nix-manual.externalNativeBuildInputs ++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.nativeBuildInputs
++ pkgs.nixComponents.nix-internal-api-docs.nativeBuildInputs ++ lib.optionals buildCanExecuteHost pkgs.nixComponents.nix-manual.externalNativeBuildInputs
++ pkgs.nixComponents.nix-external-api-docs.nativeBuildInputs ++ pkgs.nixComponents.nix-internal-api-docs.nativeBuildInputs
++ pkgs.nixComponents.nix-functional-tests.externalNativeBuildInputs ++ pkgs.nixComponents.nix-external-api-docs.nativeBuildInputs
++ lib.optional ++ pkgs.nixComponents.nix-functional-tests.externalNativeBuildInputs
(!buildCanExecuteHost ++ lib.optional (
# Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479 !buildCanExecuteHost
&& !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin) # Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479
&& stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages && !(stdenv.hostPlatform.isWindows && stdenv.buildPlatform.isDarwin)
&& lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages)) && stdenv.hostPlatform.emulatorAvailable pkgs.buildPackages
pkgs.buildPackages.mesonEmulatorHook && lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages)
++ [ ) pkgs.buildPackages.mesonEmulatorHook
pkgs.buildPackages.cmake ++ [
pkgs.buildPackages.shellcheck pkgs.buildPackages.cmake
pkgs.buildPackages.changelog-d pkgs.buildPackages.shellcheck
modular.pre-commit.settings.package pkgs.buildPackages.changelog-d
(pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.package
modular.pre-commit.settings.installationScript) (pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript)
] inputs.nixfmt.packages.${pkgs.hostPlatform.system}.default
# TODO: Remove the darwin check once ]
# https://github.com/NixOS/nixpkgs/pull/291814 is available # TODO: Remove the darwin check once
++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear # https://github.com/NixOS/nixpkgs/pull/291814 is available
++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) (lib.hiPrio pkgs.buildPackages.clang-tools); ++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear
++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) (
lib.hiPrio pkgs.buildPackages.clang-tools
);
buildInputs = attrs.buildInputs or [] buildInputs =
++ pkgs.nixComponents.nix-util.buildInputs attrs.buildInputs or [ ]
++ pkgs.nixComponents.nix-store.buildInputs ++ pkgs.nixComponents.nix-util.buildInputs
++ pkgs.nixComponents.nix-store-tests.externalBuildInputs ++ pkgs.nixComponents.nix-store.buildInputs
++ pkgs.nixComponents.nix-fetchers.buildInputs ++ pkgs.nixComponents.nix-store-tests.externalBuildInputs
++ pkgs.nixComponents.nix-expr.buildInputs ++ pkgs.nixComponents.nix-fetchers.buildInputs
++ pkgs.nixComponents.nix-expr.externalPropagatedBuildInputs ++ pkgs.nixComponents.nix-expr.buildInputs
++ pkgs.nixComponents.nix-cmd.buildInputs ++ pkgs.nixComponents.nix-expr.externalPropagatedBuildInputs
++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.externalBuildInputs ++ pkgs.nixComponents.nix-cmd.buildInputs
++ lib.optional havePerl pkgs.perl ++ lib.optionals havePerl pkgs.nixComponents.nix-perl-bindings.externalBuildInputs
; ++ lib.optional havePerl pkgs.perl;
}) }
)

View file

@ -42,27 +42,31 @@
}: }:
let let
libs = { libs =
inherit {
nix-util inherit
nix-util-c nix-util
nix-store nix-util-c
nix-store-c nix-store
nix-fetchers nix-store-c
nix-expr nix-fetchers
nix-expr-c nix-expr
nix-flake nix-expr-c
nix-flake-c nix-flake
nix-main nix-flake-c
nix-main-c nix-main
nix-cmd nix-main-c
; nix-cmd
} // lib.optionalAttrs (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) { ;
# Currently fails in static build }
inherit // lib.optionalAttrs
nix-perl-bindings (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
; {
}; # Currently fails in static build
inherit
nix-perl-bindings
;
};
dev = stdenv.mkDerivation (finalAttrs: { dev = stdenv.mkDerivation (finalAttrs: {
name = "nix-${nix-cli.version}-dev"; name = "nix-${nix-cli.version}-dev";
@ -77,10 +81,9 @@ let
''; '';
passthru = { passthru = {
tests = { tests = {
pkg-config = pkg-config = testers.hasPkgConfigModules {
testers.hasPkgConfigModules { package = finalAttrs.finalPackage;
package = finalAttrs.finalPackage; };
};
}; };
# If we were to fully emulate output selection here, we'd confuse the Nix CLIs, # If we were to fully emulate output selection here, we'd confuse the Nix CLIs,
@ -123,70 +126,84 @@ in
]; ];
meta.mainProgram = "nix"; meta.mainProgram = "nix";
}).overrideAttrs (finalAttrs: prevAttrs: { }).overrideAttrs
doCheck = true; (
doInstallCheck = true; finalAttrs: prevAttrs: {
doCheck = true;
doInstallCheck = true;
checkInputs = [ checkInputs =
# Make sure the unit tests have passed [
nix-util-tests.tests.run # Make sure the unit tests have passed
nix-store-tests.tests.run nix-util-tests.tests.run
nix-expr-tests.tests.run nix-store-tests.tests.run
nix-fetchers-tests.tests.run nix-expr-tests.tests.run
nix-flake-tests.tests.run nix-fetchers-tests.tests.run
nix-flake-tests.tests.run
# Make sure the functional tests have passed # Make sure the functional tests have passed
nix-functional-tests nix-functional-tests
# dev bundle is ok # dev bundle is ok
# (checkInputs must be empty paths??) # (checkInputs must be empty paths??)
(runCommand "check-pkg-config" { checked = dev.tests.pkg-config; } "mkdir $out") (runCommand "check-pkg-config" { checked = dev.tests.pkg-config; } "mkdir $out")
] ++ lib.optionals (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform) [ ]
# Perl currently fails in static build ++ lib.optionals
# TODO: Split out tests into a separate derivation? (!stdenv.hostPlatform.isStatic && stdenv.buildPlatform.canExecute stdenv.hostPlatform)
nix-perl-bindings [
]; # Perl currently fails in static build
passthru = prevAttrs.passthru // { # TODO: Split out tests into a separate derivation?
inherit (nix-cli) version; nix-perl-bindings
];
passthru = prevAttrs.passthru // {
inherit (nix-cli) version;
/** /**
These are the libraries that are part of the Nix project. They are used These are the libraries that are part of the Nix project. They are used
by the Nix CLI and other tools. by the Nix CLI and other tools.
If you need to use these libraries in your project, we recommend to use If you need to use these libraries in your project, we recommend to use
the `-c` C API libraries exclusively, if possible. the `-c` C API libraries exclusively, if possible.
We also recommend that you build the complete package to ensure that the unit tests pass. We also recommend that you build the complete package to ensure that the unit tests pass.
You could do this in CI, or by passing it in an unused environment variable. e.g in a `mkDerivation` call: You could do this in CI, or by passing it in an unused environment variable. e.g in a `mkDerivation` call:
```nix ```nix
buildInputs = [ nix.libs.nix-util-c nix.libs.nix-store-c ]; buildInputs = [ nix.libs.nix-util-c nix.libs.nix-store-c ];
# Make sure the nix libs we use are ok # Make sure the nix libs we use are ok
unusedInputsForTests = [ nix ]; unusedInputsForTests = [ nix ];
disallowedReferences = nix.all; disallowedReferences = nix.all;
``` ```
*/ */
inherit libs; inherit libs;
tests = prevAttrs.passthru.tests or {} // { tests = prevAttrs.passthru.tests or { } // {
# TODO: create a proper fixpoint and: # TODO: create a proper fixpoint and:
# pkg-config = # pkg-config =
# testers.hasPkgConfigModules { # testers.hasPkgConfigModules {
# package = finalPackage; # package = finalPackage;
# }; # };
}; };
/** /**
A derivation referencing the `dev` outputs of the Nix libraries. A derivation referencing the `dev` outputs of the Nix libraries.
*/ */
inherit dev; inherit dev;
inherit devdoc; inherit devdoc;
doc = nix-manual; doc = nix-manual;
outputs = [ "out" "dev" "devdoc" "doc" ]; outputs = [
all = lib.attrValues (lib.genAttrs finalAttrs.passthru.outputs (outName: finalAttrs.finalPackage.${outName})); "out"
}; "dev"
meta = prevAttrs.meta // { "devdoc"
description = "The Nix package manager"; "doc"
pkgConfigModules = dev.meta.pkgConfigModules; ];
}; all = lib.attrValues (
}) lib.genAttrs finalAttrs.passthru.outputs (outName: finalAttrs.finalPackage.${outName})
);
};
meta = prevAttrs.meta // {
description = "The Nix package manager";
pkgConfigModules = dev.meta.pkgConfigModules;
};
}
)

View file

@ -1,22 +1,24 @@
{ inputs {
, binaryTarball inputs,
, forAllCrossSystems forAllCrossSystems,
, forAllSystems forAllSystems,
, lib lib,
, linux64BitSystems linux64BitSystems,
, nixpkgsFor nixpkgsFor,
, self self,
, officialRelease officialRelease,
}: }:
let let
inherit (inputs) nixpkgs nixpkgs-regression; inherit (inputs) nixpkgs nixpkgs-regression;
installScriptFor = tarballs: installScriptFor =
nixpkgsFor.x86_64-linux.native.callPackage ../scripts/installer.nix { tarballs:
nixpkgsFor.x86_64-linux.native.callPackage ./installer {
inherit tarballs; inherit tarballs;
}; };
testNixVersions = pkgs: daemon: testNixVersions =
pkgs: daemon:
pkgs.nixComponents.nix-functional-tests.override { pkgs.nixComponents.nix-functional-tests.override {
pname = "nix-daemon-compat-tests"; pname = "nix-daemon-compat-tests";
version = "${pkgs.nix.version}-with-daemon-${daemon.version}"; version = "${pkgs.nix.version}-with-daemon-${daemon.version}";
@ -54,44 +56,72 @@ let
in in
{ {
# Binary package for various platforms. # Binary package for various platforms.
build = forAllPackages (pkgName: build = forAllPackages (
forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.${pkgName})); pkgName: forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.${pkgName})
);
shellInputs = removeAttrs shellInputs = removeAttrs (forAllSystems (
(forAllSystems (system: self.devShells.${system}.default.inputDerivation)) system: self.devShells.${system}.default.inputDerivation
[ "i686-linux" ]; )) [ "i686-linux" ];
buildStatic = forAllPackages (pkgName: buildStatic = forAllPackages (
lib.genAttrs linux64BitSystems (system: nixpkgsFor.${system}.static.nixComponents.${pkgName})); pkgName:
lib.genAttrs linux64BitSystems (
system: nixpkgsFor.${system}.native.pkgsStatic.nixComponents.${pkgName}
)
);
buildCross = forAllPackages (pkgName: buildCross = forAllPackages (
pkgName:
# Hack to avoid non-evaling package # Hack to avoid non-evaling package
(if pkgName == "nix-functional-tests" then lib.flip builtins.removeAttrs ["x86_64-w64-mingw32"] else lib.id) (
(forAllCrossSystems (crossSystem: if pkgName == "nix-functional-tests" then
lib.genAttrs [ "x86_64-linux" ] (system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName})))); lib.flip builtins.removeAttrs [ "x86_64-w64-mingw32" ]
else
lib.id
)
(
forAllCrossSystems (
crossSystem:
lib.genAttrs [ "x86_64-linux" ] (
system: nixpkgsFor.${system}.cross.${crossSystem}.nixComponents.${pkgName}
)
)
)
);
buildNoGc = let buildNoGc =
components = forAllSystems (system: let
nixpkgsFor.${system}.native.nixComponents.overrideScope (self: super: { components = forAllSystems (
nix-expr = super.nix-expr.override { enableGC = false; }; system:
}) nixpkgsFor.${system}.native.nixComponents.overrideScope (
); self: super: {
in forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); nix-expr = super.nix-expr.override { enableGC = false; };
}
)
);
in
forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName}));
buildNoTests = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-cli); buildNoTests = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-cli);
# Toggles some settings for better coverage. Windows needs these # Toggles some settings for better coverage. Windows needs these
# library combinations, and Debian build Nix with GNU readline too. # library combinations, and Debian build Nix with GNU readline too.
buildReadlineNoMarkdown = let buildReadlineNoMarkdown =
components = forAllSystems (system: let
nixpkgsFor.${system}.native.nixComponents.overrideScope (self: super: { components = forAllSystems (
nix-cmd = super.nix-cmd.override { system:
enableMarkdown = false; nixpkgsFor.${system}.native.nixComponents.overrideScope (
readlineFlavor = "readline"; self: super: {
}; nix-cmd = super.nix-cmd.override {
}) enableMarkdown = false;
); readlineFlavor = "readline";
in forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName})); };
}
)
);
in
forAllPackages (pkgName: forAllSystems (system: components.${system}.${pkgName}));
# Perl bindings for various platforms. # Perl bindings for various platforms.
perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-perl-bindings); perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nixComponents.nix-perl-bindings);
@ -99,13 +129,16 @@ in
# Binary tarball for various platforms, containing a Nix store # Binary tarball for various platforms, containing a Nix store
# with the closure of 'nix' package, and the second half of # with the closure of 'nix' package, and the second half of
# the installation script. # the installation script.
binaryTarball = forAllSystems (system: binaryTarball nixpkgsFor.${system}.native.nix nixpkgsFor.${system}.native); binaryTarball = forAllSystems (
system: nixpkgsFor.${system}.native.callPackage ./binary-tarball.nix { }
);
binaryTarballCross = lib.genAttrs [ "x86_64-linux" ] (system: binaryTarballCross = lib.genAttrs [ "x86_64-linux" ] (
forAllCrossSystems (crossSystem: system:
binaryTarball forAllCrossSystems (
nixpkgsFor.${system}.cross.${crossSystem}.nix crossSystem: nixpkgsFor.${system}.cross.${crossSystem}.callPackage ./binary-tarball.nix { }
nixpkgsFor.${system}.cross.${crossSystem})); )
);
# The first half of the installation script. This is uploaded # The first half of the installation script. This is uploaded
# to https://nixos.org/nix/install. It downloads the binary # to https://nixos.org/nix/install. It downloads the binary
@ -124,9 +157,12 @@ in
self.hydraJobs.binaryTarballCross."x86_64-linux"."riscv64-unknown-linux-gnu" self.hydraJobs.binaryTarballCross."x86_64-linux"."riscv64-unknown-linux-gnu"
]; ];
installerScriptForGHA = forAllSystems (system: nixpkgsFor.${system}.native.callPackage ../scripts/installer.nix { installerScriptForGHA = forAllSystems (
tarballs = [ self.hydraJobs.binaryTarball.${system} ]; system:
}); nixpkgsFor.${system}.native.callPackage ./installer {
tarballs = [ self.hydraJobs.binaryTarball.${system} ];
}
);
# docker image with Nix inside # docker image with Nix inside
dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage); dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage);
@ -147,16 +183,20 @@ in
external-api-docs = nixpkgsFor.x86_64-linux.native.nixComponents.nix-external-api-docs; external-api-docs = nixpkgsFor.x86_64-linux.native.nixComponents.nix-external-api-docs;
# System tests. # System tests.
tests = import ../tests/nixos { inherit lib nixpkgs nixpkgsFor self; } // { tests =
import ../tests/nixos {
inherit lib nixpkgs nixpkgsFor;
inherit (self.inputs) nixpkgs-23-11;
}
// {
# Make sure that nix-env still produces the exact same result # Make sure that nix-env still produces the exact same result
# on a particular version of Nixpkgs. # on a particular version of Nixpkgs.
evalNixpkgs = evalNixpkgs =
let let
inherit (nixpkgsFor.x86_64-linux.native) runCommand nix; inherit (nixpkgsFor.x86_64-linux.native) runCommand nix;
in in
runCommand "eval-nixos" { buildInputs = [ nix ]; } runCommand "eval-nixos" { buildInputs = [ nix ]; } ''
''
type -p nix-env type -p nix-env
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593. # Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
( (
@ -167,36 +207,36 @@ in
mkdir $out mkdir $out
''; '';
nixpkgsLibTests = nixpkgsLibTests = forAllSystems (
forAllSystems (system: system:
import (nixpkgs + "/lib/tests/test-with-nix.nix") import (nixpkgs + "/lib/tests/test-with-nix.nix") {
{ lib = nixpkgsFor.${system}.native.lib;
lib = nixpkgsFor.${system}.native.lib; nix = self.packages.${system}.nix-cli;
nix = self.packages.${system}.nix-cli; pkgs = nixpkgsFor.${system}.native;
pkgs = nixpkgsFor.${system}.native; }
}
); );
}; };
metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" { metrics.nixpkgs = import "${nixpkgs-regression}/pkgs/top-level/metrics.nix" {
pkgs = nixpkgsFor.x86_64-linux.native; pkgs = nixpkgsFor.x86_64-linux.native;
nixpkgs = nixpkgs-regression; nixpkgs = nixpkgs-regression;
}; };
installTests = forAllSystems (system: installTests = forAllSystems (
let pkgs = nixpkgsFor.${system}.native; in system:
pkgs.runCommand "install-tests" let
{ pkgs = nixpkgsFor.${system}.native;
againstSelf = testNixVersions pkgs pkgs.nix; in
againstCurrentLatest = pkgs.runCommand "install-tests" {
# FIXME: temporarily disable this on macOS because of #3605. againstSelf = testNixVersions pkgs pkgs.nix;
if system == "x86_64-linux" againstCurrentLatest =
then testNixVersions pkgs pkgs.nixVersions.latest # FIXME: temporarily disable this on macOS because of #3605.
else null; if system == "x86_64-linux" then testNixVersions pkgs pkgs.nixVersions.latest else null;
# Disabled because the latest stable version doesn't handle # Disabled because the latest stable version doesn't handle
# `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work # `NIX_DAEMON_SOCKET_PATH` which is required for the tests to work
# againstLatestStable = testNixVersions pkgs pkgs.nixStable; # againstLatestStable = testNixVersions pkgs pkgs.nixStable;
} "touch $out"); } "touch $out"
);
installerTests = import ../tests/installer { installerTests = import ../tests/installer {
binaryTarballs = self.hydraJobs.binaryTarball; binaryTarballs = self.hydraJobs.binaryTarball;

View file

@ -0,0 +1,42 @@
{
lib,
runCommand,
nix,
tarballs,
}:
runCommand "installer-script"
{
buildInputs = [ nix ];
}
''
mkdir -p $out/nix-support
# Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
tarballPath() {
# Remove the store prefix
local path=''${1#${builtins.storeDir}/}
# Get the path relative to the derivation root
local rest=''${path#*/}
# Get the derivation hash
local drvHash=''${path%%-*}
echo "$drvHash/$rest"
}
substitute ${./install.in} $out/install \
${
lib.concatMapStrings (
tarball:
let
inherit (tarball.stdenv.hostPlatform) system;
in
''
\
--replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \
--replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \
''
) tarballs
} --replace '@nixVersion@' ${nix.version}
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
''

View file

@ -1,36 +0,0 @@
{ lib
, runCommand
, nix
, tarballs
}:
runCommand "installer-script" {
buildInputs = [ nix ];
} ''
mkdir -p $out/nix-support
# Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
tarballPath() {
# Remove the store prefix
local path=''${1#${builtins.storeDir}/}
# Get the path relative to the derivation root
local rest=''${path#*/}
# Get the derivation hash
local drvHash=''${path%%-*}
echo "$drvHash/$rest"
}
substitute ${./install.in} $out/install \
${lib.concatMapStrings
(tarball: let
inherit (tarball.stdenv.hostPlatform) system;
in '' \
--replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \
--replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \
''
)
tarballs
} --replace '@nixVersion@' ${nix.version}
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
''

View file

@ -1,11 +1,12 @@
{ lib {
, mkMesonDerivation lib,
mkMesonDerivation,
, doxygen doxygen,
# Configuration Options # Configuration Options
, version version,
}: }:
let let
@ -39,11 +40,10 @@ mkMesonDerivation (finalAttrs: {
doxygen doxygen
]; ];
preConfigure = preConfigure = ''
'' chmod u+w ./.version
chmod u+w ./.version echo ${finalAttrs.version} > ./.version
echo ${finalAttrs.version} > ./.version '';
'';
postInstall = '' postInstall = ''
mkdir -p ''${!outputDoc}/nix-support mkdir -p ''${!outputDoc}/nix-support

View file

@ -1,11 +1,12 @@
{ lib {
, mkMesonDerivation lib,
mkMesonDerivation,
, doxygen doxygen,
# Configuration Options # Configuration Options
, version version,
}: }:
let let
@ -17,27 +18,28 @@ mkMesonDerivation (finalAttrs: {
inherit version; inherit version;
workDir = ./.; workDir = ./.;
fileset = let fileset =
cpp = fileset.fileFilter (file: file.hasExt "cc" || file.hasExt "hh"); let
in fileset.unions [ cpp = fileset.fileFilter (file: file.hasExt "cc" || file.hasExt "hh");
./.version in
../../.version fileset.unions [
./meson.build ./.version
./doxygen.cfg.in ../../.version
# Source is not compiled, but still must be available for Doxygen ./meson.build
# to gather comments. ./doxygen.cfg.in
(cpp ../.) # Source is not compiled, but still must be available for Doxygen
]; # to gather comments.
(cpp ../.)
];
nativeBuildInputs = [ nativeBuildInputs = [
doxygen doxygen
]; ];
preConfigure = preConfigure = ''
'' chmod u+w ./.version
chmod u+w ./.version echo ${finalAttrs.version} > ./.version
echo ${finalAttrs.version} > ./.version '';
'';
postInstall = '' postInstall = ''
mkdir -p ''${!outputDoc}/nix-support mkdir -p ''${!outputDoc}/nix-support

View file

@ -347,7 +347,7 @@ struct MixEnvironment : virtual Args
void setEnviron(); void setEnviron();
}; };
void completeFlakeInputPath( void completeFlakeInputAttrPath(
AddCompletions & completions, AddCompletions & completions,
ref<EvalState> evalState, ref<EvalState> evalState,
const std::vector<FlakeRef> & flakeRefs, const std::vector<FlakeRef> & flakeRefs,

View file

@ -33,7 +33,7 @@ namespace nix {
namespace fs { using namespace std::filesystem; } namespace fs { using namespace std::filesystem; }
void completeFlakeInputPath( void completeFlakeInputAttrPath(
AddCompletions & completions, AddCompletions & completions,
ref<EvalState> evalState, ref<EvalState> evalState,
const std::vector<FlakeRef> & flakeRefs, const std::vector<FlakeRef> & flakeRefs,
@ -117,10 +117,10 @@ MixFlakeOptions::MixFlakeOptions()
.labels = {"input-path"}, .labels = {"input-path"},
.handler = {[&](std::string s) { .handler = {[&](std::string s) {
warn("'--update-input' is a deprecated alias for 'flake update' and will be removed in a future version."); warn("'--update-input' is a deprecated alias for 'flake update' and will be removed in a future version.");
lockFlags.inputUpdates.insert(flake::parseInputPath(s)); lockFlags.inputUpdates.insert(flake::parseInputAttrPath(s));
}}, }},
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix); completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
}} }}
}); });
@ -129,15 +129,15 @@ MixFlakeOptions::MixFlakeOptions()
.description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.", .description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.",
.category = category, .category = category,
.labels = {"input-path", "flake-url"}, .labels = {"input-path", "flake-url"},
.handler = {[&](std::string inputPath, std::string flakeRef) { .handler = {[&](std::string inputAttrPath, std::string flakeRef) {
lockFlags.writeLockFile = false; lockFlags.writeLockFile = false;
lockFlags.inputOverrides.insert_or_assign( lockFlags.inputOverrides.insert_or_assign(
flake::parseInputPath(inputPath), flake::parseInputAttrPath(inputAttrPath),
parseFlakeRef(fetchSettings, flakeRef, absPath(getCommandBaseDir()), true)); parseFlakeRef(fetchSettings, flakeRef, absPath(getCommandBaseDir()), true));
}}, }},
.completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) { .completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) {
if (n == 0) { if (n == 0) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix); completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
} else if (n == 1) { } else if (n == 1) {
completeFlakeRef(completions, getEvalState()->store, prefix); completeFlakeRef(completions, getEvalState()->store, prefix);
} }

View file

@ -1,32 +1,33 @@
{ lib {
, stdenv lib,
, mkMesonLibrary stdenv,
mkMesonLibrary,
, nix-util nix-util,
, nix-store nix-store,
, nix-fetchers nix-fetchers,
, nix-expr nix-expr,
, nix-flake nix-flake,
, nix-main nix-main,
, editline editline,
, readline readline,
, lowdown lowdown,
, nlohmann_json nlohmann_json,
# Configuration Options # Configuration Options
, version version,
# Whether to enable Markdown rendering in the Nix binary. # Whether to enable Markdown rendering in the Nix binary.
, enableMarkdown ? !stdenv.hostPlatform.isWindows enableMarkdown ? !stdenv.hostPlatform.isWindows,
# Which interactive line editor library to use for Nix's repl. # Which interactive line editor library to use for Nix's repl.
# #
# Currently supported choices are: # Currently supported choices are:
# #
# - editline (default) # - editline (default)
# - readline # - readline
, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline" readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline",
}: }:
let let

View file

@ -1,12 +1,13 @@
{ lib {
, mkMesonLibrary lib,
mkMesonLibrary,
, nix-store-c nix-store-c,
, nix-expr nix-expr,
# Configuration Options # Configuration Options
, version version,
}: }:
let let

View file

@ -1,15 +1,16 @@
{ lib {
, mkMesonLibrary lib,
mkMesonLibrary,
, nix-store-test-support nix-store-test-support,
, nix-expr nix-expr,
, nix-expr-c nix-expr-c,
, rapidcheck rapidcheck,
# Configuration Options # Configuration Options
, version version,
}: }:
let let

View file

@ -1,20 +1,21 @@
{ lib {
, buildPackages lib,
, stdenv buildPackages,
, mkMesonExecutable stdenv,
mkMesonExecutable,
, nix-expr nix-expr,
, nix-expr-c nix-expr-c,
, nix-expr-test-support nix-expr-test-support,
, rapidcheck rapidcheck,
, gtest gtest,
, runCommand runCommand,
# Configuration Options # Configuration Options
, version version,
, resolvePath resolvePath,
}: }:
let let
@ -58,16 +59,22 @@ mkMesonExecutable (finalAttrs: {
passthru = { passthru = {
tests = { tests = {
run = runCommand "${finalAttrs.pname}-run" { run =
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; runCommand "${finalAttrs.pname}-run"
} (lib.optionalString stdenv.hostPlatform.isWindows '' {
export HOME="$PWD/home-dir" meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
mkdir -p "$HOME" }
'' + '' (
export _NIX_TEST_UNIT_DATA=${resolvePath ./data} lib.optionalString stdenv.hostPlatform.isWindows ''
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} export HOME="$PWD/home-dir"
touch $out mkdir -p "$HOME"
''); ''
+ ''
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
touch $out
''
);
}; };
}; };

View file

@ -20,77 +20,77 @@ let
# Resolve a input spec into a node name. An input spec is # Resolve a input spec into a node name. An input spec is
# either a node name, or a 'follows' path from the root # either a node name, or a 'follows' path from the root
# node. # node.
resolveInput = inputSpec: resolveInput =
if builtins.isList inputSpec inputSpec: if builtins.isList inputSpec then getInputByPath lockFile.root inputSpec else inputSpec;
then getInputByPath lockFile.root inputSpec
else inputSpec;
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the # Follow an input attrpath (e.g. ["dwarffs" "nixpkgs"]) from the
# root node, returning the final node. # root node, returning the final node.
getInputByPath = nodeName: path: getInputByPath =
if path == [] nodeName: path:
then nodeName if path == [ ] then
nodeName
else else
getInputByPath getInputByPath
# Since this could be a 'follows' input, call resolveInput. # Since this could be a 'follows' input, call resolveInput.
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path}) (resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
(builtins.tail path); (builtins.tail path);
allNodes = allNodes = builtins.mapAttrs (
builtins.mapAttrs key: node:
(key: node: let
let
parentNode = allNodes.${getInputByPath lockFile.root node.parent}; parentNode = allNodes.${getInputByPath lockFile.root node.parent};
sourceInfo = sourceInfo =
if overrides ? ${key} if overrides ? ${key} then
then overrides.${key}.sourceInfo
overrides.${key}.sourceInfo else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/" then
else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/" parentNode.sourceInfo
then // {
parentNode.sourceInfo // { outPath = parentNode.outPath + ("/" + node.locked.path);
outPath = parentNode.outPath + ("/" + node.locked.path); }
} else
else # FIXME: remove obsolete node.info.
# FIXME: remove obsolete node.info. # Note: lock file entries are always final.
# Note: lock file entries are always final. fetchTreeFinal (node.info or { } // removeAttrs node.locked [ "dir" ]);
fetchTreeFinal (node.info or {} // removeAttrs node.locked ["dir"]);
subdir = overrides.${key}.dir or node.locked.dir or ""; subdir = overrides.${key}.dir or node.locked.dir or "";
outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir); outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir);
flake = import (outPath + "/flake.nix"); flake = import (outPath + "/flake.nix");
inputs = builtins.mapAttrs inputs = builtins.mapAttrs (inputName: inputSpec: allNodes.${resolveInput inputSpec}) (
(inputName: inputSpec: allNodes.${resolveInput inputSpec}) node.inputs or { }
(node.inputs or {}); );
outputs = flake.outputs (inputs // { self = result; }); outputs = flake.outputs (inputs // { self = result; });
result = result =
outputs outputs
# We add the sourceInfo attribute for its metadata, as they are # We add the sourceInfo attribute for its metadata, as they are
# relevant metadata for the flake. However, the outPath of the # relevant metadata for the flake. However, the outPath of the
# sourceInfo does not necessarily match the outPath of the flake, # sourceInfo does not necessarily match the outPath of the flake,
# as the flake may be in a subdirectory of a source. # as the flake may be in a subdirectory of a source.
# This is shadowed in the next // # This is shadowed in the next //
// sourceInfo // sourceInfo
// { // {
# This shadows the sourceInfo.outPath # This shadows the sourceInfo.outPath
inherit outPath; inherit outPath;
inherit inputs; inherit outputs; inherit sourceInfo; _type = "flake"; inherit inputs;
}; inherit outputs;
inherit sourceInfo;
_type = "flake";
};
in in
if node.flake or true then if node.flake or true then
assert builtins.isFunction flake.outputs; assert builtins.isFunction flake.outputs;
result result
else else
sourceInfo sourceInfo
) ) lockFile.nodes;
lockFile.nodes;
in allNodes.${lockFile.root} in
allNodes.${lockFile.root}

View file

@ -3114,7 +3114,7 @@ std::optional<SourcePath> EvalState::resolveLookupPathPath(const LookupPath::Pat
} }
} }
if (path.pathExists()) if (path.resolveSymlinks().pathExists())
return finish(std::move(path)); return finish(std::move(path));
else { else {
logWarning({ logWarning({

View file

@ -1,40 +1,72 @@
{ system ? "" # obsolete {
, url system ? "", # obsolete
, hash ? "" # an SRI hash url,
hash ? "", # an SRI hash
# Legacy hash specification # Legacy hash specification
, md5 ? "", sha1 ? "", sha256 ? "", sha512 ? "" md5 ? "",
, outputHash ? sha1 ? "",
if hash != "" then hash else if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256 sha256 ? "",
, outputHashAlgo ? sha512 ? "",
if hash != "" then "" else if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256" outputHash ?
if hash != "" then
hash
else if sha512 != "" then
sha512
else if sha1 != "" then
sha1
else if md5 != "" then
md5
else
sha256,
outputHashAlgo ?
if hash != "" then
""
else if sha512 != "" then
"sha512"
else if sha1 != "" then
"sha1"
else if md5 != "" then
"md5"
else
"sha256",
, executable ? false executable ? false,
, unpack ? false unpack ? false,
, name ? baseNameOf (toString url) name ? baseNameOf (toString url),
, impure ? false impure ? false,
}: }:
derivation ({ derivation (
builder = "builtin:fetchurl"; {
builder = "builtin:fetchurl";
# New-style output content requirements. # New-style output content requirements.
outputHashMode = if unpack || executable then "recursive" else "flat"; outputHashMode = if unpack || executable then "recursive" else "flat";
inherit name url executable unpack; inherit
name
url
executable
unpack
;
system = "builtin"; system = "builtin";
# No need to double the amount of network traffic # No need to double the amount of network traffic
preferLocalBuild = true; preferLocalBuild = true;
# This attribute does nothing; it's here to avoid changing evaluation results. # This attribute does nothing; it's here to avoid changing evaluation results.
impureEnvVars = [ impureEnvVars = [
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy" "http_proxy"
]; "https_proxy"
"ftp_proxy"
"all_proxy"
"no_proxy"
];
# To make "nix-prefetch-url" work. # To make "nix-prefetch-url" work.
urls = [ url ]; urls = [ url ];
} // (if impure }
then { __impure = true; } // (if impure then { __impure = true; } else { inherit outputHashAlgo outputHash; })
else { inherit outputHashAlgo outputHash; })) )

View file

@ -1,21 +1,27 @@
attrs @ { drvPath, outputs, name, ... }: attrs@{
drvPath,
outputs,
name,
...
}:
let let
commonAttrs = (builtins.listToAttrs outputsList) // commonAttrs = (builtins.listToAttrs outputsList) // {
{ all = map (x: x.value) outputsList; all = map (x: x.value) outputsList;
inherit drvPath name; inherit drvPath name;
type = "derivation"; type = "derivation";
}; };
outputToAttrListElement = outputName: outputToAttrListElement = outputName: {
{ name = outputName; name = outputName;
value = commonAttrs // { value = commonAttrs // {
outPath = builtins.getAttr outputName attrs; outPath = builtins.getAttr outputName attrs;
inherit outputName; inherit outputName;
};
}; };
};
outputsList = map outputToAttrListElement outputs; outputsList = map outputToAttrListElement outputs;
in (builtins.head outputsList).value in
(builtins.head outputsList).value

View file

@ -1,33 +1,34 @@
{ lib {
, stdenv lib,
, mkMesonLibrary stdenv,
mkMesonLibrary,
, bison bison,
, flex flex,
, cmake # for resolving toml11 dep cmake, # for resolving toml11 dep
, nix-util nix-util,
, nix-store nix-store,
, nix-fetchers nix-fetchers,
, boost boost,
, boehmgc boehmgc,
, nlohmann_json nlohmann_json,
, toml11 toml11,
# Configuration Options # Configuration Options
, version version,
# Whether to use garbage collection for the Nix language evaluator. # Whether to use garbage collection for the Nix language evaluator.
# #
# If it is disabled, we just leak memory, but this is not as bad as it # If it is disabled, we just leak memory, but this is not as bad as it
# sounds so long as evaluation just takes places within short-lived # sounds so long as evaluation just takes places within short-lived
# processes. (When the process exits, the memory is reclaimed; it is # processes. (When the process exits, the memory is reclaimed; it is
# only leaked *within* the process.) # only leaked *within* the process.)
# #
# Temporarily disabled on Windows because the `GC_throw_bad_alloc` # Temporarily disabled on Windows because the `GC_throw_bad_alloc`
# symbol is missing during linking. # symbol is missing during linking.
, enableGC ? !stdenv.hostPlatform.isWindows enableGC ? !stdenv.hostPlatform.isWindows,
}: }:
let let
@ -51,10 +52,7 @@ mkMesonLibrary (finalAttrs: {
(fileset.fileFilter (file: file.hasExt "hh") ./.) (fileset.fileFilter (file: file.hasExt "hh") ./.)
./lexer.l ./lexer.l
./parser.y ./parser.y
(fileset.difference (fileset.difference (fileset.fileFilter (file: file.hasExt "nix") ./.) ./package.nix)
(fileset.fileFilter (file: file.hasExt "nix") ./.)
./package.nix
)
]; ];
nativeBuildInputs = [ nativeBuildInputs = [

View file

@ -26,27 +26,34 @@
Note that `derivation` is very bare-bones, and provides almost no commands during the build. Note that `derivation` is very bare-bones, and provides almost no commands during the build.
Most likely, you'll want to use functions like `stdenv.mkDerivation` in Nixpkgs to set up a basic environment. Most likely, you'll want to use functions like `stdenv.mkDerivation` in Nixpkgs to set up a basic environment.
*/ */
drvAttrs @ { outputs ? [ "out" ], ... }: drvAttrs@{
outputs ? [ "out" ],
...
}:
let let
strict = derivationStrict drvAttrs; strict = derivationStrict drvAttrs;
commonAttrs = drvAttrs // (builtins.listToAttrs outputsList) // commonAttrs =
{ all = map (x: x.value) outputsList; drvAttrs
// (builtins.listToAttrs outputsList)
// {
all = map (x: x.value) outputsList;
inherit drvAttrs; inherit drvAttrs;
}; };
outputToAttrListElement = outputName: outputToAttrListElement = outputName: {
{ name = outputName; name = outputName;
value = commonAttrs // { value = commonAttrs // {
outPath = builtins.getAttr outputName strict; outPath = builtins.getAttr outputName strict;
drvPath = strict.drvPath; drvPath = strict.drvPath;
type = "derivation"; type = "derivation";
inherit outputName; inherit outputName;
};
}; };
};
outputsList = map outputToAttrListElement outputs; outputsList = map outputToAttrListElement outputs;
in (builtins.head outputsList).value in
(builtins.head outputsList).value

View file

@ -1,19 +1,20 @@
{ lib {
, buildPackages lib,
, stdenv buildPackages,
, mkMesonExecutable stdenv,
mkMesonExecutable,
, nix-fetchers nix-fetchers,
, nix-store-test-support nix-store-test-support,
, rapidcheck rapidcheck,
, gtest gtest,
, runCommand runCommand,
# Configuration Options # Configuration Options
, version version,
, resolvePath resolvePath,
}: }:
let let
@ -56,16 +57,22 @@ mkMesonExecutable (finalAttrs: {
passthru = { passthru = {
tests = { tests = {
run = runCommand "${finalAttrs.pname}-run" { run =
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; runCommand "${finalAttrs.pname}-run"
} (lib.optionalString stdenv.hostPlatform.isWindows '' {
export HOME="$PWD/home-dir" meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
mkdir -p "$HOME" }
'' + '' (
export _NIX_TEST_UNIT_DATA=${resolvePath ./data} lib.optionalString stdenv.hostPlatform.isWindows ''
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} export HOME="$PWD/home-dir"
touch $out mkdir -p "$HOME"
''); ''
+ ''
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
touch $out
''
);
}; };
}; };

View file

@ -207,7 +207,8 @@ static git_packbuilder_progress PACKBUILDER_PROGRESS_CHECK_INTERRUPT = &packBuil
} // extern "C" } // extern "C"
static void initRepoAtomically(std::filesystem::path &path, bool bare) { static void initRepoAtomically(std::filesystem::path &path, bool bare)
{
if (pathExists(path.string())) return; if (pathExists(path.string())) return;
Path tmpDir = createTempDir(os_string_to_string(PathViewNG { std::filesystem::path(path).parent_path() })); Path tmpDir = createTempDir(os_string_to_string(PathViewNG { std::filesystem::path(path).parent_path() }));
@ -545,13 +546,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
// then use code that was removed in this commit (see blame) // then use code that was removed in this commit (see blame)
auto dir = this->path; auto dir = this->path;
Strings gitArgs; Strings gitArgs{"-C", dir.string(), "--git-dir", ".", "fetch", "--quiet", "--force"};
if (shallow) { if (shallow)
gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec }; append(gitArgs, {"--depth", "1"});
} append(gitArgs, {std::string("--"), url, refspec});
else {
gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--", url, refspec };
}
runProgram(RunOptions { runProgram(RunOptions {
.program = "git", .program = "git",

View file

@ -1,15 +1,16 @@
{ lib {
, mkMesonLibrary lib,
mkMesonLibrary,
, nix-util nix-util,
, nix-store nix-store,
, nlohmann_json nlohmann_json,
, libgit2 libgit2,
, curl curl,
# Configuration Options # Configuration Options
, version version,
}: }:
let let

View file

@ -1,13 +1,14 @@
{ lib {
, mkMesonLibrary lib,
mkMesonLibrary,
, nix-store-c nix-store-c,
, nix-expr-c nix-expr-c,
, nix-flake nix-flake,
# Configuration Options # Configuration Options
, version version,
}: }:
let let

View file

@ -1,20 +1,21 @@
{ lib {
, buildPackages lib,
, stdenv buildPackages,
, mkMesonExecutable stdenv,
mkMesonExecutable,
, nix-flake nix-flake,
, nix-flake-c nix-flake-c,
, nix-expr-test-support nix-expr-test-support,
, rapidcheck rapidcheck,
, gtest gtest,
, runCommand runCommand,
# Configuration Options # Configuration Options
, version version,
, resolvePath resolvePath,
}: }:
let let
@ -58,17 +59,23 @@ mkMesonExecutable (finalAttrs: {
passthru = { passthru = {
tests = { tests = {
run = runCommand "${finalAttrs.pname}-run" { run =
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; runCommand "${finalAttrs.pname}-run"
} (lib.optionalString stdenv.hostPlatform.isWindows '' {
export HOME="$PWD/home-dir" meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
mkdir -p "$HOME" }
'' + '' (
export _NIX_TEST_UNIT_DATA=${resolvePath ./data} lib.optionalString stdenv.hostPlatform.isWindows ''
export NIX_CONFIG="extra-experimental-features = flakes" export HOME="$PWD/home-dir"
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} mkdir -p "$HOME"
touch $out ''
''); + ''
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
export NIX_CONFIG="extra-experimental-features = flakes"
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
touch $out
''
);
}; };
}; };

View file

@ -105,7 +105,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
EvalState & state, EvalState & state,
Value * value, Value * value,
const PosIdx pos, const PosIdx pos,
const InputPath & lockRootPath, const InputAttrPath & lockRootAttrPath,
const SourcePath & flakeDir); const SourcePath & flakeDir);
static FlakeInput parseFlakeInput( static FlakeInput parseFlakeInput(
@ -113,7 +113,7 @@ static FlakeInput parseFlakeInput(
std::string_view inputName, std::string_view inputName,
Value * value, Value * value,
const PosIdx pos, const PosIdx pos,
const InputPath & lockRootPath, const InputAttrPath & lockRootAttrPath,
const SourcePath & flakeDir) const SourcePath & flakeDir)
{ {
expectType(state, nAttrs, *value, pos); expectType(state, nAttrs, *value, pos);
@ -137,7 +137,7 @@ static FlakeInput parseFlakeInput(
else if (attr.value->type() == nPath) { else if (attr.value->type() == nPath) {
auto path = attr.value->path(); auto path = attr.value->path();
if (path.accessor != flakeDir.accessor) if (path.accessor != flakeDir.accessor)
throw Error("input path '%s' at %s must be in the same source tree as %s", throw Error("input attribute path '%s' at %s must be in the same source tree as %s",
path, state.positions[attr.pos], flakeDir); path, state.positions[attr.pos], flakeDir);
url = "path:" + flakeDir.path.makeRelative(path.path); url = "path:" + flakeDir.path.makeRelative(path.path);
} }
@ -149,11 +149,11 @@ static FlakeInput parseFlakeInput(
expectType(state, nBool, *attr.value, attr.pos); expectType(state, nBool, *attr.value, attr.pos);
input.isFlake = attr.value->boolean(); input.isFlake = attr.value->boolean();
} else if (attr.name == sInputs) { } else if (attr.name == sInputs) {
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootPath, flakeDir); input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootAttrPath, flakeDir);
} else if (attr.name == sFollows) { } else if (attr.name == sFollows) {
expectType(state, nString, *attr.value, attr.pos); expectType(state, nString, *attr.value, attr.pos);
auto follows(parseInputPath(attr.value->c_str())); auto follows(parseInputAttrPath(attr.value->c_str()));
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end()); follows.insert(follows.begin(), lockRootAttrPath.begin(), lockRootAttrPath.end());
input.follows = follows; input.follows = follows;
} else { } else {
// Allow selecting a subset of enum values // Allow selecting a subset of enum values
@ -220,7 +220,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
EvalState & state, EvalState & state,
Value * value, Value * value,
const PosIdx pos, const PosIdx pos,
const InputPath & lockRootPath, const InputAttrPath & lockRootAttrPath,
const SourcePath & flakeDir) const SourcePath & flakeDir)
{ {
std::map<FlakeId, FlakeInput> inputs; std::map<FlakeId, FlakeInput> inputs;
@ -233,7 +233,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
state.symbols[inputAttr.name], state.symbols[inputAttr.name],
inputAttr.value, inputAttr.value,
inputAttr.pos, inputAttr.pos,
lockRootPath, lockRootAttrPath,
flakeDir)); flakeDir));
} }
@ -246,7 +246,7 @@ static Flake readFlake(
const FlakeRef & resolvedRef, const FlakeRef & resolvedRef,
const FlakeRef & lockedRef, const FlakeRef & lockedRef,
const SourcePath & rootDir, const SourcePath & rootDir,
const InputPath & lockRootPath) const InputAttrPath & lockRootAttrPath)
{ {
auto flakeDir = rootDir / CanonPath(resolvedRef.subdir); auto flakeDir = rootDir / CanonPath(resolvedRef.subdir);
auto flakePath = flakeDir / "flake.nix"; auto flakePath = flakeDir / "flake.nix";
@ -270,7 +270,7 @@ static Flake readFlake(
auto sInputs = state.symbols.create("inputs"); auto sInputs = state.symbols.create("inputs");
if (auto inputs = vInfo.attrs()->get(sInputs)) if (auto inputs = vInfo.attrs()->get(sInputs))
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootPath, flakeDir); flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootAttrPath, flakeDir);
auto sOutputs = state.symbols.create("outputs"); auto sOutputs = state.symbols.create("outputs");
@ -347,12 +347,12 @@ static Flake getFlake(
const FlakeRef & originalRef, const FlakeRef & originalRef,
bool useRegistries, bool useRegistries,
FlakeCache & flakeCache, FlakeCache & flakeCache,
const InputPath & lockRootPath) const InputAttrPath & lockRootAttrPath)
{ {
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree( auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, originalRef, useRegistries, flakeCache); state, originalRef, useRegistries, flakeCache);
return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootPath); return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootAttrPath);
} }
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool useRegistries) Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool useRegistries)
@ -407,12 +407,12 @@ LockedFlake lockFlake(
{ {
FlakeInput input; FlakeInput input;
SourcePath sourcePath; SourcePath sourcePath;
std::optional<InputPath> parentInputPath; // FIXME: rename to inputPathPrefix? std::optional<InputAttrPath> parentInputAttrPath; // FIXME: rename to inputAttrPathPrefix?
}; };
std::map<InputPath, OverrideTarget> overrides; std::map<InputAttrPath, OverrideTarget> overrides;
std::set<InputPath> explicitCliOverrides; std::set<InputAttrPath> explicitCliOverrides;
std::set<InputPath> overridesUsed, updatesUsed; std::set<InputAttrPath> overridesUsed, updatesUsed;
std::map<ref<Node>, SourcePath> nodePaths; std::map<ref<Node>, SourcePath> nodePaths;
for (auto & i : lockFlags.inputOverrides) { for (auto & i : lockFlags.inputOverrides) {
@ -436,9 +436,9 @@ LockedFlake lockFlake(
std::function<void( std::function<void(
const FlakeInputs & flakeInputs, const FlakeInputs & flakeInputs,
ref<Node> node, ref<Node> node,
const InputPath & inputPathPrefix, const InputAttrPath & inputAttrPathPrefix,
std::shared_ptr<const Node> oldNode, std::shared_ptr<const Node> oldNode,
const InputPath & followsPrefix, const InputAttrPath & followsPrefix,
const SourcePath & sourcePath, const SourcePath & sourcePath,
bool trustLock)> bool trustLock)>
computeLocks; computeLocks;
@ -450,7 +450,7 @@ LockedFlake lockFlake(
/* The node whose locks are to be updated.*/ /* The node whose locks are to be updated.*/
ref<Node> node, ref<Node> node,
/* The path to this node in the lock file graph. */ /* The path to this node in the lock file graph. */
const InputPath & inputPathPrefix, const InputAttrPath & inputAttrPathPrefix,
/* The old node, if any, from which locks can be /* The old node, if any, from which locks can be
copied. */ copied. */
std::shared_ptr<const Node> oldNode, std::shared_ptr<const Node> oldNode,
@ -458,59 +458,59 @@ LockedFlake lockFlake(
interpreted. When a node is initially locked, it's interpreted. When a node is initially locked, it's
relative to the node's flake; when it's already locked, relative to the node's flake; when it's already locked,
it's relative to the root of the lock file. */ it's relative to the root of the lock file. */
const InputPath & followsPrefix, const InputAttrPath & followsPrefix,
/* The source path of this node's flake. */ /* The source path of this node's flake. */
const SourcePath & sourcePath, const SourcePath & sourcePath,
bool trustLock) bool trustLock)
{ {
debug("computing lock file node '%s'", printInputPath(inputPathPrefix)); debug("computing lock file node '%s'", printInputAttrPath(inputAttrPathPrefix));
/* Get the overrides (i.e. attributes of the form /* Get the overrides (i.e. attributes of the form
'inputs.nixops.inputs.nixpkgs.url = ...'). */ 'inputs.nixops.inputs.nixpkgs.url = ...'). */
for (auto & [id, input] : flakeInputs) { for (auto & [id, input] : flakeInputs) {
for (auto & [idOverride, inputOverride] : input.overrides) { for (auto & [idOverride, inputOverride] : input.overrides) {
auto inputPath(inputPathPrefix); auto inputAttrPath(inputAttrPathPrefix);
inputPath.push_back(id); inputAttrPath.push_back(id);
inputPath.push_back(idOverride); inputAttrPath.push_back(idOverride);
overrides.emplace(inputPath, overrides.emplace(inputAttrPath,
OverrideTarget { OverrideTarget {
.input = inputOverride, .input = inputOverride,
.sourcePath = sourcePath, .sourcePath = sourcePath,
.parentInputPath = inputPathPrefix .parentInputAttrPath = inputAttrPathPrefix
}); });
} }
} }
/* Check whether this input has overrides for a /* Check whether this input has overrides for a
non-existent input. */ non-existent input. */
for (auto [inputPath, inputOverride] : overrides) { for (auto [inputAttrPath, inputOverride] : overrides) {
auto inputPath2(inputPath); auto inputAttrPath2(inputAttrPath);
auto follow = inputPath2.back(); auto follow = inputAttrPath2.back();
inputPath2.pop_back(); inputAttrPath2.pop_back();
if (inputPath2 == inputPathPrefix && !flakeInputs.count(follow)) if (inputAttrPath2 == inputAttrPathPrefix && !flakeInputs.count(follow))
warn( warn(
"input '%s' has an override for a non-existent input '%s'", "input '%s' has an override for a non-existent input '%s'",
printInputPath(inputPathPrefix), follow); printInputAttrPath(inputAttrPathPrefix), follow);
} }
/* Go over the flake inputs, resolve/fetch them if /* Go over the flake inputs, resolve/fetch them if
necessary (i.e. if they're new or the flakeref changed necessary (i.e. if they're new or the flakeref changed
from what's in the lock file). */ from what's in the lock file). */
for (auto & [id, input2] : flakeInputs) { for (auto & [id, input2] : flakeInputs) {
auto inputPath(inputPathPrefix); auto inputAttrPath(inputAttrPathPrefix);
inputPath.push_back(id); inputAttrPath.push_back(id);
auto inputPathS = printInputPath(inputPath); auto inputAttrPathS = printInputAttrPath(inputAttrPath);
debug("computing input '%s'", inputPathS); debug("computing input '%s'", inputAttrPathS);
try { try {
/* Do we have an override for this input from one of the /* Do we have an override for this input from one of the
ancestors? */ ancestors? */
auto i = overrides.find(inputPath); auto i = overrides.find(inputAttrPath);
bool hasOverride = i != overrides.end(); bool hasOverride = i != overrides.end();
bool hasCliOverride = explicitCliOverrides.contains(inputPath); bool hasCliOverride = explicitCliOverrides.contains(inputAttrPath);
if (hasOverride) if (hasOverride)
overridesUsed.insert(inputPath); overridesUsed.insert(inputAttrPath);
auto input = hasOverride ? i->second.input : input2; auto input = hasOverride ? i->second.input : input2;
/* Resolve relative 'path:' inputs relative to /* Resolve relative 'path:' inputs relative to
@ -525,11 +525,11 @@ LockedFlake lockFlake(
/* Resolve 'follows' later (since it may refer to an input /* Resolve 'follows' later (since it may refer to an input
path we haven't processed yet. */ path we haven't processed yet. */
if (input.follows) { if (input.follows) {
InputPath target; InputAttrPath target;
target.insert(target.end(), input.follows->begin(), input.follows->end()); target.insert(target.end(), input.follows->begin(), input.follows->end());
debug("input '%s' follows '%s'", inputPathS, printInputPath(target)); debug("input '%s' follows '%s'", inputAttrPathS, printInputAttrPath(target));
node->inputs.insert_or_assign(id, target); node->inputs.insert_or_assign(id, target);
continue; continue;
} }
@ -538,7 +538,7 @@ LockedFlake lockFlake(
auto overridenParentPath = auto overridenParentPath =
input.ref->input.isRelative() input.ref->input.isRelative()
? std::optional<InputPath>(hasOverride ? i->second.parentInputPath : inputPathPrefix) ? std::optional<InputAttrPath>(hasOverride ? i->second.parentInputAttrPath : inputAttrPathPrefix)
: std::nullopt; : std::nullopt;
auto resolveRelativePath = [&]() -> std::optional<SourcePath> auto resolveRelativePath = [&]() -> std::optional<SourcePath>
@ -557,9 +557,9 @@ LockedFlake lockFlake(
auto getInputFlake = [&]() auto getInputFlake = [&]()
{ {
if (auto resolvedPath = resolveRelativePath()) { if (auto resolvedPath = resolveRelativePath()) {
return readFlake(state, *input.ref, *input.ref, *input.ref, *resolvedPath, inputPath); return readFlake(state, *input.ref, *input.ref, *input.ref, *resolvedPath, inputAttrPath);
} else { } else {
return getFlake(state, *input.ref, useRegistries, flakeCache, inputPath); return getFlake(state, *input.ref, useRegistries, flakeCache, inputAttrPath);
} }
}; };
@ -567,19 +567,19 @@ LockedFlake lockFlake(
And the input is not in updateInputs? */ And the input is not in updateInputs? */
std::shared_ptr<LockedNode> oldLock; std::shared_ptr<LockedNode> oldLock;
updatesUsed.insert(inputPath); updatesUsed.insert(inputAttrPath);
if (oldNode && !lockFlags.inputUpdates.count(inputPath)) if (oldNode && !lockFlags.inputUpdates.count(inputAttrPath))
if (auto oldLock2 = get(oldNode->inputs, id)) if (auto oldLock2 = get(oldNode->inputs, id))
if (auto oldLock3 = std::get_if<0>(&*oldLock2)) if (auto oldLock3 = std::get_if<0>(&*oldLock2))
oldLock = *oldLock3; oldLock = *oldLock3;
if (oldLock if (oldLock
&& oldLock->originalRef == *input.ref && oldLock->originalRef == *input.ref
&& oldLock->parentPath == overridenParentPath && oldLock->parentInputAttrPath == overridenParentPath
&& !hasCliOverride) && !hasCliOverride)
{ {
debug("keeping existing input '%s'", inputPathS); debug("keeping existing input '%s'", inputAttrPathS);
/* Copy the input from the old lock since its flakeref /* Copy the input from the old lock since its flakeref
didn't change and there is no override from a didn't change and there is no override from a
@ -588,18 +588,18 @@ LockedFlake lockFlake(
oldLock->lockedRef, oldLock->lockedRef,
oldLock->originalRef, oldLock->originalRef,
oldLock->isFlake, oldLock->isFlake,
oldLock->parentPath); oldLock->parentInputAttrPath);
node->inputs.insert_or_assign(id, childNode); node->inputs.insert_or_assign(id, childNode);
/* If we have this input in updateInputs, then we /* If we have this input in updateInputs, then we
must fetch the flake to update it. */ must fetch the flake to update it. */
auto lb = lockFlags.inputUpdates.lower_bound(inputPath); auto lb = lockFlags.inputUpdates.lower_bound(inputAttrPath);
auto mustRefetch = auto mustRefetch =
lb != lockFlags.inputUpdates.end() lb != lockFlags.inputUpdates.end()
&& lb->size() > inputPath.size() && lb->size() > inputAttrPath.size()
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin()); && std::equal(inputAttrPath.begin(), inputAttrPath.end(), lb->begin());
FlakeInputs fakeInputs; FlakeInputs fakeInputs;
@ -618,7 +618,7 @@ LockedFlake lockFlake(
if (!trustLock) { if (!trustLock) {
// It is possible that the flake has changed, // It is possible that the flake has changed,
// so we must confirm all the follows that are in the lock file are also in the flake. // so we must confirm all the follows that are in the lock file are also in the flake.
auto overridePath(inputPath); auto overridePath(inputAttrPath);
overridePath.push_back(i.first); overridePath.push_back(i.first);
auto o = overrides.find(overridePath); auto o = overrides.find(overridePath);
// If the override disappeared, we have to refetch the flake, // If the override disappeared, we have to refetch the flake,
@ -642,21 +642,21 @@ LockedFlake lockFlake(
if (mustRefetch) { if (mustRefetch) {
auto inputFlake = getInputFlake(); auto inputFlake = getInputFlake();
nodePaths.emplace(childNode, inputFlake.path.parent()); nodePaths.emplace(childNode, inputFlake.path.parent());
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, followsPrefix, computeLocks(inputFlake.inputs, childNode, inputAttrPath, oldLock, followsPrefix,
inputFlake.path, false); inputFlake.path, false);
} else { } else {
computeLocks(fakeInputs, childNode, inputPath, oldLock, followsPrefix, sourcePath, true); computeLocks(fakeInputs, childNode, inputAttrPath, oldLock, followsPrefix, sourcePath, true);
} }
} else { } else {
/* We need to create a new lock file entry. So fetch /* We need to create a new lock file entry. So fetch
this input. */ this input. */
debug("creating new input '%s'", inputPathS); debug("creating new input '%s'", inputAttrPathS);
if (!lockFlags.allowUnlocked if (!lockFlags.allowUnlocked
&& !input.ref->input.isLocked() && !input.ref->input.isLocked()
&& !input.ref->input.isRelative()) && !input.ref->input.isRelative())
throw Error("cannot update unlocked flake input '%s' in pure mode", inputPathS); throw Error("cannot update unlocked flake input '%s' in pure mode", inputAttrPathS);
/* Note: in case of an --override-input, we use /* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the the *original* ref (input2.ref) for the
@ -665,7 +665,7 @@ LockedFlake lockFlake(
nuked the next time we update the lock nuked the next time we update the lock
file. That is, overrides are sticky unless you file. That is, overrides are sticky unless you
use --no-write-lock-file. */ use --no-write-lock-file. */
auto ref = (input2.ref && explicitCliOverrides.contains(inputPath)) ? *input2.ref : *input.ref; auto ref = (input2.ref && explicitCliOverrides.contains(inputAttrPath)) ? *input2.ref : *input.ref;
if (input.isFlake) { if (input.isFlake) {
auto inputFlake = getInputFlake(); auto inputFlake = getInputFlake();
@ -691,11 +691,11 @@ LockedFlake lockFlake(
own lock file. */ own lock file. */
nodePaths.emplace(childNode, inputFlake.path.parent()); nodePaths.emplace(childNode, inputFlake.path.parent());
computeLocks( computeLocks(
inputFlake.inputs, childNode, inputPath, inputFlake.inputs, childNode, inputAttrPath,
oldLock oldLock
? std::dynamic_pointer_cast<const Node>(oldLock) ? std::dynamic_pointer_cast<const Node>(oldLock)
: readLockFile(state.fetchSettings, inputFlake.lockFilePath()).root.get_ptr(), : readLockFile(state.fetchSettings, inputFlake.lockFilePath()).root.get_ptr(),
oldLock ? followsPrefix : inputPath, oldLock ? followsPrefix : inputAttrPath,
inputFlake.path, inputFlake.path,
false); false);
} }
@ -722,7 +722,7 @@ LockedFlake lockFlake(
} }
} catch (Error & e) { } catch (Error & e) {
e.addTrace({}, "while updating the flake input '%s'", inputPathS); e.addTrace({}, "while updating the flake input '%s'", inputAttrPathS);
throw; throw;
} }
} }
@ -742,11 +742,11 @@ LockedFlake lockFlake(
for (auto & i : lockFlags.inputOverrides) for (auto & i : lockFlags.inputOverrides)
if (!overridesUsed.count(i.first)) if (!overridesUsed.count(i.first))
warn("the flag '--override-input %s %s' does not match any input", warn("the flag '--override-input %s %s' does not match any input",
printInputPath(i.first), i.second); printInputAttrPath(i.first), i.second);
for (auto & i : lockFlags.inputUpdates) for (auto & i : lockFlags.inputUpdates)
if (!updatesUsed.count(i)) if (!updatesUsed.count(i))
warn("'%s' does not match any input of this flake", printInputPath(i)); warn("'%s' does not match any input of this flake", printInputAttrPath(i));
/* Check 'follows' inputs. */ /* Check 'follows' inputs. */
newLockFile.check(); newLockFile.check();

View file

@ -57,7 +57,7 @@ struct FlakeInput
* false = (fetched) static source path * false = (fetched) static source path
*/ */
bool isFlake = true; bool isFlake = true;
std::optional<InputPath> follows; std::optional<InputAttrPath> follows;
FlakeInputs overrides; FlakeInputs overrides;
}; };
@ -201,13 +201,13 @@ struct LockFlags
/** /**
* Flake inputs to be overridden. * Flake inputs to be overridden.
*/ */
std::map<InputPath, FlakeRef> inputOverrides; std::map<InputAttrPath, FlakeRef> inputOverrides;
/** /**
* Flake inputs to be updated. This means that any existing lock * Flake inputs to be updated. This means that any existing lock
* for those inputs will be ignored. * for those inputs will be ignored.
*/ */
std::set<InputPath> inputUpdates; std::set<InputAttrPath> inputUpdates;
}; };
LockedFlake lockFlake( LockedFlake lockFlake(

View file

@ -43,7 +43,7 @@ LockedNode::LockedNode(
: lockedRef(getFlakeRef(fetchSettings, json, "locked", "info")) // FIXME: remove "info" : lockedRef(getFlakeRef(fetchSettings, json, "locked", "info")) // FIXME: remove "info"
, originalRef(getFlakeRef(fetchSettings, json, "original", nullptr)) , originalRef(getFlakeRef(fetchSettings, json, "original", nullptr))
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true) , isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
, parentPath(json.find("parent") != json.end() ? (std::optional<InputPath>) json["parent"] : std::nullopt) , parentInputAttrPath(json.find("parent") != json.end() ? (std::optional<InputAttrPath>) json["parent"] : std::nullopt)
{ {
if (!lockedRef.input.isConsideredLocked(fetchSettings) && !lockedRef.input.isRelative()) if (!lockedRef.input.isConsideredLocked(fetchSettings) && !lockedRef.input.isRelative())
throw Error("Lock file contains unlocked input '%s'. Use '--allow-dirty-locks' to accept this lock file.", throw Error("Lock file contains unlocked input '%s'. Use '--allow-dirty-locks' to accept this lock file.",
@ -59,7 +59,7 @@ StorePath LockedNode::computeStorePath(Store & store) const
return lockedRef.input.computeStorePath(store); return lockedRef.input.computeStorePath(store);
} }
static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & path, std::vector<InputPath> & visited) static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputAttrPath & path, std::vector<InputAttrPath> & visited)
{ {
auto pos = root; auto pos = root;
@ -67,8 +67,8 @@ static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & pa
if (found != visited.end()) { if (found != visited.end()) {
std::vector<std::string> cycle; std::vector<std::string> cycle;
std::transform(found, visited.cend(), std::back_inserter(cycle), printInputPath); std::transform(found, visited.cend(), std::back_inserter(cycle), printInputAttrPath);
cycle.push_back(printInputPath(path)); cycle.push_back(printInputAttrPath(path));
throw Error("follow cycle detected: [%s]", concatStringsSep(" -> ", cycle)); throw Error("follow cycle detected: [%s]", concatStringsSep(" -> ", cycle));
} }
visited.push_back(path); visited.push_back(path);
@ -90,9 +90,9 @@ static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & pa
return pos; return pos;
} }
std::shared_ptr<Node> LockFile::findInput(const InputPath & path) std::shared_ptr<Node> LockFile::findInput(const InputAttrPath & path)
{ {
std::vector<InputPath> visited; std::vector<InputAttrPath> visited;
return doFind(root, path, visited); return doFind(root, path, visited);
} }
@ -115,7 +115,7 @@ LockFile::LockFile(
if (jsonNode.find("inputs") == jsonNode.end()) return; if (jsonNode.find("inputs") == jsonNode.end()) return;
for (auto & i : jsonNode["inputs"].items()) { for (auto & i : jsonNode["inputs"].items()) {
if (i.value().is_array()) { // FIXME: remove, obsolete if (i.value().is_array()) { // FIXME: remove, obsolete
InputPath path; InputAttrPath path;
for (auto & j : i.value()) for (auto & j : i.value())
path.push_back(j); path.push_back(j);
node.inputs.insert_or_assign(i.key(), path); node.inputs.insert_or_assign(i.key(), path);
@ -203,8 +203,8 @@ std::pair<nlohmann::json, LockFile::KeyMap> LockFile::toJSON() const
n["locked"].erase("__final"); n["locked"].erase("__final");
if (!lockedNode->isFlake) if (!lockedNode->isFlake)
n["flake"] = false; n["flake"] = false;
if (lockedNode->parentPath) if (lockedNode->parentInputAttrPath)
n["parent"] = *lockedNode->parentPath; n["parent"] = *lockedNode->parentInputAttrPath;
} }
nodes[key] = std::move(n); nodes[key] = std::move(n);
@ -267,36 +267,36 @@ bool LockFile::operator ==(const LockFile & other) const
return toJSON().first == other.toJSON().first; return toJSON().first == other.toJSON().first;
} }
InputPath parseInputPath(std::string_view s) InputAttrPath parseInputAttrPath(std::string_view s)
{ {
InputPath path; InputAttrPath path;
for (auto & elem : tokenizeString<std::vector<std::string>>(s, "/")) { for (auto & elem : tokenizeString<std::vector<std::string>>(s, "/")) {
if (!std::regex_match(elem, flakeIdRegex)) if (!std::regex_match(elem, flakeIdRegex))
throw UsageError("invalid flake input path element '%s'", elem); throw UsageError("invalid flake input attribute path element '%s'", elem);
path.push_back(elem); path.push_back(elem);
} }
return path; return path;
} }
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const std::map<InputAttrPath, Node::Edge> LockFile::getAllInputs() const
{ {
std::set<ref<Node>> done; std::set<ref<Node>> done;
std::map<InputPath, Node::Edge> res; std::map<InputAttrPath, Node::Edge> res;
std::function<void(const InputPath & prefix, ref<Node> node)> recurse; std::function<void(const InputAttrPath & prefix, ref<Node> node)> recurse;
recurse = [&](const InputPath & prefix, ref<Node> node) recurse = [&](const InputAttrPath & prefix, ref<Node> node)
{ {
if (!done.insert(node).second) return; if (!done.insert(node).second) return;
for (auto &[id, input] : node->inputs) { for (auto &[id, input] : node->inputs) {
auto inputPath(prefix); auto inputAttrPath(prefix);
inputPath.push_back(id); inputAttrPath.push_back(id);
res.emplace(inputPath, input); res.emplace(inputAttrPath, input);
if (auto child = std::get_if<0>(&input)) if (auto child = std::get_if<0>(&input))
recurse(inputPath, *child); recurse(inputAttrPath, *child);
} }
}; };
@ -320,7 +320,7 @@ std::ostream & operator <<(std::ostream & stream, const Node::Edge & edge)
if (auto node = std::get_if<0>(&edge)) if (auto node = std::get_if<0>(&edge))
stream << describe((*node)->lockedRef); stream << describe((*node)->lockedRef);
else if (auto follows = std::get_if<1>(&edge)) else if (auto follows = std::get_if<1>(&edge))
stream << fmt("follows '%s'", printInputPath(*follows)); stream << fmt("follows '%s'", printInputAttrPath(*follows));
return stream; return stream;
} }
@ -347,15 +347,15 @@ std::string LockFile::diff(const LockFile & oldLocks, const LockFile & newLocks)
while (i != oldFlat.end() || j != newFlat.end()) { while (i != oldFlat.end() || j != newFlat.end()) {
if (j != newFlat.end() && (i == oldFlat.end() || i->first > j->first)) { if (j != newFlat.end() && (i == oldFlat.end() || i->first > j->first)) {
res += fmt("" ANSI_GREEN "Added input '%s':" ANSI_NORMAL "\n %s\n", res += fmt("" ANSI_GREEN "Added input '%s':" ANSI_NORMAL "\n %s\n",
printInputPath(j->first), j->second); printInputAttrPath(j->first), j->second);
++j; ++j;
} else if (i != oldFlat.end() && (j == newFlat.end() || i->first < j->first)) { } else if (i != oldFlat.end() && (j == newFlat.end() || i->first < j->first)) {
res += fmt("" ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputPath(i->first)); res += fmt("" ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputAttrPath(i->first));
++i; ++i;
} else { } else {
if (!equals(i->second, j->second)) { if (!equals(i->second, j->second)) {
res += fmt("" ANSI_BOLD "Updated input '%s':" ANSI_NORMAL "\n %s\n → %s\n", res += fmt("" ANSI_BOLD "Updated input '%s':" ANSI_NORMAL "\n %s\n → %s\n",
printInputPath(i->first), printInputAttrPath(i->first),
i->second, i->second,
j->second); j->second);
} }
@ -371,19 +371,19 @@ void LockFile::check()
{ {
auto inputs = getAllInputs(); auto inputs = getAllInputs();
for (auto & [inputPath, input] : inputs) { for (auto & [inputAttrPath, input] : inputs) {
if (auto follows = std::get_if<1>(&input)) { if (auto follows = std::get_if<1>(&input)) {
if (!follows->empty() && !findInput(*follows)) if (!follows->empty() && !findInput(*follows))
throw Error("input '%s' follows a non-existent input '%s'", throw Error("input '%s' follows a non-existent input '%s'",
printInputPath(inputPath), printInputAttrPath(inputAttrPath),
printInputPath(*follows)); printInputAttrPath(*follows));
} }
} }
} }
void check(); void check();
std::string printInputPath(const InputPath & path) std::string printInputAttrPath(const InputAttrPath & path)
{ {
return concatStringsSep("/", path); return concatStringsSep("/", path);
} }

View file

@ -12,7 +12,7 @@ class StorePath;
namespace nix::flake { namespace nix::flake {
typedef std::vector<FlakeId> InputPath; typedef std::vector<FlakeId> InputAttrPath;
struct LockedNode; struct LockedNode;
@ -23,7 +23,7 @@ struct LockedNode;
*/ */
struct Node : std::enable_shared_from_this<Node> struct Node : std::enable_shared_from_this<Node>
{ {
typedef std::variant<ref<LockedNode>, InputPath> Edge; typedef std::variant<ref<LockedNode>, InputAttrPath> Edge;
std::map<FlakeId, Edge> inputs; std::map<FlakeId, Edge> inputs;
@ -40,17 +40,17 @@ struct LockedNode : Node
/* The node relative to which relative source paths /* The node relative to which relative source paths
(e.g. 'path:../foo') are interpreted. */ (e.g. 'path:../foo') are interpreted. */
std::optional<InputPath> parentPath; std::optional<InputAttrPath> parentInputAttrPath;
LockedNode( LockedNode(
const FlakeRef & lockedRef, const FlakeRef & lockedRef,
const FlakeRef & originalRef, const FlakeRef & originalRef,
bool isFlake = true, bool isFlake = true,
std::optional<InputPath> parentPath = {}) std::optional<InputAttrPath> parentInputAttrPath = {})
: lockedRef(lockedRef) : lockedRef(std::move(lockedRef))
, originalRef(originalRef) , originalRef(std::move(originalRef))
, isFlake(isFlake) , isFlake(isFlake)
, parentPath(parentPath) , parentInputAttrPath(std::move(parentInputAttrPath))
{ } { }
LockedNode( LockedNode(
@ -83,9 +83,9 @@ struct LockFile
bool operator ==(const LockFile & other) const; bool operator ==(const LockFile & other) const;
std::shared_ptr<Node> findInput(const InputPath & path); std::shared_ptr<Node> findInput(const InputAttrPath & path);
std::map<InputPath, Node::Edge> getAllInputs() const; std::map<InputAttrPath, Node::Edge> getAllInputs() const;
static std::string diff(const LockFile & oldLocks, const LockFile & newLocks); static std::string diff(const LockFile & oldLocks, const LockFile & newLocks);
@ -97,8 +97,8 @@ struct LockFile
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile); std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile);
InputPath parseInputPath(std::string_view s); InputAttrPath parseInputAttrPath(std::string_view s);
std::string printInputPath(const InputPath & path); std::string printInputAttrPath(const InputAttrPath & path);
} }

View file

@ -1,15 +1,16 @@
{ lib {
, mkMesonLibrary lib,
mkMesonLibrary,
, nix-util nix-util,
, nix-store nix-store,
, nix-fetchers nix-fetchers,
, nix-expr nix-expr,
, nlohmann_json nlohmann_json,
# Configuration Options # Configuration Options
, version version,
}: }:
let let

View file

@ -1,14 +1,15 @@
{ lib {
, mkMesonLibrary lib,
mkMesonLibrary,
, nix-util-c nix-util-c,
, nix-store nix-store,
, nix-store-c nix-store-c,
, nix-main nix-main,
# Configuration Options # Configuration Options
, version version,
}: }:
let let

View file

@ -1,14 +1,15 @@
{ lib {
, mkMesonLibrary lib,
mkMesonLibrary,
, openssl openssl,
, nix-util nix-util,
, nix-store nix-store,
# Configuration Options # Configuration Options
, version version,
}: }:
let let

View file

@ -1,12 +1,13 @@
{ lib {
, mkMesonLibrary lib,
mkMesonLibrary,
, nix-util-c nix-util-c,
, nix-store nix-store,
# Configuration Options # Configuration Options
, version version,
}: }:
let let

View file

@ -1,15 +1,16 @@
{ lib {
, mkMesonLibrary lib,
mkMesonLibrary,
, nix-util-test-support nix-util-test-support,
, nix-store nix-store,
, nix-store-c nix-store-c,
, rapidcheck rapidcheck,
# Configuration Options # Configuration Options
, version version,
}: }:
let let

View file

@ -1,21 +1,22 @@
{ lib {
, buildPackages lib,
, stdenv buildPackages,
, mkMesonExecutable stdenv,
mkMesonExecutable,
, nix-store nix-store,
, nix-store-c nix-store-c,
, nix-store-test-support nix-store-test-support,
, sqlite sqlite,
, rapidcheck rapidcheck,
, gtest gtest,
, runCommand runCommand,
# Configuration Options # Configuration Options
, version version,
, filesetToSource filesetToSource,
}: }:
let let
@ -64,26 +65,33 @@ mkMesonExecutable (finalAttrs: {
passthru = { passthru = {
tests = { tests = {
run = let run =
# Some data is shared with the functional tests: they create it, let
# we consume it. # Some data is shared with the functional tests: they create it,
data = filesetToSource { # we consume it.
root = ../..; data = filesetToSource {
fileset = lib.fileset.unions [ root = ../..;
./data fileset = lib.fileset.unions [
../../tests/functional/derivation ./data
]; ../../tests/functional/derivation
}; ];
in runCommand "${finalAttrs.pname}-run" { };
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; in
} (lib.optionalString stdenv.hostPlatform.isWindows '' runCommand "${finalAttrs.pname}-run"
export HOME="$PWD/home-dir" {
mkdir -p "$HOME" meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
'' + '' }
export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"} (
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} lib.optionalString stdenv.hostPlatform.isWindows ''
touch $out export HOME="$PWD/home-dir"
''); mkdir -p "$HOME"
''
+ ''
export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"}
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
touch $out
''
);
}; };
}; };

View file

@ -1,25 +1,26 @@
{ lib {
, stdenv lib,
, mkMesonLibrary stdenv,
mkMesonLibrary,
, unixtools unixtools,
, darwin darwin,
, nix-util nix-util,
, boost boost,
, curl curl,
, aws-sdk-cpp aws-sdk-cpp,
, libseccomp libseccomp,
, nlohmann_json nlohmann_json,
, sqlite sqlite,
, busybox-sandbox-shell ? null busybox-sandbox-shell ? null,
# Configuration Options # Configuration Options
, version version,
, embeddedSandboxShell ? stdenv.hostPlatform.isStatic embeddedSandboxShell ? stdenv.hostPlatform.isStatic,
}: }:
let let
@ -48,19 +49,20 @@ mkMesonLibrary (finalAttrs: {
(fileset.fileFilter (file: file.hasExt "sql") ./.) (fileset.fileFilter (file: file.hasExt "sql") ./.)
]; ];
nativeBuildInputs = nativeBuildInputs = lib.optional embeddedSandboxShell unixtools.hexdump;
lib.optional embeddedSandboxShell unixtools.hexdump;
buildInputs = [ buildInputs =
boost [
curl boost
sqlite curl
] ++ lib.optional stdenv.hostPlatform.isLinux libseccomp sqlite
]
++ lib.optional stdenv.hostPlatform.isLinux libseccomp
# There have been issues building these dependencies # There have been issues building these dependencies
++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox ++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox
++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)) ++ lib.optional (
aws-sdk-cpp stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)
; ) aws-sdk-cpp;
propagatedBuildInputs = [ propagatedBuildInputs = [
nix-util nix-util
@ -75,12 +77,14 @@ mkMesonLibrary (finalAttrs: {
echo ${version} > ../../.version echo ${version} > ../../.version
''; '';
mesonFlags = [ mesonFlags =
(lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux) [
(lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell) (lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux)
] ++ lib.optionals stdenv.hostPlatform.isLinux [ (lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell)
(lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox") ]
]; ++ lib.optionals stdenv.hostPlatform.isLinux [
(lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox")
];
env = { env = {
# Needed for Meson to find Boost. # Needed for Meson to find Boost.

View file

@ -539,11 +539,21 @@ void RemoteStore::addMultipleToStore(
RepairFlag repair, RepairFlag repair,
CheckSigsFlag checkSigs) CheckSigsFlag checkSigs)
{ {
// `addMultipleToStore` is single threaded
size_t bytesExpected = 0;
for (auto & [pathInfo, _] : pathsToCopy) {
bytesExpected += pathInfo.narSize;
}
act.setExpected(actCopyPath, bytesExpected);
auto source = sinkToSource([&](Sink & sink) { auto source = sinkToSource([&](Sink & sink) {
sink << pathsToCopy.size(); size_t nrTotal = pathsToCopy.size();
sink << nrTotal;
// Reverse, so we can release memory at the original start // Reverse, so we can release memory at the original start
std::reverse(pathsToCopy.begin(), pathsToCopy.end()); std::reverse(pathsToCopy.begin(), pathsToCopy.end());
while (!pathsToCopy.empty()) { while (!pathsToCopy.empty()) {
act.progress(nrTotal - pathsToCopy.size(), nrTotal, size_t(1), size_t(0));
auto & [pathInfo, pathSource] = pathsToCopy.back(); auto & [pathInfo, pathSource] = pathsToCopy.back();
WorkerProto::Serialise<ValidPathInfo>::write(*this, WorkerProto::Serialise<ValidPathInfo>::write(*this,
WorkerProto::WriteConn { WorkerProto::WriteConn {

View file

@ -242,8 +242,8 @@ void Store::addMultipleToStore(
storePathsToAdd.insert(thingToAdd.first.path); storePathsToAdd.insert(thingToAdd.first.path);
} }
auto showProgress = [&]() { auto showProgress = [&, nrTotal = pathsToCopy.size()]() {
act.progress(nrDone, pathsToCopy.size(), nrRunning, nrFailed); act.progress(nrDone, nrTotal, nrRunning, nrFailed);
}; };
processGraph<StorePath>( processGraph<StorePath>(
@ -1104,9 +1104,6 @@ std::map<StorePath, StorePath> copyPaths(
return storePathForDst; return storePathForDst;
}; };
// total is accessed by each copy, which are each handled in separate threads
std::atomic<uint64_t> total = 0;
for (auto & missingPath : sortedMissing) { for (auto & missingPath : sortedMissing) {
auto info = srcStore.queryPathInfo(missingPath); auto info = srcStore.queryPathInfo(missingPath);
@ -1116,9 +1113,10 @@ std::map<StorePath, StorePath> copyPaths(
ValidPathInfo infoForDst = *info; ValidPathInfo infoForDst = *info;
infoForDst.path = storePathForDst; infoForDst.path = storePathForDst;
auto source = sinkToSource([&](Sink & sink) { auto source = sinkToSource([&, narSize = info->narSize](Sink & sink) {
// We can reasonably assume that the copy will happen whenever we // We can reasonably assume that the copy will happen whenever we
// read the path, so log something about that at that point // read the path, so log something about that at that point
uint64_t total = 0;
auto srcUri = srcStore.getUri(); auto srcUri = srcStore.getUri();
auto dstUri = dstStore.getUri(); auto dstUri = dstStore.getUri();
auto storePathS = srcStore.printStorePath(missingPath); auto storePathS = srcStore.printStorePath(missingPath);
@ -1129,13 +1127,13 @@ std::map<StorePath, StorePath> copyPaths(
LambdaSink progressSink([&](std::string_view data) { LambdaSink progressSink([&](std::string_view data) {
total += data.size(); total += data.size();
act.progress(total, info->narSize); act.progress(total, narSize);
}); });
TeeSink tee { sink, progressSink }; TeeSink tee { sink, progressSink };
srcStore.narFromPath(missingPath, tee); srcStore.narFromPath(missingPath, tee);
}); });
pathsToCopy.push_back(std::pair{infoForDst, std::move(source)}); pathsToCopy.emplace_back(std::move(infoForDst), std::move(source));
} }
dstStore.addMultipleToStore(std::move(pathsToCopy), act, repair, checkSigs); dstStore.addMultipleToStore(std::move(pathsToCopy), act, repair, checkSigs);

View file

@ -2565,7 +2565,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
case FileIngestionMethod::Git: { case FileIngestionMethod::Git: {
return git::dumpHash( return git::dumpHash(
outputHash.hashAlgo, outputHash.hashAlgo,
{getFSSourceAccessor(), CanonPath(tmpDir + "/tmp")}).hash; {getFSSourceAccessor(), CanonPath(actualPath)}).hash;
} }
} }
assert(false); assert(false);
@ -2657,10 +2657,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
wanted.to_string(HashFormat::SRI, true), wanted.to_string(HashFormat::SRI, true),
got.to_string(HashFormat::SRI, true))); got.to_string(HashFormat::SRI, true)));
} }
if (!newInfo0.references.empty()) if (!newInfo0.references.empty()) {
auto numViolations = newInfo.references.size();
delayedException = std::make_exception_ptr( delayedException = std::make_exception_ptr(
BuildError("illegal path references in fixed-output derivation '%s'", BuildError("fixed-output derivations must not reference store paths: '%s' references %d distinct paths, e.g. '%s'",
worker.store.printStorePath(drvPath))); worker.store.printStorePath(drvPath),
numViolations,
worker.store.printStorePath(*newInfo.references.begin())));
}
return newInfo0; return newInfo0;
}, },

View file

@ -1,11 +1,12 @@
{ lib {
, mkMesonLibrary lib,
mkMesonLibrary,
, nix-util nix-util,
# Configuration Options # Configuration Options
, version version,
}: }:
let let

View file

@ -1,14 +1,15 @@
{ lib {
, mkMesonLibrary lib,
mkMesonLibrary,
, nix-util nix-util,
, nix-util-c nix-util-c,
, rapidcheck rapidcheck,
# Configuration Options # Configuration Options
, version version,
}: }:
let let

View file

@ -1,19 +1,20 @@
{ lib {
, buildPackages lib,
, stdenv buildPackages,
, mkMesonExecutable stdenv,
mkMesonExecutable,
, nix-util nix-util,
, nix-util-c nix-util-c,
, nix-util-test-support nix-util-test-support,
, rapidcheck rapidcheck,
, gtest gtest,
, runCommand runCommand,
# Configuration Options # Configuration Options
, version version,
}: }:
let let
@ -57,16 +58,22 @@ mkMesonExecutable (finalAttrs: {
passthru = { passthru = {
tests = { tests = {
run = runCommand "${finalAttrs.pname}-run" { run =
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages; runCommand "${finalAttrs.pname}-run"
} (lib.optionalString stdenv.hostPlatform.isWindows '' {
export HOME="$PWD/home-dir" meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
mkdir -p "$HOME" }
'' + '' (
export _NIX_TEST_UNIT_DATA=${./data} lib.optionalString stdenv.hostPlatform.isWindows ''
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage} export HOME="$PWD/home-dir"
touch $out mkdir -p "$HOME"
''); ''
+ ''
export _NIX_TEST_UNIT_DATA=${./data}
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
touch $out
''
);
}; };
}; };

View file

@ -1,18 +1,19 @@
{ lib {
, stdenv lib,
, mkMesonLibrary stdenv,
mkMesonLibrary,
, boost boost,
, brotli brotli,
, libarchive libarchive,
, libcpuid libcpuid,
, libsodium libsodium,
, nlohmann_json nlohmann_json,
, openssl openssl,
# Configuration Options # Configuration Options
, version version,
}: }:
let let
@ -43,8 +44,7 @@ mkMesonLibrary (finalAttrs: {
brotli brotli
libsodium libsodium
openssl openssl
] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid ] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
;
propagatedBuildInputs = [ propagatedBuildInputs = [
boost boost

View file

@ -274,6 +274,17 @@ std::optional<typename T::value_type> pop(T & c)
} }
/**
* Append items to a container. TODO: remove this once we can use
* C++23's `append_range()`.
*/
template<class C, typename T>
void append(C & c, std::initializer_list<T> l)
{
c.insert(c.end(), l.begin(), l.end());
}
template<typename T> template<typename T>
class Callback; class Callback;

View file

@ -1,4 +1,8 @@
{ name, channelName, src }: {
name,
channelName,
src,
}:
derivation { derivation {
builder = "builtin:unpack-channel"; builder = "builtin:unpack-channel";

View file

@ -8,13 +8,15 @@ derivation {
inherit manifest; inherit manifest;
# !!! grmbl, need structured data for passing this in a clean way. # !!! grmbl, need structured data for passing this in a clean way.
derivations = derivations = map (
map (d: d:
[ (d.meta.active or "true") [
(d.meta.priority or 5) (d.meta.active or "true")
(builtins.length d.outputs) (d.meta.priority or 5)
] ++ map (output: builtins.getAttr output d) d.outputs) (builtins.length d.outputs)
derivations; ]
++ map (output: builtins.getAttr output d) d.outputs
) derivations;
# Building user environments remotely just causes huge amounts of # Building user environments remotely just causes huge amounts of
# network traffic, so don't do that. # network traffic, so don't do that.

View file

@ -95,20 +95,20 @@ public:
.optional=true, .optional=true,
.handler={[&](std::vector<std::string> inputsToUpdate){ .handler={[&](std::vector<std::string> inputsToUpdate){
for (const auto & inputToUpdate : inputsToUpdate) { for (const auto & inputToUpdate : inputsToUpdate) {
InputPath inputPath; InputAttrPath inputAttrPath;
try { try {
inputPath = flake::parseInputPath(inputToUpdate); inputAttrPath = flake::parseInputAttrPath(inputToUpdate);
} catch (Error & e) { } catch (Error & e) {
warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate); warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate);
throw e; throw e;
} }
if (lockFlags.inputUpdates.contains(inputPath)) if (lockFlags.inputUpdates.contains(inputAttrPath))
warn("Input '%s' was specified multiple times. You may have done this by accident."); warn("Input '%s' was specified multiple times. You may have done this by accident.", printInputAttrPath(inputAttrPath));
lockFlags.inputUpdates.insert(inputPath); lockFlags.inputUpdates.insert(inputAttrPath);
} }
}}, }},
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix); completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
}} }}
}); });
@ -304,7 +304,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
} else if (auto follows = std::get_if<1>(&input.second)) { } else if (auto follows = std::get_if<1>(&input.second)) {
logger->cout("%s" ANSI_BOLD "%s" ANSI_NORMAL " follows input '%s'", logger->cout("%s" ANSI_BOLD "%s" ANSI_NORMAL " follows input '%s'",
prefix + (last ? treeLast : treeConn), input.first, prefix + (last ? treeLast : treeConn), input.first,
printInputPath(*follows)); printInputAttrPath(*follows));
} }
} }
}; };

View file

@ -1,14 +1,15 @@
{ lib {
, mkMesonExecutable lib,
mkMesonExecutable,
, nix-store nix-store,
, nix-expr nix-expr,
, nix-main nix-main,
, nix-cmd nix-cmd,
# Configuration Options # Configuration Options
, version version,
}: }:
let let
@ -20,64 +21,67 @@ mkMesonExecutable (finalAttrs: {
inherit version; inherit version;
workDir = ./.; workDir = ./.;
fileset = fileset.unions ([ fileset = fileset.unions (
../../nix-meson-build-support
./nix-meson-build-support
../../.version
./.version
./meson.build
./meson.options
# Symbolic links to other dirs
## exes
./build-remote
./doc
./nix-build
./nix-channel
./nix-collect-garbage
./nix-copy-closure
./nix-env
./nix-instantiate
./nix-store
## dirs
./scripts
../../scripts
./misc
../../misc
# Doc nix files for --help
../../doc/manual/generate-manpage.nix
../../doc/manual/utils.nix
../../doc/manual/generate-settings.nix
../../doc/manual/generate-store-info.nix
# Other files to be included as string literals
../nix-channel/unpack-channel.nix
../nix-env/buildenv.nix
./get-env.sh
./help-stores.md
../../doc/manual/source/store/types/index.md.in
./profiles.md
../../doc/manual/source/command-ref/files/profiles.md
# Files
] ++ lib.concatMap
(dir: [
(fileset.fileFilter (file: file.hasExt "cc") dir)
(fileset.fileFilter (file: file.hasExt "hh") dir)
(fileset.fileFilter (file: file.hasExt "md") dir)
])
[ [
./. ../../nix-meson-build-support
../build-remote ./nix-meson-build-support
../nix-build ../../.version
../nix-channel ./.version
../nix-collect-garbage ./meson.build
../nix-copy-closure ./meson.options
../nix-env
../nix-instantiate # Symbolic links to other dirs
../nix-store ## exes
./build-remote
./doc
./nix-build
./nix-channel
./nix-collect-garbage
./nix-copy-closure
./nix-env
./nix-instantiate
./nix-store
## dirs
./scripts
../../scripts
./misc
../../misc
# Doc nix files for --help
../../doc/manual/generate-manpage.nix
../../doc/manual/utils.nix
../../doc/manual/generate-settings.nix
../../doc/manual/generate-store-info.nix
# Other files to be included as string literals
../nix-channel/unpack-channel.nix
../nix-env/buildenv.nix
./get-env.sh
./help-stores.md
../../doc/manual/source/store/types/index.md.in
./profiles.md
../../doc/manual/source/command-ref/files/profiles.md
# Files
] ]
++
lib.concatMap
(dir: [
(fileset.fileFilter (file: file.hasExt "cc") dir)
(fileset.fileFilter (file: file.hasExt "hh") dir)
(fileset.fileFilter (file: file.hasExt "md") dir)
])
[
./.
../build-remote
../nix-build
../nix-channel
../nix-collect-garbage
../nix-copy-closure
../nix-env
../nix-instantiate
../nix-store
]
); );
buildInputs = [ buildInputs = [

View file

@ -1,76 +1,82 @@
{ lib {
, stdenv lib,
, mkMesonDerivation stdenv,
, pkg-config mkMesonDerivation,
, perl pkg-config,
, perlPackages perl,
, nix-store perlPackages,
, version nix-store,
, curl version,
, bzip2 curl,
, libsodium bzip2,
libsodium,
}: }:
let let
inherit (lib) fileset; inherit (lib) fileset;
in in
perl.pkgs.toPerlModule (mkMesonDerivation (finalAttrs: { perl.pkgs.toPerlModule (
pname = "nix-perl"; mkMesonDerivation (finalAttrs: {
inherit version; pname = "nix-perl";
inherit version;
workDir = ./.; workDir = ./.;
fileset = fileset.unions ([ fileset = fileset.unions (
./.version [
../../.version ./.version
./MANIFEST ../../.version
./lib ./MANIFEST
./meson.build ./lib
./meson.options ./meson.build
] ++ lib.optionals finalAttrs.doCheck [ ./meson.options
./.yath.rc.in ]
./t ++ lib.optionals finalAttrs.doCheck [
]); ./.yath.rc.in
./t
]
);
nativeBuildInputs = [ nativeBuildInputs = [
pkg-config pkg-config
perl perl
curl curl
]; ];
buildInputs = [ buildInputs = [
nix-store nix-store
] ++ finalAttrs.passthru.externalBuildInputs; ] ++ finalAttrs.passthru.externalBuildInputs;
# Hack for sake of the dev shell # Hack for sake of the dev shell
passthru.externalBuildInputs = [ passthru.externalBuildInputs = [
bzip2 bzip2
libsodium libsodium
]; ];
# `perlPackages.Test2Harness` is marked broken for Darwin # `perlPackages.Test2Harness` is marked broken for Darwin
doCheck = !stdenv.isDarwin; doCheck = !stdenv.isDarwin;
nativeCheckInputs = [ nativeCheckInputs = [
perlPackages.Test2Harness perlPackages.Test2Harness
]; ];
preConfigure = preConfigure =
# "Inline" .version so its not a symlink, and includes the suffix # "Inline" .version so its not a symlink, and includes the suffix
'' ''
chmod u+w .version chmod u+w .version
echo ${finalAttrs.version} > .version echo ${finalAttrs.version} > .version
''; '';
mesonFlags = [ mesonFlags = [
(lib.mesonOption "dbi_path" "${perlPackages.DBI}/${perl.libPrefix}") (lib.mesonOption "dbi_path" "${perlPackages.DBI}/${perl.libPrefix}")
(lib.mesonOption "dbd_sqlite_path" "${perlPackages.DBDSQLite}/${perl.libPrefix}") (lib.mesonOption "dbd_sqlite_path" "${perlPackages.DBDSQLite}/${perl.libPrefix}")
(lib.mesonEnable "tests" finalAttrs.doCheck) (lib.mesonEnable "tests" finalAttrs.doCheck)
]; ];
mesonCheckFlags = [ mesonCheckFlags = [
"--print-errorlogs" "--print-errorlogs"
]; ];
strictDeps = false; strictDeps = false;
})) })
)

View file

@ -1,6 +1,25 @@
let let
sixteenBytes = "0123456789abcdef"; sixteenBytes = "0123456789abcdef";
times16 = s: builtins.concatStringsSep "" [s s s s s s s s s s s s s s s s]; times16 =
s:
builtins.concatStringsSep "" [
s
s
s
s
s
s
s
s
s
s
s
s
s
s
s
s
];
exp = n: x: if n == 1 then x else times16 (exp (n - 1) x); exp = n: x: if n == 1 then x else times16 (exp (n - 1) x);
sixteenMegabyte = exp 6 sixteenBytes; sixteenMegabyte = exp 6 sixteenBytes;
in in

View file

@ -4,24 +4,39 @@ with import ./config.nix;
let let
mkDerivation = args: mkDerivation =
derivation ({ args:
inherit system; derivation (
builder = busybox; {
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' inherit system;
if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; builder = busybox;
eval "$buildCommand" args = [
'')]; "sh"
outputHashMode = "recursive"; "-e"
outputHashAlgo = "sha256"; args.builder or (builtins.toFile "builder-${args.name}.sh" ''
} // removeAttrs args ["builder" "meta" "passthru"]) if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi;
// { meta = args.meta or {}; passthru = args.passthru or {}; }; eval "$buildCommand"
'')
];
outputHashMode = "recursive";
outputHashAlgo = "sha256";
}
// removeAttrs args [
"builder"
"meta"
"passthru"
]
)
// {
meta = args.meta or { };
passthru = args.passthru or { };
};
input1 = mkDerivation { input1 = mkDerivation {
shell = busybox; shell = busybox;
name = "build-remote-input-1"; name = "build-remote-input-1";
buildCommand = "echo hi-input1; echo FOO > $out"; buildCommand = "echo hi-input1; echo FOO > $out";
requiredSystemFeatures = ["foo"]; requiredSystemFeatures = [ "foo" ];
outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="; outputHash = "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=";
}; };
@ -29,7 +44,7 @@ let
shell = busybox; shell = busybox;
name = "build-remote-input-2"; name = "build-remote-input-2";
buildCommand = "echo hi; echo BAR > $out"; buildCommand = "echo hi; echo BAR > $out";
requiredSystemFeatures = ["bar"]; requiredSystemFeatures = [ "bar" ];
outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q="; outputHash = "sha256-XArauVH91AVwP9hBBQNlkX9ccuPpSYx9o0zeIHb6e+Q=";
}; };
@ -41,21 +56,20 @@ let
read x < ${input2} read x < ${input2}
echo $x BAZ > $out echo $x BAZ > $out
''; '';
requiredSystemFeatures = ["baz"]; requiredSystemFeatures = [ "baz" ];
outputHash = "sha256-daKAcPp/+BYMQsVi/YYMlCKoNAxCNDsaivwSHgQqD2s="; outputHash = "sha256-daKAcPp/+BYMQsVi/YYMlCKoNAxCNDsaivwSHgQqD2s=";
}; };
in in
mkDerivation { mkDerivation {
shell = busybox; shell = busybox;
name = "build-remote"; name = "build-remote";
passthru = { inherit input1 input2 input3; }; passthru = { inherit input1 input2 input3; };
buildCommand = buildCommand = ''
'' read x < ${input1}
read x < ${input1} read y < ${input3}
read y < ${input3} echo "$x $y" > $out
echo "$x $y" > $out '';
''; outputHash = "sha256-5SxbkUw6xe2l9TE1uwCvTtTDysD1vhRor38OtDF0LqQ=";
outputHash = "sha256-5SxbkUw6xe2l9TE1uwCvTtTDysD1vhRor38OtDF0LqQ="; }
}

View file

@ -1,39 +1,61 @@
{ busybox, contentAddressed ? false }: {
busybox,
contentAddressed ? false,
}:
with import ./config.nix; with import ./config.nix;
let let
caArgs = if contentAddressed then { caArgs =
outputHashMode = "recursive"; if contentAddressed then
outputHashAlgo = "sha256"; {
__contentAddressed = true; outputHashMode = "recursive";
} else {}; outputHashAlgo = "sha256";
__contentAddressed = true;
}
else
{ };
mkDerivation = args: mkDerivation =
derivation ({ args:
inherit system; derivation (
builder = busybox; {
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' inherit system;
if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; builder = busybox;
eval "$buildCommand" args = [
'')]; "sh"
} // removeAttrs args ["builder" "meta" "passthru"] "-e"
// caArgs) args.builder or (builtins.toFile "builder-${args.name}.sh" ''
// { meta = args.meta or {}; passthru = args.passthru or {}; }; if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi;
eval "$buildCommand"
'')
];
}
// removeAttrs args [
"builder"
"meta"
"passthru"
]
// caArgs
)
// {
meta = args.meta or { };
passthru = args.passthru or { };
};
input1 = mkDerivation { input1 = mkDerivation {
shell = busybox; shell = busybox;
name = "build-remote-input-1"; name = "build-remote-input-1";
buildCommand = "echo hi-input1; echo FOO > $out"; buildCommand = "echo hi-input1; echo FOO > $out";
requiredSystemFeatures = ["foo"]; requiredSystemFeatures = [ "foo" ];
}; };
input2 = mkDerivation { input2 = mkDerivation {
shell = busybox; shell = busybox;
name = "build-remote-input-2"; name = "build-remote-input-2";
buildCommand = "echo hi; echo BAR > $out"; buildCommand = "echo hi; echo BAR > $out";
requiredSystemFeatures = ["bar"]; requiredSystemFeatures = [ "bar" ];
}; };
input3 = mkDerivation { input3 = mkDerivation {
@ -44,19 +66,18 @@ let
read x < ${input2} read x < ${input2}
echo $x BAZ > $out echo $x BAZ > $out
''; '';
requiredSystemFeatures = ["baz"]; requiredSystemFeatures = [ "baz" ];
}; };
in in
mkDerivation { mkDerivation {
shell = busybox; shell = busybox;
name = "build-remote"; name = "build-remote";
passthru = { inherit input1 input2 input3; }; passthru = { inherit input1 input2 input3; };
buildCommand = buildCommand = ''
'' read x < ${input1}
read x < ${input1} read y < ${input3}
read y < ${input3} echo "$x $y" > $out
echo "$x $y" > $out '';
''; }
}

View file

@ -1 +1,5 @@
{ inNixShell ? false, ... }@args: import ./shell.nix (args // { contentAddressed = true; }) {
inNixShell ? false,
...
}@args:
import ./shell.nix (args // { contentAddressed = true; })

View file

@ -1,13 +1,21 @@
with import ./config.nix; with import ./config.nix;
let mkCADerivation = args: mkDerivation ({ let
__contentAddressed = true; mkCADerivation =
outputHashMode = "recursive"; args:
outputHashAlgo = "sha256"; mkDerivation (
} // args); {
__contentAddressed = true;
outputHashMode = "recursive";
outputHashAlgo = "sha256";
}
// args
);
in in
{ seed ? 0 }: {
seed ? 0,
}:
# A simple content-addressed derivation. # A simple content-addressed derivation.
# The derivation can be arbitrarily modified by passing a different `seed`, # The derivation can be arbitrarily modified by passing a different `seed`,
# but the output will always be the same # but the output will always be the same
@ -23,7 +31,11 @@ rec {
}; };
rootCA = mkCADerivation { rootCA = mkCADerivation {
name = "rootCA"; name = "rootCA";
outputs = [ "out" "dev" "foo" ]; outputs = [
"out"
"dev"
"foo"
];
buildCommand = '' buildCommand = ''
echo "building a CA derivation" echo "building a CA derivation"
echo "The seed is ${toString seed}" echo "The seed is ${toString seed}"

View file

@ -1,3 +1,3 @@
{ {
outputs = { self }: import ./content-addressed.nix {}; outputs = { self }: import ./content-addressed.nix { };
} }

View file

@ -1,10 +1,16 @@
with import ./config.nix; with import ./config.nix;
let mkCADerivation = args: mkDerivation ({ let
__contentAddressed = true; mkCADerivation =
outputHashMode = "recursive"; args:
outputHashAlgo = "sha256"; mkDerivation (
} // args); {
__contentAddressed = true;
outputHashMode = "recursive";
outputHashAlgo = "sha256";
}
// args
);
in in
rec { rec {
@ -15,13 +21,15 @@ rec {
echo $(date) > $out/current-time echo $(date) > $out/current-time
''; '';
}; };
dep = seed: mkCADerivation { dep =
name = "dep"; seed:
inherit seed; mkCADerivation {
buildCommand = '' name = "dep";
echo ${currentTime} > $out inherit seed;
''; buildCommand = ''
}; echo ${currentTime} > $out
'';
};
dep1 = dep 1; dep1 = dep 1;
dep2 = dep 2; dep2 = dep 2;
toplevel = mkCADerivation { toplevel = mkCADerivation {
@ -32,4 +40,3 @@ rec {
''; '';
}; };
} }

View file

@ -1,7 +1,6 @@
# A derivation that would certainly fail if several builders tried to # A derivation that would certainly fail if several builders tried to
# build it at once. # build it at once.
with import ./config.nix; with import ./config.nix;
mkDerivation { mkDerivation {

View file

@ -2,11 +2,16 @@ with import ./config.nix;
rec { rec {
dep = import ./dependencies.nix {}; dep = import ./dependencies.nix { };
makeTest = nr: args: mkDerivation ({ makeTest =
name = "check-refs-" + toString nr; nr: args:
} // args); mkDerivation (
{
name = "check-refs-" + toString nr;
}
// args
);
src = builtins.toFile "aux-ref" "bla bla"; src = builtins.toFile "aux-ref" "bla bla";
@ -22,31 +27,31 @@ rec {
test3 = makeTest 3 { test3 = makeTest 3 {
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link"; builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link";
allowedReferences = []; allowedReferences = [ ];
inherit dep; inherit dep;
}; };
test4 = makeTest 4 { test4 = makeTest 4 {
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link"; builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link";
allowedReferences = [dep]; allowedReferences = [ dep ];
inherit dep; inherit dep;
}; };
test5 = makeTest 5 { test5 = makeTest 5 {
builder = builtins.toFile "builder.sh" "mkdir $out"; builder = builtins.toFile "builder.sh" "mkdir $out";
allowedReferences = []; allowedReferences = [ ];
inherit dep; inherit dep;
}; };
test6 = makeTest 6 { test6 = makeTest 6 {
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $out $out/link"; builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $out $out/link";
allowedReferences = []; allowedReferences = [ ];
inherit dep; inherit dep;
}; };
test7 = makeTest 7 { test7 = makeTest 7 {
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $out $out/link"; builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $out $out/link";
allowedReferences = ["out"]; allowedReferences = [ "out" ];
inherit dep; inherit dep;
}; };
@ -58,19 +63,19 @@ rec {
test9 = makeTest 9 { test9 = makeTest 9 {
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link"; builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $dep $out/link";
inherit dep; inherit dep;
disallowedReferences = [dep]; disallowedReferences = [ dep ];
}; };
test10 = makeTest 10 { test10 = makeTest 10 {
builder = builtins.toFile "builder.sh" "mkdir $out; echo $test5; ln -s $dep $out/link"; builder = builtins.toFile "builder.sh" "mkdir $out; echo $test5; ln -s $dep $out/link";
inherit dep test5; inherit dep test5;
disallowedReferences = [test5]; disallowedReferences = [ test5 ];
}; };
test11 = makeTest 11 { test11 = makeTest 11 {
__structuredAttrs = true; __structuredAttrs = true;
unsafeDiscardReferences.out = true; unsafeDiscardReferences.out = true;
outputChecks.out.allowedReferences = []; outputChecks.out.allowedReferences = [ ];
buildCommand = ''echo ${dep} > "''${outputs[out]}"''; buildCommand = ''echo ${dep} > "''${outputs[out]}"'';
}; };

View file

@ -22,36 +22,48 @@ rec {
''; '';
}; };
makeTest = nr: allowreqs: mkDerivation { makeTest =
name = "check-reqs-" + toString nr; nr: allowreqs:
inherit deps; mkDerivation {
builder = builtins.toFile "builder.sh" '' name = "check-reqs-" + toString nr;
mkdir $out inherit deps;
ln -s $deps $out/depdir1 builder = builtins.toFile "builder.sh" ''
''; mkdir $out
allowedRequisites = allowreqs; ln -s $deps $out/depdir1
}; '';
allowedRequisites = allowreqs;
};
# When specifying all the requisites, the build succeeds. # When specifying all the requisites, the build succeeds.
test1 = makeTest 1 [ dep1 dep2 deps ]; test1 = makeTest 1 [
dep1
dep2
deps
];
# But missing anything it fails. # But missing anything it fails.
test2 = makeTest 2 [ dep2 deps ]; test2 = makeTest 2 [
test3 = makeTest 3 [ dep1 deps ]; dep2
deps
];
test3 = makeTest 3 [
dep1
deps
];
test4 = makeTest 4 [ deps ]; test4 = makeTest 4 [ deps ];
test5 = makeTest 5 []; test5 = makeTest 5 [ ];
test6 = mkDerivation { test6 = mkDerivation {
name = "check-reqs"; name = "check-reqs";
inherit deps; inherit deps;
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $deps $out/depdir1"; builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $deps $out/depdir1";
disallowedRequisites = [dep1]; disallowedRequisites = [ dep1 ];
}; };
test7 = mkDerivation { test7 = mkDerivation {
name = "check-reqs"; name = "check-reqs";
inherit deps; inherit deps;
builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $deps $out/depdir1"; builder = builtins.toFile "builder.sh" "mkdir $out; ln -s $deps $out/depdir1";
disallowedRequisites = [test1]; disallowedRequisites = [ test1 ];
}; };
} }

View file

@ -1,4 +1,6 @@
{checkBuildId ? 0}: {
checkBuildId ? 0,
}:
with import ./config.nix; with import ./config.nix;
@ -6,41 +8,38 @@ with import ./config.nix;
nondeterministic = mkDerivation { nondeterministic = mkDerivation {
inherit checkBuildId; inherit checkBuildId;
name = "nondeterministic"; name = "nondeterministic";
buildCommand = buildCommand = ''
'' mkdir $out
mkdir $out date +%s.%N > $out/date
date +%s.%N > $out/date echo "CHECK_TMPDIR=$TMPDIR"
echo "CHECK_TMPDIR=$TMPDIR" echo "checkBuildId=$checkBuildId"
echo "checkBuildId=$checkBuildId" echo "$checkBuildId" > $TMPDIR/checkBuildId
echo "$checkBuildId" > $TMPDIR/checkBuildId '';
'';
}; };
deterministic = mkDerivation { deterministic = mkDerivation {
inherit checkBuildId; inherit checkBuildId;
name = "deterministic"; name = "deterministic";
buildCommand = buildCommand = ''
'' mkdir $out
mkdir $out echo date > $out/date
echo date > $out/date echo "CHECK_TMPDIR=$TMPDIR"
echo "CHECK_TMPDIR=$TMPDIR" echo "checkBuildId=$checkBuildId"
echo "checkBuildId=$checkBuildId" echo "$checkBuildId" > $TMPDIR/checkBuildId
echo "$checkBuildId" > $TMPDIR/checkBuildId '';
'';
}; };
failed = mkDerivation { failed = mkDerivation {
inherit checkBuildId; inherit checkBuildId;
name = "failed"; name = "failed";
buildCommand = buildCommand = ''
'' mkdir $out
mkdir $out echo date > $out/date
echo date > $out/date echo "CHECK_TMPDIR=$TMPDIR"
echo "CHECK_TMPDIR=$TMPDIR" echo "checkBuildId=$checkBuildId"
echo "checkBuildId=$checkBuildId" echo "$checkBuildId" > $TMPDIR/checkBuildId
echo "$checkBuildId" > $TMPDIR/checkBuildId false
false '';
'';
}; };
hashmismatch = import <nix/fetchurl.nix> { hashmismatch = import <nix/fetchurl.nix> {

View file

@ -60,6 +60,7 @@ unset XDG_DATA_HOME
unset XDG_CONFIG_HOME unset XDG_CONFIG_HOME
unset XDG_CONFIG_DIRS unset XDG_CONFIG_DIRS
unset XDG_CACHE_HOME unset XDG_CACHE_HOME
unset GIT_DIR
export IMPURE_VAR1=foo export IMPURE_VAR1=foo
export IMPURE_VAR2=bar export IMPURE_VAR2=bar

View file

@ -1,4 +1,6 @@
{ hashInvalidator ? "" }: {
hashInvalidator ? "",
}:
with import ./config.nix; with import ./config.nix;
let let

View file

@ -2,5 +2,8 @@ derivation {
name = "advanced-attributes-defaults"; name = "advanced-attributes-defaults";
system = "my-system"; system = "my-system";
builder = "/bin/bash"; builder = "/bin/bash";
args = [ "-c" "echo hello > $out" ]; args = [
"-c"
"echo hello > $out"
];
} }

View file

@ -2,7 +2,13 @@ derivation {
name = "advanced-attributes-structured-attrs-defaults"; name = "advanced-attributes-structured-attrs-defaults";
system = "my-system"; system = "my-system";
builder = "/bin/bash"; builder = "/bin/bash";
args = [ "-c" "echo hello > $out" ]; args = [
outputs = [ "out" "dev" ]; "-c"
"echo hello > $out"
];
outputs = [
"out"
"dev"
];
__structuredAttrs = true; __structuredAttrs = true;
} }

View file

@ -4,42 +4,58 @@ let
inherit system; inherit system;
name = "foo"; name = "foo";
builder = "/bin/bash"; builder = "/bin/bash";
args = ["-c" "echo foo > $out"]; args = [
"-c"
"echo foo > $out"
];
}; };
bar = derivation { bar = derivation {
inherit system; inherit system;
name = "bar"; name = "bar";
builder = "/bin/bash"; builder = "/bin/bash";
args = ["-c" "echo bar > $out"]; args = [
"-c"
"echo bar > $out"
];
}; };
in in
derivation { derivation {
inherit system; inherit system;
name = "advanced-attributes-structured-attrs"; name = "advanced-attributes-structured-attrs";
builder = "/bin/bash"; builder = "/bin/bash";
args = [ "-c" "echo hello > $out" ]; args = [
"-c"
"echo hello > $out"
];
__sandboxProfile = "sandcastle"; __sandboxProfile = "sandcastle";
__noChroot = true; __noChroot = true;
__impureHostDeps = ["/usr/bin/ditto"]; __impureHostDeps = [ "/usr/bin/ditto" ];
impureEnvVars = ["UNICORN"]; impureEnvVars = [ "UNICORN" ];
__darwinAllowLocalNetworking = true; __darwinAllowLocalNetworking = true;
outputs = [ "out" "bin" "dev" ]; outputs = [
"out"
"bin"
"dev"
];
__structuredAttrs = true; __structuredAttrs = true;
outputChecks = { outputChecks = {
out = { out = {
allowedReferences = [foo]; allowedReferences = [ foo ];
allowedRequisites = [foo]; allowedRequisites = [ foo ];
}; };
bin = { bin = {
disallowedReferences = [bar]; disallowedReferences = [ bar ];
disallowedRequisites = [bar]; disallowedRequisites = [ bar ];
}; };
dev = { dev = {
maxSize = 789; maxSize = 789;
maxClosureSize = 5909; maxClosureSize = 5909;
}; };
}; };
requiredSystemFeatures = ["rainbow" "uid-range"]; requiredSystemFeatures = [
"rainbow"
"uid-range"
];
preferLocalBuild = true; preferLocalBuild = true;
allowSubstitutes = false; allowSubstitutes = false;
} }

View file

@ -4,30 +4,42 @@ let
inherit system; inherit system;
name = "foo"; name = "foo";
builder = "/bin/bash"; builder = "/bin/bash";
args = ["-c" "echo foo > $out"]; args = [
"-c"
"echo foo > $out"
];
}; };
bar = derivation { bar = derivation {
inherit system; inherit system;
name = "bar"; name = "bar";
builder = "/bin/bash"; builder = "/bin/bash";
args = ["-c" "echo bar > $out"]; args = [
"-c"
"echo bar > $out"
];
}; };
in in
derivation { derivation {
inherit system; inherit system;
name = "advanced-attributes"; name = "advanced-attributes";
builder = "/bin/bash"; builder = "/bin/bash";
args = [ "-c" "echo hello > $out" ]; args = [
"-c"
"echo hello > $out"
];
__sandboxProfile = "sandcastle"; __sandboxProfile = "sandcastle";
__noChroot = true; __noChroot = true;
__impureHostDeps = ["/usr/bin/ditto"]; __impureHostDeps = [ "/usr/bin/ditto" ];
impureEnvVars = ["UNICORN"]; impureEnvVars = [ "UNICORN" ];
__darwinAllowLocalNetworking = true; __darwinAllowLocalNetworking = true;
allowedReferences = [foo]; allowedReferences = [ foo ];
allowedRequisites = [foo]; allowedRequisites = [ foo ];
disallowedReferences = [bar]; disallowedReferences = [ bar ];
disallowedRequisites = [bar]; disallowedRequisites = [ bar ];
requiredSystemFeatures = ["rainbow" "uid-range"]; requiredSystemFeatures = [
"rainbow"
"uid-range"
];
preferLocalBuild = true; preferLocalBuild = true;
allowSubstitutes = false; allowSubstitutes = false;
} }

View file

@ -1,6 +1,8 @@
with import ./config.nix; with import ./config.nix;
let innerName = "foo"; in let
innerName = "foo";
in
mkDerivation rec { mkDerivation rec {
name = "${innerName}.drv"; name = "${innerName}.drv";

View file

@ -2,28 +2,33 @@ with import ./config.nix;
rec { rec {
printRefs = printRefs = ''
'' echo $exportReferencesGraph
echo $exportReferencesGraph while read path; do
while read path; do read drv
read drv read nrRefs
read nrRefs echo "$path has $nrRefs references"
echo "$path has $nrRefs references" echo "$path" >> $out
echo "$path" >> $out for ((n = 0; n < $nrRefs; n++)); do read ref; echo "ref $ref"; test -e "$ref"; done
for ((n = 0; n < $nrRefs; n++)); do read ref; echo "ref $ref"; test -e "$ref"; done done < refs
done < refs '';
'';
foo."bar.runtimeGraph" = mkDerivation { foo."bar.runtimeGraph" = mkDerivation {
name = "dependencies"; name = "dependencies";
builder = builtins.toFile "build-graph-builder" "${printRefs}"; builder = builtins.toFile "build-graph-builder" "${printRefs}";
exportReferencesGraph = ["refs" (import ./dependencies.nix {})]; exportReferencesGraph = [
"refs"
(import ./dependencies.nix { })
];
}; };
foo."bar.buildGraph" = mkDerivation { foo."bar.buildGraph" = mkDerivation {
name = "dependencies"; name = "dependencies";
builder = builtins.toFile "build-graph-builder" "${printRefs}"; builder = builtins.toFile "build-graph-builder" "${printRefs}";
exportReferencesGraph = ["refs" (import ./dependencies.nix {}).drvPath]; exportReferencesGraph = [
"refs"
(import ./dependencies.nix { }).drvPath
];
}; };
} }

View file

@ -2,16 +2,29 @@
with import ./config.nix; with import ./config.nix;
let let
mkDerivation = args: mkDerivation =
derivation ({ args:
inherit system; derivation (
builder = busybox; {
args = ["sh" "-e" args.builder or (builtins.toFile "builder-${args.name}.sh" '' inherit system;
if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi; builder = busybox;
eval "$buildCommand" args = [
'')]; "sh"
} // removeAttrs args ["builder" "meta"]) "-e"
// { meta = args.meta or {}; }; args.builder or (builtins.toFile "builder-${args.name}.sh" ''
if [ -e "$NIX_ATTRS_SH_FILE" ]; then source $NIX_ATTRS_SH_FILE; fi;
eval "$buildCommand"
'')
];
}
// removeAttrs args [
"builder"
"meta"
]
)
// {
meta = args.meta or { };
};
in in
{ {

Some files were not shown because too many files have changed in this diff Show more