From c5fdbdae321903740e0e735aa89fab5647992687 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 19 Jun 2023 16:54:05 +0200 Subject: [PATCH 001/307] LocalStore::addTempRoot(): Handle ENOENT If the garbage collector has acquired the global GC lock, but hasn't created the GC socket yet, then a client attempting to connect would get ENOENT. Note that this only happens when the GC runs for the first time on a machine. Subsequently clients will get ECONNREFUSED which was already handled. Fixes #7370. --- src/libstore/gc.cc | 13 +++++++++---- tests/gc-non-blocking.sh | 7 ++++++- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 0038ec802..b5b9e2049 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -138,9 +138,9 @@ void LocalStore::addTempRoot(const StorePath & path) try { nix::connect(fdRootsSocket->get(), socketPath); } catch (SysError & e) { - /* The garbage collector may have exited, so we need to - restart. */ - if (e.errNo == ECONNREFUSED) { + /* The garbage collector may have exited or not + created the socket yet, so we need to restart. */ + if (e.errNo == ECONNREFUSED || e.errNo == ENOENT) { debug("GC socket connection refused"); fdRootsSocket->close(); goto restart; @@ -503,6 +503,11 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) auto fdGCLock = openGCLock(); FdLock gcLock(fdGCLock.get(), ltWrite, true, "waiting for the big garbage collector lock..."); + /* Synchronisation point to test ENOENT handling in + addTempRoot(), see tests/gc-non-blocking.sh. */ + if (auto p = getEnv("_NIX_TEST_GC_SYNC_2")) + readFile(*p); + /* Start the server for receiving new roots. */ auto socketPath = stateDir.get() + gcSocketPath; createDirs(dirOf(socketPath)); @@ -772,7 +777,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) } }; - /* Synchronisation point for testing, see tests/gc-concurrent.sh. */ + /* Synchronisation point for testing, see tests/gc-non-blocking.sh. */ if (auto p = getEnv("_NIX_TEST_GC_SYNC")) readFile(*p); diff --git a/tests/gc-non-blocking.sh b/tests/gc-non-blocking.sh index 0d781485d..da6dbdf5d 100644 --- a/tests/gc-non-blocking.sh +++ b/tests/gc-non-blocking.sh @@ -9,16 +9,21 @@ clearStore fifo=$TEST_ROOT/test.fifo mkfifo "$fifo" +fifo2=$TEST_ROOT/test2.fifo +mkfifo "$fifo2" + dummy=$(nix store add-path ./simple.nix) running=$TEST_ROOT/running touch $running -(_NIX_TEST_GC_SYNC=$fifo nix-store --gc -vvvvv; rm $running) & +(_NIX_TEST_GC_SYNC=$fifo _NIX_TEST_GC_SYNC_2=$fifo2 nix-store --gc -vvvvv; rm $running) & pid=$! sleep 2 +(sleep 1; echo > $fifo2) & + outPath=$(nix-build --max-silent-time 60 -o "$TEST_ROOT/result" -E " with import ./config.nix; mkDerivation { From 3859b425975d0347e724b6abb513662667b3e8c7 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 21 Jun 2023 16:17:21 +0200 Subject: [PATCH 002/307] Wait for pid --- tests/gc-non-blocking.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/gc-non-blocking.sh b/tests/gc-non-blocking.sh index da6dbdf5d..7f2aebb8b 100644 --- a/tests/gc-non-blocking.sh +++ b/tests/gc-non-blocking.sh @@ -23,6 +23,7 @@ pid=$! sleep 2 (sleep 1; echo > $fifo2) & +pid2=$! outPath=$(nix-build --max-silent-time 60 -o "$TEST_ROOT/result" -E " with import ./config.nix; @@ -32,6 +33,7 @@ outPath=$(nix-build --max-silent-time 60 -o "$TEST_ROOT/result" -E " }") wait $pid +wait $pid2 (! test -e $running) (! test -e $dummy) From 8c54a01df5ee59e4acf151dba8077a9842e8bdc5 Mon Sep 17 00:00:00 2001 From: Bob van der Linden Date: Mon, 13 Mar 2023 21:14:19 +0100 Subject: [PATCH 003/307] nix: develop: always force SHELL to chosen shell SHELL was inherited from the system environment. This resulted in a new shell being started, but with SHELL still referring to the system shell and not the one used by nix-develop. Applications like make, use SHELL to run commands, which meant that top-level commands are run inside the nix-develop-shell, but sub-commands are ran inside the system shell. This setenv forces SHELL to always be set to the shell used by nix-develop. --- src/nix/develop.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 38482ed42..4a561e52b 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -293,7 +293,6 @@ struct Common : InstallableCommand, MixProfile "NIX_LOG_FD", "NIX_REMOTE", "PPID", - "SHELL", "SHELLOPTS", "SSL_CERT_FILE", // FIXME: only want to ignore /no-cert-file.crt "TEMP", @@ -643,6 +642,10 @@ struct CmdDevelop : Common, MixEnvironment ignoreException(); } + // Override SHELL with the one chosen for this environment. + // This is to make sure the system shell doesn't leak into the build environment. + setenv("SHELL", shell.data(), 1); + // If running a phase or single command, don't want an interactive shell running after // Ctrl-C, so don't pass --rcfile auto args = phase || !command.empty() ? Strings{std::string(baseNameOf(shell)), rcFilePath} From ceab20d056a119317fb29eb0e06dfd0eb0b9d8ad Mon Sep 17 00:00:00 2001 From: Bob van der Linden Date: Mon, 13 Nov 2023 22:04:34 +0100 Subject: [PATCH 004/307] nix: develop: add tests for interactive shell --- tests/functional/flakes/develop.sh | 75 ++++++++++++++++++++++++++++++ tests/functional/local.mk | 1 + 2 files changed, 76 insertions(+) create mode 100644 tests/functional/flakes/develop.sh diff --git a/tests/functional/flakes/develop.sh b/tests/functional/flakes/develop.sh new file mode 100644 index 000000000..59f731239 --- /dev/null +++ b/tests/functional/flakes/develop.sh @@ -0,0 +1,75 @@ +source ../common.sh + +clearStore +rm -rf $TEST_HOME/.cache $TEST_HOME/.config $TEST_HOME/.local + +# Create flake under test. +cp ../shell-hello.nix ../config.nix $TEST_HOME/ +cat <$TEST_HOME/flake.nix +{ + inputs.nixpkgs.url = "$TEST_HOME/nixpkgs"; + outputs = {self, nixpkgs}: { + packages.$system.hello = (import ./config.nix).mkDerivation { + name = "hello"; + outputs = [ "out" "dev" ]; + meta.outputsToInstall = [ "out" ]; + buildCommand = ""; + }; + }; +} +EOF + +# Create fake nixpkgs flake. +mkdir -p $TEST_HOME/nixpkgs +cp ../config.nix ../shell.nix $TEST_HOME/nixpkgs +cat <$TEST_HOME/nixpkgs/flake.nix +{ + outputs = {self}: { + legacyPackages.$system.bashInteractive = (import ./shell.nix {}).bashInteractive; + }; +} +EOF + +cd $TEST_HOME + +# Test whether `nix develop` passes through environment variables. +[[ "$( + ENVVAR=a nix develop --no-write-lock-file .#hello < Date: Thu, 16 Nov 2023 15:12:31 +0100 Subject: [PATCH 005/307] fixup! nix: develop: add tests for interactive shell --- tests/functional/common/vars-and-functions.sh.in | 1 + tests/functional/flakes/develop.sh | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/functional/common/vars-and-functions.sh.in b/tests/functional/common/vars-and-functions.sh.in index 848988af9..02773bf60 100644 --- a/tests/functional/common/vars-and-functions.sh.in +++ b/tests/functional/common/vars-and-functions.sh.in @@ -45,6 +45,7 @@ if [[ -n "${NIX_DAEMON_PACKAGE:-}" ]]; then DAEMON_PATH="${NIX_DAEMON_PACKAGE}/bin:$DAEMON_PATH" fi coreutils=@coreutils@ +lsof=@lsof@ export dot=@dot@ export SHELL="@bash@" diff --git a/tests/functional/flakes/develop.sh b/tests/functional/flakes/develop.sh index 59f731239..db23ca0c0 100644 --- a/tests/functional/flakes/develop.sh +++ b/tests/functional/flakes/develop.sh @@ -54,7 +54,7 @@ BASH_INTERACTIVE_EXECUTABLE="$PWD/bash-interactive/bin/bash" [[ "$( nix develop --no-write-lock-file .#hello <&1 | grep -o '/.*/bash' EOF )" -ef "$BASH_INTERACTIVE_EXECUTABLE" ]] From 06a745120bc8fe7625954e970c61028f8a42c31e Mon Sep 17 00:00:00 2001 From: Bob van der Linden Date: Sun, 26 Nov 2023 21:27:46 +0100 Subject: [PATCH 006/307] nix: develop: remove test for interactive shell executable --- tests/functional/flakes/develop.sh | 8 -------- 1 file changed, 8 deletions(-) diff --git a/tests/functional/flakes/develop.sh b/tests/functional/flakes/develop.sh index db23ca0c0..e1e53d364 100644 --- a/tests/functional/flakes/develop.sh +++ b/tests/functional/flakes/develop.sh @@ -50,14 +50,6 @@ EOF nix build --no-write-lock-file './nixpkgs#bashInteractive' --out-link ./bash-interactive BASH_INTERACTIVE_EXECUTABLE="$PWD/bash-interactive/bin/bash" -# Test whether `nix develop` uses nixpkgs#bashInteractive shell. -[[ "$( - nix develop --no-write-lock-file .#hello <&1 | grep -o '/.*/bash' -EOF -)" -ef "$BASH_INTERACTIVE_EXECUTABLE" ]] - # Test whether `nix develop` sets `SHELL` to nixpkgs#bashInteractive shell. [[ "$( SHELL=custom nix develop --no-write-lock-file .#hello < Date: Wed, 8 Nov 2023 17:52:22 -0800 Subject: [PATCH 007/307] Don't attempt to `git add` ignored files This uses `git check-ignore` to determine if files are ignored before attempting to add them in `putFile`. We also add a condition to the `fetchFromWorkdir` filter to always add the `flake.lock` file, even if it's not tracked. This is necessary to resolve inputs. This fixes #8854 without `git add --force`. --- src/libfetchers/git.cc | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 8cd74057c..734c29258 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -313,15 +313,26 @@ struct GitInputScheme : InputScheme writeFile((CanonPath(repoInfo.url) + path).abs(), contents); - runProgram("git", true, - { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) }); + auto result = runProgram(RunOptions { + .program = "git", + .args = {"-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "check-ignore", "--quiet", std::string(path.rel())}, + }); + auto exitCode = WEXITSTATUS(result.first); - // Pause the logger to allow for user input (such as a gpg passphrase) in `git commit` - logger->pause(); - Finally restoreLogger([]() { logger->resume(); }); - if (commitMsg) + if (exitCode != 0) { + // The path is not `.gitignore`d, we can add the file. runProgram("git", true, - { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-m", *commitMsg }); + { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) }); + + + if (commitMsg) { + // Pause the logger to allow for user input (such as a gpg passphrase) in `git commit` + logger->pause(); + Finally restoreLogger([]() { logger->resume(); }); + runProgram("git", true, + { "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-m", *commitMsg }); + } + } } struct RepoInfo From 5b281ddf50775ff37577f80cd3f1f7dbf76c9762 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sat, 2 Dec 2023 02:13:11 +0100 Subject: [PATCH 008/307] reword description of the `max-jobs` setting - remove prose for the default value, which is shown programmatically - add note on how this relates to `cores` - add link to mentioned derivation attribute --- src/libstore/globals.hh | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 38b0d516c..7a30c5ae2 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -151,13 +151,18 @@ public: MaxBuildJobsSetting maxBuildJobs{ this, 1, "max-jobs", R"( - This option defines the maximum number of jobs that Nix will try to - build in parallel. The default is `1`. The special value `auto` - causes Nix to use the number of CPUs in your system. `0` is useful - when using remote builders to prevent any local builds (except for - `preferLocalBuild` derivation attribute which executes locally - regardless). It can be overridden using the `--max-jobs` (`-j`) - command line switch. + Maximum number of jobs that Nix will try to build locally in parallel. + + The special value `auto` causes Nix to use the number of CPUs in your system. + Use `0` to disable local builds and directly use the remote machines specified in [`builders`](#conf-builders). + This will not affect derivations that have [`preferLocalBuild = true`](@docroot@/language/advanced-attributes.md#adv-attr-preferLocalBuild), which are always built locally. + + > **Note** + > + > The number of CPU cores to use for each build job is independently determined by the [`cores`](#conf-cores) setting. + + + The setting can be overridden using the `--max-jobs` (`-j`) command line switch. )", {"build-max-jobs"}}; From 005eaa1bd6c6090d5a55a062f429e6464345c6df Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 1 Dec 2023 16:40:54 +0100 Subject: [PATCH 009/307] doc/prerequisites-source: Add bdwgc-traceable-allocator patch --- doc/manual/src/installation/prerequisites-source.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md index d4babf1ea..907d7f63f 100644 --- a/doc/manual/src/installation/prerequisites-source.md +++ b/doc/manual/src/installation/prerequisites-source.md @@ -32,11 +32,15 @@ your distribution does not provide it, please install it from . - - The [Boehm garbage collector](http://www.hboehm.info/gc/) to reduce - the evaluator’s memory consumption (optional). To enable it, install + - The [Boehm garbage collector (`bdw-gc`)](http://www.hboehm.info/gc/) to reduce + the evaluator’s memory consumption (optional). + + To enable it, install `pkgconfig` and the Boehm garbage collector, and pass the flag `--enable-gc` to `configure`. + For `bdw-gc` <= 8.2.4 Nix needs a [small patch](https://github.com/NixOS/nix/blob/ac4d2e7b857acdfeac35ac8a592bdecee2d29838/boehmgc-traceable_allocator-public.diff) to be applied. + - The `boost` library of version 1.66.0 or higher. It can be obtained from the official web site . From 06bed2eacdeaa3b92d6e35c5d2133c31baa9e56f Mon Sep 17 00:00:00 2001 From: Julia Evans Date: Sun, 17 Dec 2023 12:00:50 -0500 Subject: [PATCH 010/307] Make fetchTree locked input error message clearer --- src/libexpr/primops/fetchTree.cc | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index eb2df8626..fa503665e 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -167,7 +167,10 @@ static void fetchTree( input = lookupInRegistries(state.store, input).first; if (evalSettings.pureEval && !input.isLocked()) - state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); + if (type == "git") + state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchGit' requires a locked input, at %s", state.positions[pos])); + else + state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); state.checkURI(input.toURLString()); From a47fabff0dbcd63e2645db7336dde5865a1995c4 Mon Sep 17 00:00:00 2001 From: Julia Evans Date: Sun, 17 Dec 2023 12:14:55 -0500 Subject: [PATCH 011/307] use params.isFetchGit instead to check if it came from fetchGit --- src/libexpr/primops/fetchTree.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index fa503665e..505900b30 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -167,7 +167,7 @@ static void fetchTree( input = lookupInRegistries(state.store, input).first; if (evalSettings.pureEval && !input.isLocked()) - if (type == "git") + if (params.isFetchGit) state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchGit' requires a locked input, at %s", state.positions[pos])); else state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); From 1f7b62f123fde15b89746b6b1f73c40a8e927499 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Mon, 18 Dec 2023 10:36:18 -0800 Subject: [PATCH 012/307] Use `nix daemon` in the test suite As part of the CLI stabilization effort, the last remaining checkbox (at the moment) for `nix daemon` is that it "needs testing". This implements the proposal of using `nix daemon` in place of `nix-daemon` in the test suite. --- tests/functional/build-remote-trustless-should-pass-1.sh | 2 +- tests/functional/common/vars-and-functions.sh.in | 4 ++-- tests/functional/nix-daemon-untrusting.sh | 2 +- tests/functional/store-info.sh | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/functional/build-remote-trustless-should-pass-1.sh b/tests/functional/build-remote-trustless-should-pass-1.sh index 516bdf092..736e280e4 100644 --- a/tests/functional/build-remote-trustless-should-pass-1.sh +++ b/tests/functional/build-remote-trustless-should-pass-1.sh @@ -2,7 +2,7 @@ source common.sh # Remote trusts us file=build-hook.nix -prog=nix-daemon +prog='nix%20daemon' proto=ssh-ng source build-remote-trustless.sh diff --git a/tests/functional/common/vars-and-functions.sh.in b/tests/functional/common/vars-and-functions.sh.in index 848988af9..c25366481 100644 --- a/tests/functional/common/vars-and-functions.sh.in +++ b/tests/functional/common/vars-and-functions.sh.in @@ -95,7 +95,7 @@ startDaemon() { fi # Start the daemon, wait for the socket to appear. rm -f $NIX_DAEMON_SOCKET_PATH - PATH=$DAEMON_PATH nix-daemon & + PATH=$DAEMON_PATH nix --extra-experimental-features 'nix-command' daemon & _NIX_TEST_DAEMON_PID=$! export _NIX_TEST_DAEMON_PID for ((i = 0; i < 300; i++)); do @@ -148,7 +148,7 @@ fi isDaemonNewer () { [[ -n "${NIX_DAEMON_PACKAGE:-}" ]] || return 0 local requiredVersion="$1" - local daemonVersion=$($NIX_DAEMON_PACKAGE/bin/nix-daemon --version | cut -d' ' -f3) + local daemonVersion=$($NIX_DAEMON_PACKAGE/bin/nix daemon --version | cut -d' ' -f3) [[ $(nix eval --expr "builtins.compareVersions ''$daemonVersion'' ''$requiredVersion''") -ge 0 ]] } diff --git a/tests/functional/nix-daemon-untrusting.sh b/tests/functional/nix-daemon-untrusting.sh index bcdb70989..c339b5833 100755 --- a/tests/functional/nix-daemon-untrusting.sh +++ b/tests/functional/nix-daemon-untrusting.sh @@ -1,3 +1,3 @@ #!/bin/sh -exec nix-daemon --force-untrusted "$@" +exec nix daemon --force-untrusted "$@" diff --git a/tests/functional/store-info.sh b/tests/functional/store-info.sh index c002e50be..18a8131a9 100644 --- a/tests/functional/store-info.sh +++ b/tests/functional/store-info.sh @@ -6,7 +6,7 @@ STORE_INFO_JSON=$(nix store info --json) echo "$STORE_INFO" | grep "Store URL: ${NIX_REMOTE}" if [[ -v NIX_DAEMON_PACKAGE ]] && isDaemonNewer "2.7.0pre20220126"; then - DAEMON_VERSION=$($NIX_DAEMON_PACKAGE/bin/nix-daemon --version | cut -d' ' -f3) + DAEMON_VERSION=$($NIX_DAEMON_PACKAGE/bin/nix daemon --version | cut -d' ' -f3) echo "$STORE_INFO" | grep "Version: $DAEMON_VERSION" [[ "$(echo "$STORE_INFO_JSON" | jq -r ".version")" == "$DAEMON_VERSION" ]] fi From 0218e4e6c386e4c432520506568420c3cc384e47 Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 04:15:51 +0100 Subject: [PATCH 013/307] memset less in addToStoreFromDump MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit resizing a std::string clears the newly added bytes, which is not necessary here and comes with a ~1.4% slowdown on our test nixos config. 〉 nix eval --raw --impure --expr 'with import {}; system' before: Time (mean ± σ): 4.486 s ± 0.003 s [User: 3.978 s, System: 0.507 s] Range (min … max): 4.482 s … 4.492 s 10 runs after: Time (mean ± σ): 4.429 s ± 0.002 s [User: 3.929 s, System: 0.500 s] Range (min … max): 4.427 s … 4.433 s 10 runs --- src/libstore/local-store.cc | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 7e82bae28..d903bb061 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -18,6 +18,8 @@ #include #include +#include +#include #include #include #include @@ -1130,7 +1132,11 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name path. */ bool inMemory = false; - std::string dump; + struct Free { + void operator()(void* v) { free(v); } + }; + std::unique_ptr dumpBuffer(nullptr); + std::string_view dump; /* Fill out buffer, and decide whether we are working strictly in memory based on whether we break out because the buffer is full @@ -1139,13 +1145,18 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name auto oldSize = dump.size(); constexpr size_t chunkSize = 65536; auto want = std::min(chunkSize, settings.narBufferSize - oldSize); - dump.resize(oldSize + want); + if (auto tmp = realloc(dumpBuffer.get(), oldSize + want)) { + dumpBuffer.release(); + dumpBuffer.reset((char*) tmp); + } else { + throw std::bad_alloc(); + } auto got = 0; Finally cleanup([&]() { - dump.resize(oldSize + got); + dump = {dumpBuffer.get(), dump.size() + got}; }); try { - got = source.read(dump.data() + oldSize, want); + got = source.read(dumpBuffer.get() + oldSize, want); } catch (EndOfFile &) { inMemory = true; break; @@ -1171,7 +1182,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name else writeFile(tempPath, bothSource); - dump.clear(); + dumpBuffer.reset(); + dump = {}; } auto [hash, size] = hashSink->finish(); From 78353deb028fcc700776db9d92dcae45d68fb85f Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 08:24:45 +0100 Subject: [PATCH 014/307] encode black holes as tApp values MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit checking for isBlackhole in the forceValue hot path is rather more expensive than necessary, and with a little bit of trickery we can move such handling into the isApp case. small performance benefit, but under some circumstances we've seen 2% improvement as well. 〉 nix eval --raw --impure --expr 'with import {}; system' before: Time (mean ± σ): 4.429 s ± 0.002 s [User: 3.929 s, System: 0.500 s] Range (min … max): 4.427 s … 4.433 s 10 runs after: Time (mean ± σ): 4.396 s ± 0.002 s [User: 3.894 s, System: 0.501 s] Range (min … max): 4.393 s … 4.399 s 10 runs --- src/libexpr/eval-inline.hh | 13 +++++++---- src/libexpr/eval.cc | 44 +++++++++++++++++++++----------------- src/libexpr/eval.hh | 8 +++++++ src/libexpr/nixexpr.hh | 7 ++++++ src/libexpr/primops.cc | 23 ++++++++++++++++++++ src/libexpr/primops.hh | 6 ++++++ src/libexpr/value.hh | 24 ++++++++++++++------- 7 files changed, 93 insertions(+), 32 deletions(-) diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index c37b1d62b..9d08f1938 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -104,11 +104,16 @@ void EvalState::forceValue(Value & v, Callable getPos) } } else if (v.isApp()) { - PosIdx pos = getPos(); - callFunction(*v.app.left, *v.app.right, v, pos); + try { + callFunction(*v.app.left, *v.app.right, v, noPos); + } catch (InfiniteRecursionError & e) { + // only one black hole can *throw* in any given eval stack so we need not + // check whether the position is set already. + if (v.isBlackhole()) + e.err.errPos = positions[getPos()]; + throw; + } } - else if (v.isBlackhole()) - error("infinite recursion encountered").atPos(getPos()).template debugThrow(); } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 9e494148e..71c151f96 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -162,7 +162,17 @@ void Value::print(const SymbolTable &symbols, std::ostream &str, break; case tThunk: case tApp: - str << ""; + if (!isBlackhole()) { + str << ""; + } else { + // Although we know for sure that it's going to be an infinite recursion + // when this value is accessed _in the current context_, it's likely + // that the user will misinterpret a simpler «infinite recursion» output + // as a definitive statement about the value, while in fact it may be + // a valid value after `builtins.trace` and perhaps some other steps + // have completed. + str << "«potential infinite recursion»"; + } break; case tLambda: str << ""; @@ -179,15 +189,6 @@ void Value::print(const SymbolTable &symbols, std::ostream &str, case tFloat: str << fpoint; break; - case tBlackhole: - // Although we know for sure that it's going to be an infinite recursion - // when this value is accessed _in the current context_, it's likely - // that the user will misinterpret a simpler «infinite recursion» output - // as a definitive statement about the value, while in fact it may be - // a valid value after `builtins.trace` and perhaps some other steps - // have completed. - str << "«potential infinite recursion»"; - break; default: printError("Nix evaluator internal error: Value::print(): invalid value type %1%", internalType); abort(); @@ -256,8 +257,7 @@ std::string showType(const Value & v) return fmt("the partially applied built-in function '%s'", std::string(getPrimOp(v)->primOp->name)); case tExternal: return v.external->showType(); case tThunk: return "a thunk"; - case tApp: return "a function application"; - case tBlackhole: return "a black hole"; + case tApp: return v.isBlackhole() ? "a black hole" : "a function application"; default: return std::string(showType(v.type())); } @@ -1621,15 +1621,17 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & return; } else { /* We have all the arguments, so call the primop. */ - auto name = vCur.primOp->name; + auto * fn = vCur.primOp; nrPrimOpCalls++; - if (countCalls) primOpCalls[name]++; + // This will count black holes, but that's ok, because unrecoverable errors are rare. + if (countCalls) primOpCalls[fn->name]++; try { - vCur.primOp->fun(*this, vCur.determinePos(noPos), args, vCur); + fn->fun(*this, vCur.determinePos(noPos), args, vCur); } catch (Error & e) { - addErrorTrace(e, pos, "while calling the '%1%' builtin", name); + if (!fn->hideInDiagnostics) + addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); throw; } @@ -1666,18 +1668,20 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & for (size_t i = 0; i < argsLeft; ++i) vArgs[argsDone + i] = args[i]; - auto name = primOp->primOp->name; + auto fn = primOp->primOp; nrPrimOpCalls++; - if (countCalls) primOpCalls[name]++; + // This will count black holes, but that's ok, because unrecoverable errors are rare. + if (countCalls) primOpCalls[fn->name]++; try { // TODO: // 1. Unify this and above code. Heavily redundant. // 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc) // so the debugger allows to inspect the wrong parameters passed to the builtin. - primOp->primOp->fun(*this, vCur.determinePos(noPos), vArgs, vCur); + fn->fun(*this, vCur.determinePos(noPos), vArgs, vCur); } catch (Error & e) { - addErrorTrace(e, pos, "while calling the '%1%' builtin", name); + if (!fn->hideInDiagnostics) + addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); throw; } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index f3f6d35b9..e5e401ab6 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -77,6 +77,14 @@ struct PrimOp */ std::optional experimentalFeature; + /** + * Whether to hide this primop in diagnostics. + * + * Used to hide the fact that black holes are primop applications from + * stack traces. + */ + bool hideInDiagnostics; + /** * Validity check to be performed by functions that introduce primops, * such as RegisterPrimOp() and Value::mkPrimOp(). diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 020286815..cf6fd1a8d 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -21,6 +21,13 @@ MakeError(TypeError, EvalError); MakeError(UndefinedVarError, Error); MakeError(MissingArgumentError, EvalError); +class InfiniteRecursionError : public EvalError +{ + friend class EvalState; +public: + using EvalError::EvalError; +}; + /** * Position objects. */ diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 89d5492da..d46eccd34 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -4263,6 +4263,29 @@ static RegisterPrimOp primop_splitVersion({ }); +static void prim_blackHoleFn(EvalState & state, const PosIdx pos, Value * * args, Value & v) +{ + state.error("infinite recursion encountered") + .debugThrow(); +} + +static PrimOp primop_blackHole = { + .name = "«blackHole»", + .args = {}, + .fun = prim_blackHoleFn, + .hideInDiagnostics = true, +}; + +static Value makeBlackHole() +{ + Value v; + v.mkPrimOp(&primop_blackHole); + return v; +} + +Value prim_blackHole = makeBlackHole(); + + /************************************************************* * Primop registration *************************************************************/ diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh index 45486608f..244eada86 100644 --- a/src/libexpr/primops.hh +++ b/src/libexpr/primops.hh @@ -51,4 +51,10 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu */ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v); +/** + * Placeholder value for black holes, used to represent black holes as + * applications of this value to the evaluated thunks. + */ +extern Value prim_blackHole; + } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 30b3d4934..52cd0f901 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -32,7 +32,6 @@ typedef enum { tThunk, tApp, tLambda, - tBlackhole, tPrimOp, tPrimOpApp, tExternal, @@ -151,7 +150,7 @@ public: // type() == nThunk inline bool isThunk() const { return internalType == tThunk; }; inline bool isApp() const { return internalType == tApp; }; - inline bool isBlackhole() const { return internalType == tBlackhole; }; + inline bool isBlackhole() const; // type() == nFunction inline bool isLambda() const { return internalType == tLambda; }; @@ -248,7 +247,7 @@ public: case tLambda: case tPrimOp: case tPrimOpApp: return nFunction; case tExternal: return nExternal; case tFloat: return nFloat; - case tThunk: case tApp: case tBlackhole: return nThunk; + case tThunk: case tApp: return nThunk; } if (invalidIsThunk) return nThunk; @@ -356,11 +355,7 @@ public: lambda.fun = f; } - inline void mkBlackhole() - { - internalType = tBlackhole; - // Value will be overridden anyways - } + inline void mkBlackhole(); void mkPrimOp(PrimOp * p); @@ -447,6 +442,19 @@ public: }; +extern Value prim_blackHole; + +inline bool Value::isBlackhole() const +{ + return internalType == tApp && app.left == &prim_blackHole; +} + +inline void Value::mkBlackhole() +{ + mkApp(&prim_blackHole, &prim_blackHole); +} + + #if HAVE_BOEHMGC typedef std::vector> ValueVector; typedef std::map, traceable_allocator>> ValueMap; From 74c134914c747b1df6385cab5d2298f66a87b61f Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 09:25:20 +0100 Subject: [PATCH 015/307] compare string values with strcmp string_view()ification calls strlen() first, which we don't need here. --- src/libexpr/eval.cc | 2 +- src/libexpr/primops.cc | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 71c151f96..8e89ddcf1 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2436,7 +2436,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v return v1.boolean == v2.boolean; case nString: - return v1.string_view().compare(v2.string_view()) == 0; + return strcmp(v1.c_str(), v2.c_str()) == 0; case nPath: return diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index d46eccd34..b7e903667 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -586,7 +586,7 @@ struct CompareValues case nFloat: return v1->fpoint < v2->fpoint; case nString: - return v1->string_view().compare(v2->string_view()) < 0; + return strcmp(v1->c_str(), v2->c_str()) < 0; case nPath: // Note: we don't take the accessor into account // since it's not obvious how to compare them in a @@ -2401,7 +2401,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args, (v.listElems()[n++] = state.allocValue())->mkString(state.symbols[i.name]); std::sort(v.listElems(), v.listElems() + n, - [](Value * v1, Value * v2) { return v1->string_view().compare(v2->string_view()) < 0; }); + [](Value * v1, Value * v2) { return strcmp(v1->c_str(), v2->c_str()) < 0; }); } static RegisterPrimOp primop_attrNames({ From cc4038d54177c944340607c7d141680e66ff92a7 Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 09:49:38 +0100 Subject: [PATCH 016/307] use std::tie() for macro-generated operators MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit as written the comparisons generate copies, even though it looks as though they shouldn't. before: Time (mean ± σ): 4.396 s ± 0.002 s [User: 3.894 s, System: 0.501 s] Range (min … max): 4.393 s … 4.399 s 10 runs after: Time (mean ± σ): 4.260 s ± 0.003 s [User: 3.754 s, System: 0.505 s] Range (min … max): 4.257 s … 4.266 s 10 runs --- src/libcmd/built-path.cc | 4 ++-- src/libstore/derived-path.cc | 8 ++------ src/libutil/comparator.hh | 4 ++-- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/src/libcmd/built-path.cc b/src/libcmd/built-path.cc index 8e2efc7c3..c5eb93c5d 100644 --- a/src/libcmd/built-path.cc +++ b/src/libcmd/built-path.cc @@ -12,9 +12,9 @@ namespace nix { bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \ { \ const MY_TYPE* me = this; \ - auto fields1 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields1 = std::tie(*me->drvPath, me->FIELD); \ me = &other; \ - auto fields2 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields2 = std::tie(*me->drvPath, me->FIELD); \ return fields1 COMPARATOR fields2; \ } #define CMP(CHILD_TYPE, MY_TYPE, FIELD) \ diff --git a/src/libstore/derived-path.cc b/src/libstore/derived-path.cc index 3105dbc93..a7b404321 100644 --- a/src/libstore/derived-path.cc +++ b/src/libstore/derived-path.cc @@ -12,9 +12,9 @@ namespace nix { bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \ { \ const MY_TYPE* me = this; \ - auto fields1 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields1 = std::tie(*me->drvPath, me->FIELD); \ me = &other; \ - auto fields2 = std::make_tuple(*me->drvPath, me->FIELD); \ + auto fields2 = std::tie(*me->drvPath, me->FIELD); \ return fields1 COMPARATOR fields2; \ } #define CMP(CHILD_TYPE, MY_TYPE, FIELD) \ @@ -22,13 +22,9 @@ namespace nix { CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, !=) \ CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, <) -#define FIELD_TYPE std::string CMP(SingleDerivedPath, SingleDerivedPathBuilt, output) -#undef FIELD_TYPE -#define FIELD_TYPE OutputsSpec CMP(SingleDerivedPath, DerivedPathBuilt, outputs) -#undef FIELD_TYPE #undef CMP #undef CMP_ONE diff --git a/src/libutil/comparator.hh b/src/libutil/comparator.hh index a4d20a675..cbc2bb4fd 100644 --- a/src/libutil/comparator.hh +++ b/src/libutil/comparator.hh @@ -13,9 +13,9 @@ #define GENERATE_ONE_CMP(PRE, QUAL, COMPARATOR, MY_TYPE, ...) \ PRE bool QUAL operator COMPARATOR(const MY_TYPE & other) const { \ __VA_OPT__(const MY_TYPE * me = this;) \ - auto fields1 = std::make_tuple( __VA_ARGS__ ); \ + auto fields1 = std::tie( __VA_ARGS__ ); \ __VA_OPT__(me = &other;) \ - auto fields2 = std::make_tuple( __VA_ARGS__ ); \ + auto fields2 = std::tie( __VA_ARGS__ ); \ return fields1 COMPARATOR fields2; \ } #define GENERATE_EQUAL(prefix, qualification, my_type, args...) \ From 2e0321912a9efa352160eb1e57e6b7b88e517d0d Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 12:59:51 +0100 Subject: [PATCH 017/307] use aligned flex tables MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ~2% speedup on parsing without eval, less (but still significant) on system eval. having flex generate faster parsers leads to very strange misparses. maybe re2c is worth investigating. before: Time (mean ± σ): 4.260 s ± 0.003 s [User: 3.754 s, System: 0.505 s] Range (min … max): 4.257 s … 4.266 s 10 runs after: Time (mean ± σ): 4.231 s ± 0.004 s [User: 3.725 s, System: 0.504 s] Range (min … max): 4.226 s … 4.240 s 10 runs --- src/libexpr/lexer.l | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index a3a8608d9..9a35dd594 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -1,4 +1,5 @@ %option reentrant bison-bridge bison-locations +%option align %option noyywrap %option never-interactive %option stack From b78e77b34c14b0f127b22e252309527e84967dcc Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 13:00:18 +0100 Subject: [PATCH 018/307] use custom location type in the parser MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ~1% parser speedup from not using TLS indirections, less on system eval. this could have also gone in flex yyextra data, but that's significantly slower for some reason (albeit still faster than thread locals). before: Time (mean ± σ): 4.231 s ± 0.004 s [User: 3.725 s, System: 0.504 s] Range (min … max): 4.226 s … 4.240 s 10 runs after: Time (mean ± σ): 4.224 s ± 0.005 s [User: 3.711 s, System: 0.512 s] Range (min … max): 4.218 s … 4.234 s 10 runs --- src/libexpr/lexer.l | 9 +++------ src/libexpr/parser.y | 25 +++++++++++++++++++++++++ 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index 9a35dd594..df2cbd06f 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -36,9 +36,6 @@ static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data) #define CUR_POS makeCurPos(*yylloc, data) -// backup to recover from yyless(0) -thread_local YYLTYPE prev_yylloc; - static void initLoc(YYLTYPE * loc) { loc->first_line = loc->last_line = 1; @@ -47,7 +44,7 @@ static void initLoc(YYLTYPE * loc) static void adjustLoc(YYLTYPE * loc, const char * s, size_t len) { - prev_yylloc = *loc; + loc->stash(); loc->first_line = loc->last_line; loc->first_column = loc->last_column; @@ -231,7 +228,7 @@ or { return OR_KW; } {HPATH_START}\$\{ { PUSH_STATE(PATH_START); yyless(0); - *yylloc = prev_yylloc; + yylloc->unstash(); } {PATH_SEG} { @@ -287,7 +284,7 @@ or { return OR_KW; } context (it may be ')', ';', or something of that sort) */ POP_STATE(); yyless(0); - *yylloc = prev_yylloc; + yylloc->unstash(); return PATH_END; } diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 16ad8af2e..b331776f0 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -28,6 +28,31 @@ namespace nix { +#define YYLTYPE ::nix::ParserLocation + struct ParserLocation + { + int first_line, first_column; + int last_line, last_column; + + // backup to recover from yyless(0) + int stashed_first_line, stashed_first_column; + int stashed_last_line, stashed_last_column; + + void stash() { + stashed_first_line = first_line; + stashed_first_column = first_column; + stashed_last_line = last_line; + stashed_last_column = last_column; + } + + void unstash() { + first_line = stashed_first_line; + first_column = stashed_first_column; + last_line = stashed_last_line; + last_column = stashed_last_column; + } + }; + struct ParseData { EvalState & state; From f9aee2f2c41652b3b76d16a874fdded4e6d28d92 Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 10 Dec 2023 10:34:55 +0100 Subject: [PATCH 019/307] don't malloc/memset posix accessor buffer it's relatively small and fits on the stack nicely, and we don't need it initialized either. --- src/libutil/posix-source-accessor.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index 15ff76e59..5f26fa67b 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -25,7 +25,7 @@ void PosixSourceAccessor::readFile( off_t left = st.st_size; - std::vector buf(64 * 1024); + std::array buf; while (left) { checkInterrupt(); ssize_t rd = read(fd.get(), buf.data(), (size_t) std::min(left, (off_t) buf.size())); From 69ed4aee612e247f2d6ebbb44aba743c4282e00e Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 11 Dec 2023 15:48:24 +0100 Subject: [PATCH 020/307] remove lazy-pos forceValue almost all uses of this are interactive, except for deepSeq. deepSeq is going to be expensive and rare enough to not care much about, and Value::determinePos should usually be cheap enough to not be too much of a burden in any case. --- src/libcmd/installable-flake.cc | 2 +- src/libcmd/repl.cc | 4 ++-- src/libexpr/eval-inline.hh | 10 +--------- src/libexpr/eval.cc | 2 +- src/libexpr/eval.hh | 3 --- src/libexpr/get-drvs.cc | 4 ++-- src/nix-build/nix-build.cc | 2 +- src/nix-env/user-env.cc | 2 +- src/nix-instantiate/nix-instantiate.cc | 2 +- 9 files changed, 10 insertions(+), 21 deletions(-) diff --git a/src/libcmd/installable-flake.cc b/src/libcmd/installable-flake.cc index 2f428cb7e..ddec7537b 100644 --- a/src/libcmd/installable-flake.cc +++ b/src/libcmd/installable-flake.cc @@ -52,7 +52,7 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs")); assert(aOutputs); - state.forceValue(*aOutputs->value, [&]() { return aOutputs->value->determinePos(noPos); }); + state.forceValue(*aOutputs->value, aOutputs->value->determinePos(noPos)); return aOutputs->value; } diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 0986296ad..97d709ff4 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -888,7 +888,7 @@ void NixRepl::evalString(std::string s, Value & v) { Expr * e = parseString(s); e->eval(*state, *env, v); - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); } @@ -907,7 +907,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m str.flush(); checkInterrupt(); - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); switch (v.type()) { diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index 9d08f1938..8a9ebb77a 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -81,15 +81,7 @@ Env & EvalState::allocEnv(size_t size) } -[[gnu::always_inline]] void EvalState::forceValue(Value & v, const PosIdx pos) -{ - forceValue(v, [&]() { return pos; }); -} - - -template -void EvalState::forceValue(Value & v, Callable getPos) { if (v.isThunk()) { Env * env = v.thunk.env; @@ -110,7 +102,7 @@ void EvalState::forceValue(Value & v, Callable getPos) // only one black hole can *throw* in any given eval stack so we need not // check whether the position is set already. if (v.isBlackhole()) - e.err.errPos = positions[getPos()]; + e.err.errPos = positions[pos]; throw; } } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 8e89ddcf1..4dc5af97a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2044,7 +2044,7 @@ void EvalState::forceValueDeep(Value & v) recurse = [&](Value & v) { if (!seen.insert(&v).second) return; - forceValue(v, [&]() { return v.determinePos(noPos); }); + forceValue(v, v.determinePos(noPos)); if (v.type() == nAttrs) { for (auto & i : *v.attrs) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index e5e401ab6..4c7ea1d98 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -473,9 +473,6 @@ public: */ inline void forceValue(Value & v, const PosIdx pos); - template - inline void forceValue(Value & v, Callable getPos); - /** * Force a value, then recursively force list elements and * attributes. diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index d4e946d81..a6441871c 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -198,7 +198,7 @@ StringSet DrvInfo::queryMetaNames() bool DrvInfo::checkMeta(Value & v) { - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); if (v.type() == nList) { for (auto elem : v.listItems()) if (!checkMeta(*elem)) return false; @@ -304,7 +304,7 @@ static bool getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures) { try { - state.forceValue(v, [&]() { return v.determinePos(noPos); }); + state.forceValue(v, v.determinePos(noPos)); if (!state.isDerivation(v)) return true; /* Remove spurious duplicates (e.g., a set like `rec { x = diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 01da028d8..4465e2f90 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -350,7 +350,7 @@ static void main_nix_build(int argc, char * * argv) takesNixShellAttr(vRoot) ? *autoArgsWithInNixShell : *autoArgs, vRoot ).first); - state->forceValue(v, [&]() { return v.determinePos(noPos); }); + state->forceValue(v, v.determinePos(noPos)); getDerivations( *state, v, diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 34f6bd005..fe5b89b3f 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -128,7 +128,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, /* Evaluate it. */ debug("evaluating user environment builder"); - state.forceValue(topLevel, [&]() { return topLevel.determinePos(noPos); }); + state.forceValue(topLevel, topLevel.determinePos(noPos)); NixStringContext context; Attr & aDrvPath(*topLevel.attrs->find(state.sDrvPath)); auto topLevelDrv = state.coerceToStorePath(aDrvPath.pos, *aDrvPath.value, context, ""); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 86b9be17d..ab590b3a6 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -40,7 +40,7 @@ void processExpr(EvalState & state, const Strings & attrPaths, for (auto & i : attrPaths) { Value & v(*findAlongAttrPath(state, i, autoArgs, vRoot).first); - state.forceValue(v, [&]() { return v.determinePos(noPos); }); + state.forceValue(v, v.determinePos(noPos)); NixStringContext context; if (evalOnly) { From f9db4de0f3758e0f730a5d98348e7cc40082104a Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 11 Dec 2023 15:54:16 +0100 Subject: [PATCH 021/307] force-inline forceValue MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit forceValue is extremely hot. interestingly adding likeliness annotations to the branches does not seem to make a difference. before: Time (mean ± σ): 4.224 s ± 0.005 s [User: 3.711 s, System: 0.512 s] Range (min … max): 4.218 s … 4.234 s 10 runs after: Time (mean ± σ): 4.140 s ± 0.009 s [User: 3.647 s, System: 0.492 s] Range (min … max): 4.130 s … 4.152 s 10 runs --- src/libexpr/eval-inline.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index 8a9ebb77a..d48871628 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -81,6 +81,7 @@ Env & EvalState::allocEnv(size_t size) } +[[gnu::always_inline]] void EvalState::forceValue(Value & v, const PosIdx pos) { if (v.isThunk()) { From 2b0e95e7aabd075f95cbfb1607330b2284b01918 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 11 Dec 2023 16:23:08 +0100 Subject: [PATCH 022/307] use singleton expr to generate black hole errors this also reduces forceValue code size and removes the need for hideInDiagnostics. coopting thunk forcing like this has the additional benefit of clarifying how these errors can happen in the first place. --- src/libexpr/eval-inline.hh | 14 +++----------- src/libexpr/eval.cc | 35 +++++++++++++++++++++++++++-------- src/libexpr/eval.hh | 10 ++-------- src/libexpr/nixexpr.cc | 2 ++ src/libexpr/nixexpr.hh | 10 ++++++++++ src/libexpr/primops.cc | 23 ----------------------- src/libexpr/primops.hh | 6 ------ src/libexpr/value.hh | 12 +++++++----- 8 files changed, 51 insertions(+), 61 deletions(-) diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index d48871628..52aa75b5f 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -93,20 +93,12 @@ void EvalState::forceValue(Value & v, const PosIdx pos) expr->eval(*this, *env, v); } catch (...) { v.mkThunk(env, expr); + tryFixupBlackHolePos(v, pos); throw; } } - else if (v.isApp()) { - try { - callFunction(*v.app.left, *v.app.right, v, noPos); - } catch (InfiniteRecursionError & e) { - // only one black hole can *throw* in any given eval stack so we need not - // check whether the position is set already. - if (v.isBlackhole()) - e.err.errPos = positions[pos]; - throw; - } - } + else if (v.isApp()) + callFunction(*v.app.left, *v.app.right, v, pos); } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 4dc5af97a..0c35b3713 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -256,8 +256,8 @@ std::string showType(const Value & v) case tPrimOpApp: return fmt("the partially applied built-in function '%s'", std::string(getPrimOp(v)->primOp->name)); case tExternal: return v.external->showType(); - case tThunk: return "a thunk"; - case tApp: return v.isBlackhole() ? "a black hole" : "a function application"; + case tThunk: return v.isBlackhole() ? "a black hole" : "a thunk"; + case tApp: return "a function application"; default: return std::string(showType(v.type())); } @@ -1624,14 +1624,12 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & auto * fn = vCur.primOp; nrPrimOpCalls++; - // This will count black holes, but that's ok, because unrecoverable errors are rare. if (countCalls) primOpCalls[fn->name]++; try { fn->fun(*this, vCur.determinePos(noPos), args, vCur); } catch (Error & e) { - if (!fn->hideInDiagnostics) - addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); + addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); throw; } @@ -1670,7 +1668,6 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & auto fn = primOp->primOp; nrPrimOpCalls++; - // This will count black holes, but that's ok, because unrecoverable errors are rare. if (countCalls) primOpCalls[fn->name]++; try { @@ -1680,8 +1677,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & // so the debugger allows to inspect the wrong parameters passed to the builtin. fn->fun(*this, vCur.determinePos(noPos), vArgs, vCur); } catch (Error & e) { - if (!fn->hideInDiagnostics) - addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); + addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name); throw; } @@ -2035,6 +2031,29 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v) } +void ExprBlackHole::eval(EvalState & state, Env & env, Value & v) +{ + state.error("infinite recursion encountered") + .debugThrow(); +} + +// always force this to be separate, otherwise forceValue may inline it and take +// a massive perf hit +[[gnu::noinline]] +void EvalState::tryFixupBlackHolePos(Value & v, PosIdx pos) +{ + if (!v.isBlackhole()) + return; + auto e = std::current_exception(); + try { + std::rethrow_exception(e); + } catch (InfiniteRecursionError & e) { + e.err.errPos = positions[pos]; + } catch (...) { + } +} + + void EvalState::forceValueDeep(Value & v) { std::set seen; diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 4c7ea1d98..56bc5e48f 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -77,14 +77,6 @@ struct PrimOp */ std::optional experimentalFeature; - /** - * Whether to hide this primop in diagnostics. - * - * Used to hide the fact that black holes are primop applications from - * stack traces. - */ - bool hideInDiagnostics; - /** * Validity check to be performed by functions that introduce primops, * such as RegisterPrimOp() and Value::mkPrimOp(). @@ -473,6 +465,8 @@ public: */ inline void forceValue(Value & v, const PosIdx pos); + void tryFixupBlackHolePos(Value & v, PosIdx pos); + /** * Force a value, then recursively force list elements and * attributes. diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 22be8e68c..84860b30f 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -9,6 +9,8 @@ namespace nix { +ExprBlackHole eBlackHole; + struct PosAdapter : AbstractPos { Pos::Origin origin; diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index cf6fd1a8d..1e57fec7a 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -462,6 +462,16 @@ struct ExprPos : Expr COMMON_METHODS }; +/* only used to mark thunks as black holes. */ +struct ExprBlackHole : Expr +{ + void show(const SymbolTable & symbols, std::ostream & str) const override {} + void eval(EvalState & state, Env & env, Value & v) override; + void bindVars(EvalState & es, const std::shared_ptr & env) override {} +}; + +extern ExprBlackHole eBlackHole; + /* Static environments are used to map variable names onto (level, displacement) pairs used to obtain the value of the variable at diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index b7e903667..2a71747a0 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -4263,29 +4263,6 @@ static RegisterPrimOp primop_splitVersion({ }); -static void prim_blackHoleFn(EvalState & state, const PosIdx pos, Value * * args, Value & v) -{ - state.error("infinite recursion encountered") - .debugThrow(); -} - -static PrimOp primop_blackHole = { - .name = "«blackHole»", - .args = {}, - .fun = prim_blackHoleFn, - .hideInDiagnostics = true, -}; - -static Value makeBlackHole() -{ - Value v; - v.mkPrimOp(&primop_blackHole); - return v; -} - -Value prim_blackHole = makeBlackHole(); - - /************************************************************* * Primop registration *************************************************************/ diff --git a/src/libexpr/primops.hh b/src/libexpr/primops.hh index 244eada86..45486608f 100644 --- a/src/libexpr/primops.hh +++ b/src/libexpr/primops.hh @@ -51,10 +51,4 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu */ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v); -/** - * Placeholder value for black holes, used to represent black holes as - * applications of this value to the evaluated thunks. - */ -extern Value prim_blackHole; - } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 52cd0f901..d9860e921 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -61,6 +61,7 @@ class Bindings; struct Env; struct Expr; struct ExprLambda; +struct ExprBlackHole; struct PrimOp; class Symbol; class PosIdx; @@ -442,16 +443,17 @@ public: }; -extern Value prim_blackHole; +extern ExprBlackHole eBlackHole; -inline bool Value::isBlackhole() const +bool Value::isBlackhole() const { - return internalType == tApp && app.left == &prim_blackHole; + return internalType == tThunk && thunk.expr == (Expr*) &eBlackHole; } -inline void Value::mkBlackhole() +void Value::mkBlackhole() { - mkApp(&prim_blackHole, &prim_blackHole); + internalType = tThunk; + thunk.expr = (Expr*) &eBlackHole; } From e94a96893f074a949ba263d66d47e665040fed41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Thu, 21 Dec 2023 10:00:14 +0100 Subject: [PATCH 023/307] =?UTF-8?q?maintainers:=20Mention=20the=20monthly?= =?UTF-8?q?=20=E2=80=9CAssigned=E2=80=9D=20column=20review?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit As decided during [the last team meeting](https://discourse.nixos.org/t/2023-12-18-nix-team-meeting-minutes-113/37050#improving-internal-and-external-communication-3), we want to regularly review the `Assigned` column in the team's board because it tends to turn into a graveyard of forgotten stuff. So encode that in the handbook --- maintainers/README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/maintainers/README.md b/maintainers/README.md index ee97c1195..585e2b50a 100644 --- a/maintainers/README.md +++ b/maintainers/README.md @@ -43,7 +43,8 @@ The team meets twice a week: - Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) 1. Triage issues and pull requests from the [No Status](#no-status) column (30 min) - 2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min) + 2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min). + Once a month, this slot is used to check the [Assigned](#assigned) column to make sure that nothing bitrots in it. - Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) From 5ed1884875cc6a6e9330b6c5a2f24c35e685f5a0 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Thu, 21 Dec 2023 10:14:54 -0800 Subject: [PATCH 024/307] libcmd: Installable::toStorePaths -> Installable::toStorePathSet --- src/libcmd/installables.cc | 4 ++-- src/libcmd/installables.hh | 2 +- src/nix/develop.cc | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 6b3c82374..be9ebe9ca 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -715,7 +715,7 @@ BuiltPaths Installable::toBuiltPaths( } } -StorePathSet Installable::toStorePaths( +StorePathSet Installable::toStorePathSet( ref evalStore, ref store, Realise mode, OperateOn operateOn, @@ -735,7 +735,7 @@ StorePath Installable::toStorePath( Realise mode, OperateOn operateOn, ref installable) { - auto paths = toStorePaths(evalStore, store, mode, operateOn, {installable}); + auto paths = toStorePathSet(evalStore, store, mode, operateOn, {installable}); if (paths.size() != 1) throw Error("argument '%s' should evaluate to one store path", installable->what()); diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index e087f935c..c8ad41388 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -165,7 +165,7 @@ struct Installable const Installables & installables, BuildMode bMode = bmNormal); - static std::set toStorePaths( + static std::set toStorePathSet( ref evalStore, ref store, Realise mode, diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 8db2de491..974020951 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -376,7 +376,7 @@ struct Common : InstallableCommand, MixProfile for (auto & [installable_, dir_] : redirects) { auto dir = absPath(dir_); auto installable = parseInstallable(store, installable_); - auto builtPaths = Installable::toStorePaths( + auto builtPaths = Installable::toStorePathSet( getEvalStore(), store, Realise::Nothing, OperateOn::Output, {installable}); for (auto & path: builtPaths) { auto from = store->printStorePath(path); @@ -631,7 +631,7 @@ struct CmdDevelop : Common, MixEnvironment bool found = false; - for (auto & path : Installable::toStorePaths(getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) { + for (auto & path : Installable::toStorePathSet(getEvalStore(), store, Realise::Outputs, OperateOn::Output, {bashInstallable})) { auto s = store->printStorePath(path) + "/bin/bash"; if (pathExists(s)) { shell = s; From 1fb43d1eee6f398686523c0bb80adb987c584c61 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Wed, 20 Dec 2023 10:25:22 -0800 Subject: [PATCH 025/307] tests: add a test for command line ordering --- tests/functional/shell-hello.nix | 16 ++++++++++++++++ tests/functional/shell.sh | 8 ++++++++ 2 files changed, 24 insertions(+) diff --git a/tests/functional/shell-hello.nix b/tests/functional/shell-hello.nix index 3fdd3501d..dfe66ef93 100644 --- a/tests/functional/shell-hello.nix +++ b/tests/functional/shell-hello.nix @@ -23,4 +23,20 @@ with import ./config.nix; chmod +x $dev/bin/hello2 ''; }; + + salve-mundi = mkDerivation { + name = "salve-mundi"; + outputs = [ "out" ]; + meta.outputsToInstall = [ "out" ]; + buildCommand = + '' + mkdir -p $out/bin + + cat > $out/bin/hello < Date: Mon, 18 Dec 2023 15:22:09 -0800 Subject: [PATCH 026/307] nix shell: reflect command line order in PATH order Prior to this change, Nix would prepend every installable to the PATH list in order to ensure that installables appeared before the current PATH from the ambient environment. With this change, all the installables are still prepended to the PATH, but in the same order as they appear on the command line. This means that the first of two packages that expose an executable `hello` would appear in the PATH first, and thus be executed first. See the test in the prior commit for a more concrete example. --- src/libcmd/installables.cc | 14 ++++++++++++++ src/libcmd/installables.hh | 7 +++++++ src/nix/run.cc | 9 ++++++--- 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index be9ebe9ca..736c41a1e 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -729,6 +729,20 @@ StorePathSet Installable::toStorePathSet( return outPaths; } +StorePaths Installable::toStorePaths( + ref evalStore, + ref store, + Realise mode, OperateOn operateOn, + const Installables & installables) +{ + StorePaths outPaths; + for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) { + auto thisOutPaths = path.outPaths(); + outPaths.insert(outPaths.end(), thisOutPaths.begin(), thisOutPaths.end()); + } + return outPaths; +} + StorePath Installable::toStorePath( ref evalStore, ref store, diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index c8ad41388..95e8841ca 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -172,6 +172,13 @@ struct Installable OperateOn operateOn, const Installables & installables); + static std::vector toStorePaths( + ref evalStore, + ref store, + Realise mode, + OperateOn operateOn, + const Installables & installables); + static StorePath toStorePath( ref evalStore, ref store, diff --git a/src/nix/run.cc b/src/nix/run.cc index efc0c56a1..9bca5b9d0 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -114,7 +114,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment setEnviron(); - auto unixPath = tokenizeString(getEnv("PATH").value_or(""), ":"); + std::vector pathAdditions; while (!todo.empty()) { auto path = todo.front(); @@ -122,7 +122,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment if (!done.insert(path).second) continue; if (true) - unixPath.push_front(store->printStorePath(path) + "/bin"); + pathAdditions.push_back(store->printStorePath(path) + "/bin"); auto propPath = CanonPath(store->printStorePath(path)) + "nix-support" + "propagated-user-env-packages"; if (auto st = accessor->maybeLstat(propPath); st && st->type == SourceAccessor::tRegular) { @@ -131,7 +131,10 @@ struct CmdShell : InstallablesCommand, MixEnvironment } } - setenv("PATH", concatStringsSep(":", unixPath).c_str(), 1); + auto unixPath = tokenizeString(getEnv("PATH").value_or(""), ":"); + unixPath.insert(unixPath.begin(), pathAdditions.begin(), pathAdditions.end()); + auto unixPathString = concatStringsSep(":", unixPath); + setenv("PATH", unixPathString.c_str(), 1); Strings args; for (auto & arg : command) args.push_back(arg); From 397cf4e2859d5723f1e36aeb4b26ecae673515a8 Mon Sep 17 00:00:00 2001 From: Felix Uhl Date: Mon, 27 Nov 2023 23:09:32 +0100 Subject: [PATCH 027/307] nix search: Disallow empty regex Fixes #4739 Fixes #3553 in spirit IMO --- doc/manual/rl-next/empty-search-regex.md | 8 ++++++++ src/nix/search.cc | 6 ++---- src/nix/search.md | 15 ++++++++++----- tests/functional/search.sh | 17 ++++++++++------- 4 files changed, 30 insertions(+), 16 deletions(-) create mode 100644 doc/manual/rl-next/empty-search-regex.md diff --git a/doc/manual/rl-next/empty-search-regex.md b/doc/manual/rl-next/empty-search-regex.md new file mode 100644 index 000000000..b193f9456 --- /dev/null +++ b/doc/manual/rl-next/empty-search-regex.md @@ -0,0 +1,8 @@ +synopsis: Disallow empty search regex in `nix search` +prs: #9481 +description: { + +[`nix search`](@docroot@/command-ref/new-cli/nix3-search.md) now requires a search regex to be passed. To show all packages, use `^`. + +} + diff --git a/src/nix/search.cc b/src/nix/search.cc index ef0139e09..97ef1375e 100644 --- a/src/nix/search.cc +++ b/src/nix/search.cc @@ -67,11 +67,9 @@ struct CmdSearch : InstallableValueCommand, MixJSON settings.readOnlyMode = true; evalSettings.enableImportFromDerivation.setDefault(false); - // Empty search string should match all packages - // Use "^" here instead of ".*" due to differences in resulting highlighting - // (see #1893 -- libc++ claims empty search string is not in POSIX grammar) + // Recommend "^" here instead of ".*" due to differences in resulting highlighting if (res.empty()) - res.push_back("^"); + throw UsageError("Must provide at least one regex! To match all packages, use '%s'.", "nix search ^"); std::vector regexes; std::vector excludeRegexes; diff --git a/src/nix/search.md b/src/nix/search.md index 0c5d22549..f65ac9b17 100644 --- a/src/nix/search.md +++ b/src/nix/search.md @@ -5,7 +5,7 @@ R""( * Show all packages in the `nixpkgs` flake: ```console - # nix search nixpkgs + # nix search nixpkgs ^ * legacyPackages.x86_64-linux.AMB-plugins (0.8.1) A set of ambisonics ladspa plugins @@ -34,7 +34,7 @@ R""( * Show all packages in the flake in the current directory: ```console - # nix search + # nix search . ^ ``` * Search for Firefox or Chromium: @@ -64,11 +64,16 @@ R""( `nix search` searches [*installable*](./nix.md#installables) (which can be evaluated, that is, a flake or Nix expression, but not a store path or store derivation path) for packages whose name or description matches all of the -regular expressions *regex*. For each matching package, It prints the +regular expressions *regex*. For each matching package, It prints the full attribute name (from the root of the [installable](./nix.md#installables)), the version and the `meta.description` field, highlighting the substrings that -were matched by the regular expressions. If no regular expressions are -specified, all packages are shown. +were matched by the regular expressions. + +To show all packages, use the regular expression `^`. In contrast to `.*`, +it avoids highlighting the entire name and description of every package. + +> Note that in this context, `^` is the regex character to match the beginning of a string, *not* the delimiter for +> [selecting a derivation output](@docroot@/command-ref/new-cli/nix.md#derivation-output-selection). # Flake output attributes diff --git a/tests/functional/search.sh b/tests/functional/search.sh index 8742f8736..d9c7a75da 100644 --- a/tests/functional/search.sh +++ b/tests/functional/search.sh @@ -17,12 +17,15 @@ clearCache # Multiple arguments will not exist (( $(nix search -f search.nix '' hello broken | wc -l) == 0 )) +# No regex should return an error +(( $(nix search -f search.nix '' | wc -l) == 0 )) + ## Search expressions # Check that empty search string matches all -nix search -f search.nix '' |grepQuiet foo -nix search -f search.nix '' |grepQuiet bar -nix search -f search.nix '' |grepQuiet hello +nix search -f search.nix '' ^ | grepQuiet foo +nix search -f search.nix '' ^ | grepQuiet bar +nix search -f search.nix '' ^ | grepQuiet hello ## Tests for multiple regex/match highlighting @@ -39,8 +42,8 @@ e=$'\x1b' # grep doesn't support \e, \033 or even \x1b (( $(nix search -f search.nix '' 'b' | grep -Eo "$e\[32;1mb$e\[(0|0;1)m" | wc -l) == 3 )) ## Tests for --exclude -(( $(nix search -f search.nix -e hello | grep -c hello) == 0 )) +(( $(nix search -f search.nix ^ -e hello | grep -c hello) == 0 )) -(( $(nix search -f search.nix foo --exclude 'foo|bar' | grep -Ec 'foo|bar') == 0 )) -(( $(nix search -f search.nix foo -e foo --exclude bar | grep -Ec 'foo|bar') == 0 )) -[[ $(nix search -f search.nix -e bar --json | jq -c 'keys') == '["foo","hello"]' ]] +(( $(nix search -f search.nix foo ^ --exclude 'foo|bar' | grep -Ec 'foo|bar') == 0 )) +(( $(nix search -f search.nix foo ^ -e foo --exclude bar | grep -Ec 'foo|bar') == 0 )) +[[ $(nix search -f search.nix '' ^ -e bar --json | jq -c 'keys') == '["foo","hello"]' ]] From 3187bc9ac3dd193b9329ef68c73ac3cca794ed78 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 21 Dec 2023 16:48:29 +0100 Subject: [PATCH 028/307] nix profile: Remove indices --- src/nix/profile-list.md | 2 -- src/nix/profile-remove.md | 7 ------ src/nix/profile-upgrade.md | 7 ------ src/nix/profile.cc | 39 ++++++++++----------------------- tests/functional/nix-profile.sh | 11 +++++----- 5 files changed, 16 insertions(+), 50 deletions(-) diff --git a/src/nix/profile-list.md b/src/nix/profile-list.md index facfdf0d6..9811b9ec9 100644 --- a/src/nix/profile-list.md +++ b/src/nix/profile-list.md @@ -7,14 +7,12 @@ R""( ```console # nix profile list Name: gdb - Index: 0 Flake attribute: legacyPackages.x86_64-linux.gdb Original flake URL: flake:nixpkgs Locked flake URL: github:NixOS/nixpkgs/7b38b03d76ab71bdc8dc325e3f6338d984cc35ca Store paths: /nix/store/indzcw5wvlhx6vwk7k4iq29q15chvr3d-gdb-11.1 Name: blender-bin - Index: 1 Flake attribute: packages.x86_64-linux.default Original flake URL: flake:blender-bin Locked flake URL: github:edolstra/nix-warez/91f2ffee657bf834e4475865ae336e2379282d34?dir=blender diff --git a/src/nix/profile-remove.md b/src/nix/profile-remove.md index c994b79bd..1f6532250 100644 --- a/src/nix/profile-remove.md +++ b/src/nix/profile-remove.md @@ -8,13 +8,6 @@ R""( # nix profile remove hello ``` -* Remove a package by index - *(deprecated, will be removed in a future version)*: - - ```console - # nix profile remove 3 - ``` - * Remove all packages: ```console diff --git a/src/nix/profile-upgrade.md b/src/nix/profile-upgrade.md index 47103edfc..432b8fa94 100644 --- a/src/nix/profile-upgrade.md +++ b/src/nix/profile-upgrade.md @@ -15,13 +15,6 @@ R""( # nix profile upgrade hello ``` -* Upgrade a specific package by index - *(deprecated, will be removed in a future version)*: - - ```console - # nix profile upgrade 0 - ``` - # Description This command upgrades a previously installed package in a Nix profile, diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 1d89815e2..517693cd4 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -470,40 +470,28 @@ public: std::string pattern; std::regex reg; }; - typedef std::variant Matcher; + typedef std::variant Matcher; std::vector getMatchers(ref store) { std::vector res; - auto anyIndexMatchers = false; - for (auto & s : _matchers) { - if (auto n = string2Int(s)) { - res.push_back(*n); - anyIndexMatchers = true; - } + if (auto n = string2Int(s)) + throw Error("'nix profile' no longer supports indices ('%d')", *n); else if (store->isStorePath(s)) res.push_back(s); else res.push_back(RegexPattern{s,std::regex(s, std::regex::extended | std::regex::icase)}); } - if (anyIndexMatchers) { - warn("Indices are deprecated and will be removed in a future version!\n" - " Refer to packages by their `Name` as printed by `nix profile list`.\n" - " See https://github.com/NixOS/nix/issues/9171 for more information."); - } - return res; } - bool matches(const Store & store, const ProfileElement & element, size_t pos, const std::vector & matchers) + bool matches(const Store & store, const ProfileElement & element, const std::vector & matchers) { for (auto & matcher : matchers) { - if (auto n = std::get_if(&matcher)) { - if (*n == pos) return true; - } else if (auto path = std::get_if(&matcher)) { + if (auto path = std::get_if(&matcher)) { if (element.storePaths.count(store.parseStorePath(*path))) return true; } else if (auto regex = std::get_if(&matcher)) { if (std::regex_match(element.name, regex->reg)) @@ -539,7 +527,7 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem for (size_t i = 0; i < oldManifest.elements.size(); ++i) { auto & element(oldManifest.elements[i]); - if (!matches(*store, element, i, matchers)) { + if (!matches(*store, element, matchers)) { newManifest.elements.push_back(std::move(element)); } else { notice("removing '%s'", element.identifier()); @@ -553,11 +541,9 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem if (removedCount == 0) { for (auto matcher: matchers) { - if (const size_t * index = std::get_if(&matcher)){ - warn("'%d' is not a valid index", *index); - } else if (const Path * path = std::get_if(&matcher)){ + if (const Path * path = std::get_if(&matcher)) { warn("'%s' does not match any paths", *path); - } else if (const RegexPattern * regex = std::get_if(&matcher)){ + } else if (const RegexPattern * regex = std::get_if(&matcher)) { warn("'%s' does not match any packages", regex->pattern); } } @@ -595,7 +581,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf for (size_t i = 0; i < manifest.elements.size(); ++i) { auto & element(manifest.elements[i]); - if (!matches(*store, element, i, matchers)) { + if (!matches(*store, element, matchers)) { continue; } @@ -657,11 +643,9 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf if (upgradedCount == 0) { if (matchedCount == 0) { for (auto & matcher : matchers) { - if (const size_t * index = std::get_if(&matcher)){ - warn("'%d' is not a valid index", *index); - } else if (const Path * path = std::get_if(&matcher)){ + if (const Path * path = std::get_if(&matcher)) { warn("'%s' does not match any paths", *path); - } else if (const RegexPattern * regex = std::get_if(&matcher)){ + } else if (const RegexPattern * regex = std::get_if(&matcher)) { warn("'%s' does not match any packages", regex->pattern); } } @@ -715,7 +699,6 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro logger->cout("Name: " ANSI_BOLD "%s" ANSI_NORMAL "%s", element.name, element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL); - logger->cout("Index: %s", i); if (element.source) { logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string()); logger->cout("Original flake URL: %s", element.source->originalRef.to_string()); diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index eced4d3f1..618b6241d 100644 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -49,7 +49,7 @@ cp ./config.nix $flake1Dir/ nix-env -f ./user-envs.nix -i foo-1.0 nix profile list | grep -A2 'Name:.*foo' | grep 'Store paths:.*foo-1.0' nix profile install $flake1Dir -L -nix profile list | grep -A4 'Index:.*1' | grep 'Locked flake URL:.*narHash' +nix profile list | grep -A4 'Name:.*flake1' | grep 'Locked flake URL:.*narHash' [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]] [ -e $TEST_HOME/.nix-profile/share/man ] (! [ -e $TEST_HOME/.nix-profile/include ]) @@ -58,9 +58,8 @@ nix profile history | grep "packages.$system.default: ∅ -> 1.0" nix profile diff-closures | grep 'env-manifest.nix: ε → ∅' # Test XDG Base Directories support - export NIX_CONFIG="use-xdg-base-directories = true" -nix profile remove 1 +nix profile remove flake1 nix profile install $flake1Dir [[ $($TEST_HOME/.local/state/nix/profile/bin/hello) = "Hello World" ]] unset NIX_CONFIG @@ -68,7 +67,7 @@ unset NIX_CONFIG # Test upgrading a package. printf NixOS > $flake1Dir/who printf 2.0 > $flake1Dir/version -nix profile upgrade 1 +nix profile upgrade flake1 [[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello NixOS" ]] nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 2.0, 2.0-man" @@ -89,7 +88,7 @@ nix profile diff-closures | grep 'Version 3 -> 4' # Test installing a non-flake package. nix profile install --file ./simple.nix '' [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] -nix profile remove 1 +nix profile remove simple nix profile install $(nix-build --no-out-link ./simple.nix) [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] @@ -107,7 +106,7 @@ nix profile wipe-history # Test upgrade to CA package. printf true > $flake1Dir/ca.nix printf 3.0 > $flake1Dir/version -nix profile upgrade 0 +nix profile upgrade flake1 nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 3.0, 3.0-man" # Test new install of CA package. From 6268a45b650f563bae2360e0540920a2959bdd40 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 22 Dec 2023 16:11:25 +0100 Subject: [PATCH 029/307] nix profile: Make profile element names stable The profile manifest is now an object keyed on the name returned by getNameFromURL() at installation time, instead of an array. This ensures that the names of profile elements don't change when other elements are added/removed. --- src/nix/profile.cc | 140 ++++++++++++++++---------------- tests/functional/nix-profile.sh | 17 ++-- 2 files changed, 80 insertions(+), 77 deletions(-) diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 517693cd4..8b3918b80 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -45,7 +45,6 @@ const int defaultPriority = 5; struct ProfileElement { StorePathSet storePaths; - std::string name; std::optional source; bool active = true; int priority = defaultPriority; @@ -82,11 +81,6 @@ struct ProfileElement return showVersions(versions); } - bool operator < (const ProfileElement & other) const - { - return std::tuple(identifier(), storePaths) < std::tuple(other.identifier(), other.storePaths); - } - void updateStorePaths( ref evalStore, ref store, @@ -109,7 +103,9 @@ struct ProfileElement struct ProfileManifest { - std::vector elements; + using ProfileElementName = std::string; + + std::map elements; ProfileManifest() { } @@ -119,8 +115,6 @@ struct ProfileManifest if (pathExists(manifestPath)) { auto json = nlohmann::json::parse(readFile(manifestPath)); - /* Keep track of already found names to allow preventing duplicates. */ - std::set foundNames; auto version = json.value("version", 0); std::string sUrl; @@ -131,6 +125,7 @@ struct ProfileManifest sOriginalUrl = "originalUri"; break; case 2: + case 3: sUrl = "url"; sOriginalUrl = "originalUrl"; break; @@ -138,7 +133,9 @@ struct ProfileManifest throw Error("profile manifest '%s' has unsupported version %d", manifestPath, version); } - for (auto & e : json["elements"]) { + auto elems = json["elements"]; + for (auto & elem : elems.items()) { + auto & e = elem.value(); ProfileElement element; for (auto & p : e["storePaths"]) element.storePaths.insert(state.store->parseStorePath((std::string) p)); @@ -155,25 +152,16 @@ struct ProfileManifest }; } - std::string nameCandidate = element.identifier(); - if (e.contains("name")) { - nameCandidate = e["name"]; - } - else if (element.source) { - auto url = parseURL(element.source->to_string()); - auto name = getNameFromURL(url); - if (name) - nameCandidate = *name; - } + std::string name = + elems.is_object() + ? elem.key() + : e.contains("name") + ? (std::string) e["name"] + : element.source + ? getNameFromURL(parseURL(element.source->to_string())).value_or(element.identifier()) + : element.identifier(); - auto finalName = nameCandidate; - for (int i = 1; foundNames.contains(finalName); ++i) { - finalName = nameCandidate + std::to_string(i); - } - element.name = finalName; - foundNames.insert(element.name); - - elements.emplace_back(std::move(element)); + addElement(name, std::move(element)); } } @@ -187,16 +175,34 @@ struct ProfileManifest for (auto & drvInfo : drvInfos) { ProfileElement element; element.storePaths = {drvInfo.queryOutPath()}; - element.name = element.identifier(); - elements.emplace_back(std::move(element)); + addElement(std::move(element)); } } } + void addElement(std::string_view nameCandidate, ProfileElement element) + { + std::string finalName(nameCandidate); + for (int i = 1; elements.contains(finalName); ++i) + finalName = nameCandidate + "-" + std::to_string(i); + + elements.insert_or_assign(finalName, std::move(element)); + } + + void addElement(ProfileElement element) + { + auto name = + element.source + ? getNameFromURL(parseURL(element.source->to_string())) + : std::nullopt; + auto name2 = name ? *name : element.identifier(); + addElement(name2, std::move(element)); + } + nlohmann::json toJSON(Store & store) const { - auto array = nlohmann::json::array(); - for (auto & element : elements) { + auto es = nlohmann::json::object(); + for (auto & [name, element] : elements) { auto paths = nlohmann::json::array(); for (auto & path : element.storePaths) paths.push_back(store.printStorePath(path)); @@ -210,11 +216,11 @@ struct ProfileManifest obj["attrPath"] = element.source->attrPath; obj["outputs"] = element.source->outputs; } - array.push_back(obj); + es[name] = obj; } nlohmann::json json; - json["version"] = 2; - json["elements"] = array; + json["version"] = 3; + json["elements"] = es; return json; } @@ -225,7 +231,7 @@ struct ProfileManifest StorePathSet references; Packages pkgs; - for (auto & element : elements) { + for (auto & [name, element] : elements) { for (auto & path : element.storePaths) { if (element.active) pkgs.emplace_back(store->printStorePath(path), true, element.priority); @@ -267,33 +273,27 @@ struct ProfileManifest static void printDiff(const ProfileManifest & prev, const ProfileManifest & cur, std::string_view indent) { - auto prevElems = prev.elements; - std::sort(prevElems.begin(), prevElems.end()); - - auto curElems = cur.elements; - std::sort(curElems.begin(), curElems.end()); - - auto i = prevElems.begin(); - auto j = curElems.begin(); + auto i = prev.elements.begin(); + auto j = cur.elements.begin(); bool changes = false; - while (i != prevElems.end() || j != curElems.end()) { - if (j != curElems.end() && (i == prevElems.end() || i->identifier() > j->identifier())) { - logger->cout("%s%s: ∅ -> %s", indent, j->identifier(), j->versions()); + while (i != prev.elements.end() || j != cur.elements.end()) { + if (j != cur.elements.end() && (i == prev.elements.end() || i->first > j->first)) { + logger->cout("%s%s: ∅ -> %s", indent, j->second.identifier(), j->second.versions()); changes = true; ++j; } - else if (i != prevElems.end() && (j == curElems.end() || i->identifier() < j->identifier())) { - logger->cout("%s%s: %s -> ∅", indent, i->identifier(), i->versions()); + else if (i != prev.elements.end() && (j == cur.elements.end() || i->first < j->first)) { + logger->cout("%s%s: %s -> ∅", indent, i->second.identifier(), i->second.versions()); changes = true; ++i; } else { - auto v1 = i->versions(); - auto v2 = j->versions(); + auto v1 = i->second.versions(); + auto v2 = j->second.versions(); if (v1 != v2) { - logger->cout("%s%s: %s -> %s", indent, i->identifier(), v1, v2); + logger->cout("%s%s: %s -> %s", indent, i->second.identifier(), v1, v2); changes = true; } ++i; @@ -392,7 +392,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile element.updateStorePaths(getEvalStore(), store, res); - manifest.elements.push_back(std::move(element)); + manifest.addElement(std::move(element)); } try { @@ -402,7 +402,7 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile // See https://github.com/NixOS/nix/compare/3efa476c5439f8f6c1968a6ba20a31d1239c2f04..1fe5d172ece51a619e879c4b86f603d9495cc102 auto findRefByFilePath = [&](Iterator begin, Iterator end) { for (auto it = begin; it != end; it++) { - auto profileElement = *it; + auto & profileElement = it->second; for (auto & storePath : profileElement.storePaths) { if (conflictError.fileA.starts_with(store->printStorePath(storePath))) { return std::pair(conflictError.fileA, profileElement.toInstallables(*store)); @@ -488,13 +488,17 @@ public: return res; } - bool matches(const Store & store, const ProfileElement & element, const std::vector & matchers) + bool matches( + const Store & store, + const std::string & name, + const ProfileElement & element, + const std::vector & matchers) { for (auto & matcher : matchers) { if (auto path = std::get_if(&matcher)) { if (element.storePaths.count(store.parseStorePath(*path))) return true; } else if (auto regex = std::get_if(&matcher)) { - if (std::regex_match(element.name, regex->reg)) + if (std::regex_match(name, regex->reg)) return true; } } @@ -525,10 +529,9 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem ProfileManifest newManifest; - for (size_t i = 0; i < oldManifest.elements.size(); ++i) { - auto & element(oldManifest.elements[i]); - if (!matches(*store, element, matchers)) { - newManifest.elements.push_back(std::move(element)); + for (auto & [name, element] : oldManifest.elements) { + if (!matches(*store, name, element, matchers)) { + newManifest.elements.insert_or_assign(name, std::move(element)); } else { notice("removing '%s'", element.identifier()); } @@ -574,14 +577,13 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf auto matchers = getMatchers(store); Installables installables; - std::vector indices; + std::vector elems; auto matchedCount = 0; auto upgradedCount = 0; - for (size_t i = 0; i < manifest.elements.size(); ++i) { - auto & element(manifest.elements[i]); - if (!matches(*store, element, matchers)) { + for (auto & [name, element] : manifest.elements) { + if (!matches(*store, name, element, matchers)) { continue; } @@ -637,7 +639,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf }; installables.push_back(installable); - indices.push_back(i); + elems.push_back(&element); } if (upgradedCount == 0) { @@ -661,7 +663,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf for (size_t i = 0; i < installables.size(); ++i) { auto & installable = installables.at(i); - auto & element = manifest.elements[indices.at(i)]; + auto & element = *elems.at(i); element.updateStorePaths( getEvalStore(), store, @@ -693,11 +695,11 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro if (json) { std::cout << manifest.toJSON(*store).dump() << "\n"; } else { - for (size_t i = 0; i < manifest.elements.size(); ++i) { - auto & element(manifest.elements[i]); + for (const auto & [i, e] : enumerate(manifest.elements)) { + auto & [name, element] = e; if (i) logger->cout(""); logger->cout("Name: " ANSI_BOLD "%s" ANSI_NORMAL "%s", - element.name, + name, element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL); if (element.source) { logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string()); diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 618b6241d..003af5174 100644 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -59,7 +59,7 @@ nix profile diff-closures | grep 'env-manifest.nix: ε → ∅' # Test XDG Base Directories support export NIX_CONFIG="use-xdg-base-directories = true" -nix profile remove flake1 +nix profile remove flake1 2>&1 | grep 'removed 1 packages' nix profile install $flake1Dir [[ $($TEST_HOME/.local/state/nix/profile/bin/hello) = "Hello World" ]] unset NIX_CONFIG @@ -80,7 +80,7 @@ nix profile rollback # Test uninstall. [ -e $TEST_HOME/.nix-profile/bin/foo ] -nix profile remove foo +nix profile remove foo 2>&1 | grep 'removed 1 packages' (! [ -e $TEST_HOME/.nix-profile/bin/foo ]) nix profile history | grep 'foo: 1.0 -> ∅' nix profile diff-closures | grep 'Version 3 -> 4' @@ -88,7 +88,7 @@ nix profile diff-closures | grep 'Version 3 -> 4' # Test installing a non-flake package. nix profile install --file ./simple.nix '' [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] -nix profile remove simple +nix profile remove simple 2>&1 | grep 'removed 1 packages' nix profile install $(nix-build --no-out-link ./simple.nix) [[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]] @@ -96,8 +96,9 @@ nix profile install $(nix-build --no-out-link ./simple.nix) mkdir $TEST_ROOT/simple-too cp ./simple.nix ./config.nix simple.builder.sh $TEST_ROOT/simple-too nix profile install --file $TEST_ROOT/simple-too/simple.nix '' -nix profile list | grep -A4 'Name:.*simple' | grep 'Name:.*simple1' -nix profile remove simple1 +nix profile list | grep -A4 'Name:.*simple' | grep 'Name:.*simple-1' +nix profile remove simple 2>&1 | grep 'removed 1 packages' +nix profile remove simple-1 2>&1 | grep 'removed 1 packages' # Test wipe-history. nix profile wipe-history @@ -110,7 +111,7 @@ nix profile upgrade flake1 nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 3.0, 3.0-man" # Test new install of CA package. -nix profile remove flake1 +nix profile remove flake1 2>&1 | grep 'removed 1 packages' printf 4.0 > $flake1Dir/version printf Utrecht > $flake1Dir/who nix profile install $flake1Dir @@ -131,14 +132,14 @@ nix profile upgrade flake1 [ -e $TEST_HOME/.nix-profile/share/man ] [ -e $TEST_HOME/.nix-profile/include ] -nix profile remove flake1 +nix profile remove flake1 2>&1 | grep 'removed 1 packages' nix profile install "$flake1Dir^man" (! [ -e $TEST_HOME/.nix-profile/bin/hello ]) [ -e $TEST_HOME/.nix-profile/share/man ] (! [ -e $TEST_HOME/.nix-profile/include ]) # test priority -nix profile remove flake1 +nix profile remove flake1 2>&1 | grep 'removed 1 packages' # Make another flake. flake2Dir=$TEST_ROOT/flake2 From a748e88bf4cca0fdc6ce75188e88017a7899d16b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 22 Dec 2023 16:27:31 +0100 Subject: [PATCH 030/307] nix profile: Remove check for "name" attribute in manifests AFAIK, we've never emitted this attribute. --- src/nix/profile.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 8b3918b80..1b0c333bd 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -155,8 +155,6 @@ struct ProfileManifest std::string name = elems.is_object() ? elem.key() - : e.contains("name") - ? (std::string) e["name"] : element.source ? getNameFromURL(parseURL(element.source->to_string())).value_or(element.identifier()) : element.identifier(); From 936a3642264ac159f3f9093710be3465b70e0e89 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 22 Dec 2023 16:35:58 +0100 Subject: [PATCH 031/307] getNameFromURL(): Support uppercase characters in attribute names In particular, this makes it handle 'legacyPackages' correctly. --- src/libexpr/flake/url-name.cc | 2 +- tests/unit/libexpr/flake/url-name.cc | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/libexpr/flake/url-name.cc b/src/libexpr/flake/url-name.cc index 7e51aa2e1..753f197d5 100644 --- a/src/libexpr/flake/url-name.cc +++ b/src/libexpr/flake/url-name.cc @@ -4,7 +4,7 @@ namespace nix { -static const std::string attributeNamePattern("[a-z0-9_-]+"); +static const std::string attributeNamePattern("[a-zA-Z0-9_-]+"); static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?"); static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+"); static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")"); diff --git a/tests/unit/libexpr/flake/url-name.cc b/tests/unit/libexpr/flake/url-name.cc index 84d32837c..85387b323 100644 --- a/tests/unit/libexpr/flake/url-name.cc +++ b/tests/unit/libexpr/flake/url-name.cc @@ -5,11 +5,13 @@ namespace nix { /* ----------- tests for url-name.hh --------------------------------------------------*/ - TEST(getNameFromURL, getsNameFromURL) { + TEST(getNameFromURL, getNameFromURL) { ASSERT_EQ(getNameFromURL(parseURL("path:/home/user/project")), "project"); ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.hello")), "hello"); - ASSERT_EQ(getNameFromURL(parseURL("path:.#nonStandardAttr.mylaptop")), "nonStandardAttr.mylaptop"); - ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "nonStandardAttr.mylaptop"); + ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#legacyPackages.x86_64-linux.hello")), "hello"); + ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.Hello")), "Hello"); + ASSERT_EQ(getNameFromURL(parseURL("path:.#nonStandardAttr.mylaptop")), "mylaptop"); + ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "mylaptop"); ASSERT_EQ(getNameFromURL(parseURL("path:./nixpkgs#packages.x86_64-linux.complex^bin,man")), "complex"); ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#packages.x86_64-linux.default^*")), "myproj"); From 9cb287657bec5a969d8bb1678d598d9fa820e60b Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Sat, 23 Dec 2023 17:15:09 -0500 Subject: [PATCH 032/307] remote-store test: Break out IFD expression into a separate file --- tests/functional/ifd.nix | 10 ++++++++++ tests/functional/remote-store.sh | 13 +------------ 2 files changed, 11 insertions(+), 12 deletions(-) create mode 100644 tests/functional/ifd.nix diff --git a/tests/functional/ifd.nix b/tests/functional/ifd.nix new file mode 100644 index 000000000..d0b9b54ad --- /dev/null +++ b/tests/functional/ifd.nix @@ -0,0 +1,10 @@ +with import ./config.nix; +import ( + mkDerivation { + name = "foo"; + bla = import ./dependencies.nix {}; + buildCommand = " + echo \\\"hi\\\" > $out + "; + } +) diff --git a/tests/functional/remote-store.sh b/tests/functional/remote-store.sh index 5c7bfde46..dc80f8b55 100644 --- a/tests/functional/remote-store.sh +++ b/tests/functional/remote-store.sh @@ -19,18 +19,7 @@ else fi # Test import-from-derivation through the daemon. -[[ $(nix eval --impure --raw --expr ' - with import ./config.nix; - import ( - mkDerivation { - name = "foo"; - bla = import ./dependencies.nix {}; - buildCommand = " - echo \\\"hi\\\" > $out - "; - } - ) -') = hi ]] +[[ $(nix eval --impure --raw --file ./ifd.nix) = hi ]] storeCleared=1 NIX_REMOTE_=$NIX_REMOTE $SHELL ./user-envs.sh From c3942ef85ffbd83391410fbf012f1de366d2463c Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Sat, 23 Dec 2023 21:26:12 -0500 Subject: [PATCH 033/307] Build IFD in the build store when using eval-store. Previously, IFDs would be built within the eval store, even though one is typically using `--eval-store` precisely to *avoid* local builds. Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures. --- doc/manual/rl-next/ifd-eval-store.md | 8 ++++++++ src/libexpr/primops.cc | 19 +++++++++++++------ tests/functional/eval-store.sh | 8 ++++++++ 3 files changed, 29 insertions(+), 6 deletions(-) create mode 100644 doc/manual/rl-next/ifd-eval-store.md diff --git a/doc/manual/rl-next/ifd-eval-store.md b/doc/manual/rl-next/ifd-eval-store.md new file mode 100644 index 000000000..835e7e7a3 --- /dev/null +++ b/doc/manual/rl-next/ifd-eval-store.md @@ -0,0 +1,8 @@ +--- +synopsis: import-from-derivation builds the derivation in the build store +prs: 9661 +--- + +When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option. + +Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures. diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index a1502da45..58826b3bd 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -84,14 +84,14 @@ StringMap EvalState::realiseContext(const NixStringContext & context) /* Build/substitute the context. */ std::vector buildReqs; for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d }); - store->buildPaths(buildReqs); + buildStore->buildPaths(buildReqs, bmNormal, store); + + StorePathSet outputsToCopyAndAllow; for (auto & drv : drvs) { - auto outputs = resolveDerivedPath(*store, drv); + auto outputs = resolveDerivedPath(*buildStore, drv, &*store); for (auto & [outputName, outputPath] : outputs) { - /* Add the output of this derivations to the allowed - paths. */ - allowPath(store->toRealPath(outputPath)); + outputsToCopyAndAllow.insert(outputPath); /* Get all the output paths corresponding to the placeholders we had */ if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) { @@ -101,12 +101,19 @@ StringMap EvalState::realiseContext(const NixStringContext & context) .drvPath = drv.drvPath, .output = outputName, }).render(), - store->printStorePath(outputPath) + buildStore->printStorePath(outputPath) ); } } } + if (store != buildStore) copyClosure(*buildStore, *store, outputsToCopyAndAllow); + for (auto & outputPath : outputsToCopyAndAllow) { + /* Add the output of this derivations to the allowed + paths. */ + allowPath(store->toRealPath(outputPath)); + } + return res; } diff --git a/tests/functional/eval-store.sh b/tests/functional/eval-store.sh index ec99fd953..9937ecbce 100644 --- a/tests/functional/eval-store.sh +++ b/tests/functional/eval-store.sh @@ -40,3 +40,11 @@ if [[ ! -n "${NIX_TESTS_CA_BY_DEFAULT:-}" ]]; then (! ls $NIX_STORE_DIR/*.drv) fi ls $eval_store/nix/store/*.drv + +clearStore +rm -rf "$eval_store" + +# Confirm that import-from-derivation builds on the build store +[[ $(nix eval --eval-store "$eval_store?require-sigs=false" --impure --raw --file ./ifd.nix) = hi ]] +ls $NIX_STORE_DIR/*dependencies-top/foobar +(! ls $eval_store/nix/store/*dependencies-top/foobar) From e2399fc94935c9bc1ae6670c5d445214e039ac84 Mon Sep 17 00:00:00 2001 From: Brian Le Date: Tue, 26 Dec 2023 17:12:28 -0500 Subject: [PATCH 034/307] Change "dervation" typos to "derivation" --- doc/manual/src/language/derivations.md | 2 +- src/libstore/remote-store.cc | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/language/derivations.md b/doc/manual/src/language/derivations.md index 2aded5527..cbb30d074 100644 --- a/doc/manual/src/language/derivations.md +++ b/doc/manual/src/language/derivations.md @@ -274,7 +274,7 @@ The [`builder`](#attr-builder) is executed as follows: directory (typically, `/nix/store`). - `NIX_ATTRS_JSON_FILE` & `NIX_ATTRS_SH_FILE` if `__structuredAttrs` - is set to `true` for the dervation. A detailed explanation of this + is set to `true` for the derivation. A detailed explanation of this behavior can be found in the [section about structured attrs](./advanced-attributes.md#adv-attr-structuredAttrs). diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 4d0113594..f0df646ca 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -186,7 +186,7 @@ void RemoteStore::ConnectionHandle::processStderr(Sink * sink, Source * source, if (m.find("parsing derivation") != std::string::npos && m.find("expected string") != std::string::npos && m.find("Derive([") != std::string::npos) - throw Error("%s, this might be because the daemon is too old to understand dependencies on dynamic derivations. Check to see if the raw dervation is in the form '%s'", std::move(m), "DrvWithVersion(..)"); + throw Error("%s, this might be because the daemon is too old to understand dependencies on dynamic derivations. Check to see if the raw derivation is in the form '%s'", std::move(m), "DrvWithVersion(..)"); } throw; } From b6313f64f7be11e0fe74b17cb31dbbf12b2e7725 Mon Sep 17 00:00:00 2001 From: DavHau Date: Wed, 27 Dec 2023 19:57:27 +0700 Subject: [PATCH 035/307] saner default for log-lines: change to 25 This seems to be a much saner default. 10 lines are just not enough in so many cases. --- src/libstore/globals.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index b35dc37a1..c12998f8e 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -144,7 +144,7 @@ public: */ bool verboseBuild = true; - Setting logLines{this, 10, "log-lines", + Setting logLines{this, 25, "log-lines", "The number of lines of the tail of " "the log to show if a build fails."}; From 99a691c8a1abffd30077bd5f005cb8d4bbafae5c Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 24 Dec 2023 21:14:08 +0100 Subject: [PATCH 036/307] don't use istreams in hot paths MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit istream sentry objects are very expensive for single-character operations, and since we don't configure exception masks for the istreams used here they don't even do anything. all we need is end-of-string checks and an advancing position in an immutable memory buffer, both of which can be had for much cheaper than istreams allow. the effect of this change is most apparent on empty stores. before: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 7.167 s ± 0.013 s [User: 5.528 s, System: 1.431 s] Range (min … max): 7.147 s … 7.182 s 10 runs after: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.963 s ± 0.011 s [User: 5.330 s, System: 1.421 s] Range (min … max): 6.943 s … 6.974 s 10 runs --- src/libstore/derivations.cc | 47 +++++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 13 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 8a7d660ff..973ce5211 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -154,18 +154,39 @@ StorePath writeDerivation(Store & store, } -/* Read string `s' from stream `str'. */ -static void expect(std::istream & str, std::string_view s) -{ - for (auto & c : s) { - if (str.get() != c) - throw FormatError("expected string '%1%'", s); +namespace { +/** + * This mimics std::istream to some extent. We use this much smaller implementation + * instead of plain istreams because the sentry object overhead is too high. + */ +struct StringViewStream { + std::string_view remaining; + + int peek() const { + return remaining.empty() ? EOF : remaining[0]; } + + int get() { + if (remaining.empty()) return EOF; + char c = remaining[0]; + remaining.remove_prefix(1); + return c; + } +}; +} + + +/* Read string `s' from stream `str'. */ +static void expect(StringViewStream & str, std::string_view s) +{ + if (!str.remaining.starts_with(s)) + throw FormatError("expected string '%1%'", s); + str.remaining.remove_prefix(s.size()); } /* Read a C-style string from stream `str'. */ -static std::string parseString(std::istream & str) +static std::string parseString(StringViewStream & str) { std::string res; expect(str, "\""); @@ -187,7 +208,7 @@ static void validatePath(std::string_view s) { throw FormatError("bad path '%1%' in derivation", s); } -static Path parsePath(std::istream & str) +static Path parsePath(StringViewStream & str) { auto s = parseString(str); validatePath(s); @@ -195,7 +216,7 @@ static Path parsePath(std::istream & str) } -static bool endOfList(std::istream & str) +static bool endOfList(StringViewStream & str) { if (str.peek() == ',') { str.get(); @@ -209,7 +230,7 @@ static bool endOfList(std::istream & str) } -static StringSet parseStrings(std::istream & str, bool arePaths) +static StringSet parseStrings(StringViewStream & str, bool arePaths) { StringSet res; expect(str, "["); @@ -267,7 +288,7 @@ static DerivationOutput parseDerivationOutput( } static DerivationOutput parseDerivationOutput( - const StoreDirConfig & store, std::istringstream & str, + const StoreDirConfig & store, StringViewStream & str, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings) { expect(str, ","); const auto pathS = parseString(str); @@ -297,7 +318,7 @@ enum struct DerivationATermVersion { static DerivedPathMap::ChildNode parseDerivedPathMapNode( const StoreDirConfig & store, - std::istringstream & str, + StringViewStream & str, DerivationATermVersion version) { DerivedPathMap::ChildNode node; @@ -349,7 +370,7 @@ Derivation parseDerivation( Derivation drv; drv.name = name; - std::istringstream str(std::move(s)); + StringViewStream str{s}; expect(str, "D"); DerivationATermVersion version; switch (str.peek()) { From 2cfc4ace35d1c8cca917c487be3cfddfcf3bba01 Mon Sep 17 00:00:00 2001 From: pennae Date: Tue, 26 Dec 2023 17:40:55 +0100 Subject: [PATCH 037/307] malloc/memset even less MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit more buffers that can be uninitialized and on the stack. small difference, but still worth doing. before: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.963 s ± 0.011 s [User: 5.330 s, System: 1.421 s] Range (min … max): 6.943 s … 6.974 s 10 runs after: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.952 s ± 0.015 s [User: 5.294 s, System: 1.452 s] Range (min … max): 6.926 s … 6.974 s 10 runs --- src/libutil/archive.cc | 2 +- src/libutil/file-system.cc | 2 +- src/libutil/serialise.cc | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 465df2073..712ea51c7 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -140,7 +140,7 @@ static void parseContents(ParseSink & sink, Source & source, const Path & path) sink.preallocateContents(size); uint64_t left = size; - std::vector buf(65536); + std::array buf; while (left) { checkInterrupt(); diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index c96effff9..4cac35ace 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -307,7 +307,7 @@ void writeFile(const Path & path, Source & source, mode_t mode, bool sync) if (!fd) throw SysError("opening file '%1%'", path); - std::vector buf(64 * 1024); + std::array buf; try { while (true) { diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index f465bd0de..76b378e18 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -82,7 +82,7 @@ void Source::operator () (std::string_view data) void Source::drainInto(Sink & sink) { std::string s; - std::vector buf(8192); + std::array buf; while (true) { size_t n; try { From 7434caca0545bd6194bb52eebf6fdf0424755eb0 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 15 Dec 2023 11:52:21 -0800 Subject: [PATCH 038/307] Fix segfault on infinite recursion in some cases MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This fixes a segfault on infinite function call recursion (rather than infinite thunk recursion) by tracking the function call depth in `EvalState`. Additionally, to avoid printing extremely long stack traces, stack frames are now deduplicated, with a `(19997 duplicate traces omitted)` message. This should only really be triggered in infinite recursion scenarios. Before: $ nix-instantiate --eval --expr '(x: x x) (x: x x)' Segmentation fault: 11 After: $ nix-instantiate --eval --expr '(x: x x) (x: x x)' error: stack overflow at «string»:1:14: 1| (x: x x) (x: x x) | ^ $ nix-instantiate --eval --expr '(x: x x) (x: x x)' --show-trace error: … from call site at «string»:1:1: 1| (x: x x) (x: x x) | ^ … while calling anonymous lambda at «string»:1:2: 1| (x: x x) (x: x x) | ^ … from call site at «string»:1:5: 1| (x: x x) (x: x x) | ^ … while calling anonymous lambda at «string»:1:11: 1| (x: x x) (x: x x) | ^ … from call site at «string»:1:14: 1| (x: x x) (x: x x) | ^ (19997 duplicate traces omitted) error: stack overflow at «string»:1:14: 1| (x: x x) (x: x x) | ^ --- .../rl-next/stack-overflow-segfaults.md | 32 +++++ src/libexpr/eval-settings.hh | 3 + src/libexpr/eval.cc | 18 +++ src/libexpr/eval.hh | 5 + src/libutil/error.cc | 111 +++++++++++++++++- src/libutil/error.hh | 8 ++ .../lang/eval-fail-duplicate-traces.err.exp | 44 +++++++ .../lang/eval-fail-duplicate-traces.nix | 9 ++ ...val-fail-infinite-recursion-lambda.err.exp | 38 ++++++ .../eval-fail-infinite-recursion-lambda.nix | 1 + .../lang/eval-fail-mutual-recursion.err.exp | 57 +++++++++ .../lang/eval-fail-mutual-recursion.nix | 36 ++++++ 12 files changed, 358 insertions(+), 4 deletions(-) create mode 100644 doc/manual/rl-next/stack-overflow-segfaults.md create mode 100644 tests/functional/lang/eval-fail-duplicate-traces.err.exp create mode 100644 tests/functional/lang/eval-fail-duplicate-traces.nix create mode 100644 tests/functional/lang/eval-fail-infinite-recursion-lambda.err.exp create mode 100644 tests/functional/lang/eval-fail-infinite-recursion-lambda.nix create mode 100644 tests/functional/lang/eval-fail-mutual-recursion.err.exp create mode 100644 tests/functional/lang/eval-fail-mutual-recursion.nix diff --git a/doc/manual/rl-next/stack-overflow-segfaults.md b/doc/manual/rl-next/stack-overflow-segfaults.md new file mode 100644 index 000000000..3d9753248 --- /dev/null +++ b/doc/manual/rl-next/stack-overflow-segfaults.md @@ -0,0 +1,32 @@ +--- +synopsis: Some stack overflow segfaults are fixed +issues: 9616 +prs: 9617 +--- + +The number of nested function calls has been restricted, to detect and report +infinite function call recursions. The default maximum call depth is 10,000 and +can be set with [the `max-call-depth` +option](@docroot@/command-ref/conf-file.md#conf-max-call-depth). + +This fixes segfaults or the following unhelpful error message in many cases: + + error: stack overflow (possible infinite recursion) + +Before: + +``` +$ nix-instantiate --eval --expr '(x: x x) (x: x x)' +Segmentation fault: 11 +``` + +After: + +``` +$ nix-instantiate --eval --expr '(x: x x) (x: x x)' +error: stack overflow + + at «string»:1:14: + 1| (x: x x) (x: x x) + | ^ +``` diff --git a/src/libexpr/eval-settings.hh b/src/libexpr/eval-settings.hh index ad187ca01..2f6c12d45 100644 --- a/src/libexpr/eval-settings.hh +++ b/src/libexpr/eval-settings.hh @@ -124,6 +124,9 @@ struct EvalSettings : Config Setting traceVerbose{this, false, "trace-verbose", "Whether `builtins.traceVerbose` should trace its first argument when evaluated."}; + + Setting maxCallDepth{this, 10000, "max-call-depth", + "The maximum function call depth to allow before erroring."}; }; extern EvalSettings evalSettings; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 810843995..f73e22ba0 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1505,9 +1505,27 @@ void ExprLambda::eval(EvalState & state, Env & env, Value & v) v.mkLambda(&env, this); } +namespace { +/** Increments a count on construction and decrements on destruction. + */ +class CallDepth { + size_t & count; +public: + CallDepth(size_t & count) : count(count) { + ++count; + } + ~CallDepth() { + --count; + } +}; +}; void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos) { + if (callDepth > evalSettings.maxCallDepth) + error("stack overflow; max-call-depth exceeded").atPos(pos).template debugThrow(); + CallDepth _level(callDepth); + auto trace = evalSettings.traceFunctionCalls ? std::make_unique(positions[pos]) : nullptr; diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index da2d256db..7dbffe38c 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -622,6 +622,11 @@ private: const SourcePath & basePath, std::shared_ptr & staticEnv); + /** + * Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run out of system stack. + */ + size_t callDepth = 0; + public: /** diff --git a/src/libutil/error.cc b/src/libutil/error.cc index bc0194d59..e42925c2b 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -50,6 +50,32 @@ std::ostream & operator <<(std::ostream & str, const AbstractPos & pos) return str; } +/** + * An arbitrarily defined value comparison for the purpose of using traces in the key of a sorted container. + */ +inline bool operator<(const Trace& lhs, const Trace& rhs) +{ + // `std::shared_ptr` does not have value semantics for its comparison + // functions, so we need to check for nulls and compare the dereferenced + // values here. + if (lhs.pos != rhs.pos) { + if (!lhs.pos) + return true; + if (!rhs.pos) + return false; + if (*lhs.pos != *rhs.pos) + return *lhs.pos < *rhs.pos; + } + // This formats a freshly formatted hint string and then throws it away, which + // shouldn't be much of a problem because it only runs when pos is equal, and this function is + // used for trace printing, which is infrequent. + return std::forward_as_tuple(lhs.hint.str(), lhs.frame) + < std::forward_as_tuple(rhs.hint.str(), rhs.frame); +} +inline bool operator> (const Trace& lhs, const Trace& rhs) { return rhs < lhs; } +inline bool operator<=(const Trace& lhs, const Trace& rhs) { return !(lhs > rhs); } +inline bool operator>=(const Trace& lhs, const Trace& rhs) { return !(lhs < rhs); } + std::optional AbstractPos::getCodeLines() const { if (line == 0) @@ -185,6 +211,69 @@ static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std return hasPos; } +void printTrace( + std::ostream & output, + const std::string_view & indent, + size_t & count, + const Trace & trace) +{ + output << "\n" << "… " << trace.hint.str() << "\n"; + + if (printPosMaybe(output, indent, trace.pos)) + count++; +} + +void printSkippedTracesMaybe( + std::ostream & output, + const std::string_view & indent, + size_t & count, + std::vector & skippedTraces, + std::set tracesSeen) +{ + if (skippedTraces.size() > 0) { + // If we only skipped a few frames, print them out normally; + // messages like "1 duplicate frames omitted" aren't helpful. + if (skippedTraces.size() <= 5) { + for (auto & trace : skippedTraces) { + printTrace(output, indent, count, trace); + } + } else { + output << "\n" << ANSI_WARNING "(" << skippedTraces.size() << " duplicate frames omitted)" ANSI_NORMAL << "\n"; + // Clear the set of "seen" traces after printing a chunk of + // `duplicate frames omitted`. + // + // Consider a mutually recursive stack trace with: + // - 10 entries of A + // - 10 entries of B + // - 10 entries of A + // + // If we don't clear `tracesSeen` here, we would print output like this: + // - 1 entry of A + // - (9 duplicate frames omitted) + // - 1 entry of B + // - (19 duplicate frames omitted) + // + // This would obscure the control flow, which went from A, + // to B, and back to A again. + // + // In contrast, if we do clear `tracesSeen`, the output looks like this: + // - 1 entry of A + // - (9 duplicate frames omitted) + // - 1 entry of B + // - (9 duplicate frames omitted) + // - 1 entry of A + // - (9 duplicate frames omitted) + // + // See: `tests/functional/lang/eval-fail-mutual-recursion.nix` + tracesSeen.clear(); + } + } + // We've either printed each trace in `skippedTraces` normally, or + // printed a chunk of `duplicate frames omitted`. Either way, we've + // processed these traces and can clear them. + skippedTraces.clear(); +} + std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool showTrace) { std::string prefix; @@ -333,7 +422,13 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s bool frameOnly = false; if (!einfo.traces.empty()) { + // Stack traces seen since we last printed a chunk of `duplicate frames + // omitted`. + std::set tracesSeen; + // A consecutive sequence of stack traces that are all in `tracesSeen`. + std::vector skippedTraces; size_t count = 0; + for (const auto & trace : einfo.traces) { if (trace.hint.str().empty()) continue; if (frameOnly && !trace.frame) continue; @@ -343,14 +438,21 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s break; } + if (tracesSeen.count(trace)) { + skippedTraces.push_back(trace); + continue; + } + tracesSeen.insert(trace); + + printSkippedTracesMaybe(oss, ellipsisIndent, count, skippedTraces, tracesSeen); + count++; frameOnly = trace.frame; - oss << "\n" << "… " << trace.hint.str() << "\n"; - - if (printPosMaybe(oss, ellipsisIndent, trace.pos)) - count++; + printTrace(oss, ellipsisIndent, count, trace); } + + printSkippedTracesMaybe(oss, ellipsisIndent, count, skippedTraces, tracesSeen); oss << "\n" << prefix; } @@ -369,4 +471,5 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s return out; } + } diff --git a/src/libutil/error.hh b/src/libutil/error.hh index c04dcbd77..baffca128 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -25,6 +25,7 @@ #include #include #include +#include #include #include @@ -88,6 +89,8 @@ struct AbstractPos std::optional getCodeLines() const; virtual ~AbstractPos() = default; + + inline auto operator<=>(const AbstractPos& rhs) const = default; }; std::ostream & operator << (std::ostream & str, const AbstractPos & pos); @@ -103,6 +106,11 @@ struct Trace { bool frame; }; +inline bool operator<(const Trace& lhs, const Trace& rhs); +inline bool operator> (const Trace& lhs, const Trace& rhs); +inline bool operator<=(const Trace& lhs, const Trace& rhs); +inline bool operator>=(const Trace& lhs, const Trace& rhs); + struct ErrorInfo { Verbosity level; hintformat msg; diff --git a/tests/functional/lang/eval-fail-duplicate-traces.err.exp b/tests/functional/lang/eval-fail-duplicate-traces.err.exp new file mode 100644 index 000000000..32ad9b376 --- /dev/null +++ b/tests/functional/lang/eval-fail-duplicate-traces.err.exp @@ -0,0 +1,44 @@ +error: + … from call site + at /pwd/lang/eval-fail-duplicate-traces.nix:9:3: + 8| in + 9| throwAfter 2 + | ^ + 10| + + … while calling 'throwAfter' + at /pwd/lang/eval-fail-duplicate-traces.nix:4:16: + 3| let + 4| throwAfter = n: + | ^ + 5| if n > 0 + + … from call site + at /pwd/lang/eval-fail-duplicate-traces.nix:6:10: + 5| if n > 0 + 6| then throwAfter (n - 1) + | ^ + 7| else throw "Uh oh!"; + + … while calling 'throwAfter' + at /pwd/lang/eval-fail-duplicate-traces.nix:4:16: + 3| let + 4| throwAfter = n: + | ^ + 5| if n > 0 + + … from call site + at /pwd/lang/eval-fail-duplicate-traces.nix:6:10: + 5| if n > 0 + 6| then throwAfter (n - 1) + | ^ + 7| else throw "Uh oh!"; + + … while calling 'throwAfter' + at /pwd/lang/eval-fail-duplicate-traces.nix:4:16: + 3| let + 4| throwAfter = n: + | ^ + 5| if n > 0 + + error: Uh oh! diff --git a/tests/functional/lang/eval-fail-duplicate-traces.nix b/tests/functional/lang/eval-fail-duplicate-traces.nix new file mode 100644 index 000000000..17ce374ec --- /dev/null +++ b/tests/functional/lang/eval-fail-duplicate-traces.nix @@ -0,0 +1,9 @@ +# Check that we only omit duplicate stack traces when there's a bunch of them. +# Here, there's only a couple duplicate entries, so we output them all. +let + throwAfter = n: + if n > 0 + then throwAfter (n - 1) + else throw "Uh oh!"; +in + throwAfter 2 diff --git a/tests/functional/lang/eval-fail-infinite-recursion-lambda.err.exp b/tests/functional/lang/eval-fail-infinite-recursion-lambda.err.exp new file mode 100644 index 000000000..5d843d827 --- /dev/null +++ b/tests/functional/lang/eval-fail-infinite-recursion-lambda.err.exp @@ -0,0 +1,38 @@ +error: + … from call site + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:1: + 1| (x: x x) (x: x x) + | ^ + 2| + + … while calling anonymous lambda + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:2: + 1| (x: x x) (x: x x) + | ^ + 2| + + … from call site + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:5: + 1| (x: x x) (x: x x) + | ^ + 2| + + … while calling anonymous lambda + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:11: + 1| (x: x x) (x: x x) + | ^ + 2| + + … from call site + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:14: + 1| (x: x x) (x: x x) + | ^ + 2| + + (19997 duplicate frames omitted) + + error: stack overflow; max-call-depth exceeded + at /pwd/lang/eval-fail-infinite-recursion-lambda.nix:1:14: + 1| (x: x x) (x: x x) + | ^ + 2| diff --git a/tests/functional/lang/eval-fail-infinite-recursion-lambda.nix b/tests/functional/lang/eval-fail-infinite-recursion-lambda.nix new file mode 100644 index 000000000..dd0a8bf2e --- /dev/null +++ b/tests/functional/lang/eval-fail-infinite-recursion-lambda.nix @@ -0,0 +1 @@ +(x: x x) (x: x x) diff --git a/tests/functional/lang/eval-fail-mutual-recursion.err.exp b/tests/functional/lang/eval-fail-mutual-recursion.err.exp new file mode 100644 index 000000000..dc2e11766 --- /dev/null +++ b/tests/functional/lang/eval-fail-mutual-recursion.err.exp @@ -0,0 +1,57 @@ +error: + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:36:3: + 35| in + 36| throwAfterA true 10 + | ^ + 37| + + … while calling 'throwAfterA' + at /pwd/lang/eval-fail-mutual-recursion.nix:29:26: + 28| + 29| throwAfterA = recurse: n: + | ^ + 30| if n > 0 + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:31:10: + 30| if n > 0 + 31| then throwAfterA recurse (n - 1) + | ^ + 32| else if recurse + + (19 duplicate frames omitted) + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:33:10: + 32| else if recurse + 33| then throwAfterB true 10 + | ^ + 34| else throw "Uh oh!"; + + … while calling 'throwAfterB' + at /pwd/lang/eval-fail-mutual-recursion.nix:22:26: + 21| let + 22| throwAfterB = recurse: n: + | ^ + 23| if n > 0 + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:24:10: + 23| if n > 0 + 24| then throwAfterB recurse (n - 1) + | ^ + 25| else if recurse + + (19 duplicate frames omitted) + + … from call site + at /pwd/lang/eval-fail-mutual-recursion.nix:26:10: + 25| else if recurse + 26| then throwAfterA false 10 + | ^ + 27| else throw "Uh oh!"; + + (21 duplicate frames omitted) + + error: Uh oh! diff --git a/tests/functional/lang/eval-fail-mutual-recursion.nix b/tests/functional/lang/eval-fail-mutual-recursion.nix new file mode 100644 index 000000000..d090d3158 --- /dev/null +++ b/tests/functional/lang/eval-fail-mutual-recursion.nix @@ -0,0 +1,36 @@ +# Check that stack frame deduplication only affects consecutive intervals, and +# that they are reported independently of any preceding sections, even if +# they're indistinguishable. +# +# In terms of the current implementation, we check that we clear the set of +# "seen frames" after eliding a group of frames. +# +# Suppose we have: +# - 10 frames in a function A +# - 10 frames in a function B +# - 10 frames in a function A +# +# We want to output: +# - a few frames of A (skip the rest) +# - a few frames of B (skip the rest) +# - a few frames of A (skip the rest) +# +# If we implemented this in the naive manner, we'd instead get: +# - a few frames of A (skip the rest) +# - a few frames of B (skip the rest, _and_ skip the remaining frames of A) +let + throwAfterB = recurse: n: + if n > 0 + then throwAfterB recurse (n - 1) + else if recurse + then throwAfterA false 10 + else throw "Uh oh!"; + + throwAfterA = recurse: n: + if n > 0 + then throwAfterA recurse (n - 1) + else if recurse + then throwAfterB true 10 + else throw "Uh oh!"; +in + throwAfterA true 10 From 79d3d412cacd210bc9a0e9ba5407eea67c8e3868 Mon Sep 17 00:00:00 2001 From: pennae Date: Tue, 26 Dec 2023 22:18:42 +0100 Subject: [PATCH 039/307] optimize derivation string parsing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit a bunch of derivation strings contain no escape sequences. we can optimize for this fact by first scanning for the end of a derivation string and simply returning the contents unmodified if no escape sequences were found. to make this even more efficient we can also use BackedStringViews to avoid copies, avoiding heap allocations for transient data. before: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.952 s ± 0.015 s [User: 5.294 s, System: 1.452 s] Range (min … max): 6.926 s … 6.974 s 10 runs after: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.907 s ± 0.012 s [User: 5.272 s, System: 1.429 s] Range (min … max): 6.893 s … 6.926 s 10 runs --- doc/manual/rl-next/drv-string-parse-hang.md | 6 ++ src/libstore/derivations.cc | 65 +++++++++++++-------- 2 files changed, 48 insertions(+), 23 deletions(-) create mode 100644 doc/manual/rl-next/drv-string-parse-hang.md diff --git a/doc/manual/rl-next/drv-string-parse-hang.md b/doc/manual/rl-next/drv-string-parse-hang.md new file mode 100644 index 000000000..1e041d3e9 --- /dev/null +++ b/doc/manual/rl-next/drv-string-parse-hang.md @@ -0,0 +1,6 @@ +--- +synopsis: Fix handling of truncated `.drv` files. +prs: 9673 +--- + +Previously a `.drv` that was truncated in the middle of a string would case nix to enter an infinite loop, eventually exhausting all memory and crashing. diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 973ce5211..89d902917 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -2,6 +2,7 @@ #include "downstream-placeholder.hh" #include "store-api.hh" #include "globals.hh" +#include "types.hh" #include "util.hh" #include "split.hh" #include "common-protocol.hh" @@ -186,20 +187,38 @@ static void expect(StringViewStream & str, std::string_view s) /* Read a C-style string from stream `str'. */ -static std::string parseString(StringViewStream & str) +static BackedStringView parseString(StringViewStream & str) { - std::string res; expect(str, "\""); - int c; - while ((c = str.get()) != '"') - if (c == '\\') { - c = str.get(); - if (c == 'n') res += '\n'; - else if (c == 'r') res += '\r'; - else if (c == 't') res += '\t'; - else res += c; + auto c = str.remaining.begin(), end = str.remaining.end(); + bool escaped = false; + for (; c != end && *c != '"'; c++) { + if (*c == '\\') { + c++; + if (c == end) + throw FormatError("unterminated string in derivation"); + escaped = true; } - else res += c; + } + + const auto contentLen = c - str.remaining.begin(); + const auto content = str.remaining.substr(0, contentLen); + str.remaining.remove_prefix(contentLen + 1); + + if (!escaped) + return content; + + std::string res; + res.reserve(content.size()); + for (c = content.begin(), end = content.end(); c != end; c++) + if (*c == '\\') { + c++; + if (*c == 'n') res += '\n'; + else if (*c == 'r') res += '\r'; + else if (*c == 't') res += '\t'; + else res += *c; + } + else res += *c; return res; } @@ -210,7 +229,7 @@ static void validatePath(std::string_view s) { static Path parsePath(StringViewStream & str) { - auto s = parseString(str); + auto s = parseString(str).toOwned(); validatePath(s); return s; } @@ -235,7 +254,7 @@ static StringSet parseStrings(StringViewStream & str, bool arePaths) StringSet res; expect(str, "["); while (!endOfList(str)) - res.insert(arePaths ? parsePath(str) : parseString(str)); + res.insert(arePaths ? parsePath(str) : parseString(str).toOwned()); return res; } @@ -296,7 +315,7 @@ static DerivationOutput parseDerivationOutput( expect(str, ","); const auto hash = parseString(str); expect(str, ")"); - return parseDerivationOutput(store, pathS, hashAlgo, hash, xpSettings); + return parseDerivationOutput(store, *pathS, *hashAlgo, *hash, xpSettings); } /** @@ -344,7 +363,7 @@ static DerivedPathMap::ChildNode parseDerivedPathMapNode( expect(str, ",["); while (!endOfList(str)) { expect(str, "("); - auto outputName = parseString(str); + auto outputName = parseString(str).toOwned(); expect(str, ","); node.childMap.insert_or_assign(outputName, parseDerivedPathMapNode(store, str, version)); expect(str, ")"); @@ -381,12 +400,12 @@ Derivation parseDerivation( case 'r': { expect(str, "rvWithVersion("); auto versionS = parseString(str); - if (versionS == "xp-dyn-drv") { + if (*versionS == "xp-dyn-drv") { // Only verison we have so far version = DerivationATermVersion::DynamicDerivations; xpSettings.require(Xp::DynamicDerivations); } else { - throw FormatError("Unknown derivation ATerm format version '%s'", versionS); + throw FormatError("Unknown derivation ATerm format version '%s'", *versionS); } expect(str, ","); break; @@ -398,7 +417,7 @@ Derivation parseDerivation( /* Parse the list of outputs. */ expect(str, "["); while (!endOfList(str)) { - expect(str, "("); std::string id = parseString(str); + expect(str, "("); std::string id = parseString(str).toOwned(); auto output = parseDerivationOutput(store, str, xpSettings); drv.outputs.emplace(std::move(id), std::move(output)); } @@ -414,19 +433,19 @@ Derivation parseDerivation( } expect(str, ","); drv.inputSrcs = store.parseStorePathSet(parseStrings(str, true)); - expect(str, ","); drv.platform = parseString(str); - expect(str, ","); drv.builder = parseString(str); + expect(str, ","); drv.platform = parseString(str).toOwned(); + expect(str, ","); drv.builder = parseString(str).toOwned(); /* Parse the builder arguments. */ expect(str, ",["); while (!endOfList(str)) - drv.args.push_back(parseString(str)); + drv.args.push_back(parseString(str).toOwned()); /* Parse the environment variables. */ expect(str, ",["); while (!endOfList(str)) { - expect(str, "("); auto name = parseString(str); - expect(str, ","); auto value = parseString(str); + expect(str, "("); auto name = parseString(str).toOwned(); + expect(str, ","); auto value = parseString(str).toOwned(); expect(str, ")"); drv.env[name] = value; } From 02c64abf1e892220cb62ce3b7e1598030fb6a61c Mon Sep 17 00:00:00 2001 From: pennae Date: Tue, 26 Dec 2023 05:44:52 +0100 Subject: [PATCH 040/307] use translation table for drv string parsing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit the table is very small compared to cache sizes and a single indexed load is faster than three comparisons. before: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.907 s ± 0.012 s [User: 5.272 s, System: 1.429 s] Range (min … max): 6.893 s … 6.926 s 10 runs after: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.883 s ± 0.016 s [User: 5.250 s, System: 1.424 s] Range (min … max): 6.860 s … 6.905 s 10 runs --- src/libstore/derivations.cc | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 89d902917..89a345057 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -174,6 +174,17 @@ struct StringViewStream { return c; } }; + +constexpr struct Escapes { + char map[256]; + constexpr Escapes() { + for (int i = 0; i < 256; i++) map[i] = (char) (unsigned char) i; + map[(int) (unsigned char) 'n'] = '\n'; + map[(int) (unsigned char) 'r'] = '\r'; + map[(int) (unsigned char) 't'] = '\t'; + } + char operator[](char c) const { return map[(unsigned char) c]; } +} escapes; } @@ -213,10 +224,7 @@ static BackedStringView parseString(StringViewStream & str) for (c = content.begin(), end = content.end(); c != end; c++) if (*c == '\\') { c++; - if (*c == 'n') res += '\n'; - else if (*c == 'r') res += '\r'; - else if (*c == 't') res += '\t'; - else res += *c; + res += escapes[*c]; } else res += *c; return res; From c62686a95bd3ebbf3f5104c27222e751e84b84a3 Mon Sep 17 00:00:00 2001 From: pennae Date: Wed, 27 Dec 2023 04:26:50 +0100 Subject: [PATCH 041/307] reduce copies during drv parsing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit many paths need not be heap-allocated, and derivation env name/valye pairs can be moved into the map. before: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.883 s ± 0.016 s [User: 5.250 s, System: 1.424 s] Range (min … max): 6.860 s … 6.905 s 10 runs after: Benchmark 1: nix eval --raw --impure --expr 'with import {}; system' Time (mean ± σ): 6.868 s ± 0.027 s [User: 5.194 s, System: 1.466 s] Range (min … max): 6.828 s … 6.913 s 10 runs --- src/libstore/derivations.cc | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 89a345057..2fafcb8e7 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -235,10 +235,10 @@ static void validatePath(std::string_view s) { throw FormatError("bad path '%1%' in derivation", s); } -static Path parsePath(StringViewStream & str) +static BackedStringView parsePath(StringViewStream & str) { - auto s = parseString(str).toOwned(); - validatePath(s); + auto s = parseString(str); + validatePath(*s); return s; } @@ -262,7 +262,7 @@ static StringSet parseStrings(StringViewStream & str, bool arePaths) StringSet res; expect(str, "["); while (!endOfList(str)) - res.insert(arePaths ? parsePath(str) : parseString(str).toOwned()); + res.insert((arePaths ? parsePath(str) : parseString(str)).toOwned()); return res; } @@ -434,9 +434,9 @@ Derivation parseDerivation( expect(str, ",["); while (!endOfList(str)) { expect(str, "("); - Path drvPath = parsePath(str); + auto drvPath = parsePath(str); expect(str, ","); - drv.inputDrvs.map.insert_or_assign(store.parseStorePath(drvPath), parseDerivedPathMapNode(store, str, version)); + drv.inputDrvs.map.insert_or_assign(store.parseStorePath(*drvPath), parseDerivedPathMapNode(store, str, version)); expect(str, ")"); } @@ -455,7 +455,7 @@ Derivation parseDerivation( expect(str, "("); auto name = parseString(str).toOwned(); expect(str, ","); auto value = parseString(str).toOwned(); expect(str, ")"); - drv.env[name] = value; + drv.env.insert_or_assign(std::move(name), std::move(value)); } expect(str, ")"); From 1fe66852ff87e98615f35e8aac64675ff988fb5a Mon Sep 17 00:00:00 2001 From: pennae Date: Fri, 22 Dec 2023 18:19:53 +0100 Subject: [PATCH 042/307] reduce the size of Env by one pointer MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit since `up` and `values` are both pointer-aligned the type field will also be pointer-aligned, wasting 48 bits of space on most machines. we can get away with removing the type field altogether by encoding some information into the `with` expr that created the env to begin with, reducing the GC load for the absolutely massive amount of single-entry envs we create for lambdas. this reduces memory usage of system eval by quite a bit (reducing heap size of our system eval from 8.4GB to 8.23GB) and gives similar savings in eval time. running `nix eval --raw --impure --expr 'with import {}; system'` before: Time (mean ± σ): 5.576 s ± 0.003 s [User: 5.197 s, System: 0.378 s] Range (min … max): 5.572 s … 5.581 s 10 runs after: Time (mean ± σ): 5.408 s ± 0.002 s [User: 5.019 s, System: 0.388 s] Range (min … max): 5.405 s … 5.411 s 10 runs --- doc/manual/rl-next/env-size-reduction.md | 7 +++++ doc/manual/rl-next/with-error-reporting.md | 31 ++++++++++++++++++++++ src/libcmd/repl.cc | 2 +- src/libexpr/eval-inline.hh | 2 -- src/libexpr/eval.cc | 29 ++++++++++---------- src/libexpr/eval.hh | 5 ---- src/libexpr/nixexpr.cc | 18 ++++++++----- src/libexpr/nixexpr.hh | 13 ++++++--- src/libexpr/primops.cc | 2 +- 9 files changed, 75 insertions(+), 34 deletions(-) create mode 100644 doc/manual/rl-next/env-size-reduction.md create mode 100644 doc/manual/rl-next/with-error-reporting.md diff --git a/doc/manual/rl-next/env-size-reduction.md b/doc/manual/rl-next/env-size-reduction.md new file mode 100644 index 000000000..40a58bc28 --- /dev/null +++ b/doc/manual/rl-next/env-size-reduction.md @@ -0,0 +1,7 @@ +--- +synopsis: Reduce eval memory usage and wall time +prs: 9658 +--- + +Reduce the size of the `Env` struct used in the evaluator by a pointer, or 8 bytes on most modern machines. +This reduces memory usage during eval by around 2% and wall time by around 3%. diff --git a/doc/manual/rl-next/with-error-reporting.md b/doc/manual/rl-next/with-error-reporting.md new file mode 100644 index 000000000..10b020956 --- /dev/null +++ b/doc/manual/rl-next/with-error-reporting.md @@ -0,0 +1,31 @@ +--- +synopsis: Better error reporting for `with` expressions +prs: 9658 +--- + +`with` expressions using non-attrset values to resolve variables are now reported with proper positions. + +Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated: + +``` +nix-repl> with 1; a +error: + … + + at «none»:0: (source not available) + + error: value is an integer while a set was expected +``` + +Now position information is preserved and reported as with most other errors: + +``` +nix-repl> with 1; a +error: + … while evaluating the first subexpression of a with expression + at «string»:1:1: + 1| with 1; a + | ^ + + error: value is an integer while a set was expected +``` diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 97d709ff4..dea91ba63 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -112,7 +112,7 @@ NixRepl::NixRepl(const SearchPath & searchPath, nix::ref store, refstaticBaseEnv.get())) + , staticEnv(new StaticEnv(nullptr, state->staticBaseEnv.get())) , historyFile(getDataDir() + "/nix/repl-history") { } diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index 52aa75b5f..f7710f819 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -73,8 +73,6 @@ Env & EvalState::allocEnv(size_t size) #endif env = (Env *) allocBytes(sizeof(Env) + size * sizeof(Value *)); - env->type = Env::Plain; - /* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */ return *env; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 810843995..ee1a87d9a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -543,7 +543,7 @@ EvalState::EvalState( , env1AllocCache(std::allocate_shared(traceable_allocator(), nullptr)) #endif , baseEnv(allocEnv(128)) - , staticBaseEnv{std::make_shared(false, nullptr)} + , staticBaseEnv{std::make_shared(nullptr, nullptr)} { corepkgsFS->setPathDisplay(""); internalFS->setPathDisplay("«nix-internal»", ""); @@ -781,7 +781,7 @@ void printStaticEnvBindings(const SymbolTable & st, const StaticEnv & se) // just for the current level of Env, not the whole chain. void printWithBindings(const SymbolTable & st, const Env & env) { - if (env.type == Env::HasWithAttrs) { + if (!env.values[0]->isThunk()) { std::cout << "with: "; std::cout << ANSI_MAGENTA; Bindings::iterator j = env.values[0]->attrs->begin(); @@ -835,7 +835,7 @@ void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const En if (env.up && se.up) { mapStaticEnvBindings(st, *se.up, *env.up, vm); - if (env.type == Env::HasWithAttrs) { + if (!env.values[0]->isThunk()) { // add 'with' bindings. Bindings::iterator j = env.values[0]->attrs->begin(); while (j != env.values[0]->attrs->end()) { @@ -973,22 +973,23 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) if (!var.fromWith) return env->values[var.displ]; + // This early exit defeats the `maybeThunk` optimization for variables from `with`, + // The added complexity of handling this appears to be similarly in cost, or + // the cases where applicable were insignificant in the first place. + if (noEval) return nullptr; + + auto * fromWith = var.fromWith; while (1) { - if (env->type == Env::HasWithExpr) { - if (noEval) return 0; - Value * v = allocValue(); - evalAttrs(*env->up, (Expr *) env->values[0], *v, noPos, ""); - env->values[0] = v; - env->type = Env::HasWithAttrs; - } + forceAttrs(*env->values[0], fromWith->pos, "while evaluating the first subexpression of a with expression"); Bindings::iterator j = env->values[0]->attrs->find(var.name); if (j != env->values[0]->attrs->end()) { if (countCalls) attrSelects[j->pos]++; return j->value; } - if (!env->prevWith) + if (!fromWith->parentWith) error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow(); - for (size_t l = env->prevWith; l; --l, env = env->up) ; + for (size_t l = fromWith->prevWith; l; --l, env = env->up) ; + fromWith = fromWith->parentWith; } } @@ -1816,9 +1817,7 @@ void ExprWith::eval(EvalState & state, Env & env, Value & v) { Env & env2(state.allocEnv(1)); env2.up = &env; - env2.prevWith = prevWith; - env2.type = Env::HasWithExpr; - env2.values[0] = (Value *) attrs; + env2.values[0] = attrs->maybeThunk(state, env); body->eval(state, env2, v); } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index da2d256db..db606ebae 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -116,11 +116,6 @@ struct Constant struct Env { Env * up; - /** - * Number of of levels up to next `with` environment - */ - unsigned short prevWith:14; - enum { Plain = 0, HasWithExpr, HasWithAttrs } type:2; Value * values[0]; }; diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 84860b30f..ede070cff 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -333,6 +333,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); + fromWith = nullptr; + /* Check whether the variable appears in the environment. If so, set its level and displacement. */ const StaticEnv * curEnv; @@ -344,7 +346,6 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & } else { auto i = curEnv->find(name); if (i != curEnv->vars.end()) { - fromWith = false; this->level = level; displ = i->second; return; @@ -360,7 +361,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & .msg = hintfmt("undefined variable '%1%'", es.symbols[name]), .errPos = es.positions[pos] }); - fromWith = true; + for (auto * e = env.get(); e && !fromWith; e = e->up) + fromWith = e->isWith; this->level = withLevel; } @@ -393,7 +395,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr es.exprEnvs.insert(std::make_pair(this, env)); if (recursive) { - auto newEnv = std::make_shared(false, env.get(), recursive ? attrs.size() : 0); + auto newEnv = std::make_shared(nullptr, env.get(), recursive ? attrs.size() : 0); Displacement displ = 0; for (auto & i : attrs) @@ -435,7 +437,7 @@ void ExprLambda::bindVars(EvalState & es, const std::shared_ptr es.exprEnvs.insert(std::make_pair(this, env)); auto newEnv = std::make_shared( - false, env.get(), + nullptr, env.get(), (hasFormals() ? formals->formals.size() : 0) + (!arg ? 0 : 1)); @@ -471,7 +473,7 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr & if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); - auto newEnv = std::make_shared(false, env.get(), attrs->attrs.size()); + auto newEnv = std::make_shared(nullptr, env.get(), attrs->attrs.size()); Displacement displ = 0; for (auto & i : attrs->attrs) @@ -490,6 +492,10 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr & if (es.debugRepl) es.exprEnvs.insert(std::make_pair(this, env)); + parentWith = nullptr; + for (auto * e = env.get(); e && !parentWith; e = e->up) + parentWith = e->isWith; + /* Does this `with' have an enclosing `with'? If so, record its level so that `lookupVar' can look up variables in the previous `with' if this one doesn't contain the desired attribute. */ @@ -506,7 +512,7 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr & es.exprEnvs.insert(std::make_pair(this, env)); attrs->bindVars(es, env); - auto newEnv = std::make_shared(true, env.get()); + auto newEnv = std::make_shared(this, env.get()); body->bindVars(es, newEnv); } diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 1e57fec7a..e50a157ee 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -138,6 +138,7 @@ std::ostream & operator << (std::ostream & str, const Pos & pos); struct Env; struct Value; class EvalState; +struct ExprWith; struct StaticEnv; @@ -226,8 +227,11 @@ struct ExprVar : Expr Symbol name; /* Whether the variable comes from an environment (e.g. a rec, let - or function argument) or from a "with". */ - bool fromWith; + or function argument) or from a "with". + + `nullptr`: Not from a `with`. + Valid pointer: the nearest, innermost `with` expression to query first. */ + ExprWith * fromWith; /* In the former case, the value is obtained by going `level` levels up from the current environment and getting the @@ -385,6 +389,7 @@ struct ExprWith : Expr PosIdx pos; Expr * attrs, * body; size_t prevWith; + ExprWith * parentWith; ExprWith(const PosIdx & pos, Expr * attrs, Expr * body) : pos(pos), attrs(attrs), body(body) { }; PosIdx getPos() const override { return pos; } COMMON_METHODS @@ -478,14 +483,14 @@ extern ExprBlackHole eBlackHole; runtime. */ struct StaticEnv { - bool isWith; + ExprWith * isWith; const StaticEnv * up; // Note: these must be in sorted order. typedef std::vector> Vars; Vars vars; - StaticEnv(bool isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) { + StaticEnv(ExprWith * isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) { vars.reserve(expectedSize); }; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index a1502da45..924de3184 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -214,7 +214,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v Env * env = &state.allocEnv(vScope->attrs->size()); env->up = &state.baseEnv; - auto staticEnv = std::make_shared(false, state.staticBaseEnv.get(), vScope->attrs->size()); + auto staticEnv = std::make_shared(nullptr, state.staticBaseEnv.get(), vScope->attrs->size()); unsigned int displ = 0; for (auto & attr : *vScope->attrs) { From 3f796514b37a1e723a395fce8271428410e93f5f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 2 Jan 2024 12:39:16 +0100 Subject: [PATCH 043/307] Optimize empty list constants MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This avoids a Value allocation for empty list constants. During a `nix search nixpkgs`, about 82% of all thunked lists are empty, so this removes about 3 million Value allocations. Performance comparison on `nix search github:NixOS/nixpkgs/e1fa12d4f6c6fe19ccb59cac54b5b3f25e160870 --no-eval-cache`: maximum RSS: median = 3845432.0000 mean = 3845432.0000 stddev = 0.0000 min = 3845432.0000 max = 3845432.0000 [rejected?, p=0.00000, Δ=-70084.00000±0.00000] soft page faults: median = 965395.0000 mean = 965394.6667 stddev = 1.1181 min = 965392.0000 max = 965396.0000 [rejected?, p=0.00000, Δ=-17929.77778±38.59610] system CPU time: median = 1.8029 mean = 1.7702 stddev = 0.0621 min = 1.6749 max = 1.8417 [rejected, p=0.00064, Δ=-0.12873±0.09905] user CPU time: median = 14.1022 mean = 14.0633 stddev = 0.1869 min = 13.8118 max = 14.3190 [not rejected, p=0.03006, Δ=-0.18248±0.24928] elapsed time: median = 15.8205 mean = 15.8618 stddev = 0.2312 min = 15.5033 max = 16.1670 [not rejected, p=0.00558, Δ=-0.28963±0.29434] --- src/libexpr/eval.cc | 11 +++++++++++ src/libexpr/eval.hh | 3 +++ src/libexpr/nixexpr.hh | 1 + 3 files changed, 15 insertions(+) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 810843995..494b8338f 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -554,6 +554,8 @@ EvalState::EvalState( static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes"); + vEmptyList.mkList(0); + /* Initialise the Nix expression search path. */ if (!evalSettings.pureEval) { for (auto & i : _searchPath.elements) @@ -1384,6 +1386,15 @@ void ExprList::eval(EvalState & state, Env & env, Value & v) } +Value * ExprList::maybeThunk(EvalState & state, Env & env) +{ + if (elems.empty()) { + return &state.vEmptyList; + } + return Expr::maybeThunk(state, env); +} + + void ExprVar::eval(EvalState & state, Env & env, Value & v) { Value * v2 = state.lookupVar(&env, *this, false); diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index da2d256db..bf85b50c8 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -305,6 +305,9 @@ public: return *errorBuilder; } + /* Empty list constant. */ + Value vEmptyList; + private: /* Cache for calls to addToStore(); maps source paths to the store diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 1e57fec7a..55e930758 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -299,6 +299,7 @@ struct ExprList : Expr std::vector elems; ExprList() { }; COMMON_METHODS + Value * maybeThunk(EvalState & state, Env & env) override; PosIdx getPos() const override { From 2b20f36f9515882589975d14a94ba1fd2b5c513a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Jan 2024 12:33:51 -0500 Subject: [PATCH 044/307] Fix NetBSD build There was still a mistake after my earlier a7115a47ef0d83ea81b494f6bc5b11d8286e0672 and e13fc0bbdb1e1eefeb33ff4d18310958041b1ad5. This finally gets it right. --- configure.ac | 7 ++++++- src/libstore/globals.hh | 2 ++ src/libstore/posix-fs-canonicalise.cc | 4 ++-- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/configure.ac b/configure.ac index 1bc4f17b0..b97e25bbd 100644 --- a/configure.ac +++ b/configure.ac @@ -308,7 +308,12 @@ AC_SUBST(HAVE_SECCOMP, [$have_seccomp]) # Optional dependencies for better normalizing file system data AC_CHECK_HEADERS([sys/xattr.h]) -AC_CHECK_FUNCS([llistxattr lremovexattr]) +AS_IF([test "$ac_cv_header_sys_xattr_h" = "yes"],[ + AC_CHECK_FUNCS([llistxattr lremovexattr]) + AS_IF([test "$ac_cv_func_llistxattr" = "yes" && test "$ac_cv_func_lremovexattr" = "yes"],[ + AC_DEFINE([HAVE_ACL_SUPPORT], [1], [Define if we can manipulate file system Access Control Lists]) + ]) +]) # Look for aws-cpp-sdk-s3. AC_LANG_PUSH(C++) diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index b35dc37a1..cf34ae354 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -946,7 +946,9 @@ public: may be useful in certain scenarios (e.g. to spin up containers or set up userspace network interfaces in tests). )"}; +#endif +#if HAVE_ACL_SUPPORT Setting ignoredAcls{ this, {"security.selinux", "system.nfs4_acl", "security.csm"}, "ignored-acls", R"( diff --git a/src/libstore/posix-fs-canonicalise.cc b/src/libstore/posix-fs-canonicalise.cc index 5edda0157..8b29e90d4 100644 --- a/src/libstore/posix-fs-canonicalise.cc +++ b/src/libstore/posix-fs-canonicalise.cc @@ -1,4 +1,4 @@ -#if HAVE_SYS_XATTR_H +#if HAVE_ACL_SUPPORT # include #endif @@ -78,7 +78,7 @@ static void canonicalisePathMetaData_( if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode))) throw Error("file '%1%' has an unsupported type", path); -#if HAVE_SYS_XATTR_H && HAVE_LLISTXATTR && HAVE_LREMOVEXATTR +#if HAVE_ACL_SUPPORT /* Remove extended attributes / ACLs. */ ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0); From 86e924443722a04f7d458594e3332ffaa73edb1d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Jan 2024 12:41:53 -0500 Subject: [PATCH 045/307] Fix `buildNoTest` `checkInputs` is not right for this because we don't just need these deps when `doTest`, we also need them when `installUnitTests`. --- package.nix | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/package.nix b/package.nix index b5ff45083..56276ecc4 100644 --- a/package.nix +++ b/package.nix @@ -214,6 +214,9 @@ in { ] ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ editline lowdown + ] ++ lib.optionals buildUnitTests [ + gtest + rapidcheck ] ++ lib.optional stdenv.isLinux libseccomp ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid # There have been issues building these dependencies @@ -232,11 +235,6 @@ in { dontBuild = !attrs.doBuild; doCheck = attrs.doCheck; - checkInputs = [ - gtest - rapidcheck - ]; - nativeCheckInputs = [ git mercurial From 7b8af5f916a73aa5927b103ff712280023cea840 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 2 Jan 2024 12:50:48 -0500 Subject: [PATCH 046/307] `buildNoTests`: Restore intent The thing we wanted to test was that building Nix without building or running tests, and without depending on libraries only needed by tests, works. But since 6c8f4ef3502aa214557541ec00538e41aeced6e3, we can also install unit tests, and during the conversion to using `package.nix` this started happening more often (they go to a separate output though, so this should be fine). This adds more `... = false` to restore the original intent: don't run unit test or functional tests, and don't install unit tests. --- flake.nix | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/flake.nix b/flake.nix index a8fc105e8..9217de9af 100644 --- a/flake.nix +++ b/flake.nix @@ -234,11 +234,11 @@ buildNoGc = forAllSystems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];})); buildNoTests = forAllSystems (system: - self.packages.${system}.nix.overrideAttrs (a: { - doCheck = - assert ! a?dontCheck; - false; - }) + self.packages.${system}.nix.override { + doCheck = false; + doInstallCheck = false; + installUnitTests = false; + } ); # Perl bindings for various platforms. From 484881f3021856cd0d0c0cb42d4473b3c7ea0051 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 Jan 2024 10:23:27 +0100 Subject: [PATCH 047/307] Move empty list constant --- src/libexpr/eval.hh | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index bf85b50c8..e2180f00d 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -218,6 +218,11 @@ public: Bindings emptyBindings; + /** + * Empty list constant. + */ + Value vEmptyList; + /** * The accessor for the root filesystem. */ @@ -305,9 +310,6 @@ public: return *errorBuilder; } - /* Empty list constant. */ - Value vEmptyList; - private: /* Cache for calls to addToStore(); maps source paths to the store From 24e70489e59f9ab75310382dc59df09796ea8df4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 3 Jan 2024 19:14:50 +0100 Subject: [PATCH 048/307] withFramedSink(): Receive interrupts on the stderr thread Otherwise Nix deadlocks when Ctrl-C is received in withFramedSink(): the parent thread will wait forever for the stderr thread to shut down. Fixes the hang reported in https://github.com/NixOS/nix/issues/7245#issuecomment-1770560923. --- src/libstore/remote-store.cc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index f0df646ca..078b9fe00 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -16,6 +16,8 @@ #include "logging.hh" #include "callback.hh" #include "filetransfer.hh" +#include "signals.hh" + #include namespace nix { @@ -1066,6 +1068,7 @@ void RemoteStore::ConnectionHandle::withFramedSink(std::function Date: Wed, 3 Jan 2024 19:30:02 +0100 Subject: [PATCH 049/307] Make some more threads receive interrupts Shouldn't hurt to do this. In particular, this should speed up shutting down the PathSubstitutionGoal thread if it's copying from a remote store. --- src/libstore/build/substitution-goal.cc | 3 +++ src/libutil/thread-pool.cc | 2 ++ 2 files changed, 5 insertions(+) diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index 93867007d..c7e8e2825 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -2,6 +2,7 @@ #include "substitution-goal.hh" #include "nar-info.hh" #include "finally.hh" +#include "signals.hh" namespace nix { @@ -217,6 +218,8 @@ void PathSubstitutionGoal::tryToRun() thr = std::thread([this]() { try { + ReceiveInterrupts receiveInterrupts; + /* Wake up the worker loop when we're done. */ Finally updateStats([this]() { outPipe.writeSide.close(); }); diff --git a/src/libutil/thread-pool.cc b/src/libutil/thread-pool.cc index c5e735617..9a7dfee56 100644 --- a/src/libutil/thread-pool.cc +++ b/src/libutil/thread-pool.cc @@ -79,6 +79,8 @@ void ThreadPool::process() void ThreadPool::doWork(bool mainThread) { + ReceiveInterrupts receiveInterrupts; + if (!mainThread) interruptCheck = [&]() { return (bool) quit; }; From 12bb8cdd381156456a712e4a5a8af3b6bc852eab Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 3 Jan 2024 15:02:20 -0500 Subject: [PATCH 050/307] Signer infrastructure: Prep for #9076 This sets up infrastructure in libutil to allow for signing other than by a secret key in memory. #9076 uses this to implement remote signing. (Split from that PR to allow reviewing in smaller chunks.) Co-Authored-By: Raito Bezarius --- perl/lib/Nix/Store.xs | 1 - src/libstore/binary-cache-store.cc | 5 +- src/libstore/binary-cache-store.hh | 5 +- src/libstore/globals.cc | 5 -- src/libstore/keys.cc | 31 ++++++++++ src/libstore/keys.hh | 10 +++ src/libstore/local-store.cc | 7 ++- src/libstore/local.mk | 2 +- src/libstore/path-info.cc | 4 +- src/libstore/path-info.hh | 4 +- src/libstore/path.cc | 6 +- src/libstore/realisation.cc | 5 +- src/libstore/realisation.hh | 4 +- src/libstore/store-api.cc | 2 +- src/libutil/hash.cc | 9 +++ src/libutil/hash.hh | 6 +- src/libutil/local.mk | 7 ++- .../signature/local-keys.cc} | 54 +++++++--------- .../signature/local-keys.hh} | 42 +++++++++++-- src/libutil/signature/signer.cc | 23 +++++++ src/libutil/signature/signer.hh | 61 +++++++++++++++++++ src/libutil/util.cc | 4 ++ src/nix/sigs.cc | 5 +- src/nix/verify.cc | 1 + 24 files changed, 233 insertions(+), 70 deletions(-) create mode 100644 src/libstore/keys.cc create mode 100644 src/libstore/keys.hh rename src/{libstore/crypto.cc => libutil/signature/local-keys.cc} (64%) rename src/{libstore/crypto.hh => libutil/signature/local-keys.hh} (51%) create mode 100644 src/libutil/signature/signer.cc create mode 100644 src/libutil/signature/signer.hh diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 4964b8a34..423c01cf7 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -12,7 +12,6 @@ #include "realisation.hh" #include "globals.hh" #include "store-api.hh" -#include "crypto.hh" #include "posix-source-accessor.hh" #include diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 8a3052433..ea1279e2e 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -28,7 +28,8 @@ BinaryCacheStore::BinaryCacheStore(const Params & params) , Store(params) { if (secretKeyFile != "") - secretKey = std::unique_ptr(new SecretKey(readFile(secretKeyFile))); + signer = std::make_unique( + SecretKey { readFile(secretKeyFile) }); StringSink sink; sink << narVersionMagic1; @@ -274,7 +275,7 @@ ref BinaryCacheStore::addToStoreCommon( stats.narWriteCompressionTimeMs += duration; /* Atomically write the NAR info file.*/ - if (secretKey) narInfo->sign(*this, *secretKey); + if (signer) narInfo->sign(*this, *signer); writeNarInfo(narInfo); diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index 98e43ee6a..00ab73905 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -1,7 +1,7 @@ #pragma once ///@file -#include "crypto.hh" +#include "signature/local-keys.hh" #include "store-api.hh" #include "log-store.hh" @@ -57,8 +57,7 @@ class BinaryCacheStore : public virtual BinaryCacheStoreConfig, { private: - - std::unique_ptr secretKey; + std::unique_ptr signer; protected: diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index f401d076d..50584e06c 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -15,8 +15,6 @@ #include -#include - #ifdef __GLIBC__ # include # include @@ -409,9 +407,6 @@ void initLibStore() { initLibUtil(); - if (sodium_init() == -1) - throw Error("could not initialise libsodium"); - loadConfFile(); preloadNSS(); diff --git a/src/libstore/keys.cc b/src/libstore/keys.cc new file mode 100644 index 000000000..2cc50970f --- /dev/null +++ b/src/libstore/keys.cc @@ -0,0 +1,31 @@ +#include "file-system.hh" +#include "globals.hh" +#include "keys.hh" + +namespace nix { + +PublicKeys getDefaultPublicKeys() +{ + PublicKeys publicKeys; + + // FIXME: filter duplicates + + for (auto s : settings.trustedPublicKeys.get()) { + PublicKey key(s); + publicKeys.emplace(key.name, key); + } + + for (auto secretKeyFile : settings.secretKeyFiles.get()) { + try { + SecretKey secretKey(readFile(secretKeyFile)); + publicKeys.emplace(secretKey.name, secretKey.toPublicKey()); + } catch (SysError & e) { + /* Ignore unreadable key files. That's normal in a + multi-user installation. */ + } + } + + return publicKeys; +} + +} diff --git a/src/libstore/keys.hh b/src/libstore/keys.hh new file mode 100644 index 000000000..3da19493f --- /dev/null +++ b/src/libstore/keys.hh @@ -0,0 +1,10 @@ +#pragma once +///@file + +#include "signature/local-keys.hh" + +namespace nix { + +PublicKeys getDefaultPublicKeys(); + +} diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 63e90ea1e..0f3c37c8a 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -14,6 +14,7 @@ #include "signals.hh" #include "posix-fs-canonicalise.hh" #include "posix-source-accessor.hh" +#include "keys.hh" #include #include @@ -1578,7 +1579,8 @@ void LocalStore::signRealisation(Realisation & realisation) for (auto & secretKeyFile : secretKeyFiles.get()) { SecretKey secretKey(readFile(secretKeyFile)); - realisation.sign(secretKey); + LocalSigner signer(std::move(secretKey)); + realisation.sign(signer); } } @@ -1590,7 +1592,8 @@ void LocalStore::signPathInfo(ValidPathInfo & info) for (auto & secretKeyFile : secretKeyFiles.get()) { SecretKey secretKey(readFile(secretKeyFile)); - info.sign(*this, secretKey); + LocalSigner signer(std::move(secretKey)); + info.sign(*this, signer); } } diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 68ccdc409..675976314 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -8,7 +8,7 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc) libstore_LIBS = libutil -libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread +libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) -pthread ifdef HOST_LINUX libstore_LDFLAGS += -ldl endif diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index f58e31bfd..d82ccd0c9 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -38,9 +38,9 @@ std::string ValidPathInfo::fingerprint(const Store & store) const } -void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey) +void ValidPathInfo::sign(const Store & store, const Signer & signer) { - sigs.insert(secretKey.signDetached(fingerprint(store))); + sigs.insert(signer.signDetached(fingerprint(store))); } std::optional ValidPathInfo::contentAddressWithReferences() const diff --git a/src/libstore/path-info.hh b/src/libstore/path-info.hh index 077abc7e1..b6dc0855d 100644 --- a/src/libstore/path-info.hh +++ b/src/libstore/path-info.hh @@ -1,7 +1,7 @@ #pragma once ///@file -#include "crypto.hh" +#include "signature/signer.hh" #include "path.hh" #include "hash.hh" #include "content-address.hh" @@ -107,7 +107,7 @@ struct ValidPathInfo : UnkeyedValidPathInfo { */ std::string fingerprint(const Store & store) const; - void sign(const Store & store, const SecretKey & secretKey); + void sign(const Store & store, const Signer & signer); /** * @return The `ContentAddressWithReferences` that determines the diff --git a/src/libstore/path.cc b/src/libstore/path.cc index 1afd10af7..a15a78545 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -1,7 +1,5 @@ #include "store-dir-config.hh" -#include - namespace nix { static void checkName(std::string_view path, std::string_view name) @@ -49,9 +47,7 @@ StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x"); StorePath StorePath::random(std::string_view name) { - Hash hash(HashAlgorithm::SHA1); - randombytes_buf(hash.hash, hash.hashSize); - return StorePath(hash, name); + return StorePath(Hash::random(HashAlgorithm::SHA1), name); } StorePath StoreDirConfig::parseStorePath(std::string_view path) const diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc index 93ddb5b20..86bfdd1a8 100644 --- a/src/libstore/realisation.cc +++ b/src/libstore/realisation.cc @@ -1,6 +1,7 @@ #include "realisation.hh" #include "store-api.hh" #include "closure.hh" +#include "signature/local-keys.hh" #include namespace nix { @@ -113,9 +114,9 @@ std::string Realisation::fingerprint() const return serialized.dump(); } -void Realisation::sign(const SecretKey & secretKey) +void Realisation::sign(const Signer &signer) { - signatures.insert(secretKey.signDetached(fingerprint())); + signatures.insert(signer.signDetached(fingerprint())); } bool Realisation::checkSignature(const PublicKeys & publicKeys, const std::string & sig) const diff --git a/src/libstore/realisation.hh b/src/libstore/realisation.hh index 4ba2123d8..ddb4af770 100644 --- a/src/libstore/realisation.hh +++ b/src/libstore/realisation.hh @@ -8,7 +8,7 @@ #include "derived-path.hh" #include #include "comparator.hh" -#include "crypto.hh" +#include "signature/signer.hh" namespace nix { @@ -64,7 +64,7 @@ struct Realisation { static Realisation fromJSON(const nlohmann::json& json, const std::string& whence); std::string fingerprint() const; - void sign(const SecretKey &); + void sign(const Signer &); bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const; size_t checkSignatures(const PublicKeys & publicKeys) const; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index c2516afb5..c48bfc248 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -1,4 +1,4 @@ -#include "crypto.hh" +#include "signature/local-keys.hh" #include "source-accessor.hh" #include "globals.hh" #include "derived-path.hh" diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc index 502afbda2..d067da969 100644 --- a/src/libutil/hash.cc +++ b/src/libutil/hash.cc @@ -14,6 +14,8 @@ #include #include +#include + namespace nix { static size_t regularHashSize(HashAlgorithm type) { @@ -261,6 +263,13 @@ Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI) throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(this->algo)); } +Hash Hash::random(HashAlgorithm algo) +{ + Hash hash(algo); + randombytes_buf(hash.hash, hash.hashSize); + return hash; +} + Hash newHashAllowEmpty(std::string_view hashStr, std::optional ha) { if (hashStr.empty()) { diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index 2fe9a53f5..f7e8eb265 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -5,7 +5,6 @@ #include "serialise.hh" #include "file-system.hh" - namespace nix { @@ -143,6 +142,11 @@ public: } static Hash dummy; + + /** + * @return a random hash with hash algorithm `algo` + */ + static Hash random(HashAlgorithm algo); }; /** diff --git a/src/libutil/local.mk b/src/libutil/local.mk index 81efaafec..0fdebaf5c 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -4,14 +4,17 @@ libutil_NAME = libnixutil libutil_DIR := $(d) -libutil_SOURCES := $(wildcard $(d)/*.cc) +libutil_SOURCES := $(wildcard $(d)/*.cc $(d)/signature/*.cc) libutil_CXXFLAGS += -I src/libutil -libutil_LDFLAGS += -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context +libutil_LDFLAGS += -pthread $(LIBCURL_LIBS) $(SODIUM_LIBS) $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context $(foreach i, $(wildcard $(d)/args/*.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix/args, 0644))) +$(foreach i, $(wildcard $(d)/signature/*.hh), \ + $(eval $(call install-file-in, $(i), $(includedir)/nix/signature, 0644))) + ifeq ($(HAVE_LIBCPUID), 1) libutil_LDFLAGS += -lcpuid diff --git a/src/libstore/crypto.cc b/src/libutil/signature/local-keys.cc similarity index 64% rename from src/libstore/crypto.cc rename to src/libutil/signature/local-keys.cc index 1b705733c..858b036f5 100644 --- a/src/libstore/crypto.cc +++ b/src/libutil/signature/local-keys.cc @@ -1,13 +1,12 @@ -#include "crypto.hh" +#include "signature/local-keys.hh" + #include "file-system.hh" #include "util.hh" -#include "globals.hh" - #include namespace nix { -static std::pair split(std::string_view s) +BorrowedCryptoValue BorrowedCryptoValue::parse(std::string_view s) { size_t colon = s.find(':'); if (colon == std::string::npos || colon == 0) @@ -17,10 +16,10 @@ static std::pair split(std::string_view s) Key::Key(std::string_view s) { - auto ss = split(s); + auto ss = BorrowedCryptoValue::parse(s); - name = ss.first; - key = ss.second; + name = ss.name; + key = ss.payload; if (name == "" || key == "") throw Error("secret key is corrupt"); @@ -73,45 +72,34 @@ PublicKey::PublicKey(std::string_view s) throw Error("public key is not valid"); } -bool verifyDetached(const std::string & data, const std::string & sig, - const PublicKeys & publicKeys) +bool PublicKey::verifyDetached(std::string_view data, std::string_view sig) const { - auto ss = split(sig); + auto ss = BorrowedCryptoValue::parse(sig); - auto key = publicKeys.find(std::string(ss.first)); - if (key == publicKeys.end()) return false; + if (ss.name != std::string_view { name }) return false; - auto sig2 = base64Decode(ss.second); + return verifyDetachedAnon(data, ss.payload); +} + +bool PublicKey::verifyDetachedAnon(std::string_view data, std::string_view sig) const +{ + auto sig2 = base64Decode(sig); if (sig2.size() != crypto_sign_BYTES) throw Error("signature is not valid"); return crypto_sign_verify_detached((unsigned char *) sig2.data(), (unsigned char *) data.data(), data.size(), - (unsigned char *) key->second.key.data()) == 0; + (unsigned char *) key.data()) == 0; } -PublicKeys getDefaultPublicKeys() +bool verifyDetached(std::string_view data, std::string_view sig, const PublicKeys & publicKeys) { - PublicKeys publicKeys; + auto ss = BorrowedCryptoValue::parse(sig); - // FIXME: filter duplicates + auto key = publicKeys.find(std::string(ss.name)); + if (key == publicKeys.end()) return false; - for (auto s : settings.trustedPublicKeys.get()) { - PublicKey key(s); - publicKeys.emplace(key.name, key); - } - - for (auto secretKeyFile : settings.secretKeyFiles.get()) { - try { - SecretKey secretKey(readFile(secretKeyFile)); - publicKeys.emplace(secretKey.name, secretKey.toPublicKey()); - } catch (SysError & e) { - /* Ignore unreadable key files. That's normal in a - multi-user installation. */ - } - } - - return publicKeys; + return key->second.verifyDetachedAnon(data, ss.payload); } } diff --git a/src/libstore/crypto.hh b/src/libutil/signature/local-keys.hh similarity index 51% rename from src/libstore/crypto.hh rename to src/libutil/signature/local-keys.hh index 35216d470..4aafc1239 100644 --- a/src/libstore/crypto.hh +++ b/src/libutil/signature/local-keys.hh @@ -7,6 +7,25 @@ namespace nix { +/** + * Except where otherwise noted, Nix serializes keys and signatures in + * the form: + * + * ``` + * : + * ``` + */ +struct BorrowedCryptoValue { + std::string_view name; + std::string_view payload; + + /** + * This splits on the colon, the user can then separated decode the + * Base64 payload separately. + */ + static BorrowedCryptoValue parse(std::string_view); +}; + struct Key { std::string name; @@ -49,21 +68,36 @@ struct PublicKey : Key { PublicKey(std::string_view data); + /** + * @return true iff `sig` and this key's names match, and `sig` is a + * correct signature over `data` using the given public key. + */ + bool verifyDetached(std::string_view data, std::string_view sigs) const; + + /** + * @return true iff `sig` is a correct signature over `data` using the + * given public key. + * + * @param just the Base64 signature itself, not a colon-separated pair of a + * public key name and signature. + */ + bool verifyDetachedAnon(std::string_view data, std::string_view sigs) const; + private: PublicKey(std::string_view name, std::string && key) : Key(name, std::move(key)) { } friend struct SecretKey; }; +/** + * Map from key names to public keys + */ typedef std::map PublicKeys; /** * @return true iff ‘sig’ is a correct signature over ‘data’ using one * of the given public keys. */ -bool verifyDetached(const std::string & data, const std::string & sig, - const PublicKeys & publicKeys); - -PublicKeys getDefaultPublicKeys(); +bool verifyDetached(std::string_view data, std::string_view sig, const PublicKeys & publicKeys); } diff --git a/src/libutil/signature/signer.cc b/src/libutil/signature/signer.cc new file mode 100644 index 000000000..0d26867b5 --- /dev/null +++ b/src/libutil/signature/signer.cc @@ -0,0 +1,23 @@ +#include "signature/signer.hh" +#include "error.hh" + +#include + +namespace nix { + +LocalSigner::LocalSigner(SecretKey && privateKey) + : privateKey(privateKey) + , publicKey(privateKey.toPublicKey()) +{ } + +std::string LocalSigner::signDetached(std::string_view s) const +{ + return privateKey.signDetached(s); +} + +const PublicKey & LocalSigner::getPublicKey() +{ + return publicKey; +} + +} diff --git a/src/libutil/signature/signer.hh b/src/libutil/signature/signer.hh new file mode 100644 index 000000000..e50170fe2 --- /dev/null +++ b/src/libutil/signature/signer.hh @@ -0,0 +1,61 @@ +#pragma once + +#include "types.hh" +#include "signature/local-keys.hh" + +#include +#include + +namespace nix { + +/** + * An abstract signer + * + * Derive from this class to implement a custom signature scheme. + * + * It is only necessary to implement signature of bytes and provide a + * public key. + */ +struct Signer +{ + virtual ~Signer() = default; + + /** + * Sign the given data, creating a (detached) signature. + * + * @param data data to be signed. + * + * @return the [detached + * signature](https://en.wikipedia.org/wiki/Detached_signature), + * i.e. just the signature itself without a copy of the signed data. + */ + virtual std::string signDetached(std::string_view data) const = 0; + + /** + * View the public key associated with this `Signer`. + */ + virtual const PublicKey & getPublicKey() = 0; +}; + +using Signers = std::map; + +/** + * Local signer + * + * The private key is held in this machine's RAM + */ +struct LocalSigner : Signer +{ + LocalSigner(SecretKey && privateKey); + + std::string signDetached(std::string_view s) const override; + + const PublicKey & getPublicKey() override; + +private: + + SecretKey privateKey; + PublicKey publicKey; +}; + +} diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 5bb3f374b..7b4b1d031 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -7,6 +7,7 @@ #include #include +#include namespace nix { @@ -28,6 +29,9 @@ void initLibUtil() { } // This is not actually the main point of this check, but let's make sure anyway: assert(caught); + + if (sodium_init() == -1) + throw Error("could not initialise libsodium"); } ////////////////////////////////////////////////////////////////////// diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index a57a407e6..dfef44869 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -112,7 +112,7 @@ struct CmdSign : StorePathsCommand std::string description() override { - return "sign store paths"; + return "sign store paths with a local key"; } void run(ref store, StorePaths && storePaths) override @@ -121,6 +121,7 @@ struct CmdSign : StorePathsCommand throw UsageError("you must specify a secret key file using '-k'"); SecretKey secretKey(readFile(secretKeyFile)); + LocalSigner signer(std::move(secretKey)); size_t added{0}; @@ -129,7 +130,7 @@ struct CmdSign : StorePathsCommand auto info2(*info); info2.sigs.clear(); - info2.sign(*store, secretKey); + info2.sign(*store, signer); assert(!info2.sigs.empty()); if (!info->sigs.count(*info2.sigs.begin())) { diff --git a/src/nix/verify.cc b/src/nix/verify.cc index f0234f7be..2a0cbd19f 100644 --- a/src/nix/verify.cc +++ b/src/nix/verify.cc @@ -5,6 +5,7 @@ #include "thread-pool.hh" #include "references.hh" #include "signals.hh" +#include "keys.hh" #include From 37ea1612c78b88884f7baecbb1bf81e65e571592 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 3 Jan 2024 19:38:22 -0500 Subject: [PATCH 051/307] flake: Go back to regular `nixos-23.05-small` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Finally get off the ad-hoc staging commit! Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/36c4ac09e9bebcec1fa7b7539cddb0c9e837409c' (2023-11-30) → 'github:NixOS/nixpkgs/2c9c58e98243930f8cb70387934daa4bc8b00373' (2023-12-31) --- flake.lock | 8 ++++---- flake.nix | 12 +----------- tests/nixos/default.nix | 1 - 3 files changed, 5 insertions(+), 16 deletions(-) diff --git a/flake.lock b/flake.lock index db1a72c14..ae98d789a 100644 --- a/flake.lock +++ b/flake.lock @@ -34,16 +34,16 @@ }, "nixpkgs": { "locked": { - "lastModified": 1701355166, - "narHash": "sha256-4V7XMI0Gd+y0zsi++cEHd99u3GNL0xSTGRmiWKzGnUQ=", + "lastModified": 1704018918, + "narHash": "sha256-erjg/HrpC9liEfm7oLqb8GXCqsxaFwIIPqCsknW5aFY=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "36c4ac09e9bebcec1fa7b7539cddb0c9e837409c", + "rev": "2c9c58e98243930f8cb70387934daa4bc8b00373", "type": "github" }, "original": { "owner": "NixOS", - "ref": "staging-23.05", + "ref": "nixos-23.05-small", "repo": "nixpkgs", "type": "github" } diff --git a/flake.nix b/flake.nix index 9217de9af..e6a88af9f 100644 --- a/flake.nix +++ b/flake.nix @@ -1,17 +1,7 @@ { description = "The purely functional package manager"; - # TODO Go back to nixos-23.05-small once - # https://github.com/NixOS/nixpkgs/pull/271202 is merged. - # - # Also, do not grab arbitrary further staging commits. This PR was - # carefully made to be based on release-23.05 and just contain - # rebuild-causing changes to packages that Nix actually uses. - # - # Once this is updated to something containing - # https://github.com/NixOS/nixpkgs/pull/271423, don't forget - # to remove the `nix.checkAllErrors = false;` line in the tests. - inputs.nixpkgs.url = "github:NixOS/nixpkgs/staging-23.05"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; }; inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; }; diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index 2645cac8e..4459aa664 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -10,7 +10,6 @@ let hostPkgs = nixpkgsFor.${system}.native; defaults = { nixpkgs.pkgs = nixpkgsFor.${system}.native; - nix.checkAllErrors = false; }; _module.args.nixpkgs = nixpkgs; }; From d8a2b06e2068b5209264dfc6d74d5cadf88b8684 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 4 Jan 2024 11:31:09 -0800 Subject: [PATCH 052/307] Remove `clang11Stdenv` Clang 11 doesn't have support for three-way-comparisons (<=>, "spaceship operator", "consistent comparisons") and is older than `clangStdenv`. `clangStdenv` is currently 12 on FreeBSD and Android and 16 on other platforms: https://github.com/NixOS/nixpkgs/blob/32e718f00c26c811be0062dd0777066f02406940/pkgs/top-level/all-packages.nix#L16629-L16644 Let's start by removing Clang 11 from our distribution. Next we can consider upgrading to Clang 17, which fully supports the spaceship operator: https://releases.llvm.org/17.0.1/tools/clang/docs/ReleaseNotes.html#what-s-new-in-clang-release --- doc/manual/src/contributing/hacking.md | 4 ++-- flake.nix | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index dce0422dc..9a03ac9b6 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -31,7 +31,7 @@ This shell also adds `./outputs/bin/nix` to your `$PATH` so you can run `nix` im To get a shell with one of the other [supported compilation environments](#compilation-environments): ```console -$ nix develop .#native-clang11StdenvPackages +$ nix develop .#native-clangStdenvPackages ``` > **Note** @@ -96,7 +96,7 @@ $ nix-shell To get a shell with one of the other [supported compilation environments](#compilation-environments): ```console -$ nix-shell --attr devShells.x86_64-linux.native-clang11StdenvPackages +$ nix-shell --attr devShells.x86_64-linux.native-clangStdenvPackages ``` > **Note** diff --git a/flake.nix b/flake.nix index e6a88af9f..32354a88f 100644 --- a/flake.nix +++ b/flake.nix @@ -52,7 +52,6 @@ stdenvs = [ "ccacheStdenv" - "clang11Stdenv" "clangStdenv" "gccStdenv" "libcxxStdenv" From 388c79d546db0a2e636aa56e4d4b9a5dfde50db5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 5 Jan 2024 15:15:25 +0100 Subject: [PATCH 053/307] Don't pull in libboost_regex We're not using and we don't want to pull in libicu (37 MiB). --- package.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.nix b/package.nix index 56276ecc4..dfebdb0e4 100644 --- a/package.nix +++ b/package.nix @@ -248,7 +248,7 @@ in { # Copy libboost_context so we don't get all of Boost in our closure. # https://github.com/NixOS/nixpkgs/issues/45462 mkdir -p $out/lib - cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*,libboost_regex*} $out/lib + cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib rm -f $out/lib/*.a '' + lib.optionalString stdenv.hostPlatform.isLinux '' chmod u+w $out/lib/*.so.* From a4d33e816ef6c5baaed4eb65e826cd5aa75c0343 Mon Sep 17 00:00:00 2001 From: wiki-me <68199012+wiki-me@users.noreply.github.com> Date: Sat, 6 Jan 2024 20:01:10 +0200 Subject: [PATCH 054/307] Improve documentation around upgrading nix (#9679) * Improve documentation around upgrading nix, add replacing nix channel with new one Co-authored-by: Valentin Gagarin --- doc/manual/src/installation/upgrading.md | 35 ++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/doc/manual/src/installation/upgrading.md b/doc/manual/src/installation/upgrading.md index 6d09f54d8..d1b64b80b 100644 --- a/doc/manual/src/installation/upgrading.md +++ b/doc/manual/src/installation/upgrading.md @@ -1,5 +1,40 @@ # Upgrading Nix +> **Note** +> +> These upgrade instructions apply for regular Linux distributions where Nix was installed following the [installation instructions in this manual](./index.md). + +First, find the name of the current [channel](@docroot@/command-ref/nix-channel.md) through which Nix is distributed: + +```console +$ nix-channel --list +``` + +By default this should return an entry for Nixpkgs: + +```console +nixpkgs https://nixos.org/channels/nixpkgs-23.05 +``` + +Check which Nix version will be installed: + +```console +$ nix-shell -p nix -I nixpkgs=channel:nixpkgs-23.11 --run "nix --version" +nix (Nix) 2.18.1 +``` + +> **Warning** +> +> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with `nix-build` or `nix-store --realise`, may change the database schema! +> Reverting to an older version of Nix may therefore require purging the store database before it can be used. + +Update the channel entry: + +```console +$ nix-channel --remove nixpkgs +$ nix-channel --add https://nixos.org/channels/nixpkgs-23.11 nixpkgs +``` + Multi-user Nix users on macOS can upgrade Nix by running: `sudo -i sh -c 'nix-channel --update && nix-env --install --attr nixpkgs.nix && From 8e865f3aba526394ca333efe7258bd8db0050fbb Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sat, 6 Jan 2024 22:45:25 +0100 Subject: [PATCH 055/307] deduplicate installation instructions (#9507) * deduplicate installation instructions - reorder sections to present pinned installation more prominently - remove outdated notes on the macOS installer rework - update instructions to handle the installer tarball Co-authored-by: Travis A. Everett --- .../src/installation/installing-binary.md | 162 +++++++++--------- doc/manual/src/quick-start.md | 1 - 2 files changed, 77 insertions(+), 86 deletions(-) diff --git a/doc/manual/src/installation/installing-binary.md b/doc/manual/src/installation/installing-binary.md index ffabb250a..0dc989159 100644 --- a/doc/manual/src/installation/installing-binary.md +++ b/doc/manual/src/installation/installing-binary.md @@ -1,26 +1,60 @@ # Installing a Binary Distribution -The easiest way to install Nix is to run the following command: +To install the latest version Nix, run the following command: ```console $ curl -L https://nixos.org/nix/install | sh ``` -This will run the installer interactively (causing it to explain what -it is doing more explicitly), and perform the default "type" of install -for your platform: -- single-user on Linux -- multi-user on macOS +This performs the default type of installation for your platform: - > **Notes on read-only filesystem root in macOS 10.15 Catalina +** - > - > - It took some time to support this cleanly. You may see posts, - > examples, and tutorials using obsolete workarounds. - > - Supporting it cleanly made macOS installs too complex to qualify - > as single-user, so this type is no longer supported on macOS. +- [Multi-user](#multi-user-installation): + - Linux with systemd and without SELinux + - macOS +- [Single-user](#single-user-installation): + - Linux without systemd + - Linux with SELinux -We recommend the multi-user install if it supports your platform and -you can authenticate with `sudo`. +We recommend the multi-user installation if it supports your platform and you can authenticate with `sudo`. + +The installer can configured with various command line arguments and environment variables. +To show available command line flags: + +```console +$ curl -L https://nixos.org/nix/install | sh -s -- --help +``` + +To check what it does and how it can be customised further, [download and edit the second-stage installation script](#installing-from-a-binary-tarball). + +# Installing a pinned Nix version from a URL + +Version-specific installation URLs for all Nix versions since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/). +The directory for each version contains the corresponding SHA-256 hash. + +All installation scripts are invoked the same way: + +```console +$ export VERSION=2.19.2 +$ curl -L https://releases.nixos.org/nix/nix-$VERSION/install | sh +``` + +# Multi User Installation + +The multi-user Nix installation creates system users and a system service for the Nix daemon. + +Supported systems: + +- Linux running systemd, with SELinux disabled +- macOS + +To explicitly instruct the installer to perform a multi-user installation on your system: + +```console +$ curl -L https://nixos.org/nix/install | sh -s -- --daemon +``` + +You can run this under your usual user account or `root`. +The script will invoke `sudo` as needed. # Single User Installation @@ -30,60 +64,48 @@ To explicitly select a single-user installation on your system: $ curl -L https://nixos.org/nix/install | sh -s -- --no-daemon ``` -This will perform a single-user installation of Nix, meaning that `/nix` -is owned by the invoking user. You can run this under your usual user -account or root. The script will invoke `sudo` to create `/nix` -if it doesn’t already exist. If you don’t have `sudo`, you should -manually create `/nix` first as root, e.g.: +In a single-user installation, `/nix` is owned by the invoking user. +The script will invoke `sudo` to create `/nix` if it doesn’t already exist. +If you don’t have `sudo`, manually create `/nix` as `root`: ```console -$ mkdir /nix -$ chown alice /nix +$ su root +# mkdir /nix +# chown alice /nix ``` -The install script will modify the first writable file from amongst -`.bash_profile`, `.bash_login` and `.profile` to source -`~/.nix-profile/etc/profile.d/nix.sh`. You can set the -`NIX_INSTALLER_NO_MODIFY_PROFILE` environment variable before executing -the install script to disable this behaviour. +# Installing from a binary tarball -# Multi User Installation +You can also download a binary tarball that contains Nix and all its dependencies: +- Choose a [version](https://releases.nixos.org/?prefix=nix/) and [system type](../contributing/hacking.md#platforms) +- Download and unpack the tarball +- Run the installer -The multi-user Nix installation creates system users, and a system -service for the Nix daemon. - -**Supported Systems** -- Linux running systemd, with SELinux disabled -- macOS - -You can instruct the installer to perform a multi-user installation on -your system: - -```console -$ curl -L https://nixos.org/nix/install | sh -s -- --daemon -``` - -The multi-user installation of Nix will create build users between the -user IDs 30001 and 30032, and a group with the group ID 30000. You -can run this under your usual user account or root. The script -will invoke `sudo` as needed. - -> **Note** +> **Example** > -> If you need Nix to use a different group ID or user ID set, you will -> have to download the tarball manually and [edit the install -> script](#installing-from-a-binary-tarball). +> ```console +> $ pushd $(mktemp -d) +> $ export VERSION=2.19.2 +> $ export SYSTEM=x86_64-linux +> $ curl -LO https://releases.nixos.org/nix/nix-$VERSION/nix-$VERSION-$SYSTEM.tar.xz +> $ tar xfj nix-$VERSION-$SYSTEM.tar.xz +> $ cd nix-$VERSION-$SYSTEM +> $ ./install +> $ popd +> ``` -The installer will modify `/etc/bashrc`, and `/etc/zshrc` if they exist. -The installer will first back up these files with a `.backup-before-nix` -extension. The installer will also create `/etc/profile.d/nix.sh`. +The installer can be customised with the environment variables declared in the file named `install-multi-user`. + +## Native packages for Linux distributions + +The Nix community maintains installers for some Linux distributions in their native packaging format(https://nix-community.github.io/nix-installers/). # macOS Installation + []{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes} - -We believe we have ironed out how to cleanly support the read-only root +We believe we have ironed out how to cleanly support the read-only root file system on modern macOS. New installs will do this automatically. This section previously detailed the situation, options, and trade-offs, @@ -126,33 +148,3 @@ this to run the installer, but it may help if you run into trouble: boot process to avoid problems loading or restoring any programs that need access to your Nix store -# Installing a pinned Nix version from a URL - -Version-specific installation URLs for all Nix versions -since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/). -The corresponding SHA-256 hash can be found in the directory for the given version. - -These install scripts can be used the same as usual: - -```console -$ curl -L https://releases.nixos.org/nix/nix-/install | sh -``` - -# Installing from a binary tarball - -You can also download a binary tarball that contains Nix and all its -dependencies. (This is what the install script at - does automatically.) You should unpack -it somewhere (e.g. in `/tmp`), and then run the script named `install` -inside the binary tarball: - -```console -$ cd /tmp -$ tar xfj nix-1.8-x86_64-darwin.tar.bz2 -$ cd nix-1.8-x86_64-darwin -$ ./install -``` - -If you need to edit the multi-user installation script to use different -group ID or a different user ID range, modify the variables set in the -file named `install-multi-user`. diff --git a/doc/manual/src/quick-start.md b/doc/manual/src/quick-start.md index 04a0b7c96..75853ced7 100644 --- a/doc/manual/src/quick-start.md +++ b/doc/manual/src/quick-start.md @@ -10,7 +10,6 @@ For more in-depth information you are kindly referred to subsequent chapters. ``` The install script will use `sudo`, so make sure you have sufficient rights. - On Linux, `--daemon` can be omitted for a single-user install. For other installation methods, see the detailed [installation instructions](installation/index.md). From fe751fbde22aea0362993ab7212f96630443c307 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sat, 6 Jan 2024 23:44:15 +0100 Subject: [PATCH 056/307] don't show channels in upgrade instructions channels make everything more stateful, and therefore more complicated and potentially confusing, but aren't needed for this task, so don't encourage their use. --- doc/manual/src/installation/upgrading.md | 49 ++++++++++-------------- 1 file changed, 20 insertions(+), 29 deletions(-) diff --git a/doc/manual/src/installation/upgrading.md b/doc/manual/src/installation/upgrading.md index d1b64b80b..47618e2f5 100644 --- a/doc/manual/src/installation/upgrading.md +++ b/doc/manual/src/installation/upgrading.md @@ -2,48 +2,39 @@ > **Note** > -> These upgrade instructions apply for regular Linux distributions where Nix was installed following the [installation instructions in this manual](./index.md). +> These upgrade instructions apply where Nix was installed following the [installation instructions in this manual](./index.md). -First, find the name of the current [channel](@docroot@/command-ref/nix-channel.md) through which Nix is distributed: +Check which Nix version will be installed, for example from one of the [release channels](http://channels.nixos.org/) such as `nixpkgs-unstable`: ```console -$ nix-channel --list -``` - -By default this should return an entry for Nixpkgs: - -```console -nixpkgs https://nixos.org/channels/nixpkgs-23.05 -``` - -Check which Nix version will be installed: - -```console -$ nix-shell -p nix -I nixpkgs=channel:nixpkgs-23.11 --run "nix --version" +$ nix-shell -p nix -I nixpkgs=channel:nixpkgs-unstable --run "nix --version" nix (Nix) 2.18.1 ``` > **Warning** > -> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with `nix-build` or `nix-store --realise`, may change the database schema! +> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with [`nix-build`](@docroot@/command-ref/nix-build.md) or [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md), may change the database schema! > Reverting to an older version of Nix may therefore require purging the store database before it can be used. -Update the channel entry: +### Linux multi-user ```console -$ nix-channel --remove nixpkgs -$ nix-channel --add https://nixos.org/channels/nixpkgs-23.11 nixpkgs +$ sudo su +# nix-env --install --file '' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable +# systemctl daemon-reload +# systemctl restart nix-daemon ``` -Multi-user Nix users on macOS can upgrade Nix by running: `sudo -i sh -c -'nix-channel --update && -nix-env --install --attr nixpkgs.nix && -launchctl remove org.nixos.nix-daemon && -launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist'` +## macOS multi-user -Single-user installations of Nix should run this: `nix-channel --update; -nix-env --install --attr nixpkgs.nix nixpkgs.cacert` +```console +$ sudo nix-env --install --file '' --attr nix -I nixpkgs=channel:nixpkgs-unstable +$ sudo launchctl remove org.nixos.nix-daemon +$ sudo launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist +``` -Multi-user Nix users on Linux should run this with sudo: `nix-channel ---update; nix-env --install --attr nixpkgs.nix nixpkgs.cacert; systemctl -daemon-reload; systemctl restart nix-daemon` +## Single-user all platforms + +```console +$ nix-env --install --file '' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable +``` From eeb2f083c5646bd3a66344cff69be586fd89a450 Mon Sep 17 00:00:00 2001 From: Shea Levy Date: Sun, 24 Dec 2023 06:44:56 -0500 Subject: [PATCH 057/307] Improve error message for fixed-outputs with references. This codepath is possible, e.g. with a dockerTools.pullImage of an image with a Nix store. --- src/libstore/store-api.cc | 5 ++++- tests/functional/fixed.nix | 9 +++++++++ tests/functional/fixed.sh | 3 +++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index c2516afb5..ad6e1cc0f 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -194,7 +194,10 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) { return makeStorePath(makeType(*this, "source", info.references), info.hash, name); } else { - assert(info.references.size() == 0); + if (!info.references.empty()) { + throw Error("fixed output derivation '%s' is not allowed to refer to other store paths.\nYou may need to use the 'unsafeDiscardReferences' derivation attribute, see the manual for more details.", + name); + } return makeStorePath("output:out", hashString(HashAlgorithm::SHA256, "fixed:out:" diff --git a/tests/functional/fixed.nix b/tests/functional/fixed.nix index babe71504..5bdf79333 100644 --- a/tests/functional/fixed.nix +++ b/tests/functional/fixed.nix @@ -48,6 +48,15 @@ rec { (f ./fixed.builder1.sh "flat" "md5" "ddd8be4b179a529afa5f2ffae4b9858") ]; + badReferences = mkDerivation rec { + name = "bad-hash"; + builder = script; + script = builtins.toFile "installer.sh" "echo $script >$out"; + outputHash = "1ixr6yd3297ciyp9im522dfxpqbkhcw0pylkb2aab915278fqaik"; + outputHashAlgo = "sha256"; + outputHashMode = "flat"; + }; + # Test for building two derivations in parallel that produce the # same output path because they're fixed-output derivations. parallelSame = [ diff --git a/tests/functional/fixed.sh b/tests/functional/fixed.sh index f1e1ce420..2405d059c 100644 --- a/tests/functional/fixed.sh +++ b/tests/functional/fixed.sh @@ -26,6 +26,9 @@ nix-build fixed.nix -A good2 --no-out-link echo 'testing reallyBad...' nix-instantiate fixed.nix -A reallyBad && fail "should fail" +echo 'testing fixed with references...' +expectStderr 1 nix-build fixed.nix -A badReferences | grepQuiet "not allowed to refer to other store paths" + # While we're at it, check attribute selection a bit more. echo 'testing attribute selection...' test $(nix-instantiate fixed.nix -A good.1 | wc -l) = 1 From faf87b51f76ba9794e65e1d17dc3debf759052cd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 8 Jan 2024 14:14:36 +0100 Subject: [PATCH 058/307] Show why GC socket connection was refused Co-authored-by: John Ericson --- src/libstore/gc.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index b5b9e2049..38a9c708b 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -141,7 +141,7 @@ void LocalStore::addTempRoot(const StorePath & path) /* The garbage collector may have exited or not created the socket yet, so we need to restart. */ if (e.errNo == ECONNREFUSED || e.errNo == ENOENT) { - debug("GC socket connection refused"); + debug("GC socket connection refused: %s", e.msg()) fdRootsSocket->close(); goto restart; } From c4c636284e4b7b057788383068967910c5a31856 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 8 Jan 2024 10:17:28 -0500 Subject: [PATCH 059/307] Only test bug fix with new enough deamon --- tests/functional/fixed.sh | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/functional/fixed.sh b/tests/functional/fixed.sh index 2405d059c..d98d4cd15 100644 --- a/tests/functional/fixed.sh +++ b/tests/functional/fixed.sh @@ -26,8 +26,10 @@ nix-build fixed.nix -A good2 --no-out-link echo 'testing reallyBad...' nix-instantiate fixed.nix -A reallyBad && fail "should fail" -echo 'testing fixed with references...' -expectStderr 1 nix-build fixed.nix -A badReferences | grepQuiet "not allowed to refer to other store paths" +if isDaemonNewer "2.20pre20240108"; then + echo 'testing fixed with references...' + expectStderr 1 nix-build fixed.nix -A badReferences | grepQuiet "not allowed to refer to other store paths" +fi # While we're at it, check attribute selection a bit more. echo 'testing attribute selection...' From 605eba3829946eb04f1aaf1160cf11a55183c677 Mon Sep 17 00:00:00 2001 From: Weijia Wang <9713184+wegank@users.noreply.github.com> Date: Mon, 8 Jan 2024 17:31:27 +0100 Subject: [PATCH 060/307] Fix typo in configure.ac --- configure.ac | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.ac b/configure.ac index b97e25bbd..369d62552 100644 --- a/configure.ac +++ b/configure.ac @@ -160,7 +160,7 @@ AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation AC_SUBST(ENABLE_DOC_GEN) AS_IF( - [test "$ENABLE_BUILD" == "no" && test "$ENABLE_GENERATED_DOCS" == "yes"], + [test "$ENABLE_BUILD" == "no" && test "$ENABLE_DOC_GEN" == "yes"], [AC_MSG_ERROR([Cannot enable generated docs when building overall is disabled. Please do not pass '--enable-doc-gen' or do not pass '--disable-build'.])]) # Building without API docs is the default as Nix' C++ interfaces are internal and unstable. From 6a243e5ed281344135285d9093ef36969a867d73 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Mon, 8 Jan 2024 19:38:36 +0100 Subject: [PATCH 061/307] fix an old lost direct (#9458) this part must have been moved quite a while ago, but apparently so far no one noticed --- doc/manual/redirects.js | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js index 3b507adf3..d04f32b49 100644 --- a/doc/manual/redirects.js +++ b/doc/manual/redirects.js @@ -21,6 +21,7 @@ const redirects = { "chap-distributed-builds": "advanced-topics/distributed-builds.html", "chap-post-build-hook": "advanced-topics/post-build-hook.html", "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats", + "chap-writing-nix-expressions": "language/index.html", "part-command-ref": "command-ref/command-ref.html", "conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation", "conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges", From 53fdcbca509b6c5dacaea3d3c465d86e49b0dd74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Luis=20Lafuente?= Date: Mon, 8 Jan 2024 19:46:38 +0100 Subject: [PATCH 062/307] Add clang format configuration --- .clang-format | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 .clang-format diff --git a/.clang-format b/.clang-format new file mode 100644 index 000000000..9c0c0946a --- /dev/null +++ b/.clang-format @@ -0,0 +1,30 @@ +BasedOnStyle: LLVM +IndentWidth: 4 +BreakBeforeBraces: Custom +BraceWrapping: + AfterStruct: true + AfterClass: true + AfterFunction: true + AfterUnion: true + SplitEmptyRecord: false +PointerAlignment: Middle +FixNamespaceComments: false +SortIncludes: Never +#IndentPPDirectives: BeforeHash +SpaceAfterCStyleCast: true +SpaceAfterTemplateKeyword: false +AccessModifierOffset: -4 +AlignAfterOpenBracket: AlwaysBreak +AlignEscapedNewlines: DontAlign +ColumnLimit: 120 +BreakStringLiterals: false +BitFieldColonSpacing: None +AllowShortFunctionsOnASingleLine: Empty +AlwaysBreakTemplateDeclarations: Yes +BinPackParameters: false +BreakConstructorInitializers: BeforeComma +EmptyLineAfterAccessModifier: Leave # change to always/never later? +EmptyLineBeforeAccessModifier: Leave +#PackConstructorInitializers: BinPack +BreakBeforeBinaryOperators: NonAssignment +AlwaysBreakBeforeMultilineStrings: true From 4feb7d9f715021784952bea57b37a8628c9b6860 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 18 Dec 2023 13:14:42 -0800 Subject: [PATCH 063/307] Combine `AbstractPos`, `PosAdapter`, and `Pos` Also move `SourcePath` into `libutil`. These changes allow `error.hh` and `error.cc` to access source path and position information, which we can use to produce better error messages (for example, we could consider omitting filenames when two or more consecutive stack frames originate from the same file). --- src/libcmd/editor-for.cc | 1 + src/libcmd/editor-for.hh | 2 +- src/libcmd/installable-value.cc | 3 +- src/libcmd/repl.cc | 2 +- src/libexpr/eval.cc | 9 +- src/libexpr/eval.hh | 2 +- src/libexpr/nixexpr.cc | 63 ------- src/libexpr/nixexpr.hh | 26 +-- src/libexpr/primops.cc | 3 +- src/libexpr/value.hh | 1 + src/libfetchers/fetch-to-store.cc | 68 ++++++++ src/libfetchers/fetch-to-store.hh | 22 +++ src/libfetchers/fetchers.cc | 4 +- src/libfetchers/filtering-input-accessor.hh | 1 + src/libfetchers/fs-input-accessor.hh | 1 + src/libfetchers/input-accessor.cc | 129 --------------- src/libfetchers/input-accessor.hh | 174 -------------------- src/libfetchers/memory-input-accessor.cc | 1 + src/libfetchers/memory-input-accessor.hh | 1 + src/libstore/store-api.hh | 1 + src/libutil/error.cc | 55 +------ src/libutil/error.hh | 42 +---- src/libutil/input-accessor.hh | 27 +++ src/libutil/logging.cc | 6 +- src/libutil/position.cc | 112 +++++++++++++ src/libutil/position.hh | 74 +++++++++ src/libutil/ref.hh | 1 + src/{libstore => libutil}/repair-flag.hh | 0 src/libutil/source-path.cc | 105 ++++++++++++ src/libutil/source-path.hh | 114 +++++++++++++ 30 files changed, 561 insertions(+), 489 deletions(-) create mode 100644 src/libfetchers/fetch-to-store.cc create mode 100644 src/libfetchers/fetch-to-store.hh delete mode 100644 src/libfetchers/input-accessor.cc delete mode 100644 src/libfetchers/input-accessor.hh create mode 100644 src/libutil/input-accessor.hh create mode 100644 src/libutil/position.cc create mode 100644 src/libutil/position.hh rename src/{libstore => libutil}/repair-flag.hh (100%) create mode 100644 src/libutil/source-path.cc create mode 100644 src/libutil/source-path.hh diff --git a/src/libcmd/editor-for.cc b/src/libcmd/editor-for.cc index 619d3673f..67653d9c9 100644 --- a/src/libcmd/editor-for.cc +++ b/src/libcmd/editor-for.cc @@ -1,5 +1,6 @@ #include "editor-for.hh" #include "environment-variables.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libcmd/editor-for.hh b/src/libcmd/editor-for.hh index fbf4307c9..8acd7011e 100644 --- a/src/libcmd/editor-for.hh +++ b/src/libcmd/editor-for.hh @@ -2,7 +2,7 @@ ///@file #include "types.hh" -#include "input-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc index bdc34bbe3..c8a3e1b21 100644 --- a/src/libcmd/installable-value.cc +++ b/src/libcmd/installable-value.cc @@ -1,5 +1,6 @@ #include "installable-value.hh" #include "eval-cache.hh" +#include "fetch-to-store.hh" namespace nix { @@ -44,7 +45,7 @@ ref InstallableValue::require(ref installable) std::optional InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx) { if (v.type() == nPath) { - auto storePath = v.path().fetchToStore(*state->store); + auto storePath = fetchToStore(*state->store, v.path()); return {{ .path = DerivedPath::Opaque { .path = std::move(storePath), diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index dea91ba63..78c4538b2 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -221,7 +221,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi // prefer direct pos, but if noPos then try the expr. auto pos = dt.pos ? dt.pos - : static_cast>(positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]); + : positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]; if (pos) { out << pos; diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 31f2d4952..d408f1adc 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -19,6 +19,7 @@ #include "signals.hh" #include "gc-small-vector.hh" #include "url.hh" +#include "fetch-to-store.hh" #include #include @@ -870,7 +871,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & ? std::make_unique( *this, DebugTrace { - .pos = error->info().errPos ? error->info().errPos : static_cast>(positions[expr.getPos()]), + .pos = error->info().errPos ? error->info().errPos : positions[expr.getPos()], .expr = expr, .env = env, .hint = error->info().msg, @@ -909,7 +910,7 @@ static std::unique_ptr makeDebugTraceStacker( EvalState & state, Expr & expr, Env & env, - std::shared_ptr && pos, + std::shared_ptr && pos, const char * s, const std::string & s2) { @@ -1187,7 +1188,7 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) *this, *e, this->baseEnv, - e->getPos() ? static_cast>(positions[e->getPos()]) : nullptr, + e->getPos() ? std::make_shared(positions[e->getPos()]) : nullptr, "while evaluating the file '%1%':", resolvedPath.to_string()) : nullptr; @@ -2368,7 +2369,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat auto dstPath = i != srcToStore.end() ? i->second : [&]() { - auto dstPath = path.fetchToStore(*store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); + auto dstPath = fetchToStore(*store, path, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); allowPath(dstPath); srcToStore.insert_or_assign(path, dstPath); printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 6e3f08d55..5e0f1886d 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -142,7 +142,7 @@ struct RegexCache; std::shared_ptr makeRegexCache(); struct DebugTrace { - std::shared_ptr pos; + std::shared_ptr pos; const Expr & expr; const Env & env; hintformat hint; diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index ede070cff..964de6351 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -11,58 +11,6 @@ namespace nix { ExprBlackHole eBlackHole; -struct PosAdapter : AbstractPos -{ - Pos::Origin origin; - - PosAdapter(Pos::Origin origin) - : origin(std::move(origin)) - { - } - - std::optional getSource() const override - { - return std::visit(overloaded { - [](const Pos::none_tag &) -> std::optional { - return std::nullopt; - }, - [](const Pos::Stdin & s) -> std::optional { - // Get rid of the null terminators added by the parser. - return std::string(s.source->c_str()); - }, - [](const Pos::String & s) -> std::optional { - // Get rid of the null terminators added by the parser. - return std::string(s.source->c_str()); - }, - [](const SourcePath & path) -> std::optional { - try { - return path.readFile(); - } catch (Error &) { - return std::nullopt; - } - } - }, origin); - } - - void print(std::ostream & out) const override - { - std::visit(overloaded { - [&](const Pos::none_tag &) { out << "«none»"; }, - [&](const Pos::Stdin &) { out << "«stdin»"; }, - [&](const Pos::String & s) { out << "«string»"; }, - [&](const SourcePath & path) { out << path; } - }, origin); - } -}; - -Pos::operator std::shared_ptr() const -{ - auto pos = std::make_shared(origin); - pos->line = line; - pos->column = column; - return pos; -} - // FIXME: remove, because *symbols* are abstract and do not have a single // textual representation; see printIdentifier() std::ostream & operator <<(std::ostream & str, const SymbolStr & symbol) @@ -268,17 +216,6 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const } -std::ostream & operator << (std::ostream & str, const Pos & pos) -{ - if (auto pos2 = (std::shared_ptr) pos) { - str << *pos2; - } else - str << "undefined position"; - - return str; -} - - std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath) { std::ostringstream out; diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 71ed9ef30..3cd46ca27 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -8,6 +8,7 @@ #include "symbol-table.hh" #include "error.hh" #include "chunked-vector.hh" +#include "position.hh" namespace nix { @@ -28,27 +29,6 @@ public: using EvalError::EvalError; }; -/** - * Position objects. - */ -struct Pos -{ - uint32_t line; - uint32_t column; - - struct none_tag { }; - struct Stdin { ref source; }; - struct String { ref source; }; - - typedef std::variant Origin; - - Origin origin; - - explicit operator bool() const { return line > 0; } - - operator std::shared_ptr() const; -}; - class PosIdx { friend class PosTable; @@ -81,7 +61,7 @@ public: mutable uint32_t idx = std::numeric_limits::max(); // Used for searching in PosTable::[]. - explicit Origin(uint32_t idx): idx(idx), origin{Pos::none_tag()} {} + explicit Origin(uint32_t idx): idx(idx), origin{std::monostate()} {} public: const Pos::Origin origin; @@ -132,8 +112,6 @@ public: inline PosIdx noPos = {}; -std::ostream & operator << (std::ostream & str, const Pos & pos); - struct Env; struct Value; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index b2ffcc051..ee07e5568 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -16,6 +16,7 @@ #include "value-to-xml.hh" #include "primops.hh" #include "fs-input-accessor.hh" +#include "fetch-to-store.hh" #include #include @@ -2240,7 +2241,7 @@ static void addPath( }); if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { - auto dstPath = path.fetchToStore(*state.store, name, method, filter.get(), state.repair); + auto dstPath = fetchToStore(*state.store, path, name, method, filter.get(), state.repair); if (expectedHash && expectedStorePath != dstPath) state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); state.allowAndSetStorePathString(dstPath, v); diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index d9860e921..c65b336b0 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -8,6 +8,7 @@ #include "symbol-table.hh" #include "value/context.hh" #include "input-accessor.hh" +#include "source-path.hh" #if HAVE_BOEHMGC #include diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc new file mode 100644 index 000000000..196489e05 --- /dev/null +++ b/src/libfetchers/fetch-to-store.cc @@ -0,0 +1,68 @@ +#include "fetch-to-store.hh" +#include "fetchers.hh" +#include "cache.hh" + +namespace nix { + +StorePath fetchToStore( + Store & store, + const SourcePath & path, + std::string_view name, + ContentAddressMethod method, + PathFilter * filter, + RepairFlag repair) +{ + // FIXME: add an optimisation for the case where the accessor is + // an FSInputAccessor pointing to a store path. + + std::optional cacheKey; + + if (!filter && path.accessor->fingerprint) { + cacheKey = fetchers::Attrs{ + {"_what", "fetchToStore"}, + {"store", store.storeDir}, + {"name", std::string(name)}, + {"fingerprint", *path.accessor->fingerprint}, + { + "method", + std::visit(overloaded { + [](const TextIngestionMethod &) { + return "text"; + }, + [](const FileIngestionMethod & fim) { + switch (fim) { + case FileIngestionMethod::Flat: return "flat"; + case FileIngestionMethod::Recursive: return "nar"; + default: assert(false); + } + }, + }, method.raw), + }, + {"path", path.path.abs()} + }; + if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) { + debug("store path cache hit for '%s'", path); + return res->second; + } + } else + debug("source path '%s' is uncacheable", path); + + Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", path)); + + auto filter2 = filter ? *filter : defaultPathFilter; + + auto storePath = + settings.readOnlyMode + ? store.computeStorePath( + name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2).first + : store.addToStore( + name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2, repair); + + if (cacheKey) + fetchers::getCache()->add(store, *cacheKey, {}, storePath, true); + + return storePath; +} + + +} diff --git a/src/libfetchers/fetch-to-store.hh b/src/libfetchers/fetch-to-store.hh new file mode 100644 index 000000000..e5e039340 --- /dev/null +++ b/src/libfetchers/fetch-to-store.hh @@ -0,0 +1,22 @@ +#pragma once + +#include "source-path.hh" +#include "store-api.hh" +#include "file-system.hh" +#include "repair-flag.hh" +#include "file-content-address.hh" + +namespace nix { + +/** + * Copy the `path` to the Nix store. + */ +StorePath fetchToStore( + Store & store, + const SourcePath & path, + std::string_view name = "source", + ContentAddressMethod method = FileIngestionMethod::Recursive, + PathFilter * filter = nullptr, + RepairFlag repair = NoRepair); + +} diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index f309e5993..7f282c972 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -1,6 +1,8 @@ #include "fetchers.hh" #include "store-api.hh" #include "input-accessor.hh" +#include "source-path.hh" +#include "fetch-to-store.hh" #include @@ -374,7 +376,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const std::pair InputScheme::fetch(ref store, const Input & input) { auto [accessor, input2] = getAccessor(store, input); - auto storePath = SourcePath(accessor).fetchToStore(*store, input2.getName()); + auto storePath = fetchToStore(*store, SourcePath(accessor), input2.getName()); return {storePath, input2}; } diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-input-accessor.hh index e1b83c929..a352a33a6 100644 --- a/src/libfetchers/filtering-input-accessor.hh +++ b/src/libfetchers/filtering-input-accessor.hh @@ -1,6 +1,7 @@ #pragma once #include "input-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libfetchers/fs-input-accessor.hh b/src/libfetchers/fs-input-accessor.hh index ba5af5887..a98e83511 100644 --- a/src/libfetchers/fs-input-accessor.hh +++ b/src/libfetchers/fs-input-accessor.hh @@ -1,6 +1,7 @@ #pragma once #include "input-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libfetchers/input-accessor.cc b/src/libfetchers/input-accessor.cc deleted file mode 100644 index a647f5915..000000000 --- a/src/libfetchers/input-accessor.cc +++ /dev/null @@ -1,129 +0,0 @@ -#include "input-accessor.hh" -#include "store-api.hh" -#include "cache.hh" - -namespace nix { - -StorePath InputAccessor::fetchToStore( - Store & store, - const CanonPath & path, - std::string_view name, - ContentAddressMethod method, - PathFilter * filter, - RepairFlag repair) -{ - // FIXME: add an optimisation for the case where the accessor is - // an FSInputAccessor pointing to a store path. - - std::optional cacheKey; - - if (!filter && fingerprint) { - cacheKey = fetchers::Attrs{ - {"_what", "fetchToStore"}, - {"store", store.storeDir}, - {"name", std::string(name)}, - {"fingerprint", *fingerprint}, - { - "method", - std::visit(overloaded { - [](const TextIngestionMethod &) { - return "text"; - }, - [](const FileIngestionMethod & fim) { - switch (fim) { - case FileIngestionMethod::Flat: return "flat"; - case FileIngestionMethod::Recursive: return "nar"; - default: assert(false); - } - }, - }, method.raw), - }, - {"path", path.abs()} - }; - if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) { - debug("store path cache hit for '%s'", showPath(path)); - return res->second; - } - } else - debug("source path '%s' is uncacheable", showPath(path)); - - Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path))); - - auto filter2 = filter ? *filter : defaultPathFilter; - - auto storePath = - settings.readOnlyMode - ? store.computeStorePath( - name, *this, path, method, HashAlgorithm::SHA256, {}, filter2).first - : store.addToStore( - name, *this, path, method, HashAlgorithm::SHA256, {}, filter2, repair); - - if (cacheKey) - fetchers::getCache()->add(store, *cacheKey, {}, storePath, true); - - return storePath; -} - -std::ostream & operator << (std::ostream & str, const SourcePath & path) -{ - str << path.to_string(); - return str; -} - -StorePath SourcePath::fetchToStore( - Store & store, - std::string_view name, - ContentAddressMethod method, - PathFilter * filter, - RepairFlag repair) const -{ - return accessor->fetchToStore(store, path, name, method, filter, repair); -} - -std::string_view SourcePath::baseName() const -{ - return path.baseName().value_or("source"); -} - -SourcePath SourcePath::parent() const -{ - auto p = path.parent(); - assert(p); - return {accessor, std::move(*p)}; -} - -SourcePath SourcePath::resolveSymlinks() const -{ - auto res = SourcePath(accessor); - - int linksAllowed = 1024; - - std::list todo; - for (auto & c : path) - todo.push_back(std::string(c)); - - while (!todo.empty()) { - auto c = *todo.begin(); - todo.pop_front(); - if (c == "" || c == ".") - ; - else if (c == "..") - res.path.pop(); - else { - res.path.push(c); - if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) { - if (!linksAllowed--) - throw Error("infinite symlink recursion in path '%s'", path); - auto target = res.readLink(); - res.path.pop(); - if (hasPrefix(target, "/")) - res.path = CanonPath::root; - todo.splice(todo.begin(), tokenizeString>(target, "/")); - } - } - } - - return res; -} - -} diff --git a/src/libfetchers/input-accessor.hh b/src/libfetchers/input-accessor.hh deleted file mode 100644 index d2a21cb4b..000000000 --- a/src/libfetchers/input-accessor.hh +++ /dev/null @@ -1,174 +0,0 @@ -#pragma once -///@file - -#include "source-accessor.hh" -#include "ref.hh" -#include "types.hh" -#include "file-system.hh" -#include "repair-flag.hh" -#include "content-address.hh" - -namespace nix { - -MakeError(RestrictedPathError, Error); - -struct SourcePath; -class StorePath; -class Store; - -struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this -{ - std::optional fingerprint; - - /** - * Return the maximum last-modified time of the files in this - * tree, if available. - */ - virtual std::optional getLastModified() - { - return std::nullopt; - } - - StorePath fetchToStore( - Store & store, - const CanonPath & path, - std::string_view name = "source", - ContentAddressMethod method = FileIngestionMethod::Recursive, - PathFilter * filter = nullptr, - RepairFlag repair = NoRepair); -}; - -/** - * An abstraction for accessing source files during - * evaluation. Currently, it's just a wrapper around `CanonPath` that - * accesses files in the regular filesystem, but in the future it will - * support fetching files in other ways. - */ -struct SourcePath -{ - ref accessor; - CanonPath path; - - SourcePath(ref accessor, CanonPath path = CanonPath::root) - : accessor(std::move(accessor)) - , path(std::move(path)) - { } - - std::string_view baseName() const; - - /** - * Construct the parent of this `SourcePath`. Aborts if `this` - * denotes the root. - */ - SourcePath parent() const; - - /** - * If this `SourcePath` denotes a regular file (not a symlink), - * return its contents; otherwise throw an error. - */ - std::string readFile() const - { return accessor->readFile(path); } - - /** - * Return whether this `SourcePath` denotes a file (of any type) - * that exists - */ - bool pathExists() const - { return accessor->pathExists(path); } - - /** - * Return stats about this `SourcePath`, or throw an exception if - * it doesn't exist. - */ - InputAccessor::Stat lstat() const - { return accessor->lstat(path); } - - /** - * Return stats about this `SourcePath`, or std::nullopt if it - * doesn't exist. - */ - std::optional maybeLstat() const - { return accessor->maybeLstat(path); } - - /** - * If this `SourcePath` denotes a directory (not a symlink), - * return its directory entries; otherwise throw an error. - */ - InputAccessor::DirEntries readDirectory() const - { return accessor->readDirectory(path); } - - /** - * If this `SourcePath` denotes a symlink, return its target; - * otherwise throw an error. - */ - std::string readLink() const - { return accessor->readLink(path); } - - /** - * Dump this `SourcePath` to `sink` as a NAR archive. - */ - void dumpPath( - Sink & sink, - PathFilter & filter = defaultPathFilter) const - { return accessor->dumpPath(path, sink, filter); } - - /** - * Copy this `SourcePath` to the Nix store. - */ - StorePath fetchToStore( - Store & store, - std::string_view name = "source", - ContentAddressMethod method = FileIngestionMethod::Recursive, - PathFilter * filter = nullptr, - RepairFlag repair = NoRepair) const; - - /** - * Return the location of this path in the "real" filesystem, if - * it has a physical location. - */ - std::optional getPhysicalPath() const - { return accessor->getPhysicalPath(path); } - - std::string to_string() const - { return accessor->showPath(path); } - - /** - * Append a `CanonPath` to this path. - */ - SourcePath operator + (const CanonPath & x) const - { return {accessor, path + x}; } - - /** - * Append a single component `c` to this path. `c` must not - * contain a slash. A slash is implicitly added between this path - * and `c`. - */ - SourcePath operator + (std::string_view c) const - { return {accessor, path + c}; } - - bool operator == (const SourcePath & x) const - { - return std::tie(accessor, path) == std::tie(x.accessor, x.path); - } - - bool operator != (const SourcePath & x) const - { - return std::tie(accessor, path) != std::tie(x.accessor, x.path); - } - - bool operator < (const SourcePath & x) const - { - return std::tie(accessor, path) < std::tie(x.accessor, x.path); - } - - /** - * Resolve any symlinks in this `SourcePath` (including its - * parents). The result is a `SourcePath` in which no element is a - * symlink. - */ - SourcePath resolveSymlinks() const; -}; - -std::ostream & operator << (std::ostream & str, const SourcePath & path); - -} diff --git a/src/libfetchers/memory-input-accessor.cc b/src/libfetchers/memory-input-accessor.cc index 057f3e37f..88a2e34e8 100644 --- a/src/libfetchers/memory-input-accessor.cc +++ b/src/libfetchers/memory-input-accessor.cc @@ -1,5 +1,6 @@ #include "memory-input-accessor.hh" #include "memory-source-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libfetchers/memory-input-accessor.hh b/src/libfetchers/memory-input-accessor.hh index b75b02bfd..508b07722 100644 --- a/src/libfetchers/memory-input-accessor.hh +++ b/src/libfetchers/memory-input-accessor.hh @@ -1,4 +1,5 @@ #include "input-accessor.hh" +#include "source-path.hh" namespace nix { diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 96a7ebd7b..9667b5e9e 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -13,6 +13,7 @@ #include "path-info.hh" #include "repair-flag.hh" #include "store-dir-config.hh" +#include "source-path.hh" #include #include diff --git a/src/libutil/error.cc b/src/libutil/error.cc index e42925c2b..bd2f6b840 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -2,6 +2,7 @@ #include "environment-variables.hh" #include "signals.hh" #include "terminal.hh" +#include "position.hh" #include #include @@ -10,7 +11,7 @@ namespace nix { -void BaseError::addTrace(std::shared_ptr && e, hintformat hint, bool frame) +void BaseError::addTrace(std::shared_ptr && e, hintformat hint, bool frame) { err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame }); } @@ -41,15 +42,6 @@ std::ostream & operator <<(std::ostream & os, const hintformat & hf) return os << hf.str(); } -std::ostream & operator <<(std::ostream & str, const AbstractPos & pos) -{ - pos.print(str); - str << ":" << pos.line; - if (pos.column > 0) - str << ":" << pos.column; - return str; -} - /** * An arbitrarily defined value comparison for the purpose of using traces in the key of a sorted container. */ @@ -76,49 +68,10 @@ inline bool operator> (const Trace& lhs, const Trace& rhs) { return rhs < lhs; } inline bool operator<=(const Trace& lhs, const Trace& rhs) { return !(lhs > rhs); } inline bool operator>=(const Trace& lhs, const Trace& rhs) { return !(lhs < rhs); } -std::optional AbstractPos::getCodeLines() const -{ - if (line == 0) - return std::nullopt; - - if (auto source = getSource()) { - - std::istringstream iss(*source); - // count the newlines. - int count = 0; - std::string curLine; - int pl = line - 1; - - LinesOfCode loc; - - do { - std::getline(iss, curLine); - ++count; - if (count < pl) - ; - else if (count == pl) { - loc.prevLineOfCode = curLine; - } else if (count == pl + 1) { - loc.errLineOfCode = curLine; - } else if (count == pl + 2) { - loc.nextLineOfCode = curLine; - break; - } - - if (!iss.good()) - break; - } while (true); - - return loc; - } - - return std::nullopt; -} - // print lines of code to the ostream, indicating the error column. void printCodeLines(std::ostream & out, const std::string & prefix, - const AbstractPos & errPos, + const Pos & errPos, const LinesOfCode & loc) { // previous line of code. @@ -196,7 +149,7 @@ static bool printUnknownLocations = getEnv("_NIX_EVAL_SHOW_UNKNOWN_LOCATIONS").h * * @return true if a position was printed. */ -static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr & pos) { +static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr & pos) { bool hasPos = pos && *pos; if (hasPos) { oss << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":"; diff --git a/src/libutil/error.hh b/src/libutil/error.hh index baffca128..234cbe1f6 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -63,45 +63,15 @@ struct LinesOfCode { std::optional nextLineOfCode; }; -/** - * An abstract type that represents a location in a source file. - */ -struct AbstractPos -{ - uint32_t line = 0; - uint32_t column = 0; - - /** - * An AbstractPos may be a "null object", representing an unknown position. - * - * Return true if this position is known. - */ - inline operator bool() const { return line != 0; }; - - /** - * Return the contents of the source file. - */ - virtual std::optional getSource() const - { return std::nullopt; }; - - virtual void print(std::ostream & out) const = 0; - - std::optional getCodeLines() const; - - virtual ~AbstractPos() = default; - - inline auto operator<=>(const AbstractPos& rhs) const = default; -}; - -std::ostream & operator << (std::ostream & str, const AbstractPos & pos); +struct Pos; void printCodeLines(std::ostream & out, const std::string & prefix, - const AbstractPos & errPos, + const Pos & errPos, const LinesOfCode & loc); struct Trace { - std::shared_ptr pos; + std::shared_ptr pos; hintformat hint; bool frame; }; @@ -114,7 +84,7 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs); struct ErrorInfo { Verbosity level; hintformat msg; - std::shared_ptr errPos; + std::shared_ptr errPos; std::list traces; Suggestions suggestions; @@ -185,12 +155,12 @@ public: } template - void addTrace(std::shared_ptr && e, std::string_view fs, const Args & ... args) + void addTrace(std::shared_ptr && e, std::string_view fs, const Args & ... args) { addTrace(std::move(e), hintfmt(std::string(fs), args...)); } - void addTrace(std::shared_ptr && e, hintformat hint, bool frame = false); + void addTrace(std::shared_ptr && e, hintformat hint, bool frame = false); bool hasTrace() const { return !err.traces.empty(); } diff --git a/src/libutil/input-accessor.hh b/src/libutil/input-accessor.hh new file mode 100644 index 000000000..55b7c2f2f --- /dev/null +++ b/src/libutil/input-accessor.hh @@ -0,0 +1,27 @@ +#pragma once +///@file + +#include "source-accessor.hh" +#include "ref.hh" +#include "repair-flag.hh" + +namespace nix { + +MakeError(RestrictedPathError, Error); + +struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this +{ + std::optional fingerprint; + + /** + * Return the maximum last-modified time of the files in this + * tree, if available. + */ + virtual std::optional getLastModified() + { + return std::nullopt; + } + +}; + +} diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 60b0865bf..183aee2dc 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -4,6 +4,8 @@ #include "terminal.hh" #include "util.hh" #include "config.hh" +#include "source-path.hh" +#include "position.hh" #include #include @@ -136,13 +138,13 @@ Activity::Activity(Logger & logger, Verbosity lvl, ActivityType type, logger.startActivity(id, lvl, type, s, fields, parent); } -void to_json(nlohmann::json & json, std::shared_ptr pos) +void to_json(nlohmann::json & json, std::shared_ptr pos) { if (pos) { json["line"] = pos->line; json["column"] = pos->column; std::ostringstream str; - pos->print(str); + pos->print(str, true); json["file"] = str.str(); } else { json["line"] = nullptr; diff --git a/src/libutil/position.cc b/src/libutil/position.cc new file mode 100644 index 000000000..b39a5a1d4 --- /dev/null +++ b/src/libutil/position.cc @@ -0,0 +1,112 @@ +#include "position.hh" + +namespace nix { + +Pos::Pos(const Pos * other) +{ + if (!other) { + return; + } + line = other->line; + column = other->column; + origin = std::move(other->origin); +} + +Pos::operator std::shared_ptr() const +{ + return std::make_shared(&*this); +} + +bool Pos::operator<(const Pos &rhs) const +{ + return std::forward_as_tuple(line, column, origin) + < std::forward_as_tuple(rhs.line, rhs.column, rhs.origin); +} + +std::optional Pos::getCodeLines() const +{ + if (line == 0) + return std::nullopt; + + if (auto source = getSource()) { + + std::istringstream iss(*source); + // count the newlines. + int count = 0; + std::string curLine; + int pl = line - 1; + + LinesOfCode loc; + + do { + std::getline(iss, curLine); + ++count; + if (count < pl) + ; + else if (count == pl) { + loc.prevLineOfCode = curLine; + } else if (count == pl + 1) { + loc.errLineOfCode = curLine; + } else if (count == pl + 2) { + loc.nextLineOfCode = curLine; + break; + } + + if (!iss.good()) + break; + } while (true); + + return loc; + } + + return std::nullopt; +} + + +std::optional Pos::getSource() const +{ + return std::visit(overloaded { + [](const std::monostate &) -> std::optional { + return std::nullopt; + }, + [](const Pos::Stdin & s) -> std::optional { + // Get rid of the null terminators added by the parser. + return std::string(s.source->c_str()); + }, + [](const Pos::String & s) -> std::optional { + // Get rid of the null terminators added by the parser. + return std::string(s.source->c_str()); + }, + [](const SourcePath & path) -> std::optional { + try { + return path.readFile(); + } catch (Error &) { + return std::nullopt; + } + } + }, origin); +} + +void Pos::print(std::ostream & out, bool showOrigin) const +{ + if (showOrigin) { + std::visit(overloaded { + [&](const std::monostate &) { out << "«none»"; }, + [&](const Pos::Stdin &) { out << "«stdin»"; }, + [&](const Pos::String & s) { out << "«string»"; }, + [&](const SourcePath & path) { out << path; } + }, origin); + out << ":"; + } + out << line; + if (column > 0) + out << ":" << column; +} + +std::ostream & operator<<(std::ostream & str, const Pos & pos) +{ + pos.print(str, true); + return str; +} + +} diff --git a/src/libutil/position.hh b/src/libutil/position.hh new file mode 100644 index 000000000..a184997ed --- /dev/null +++ b/src/libutil/position.hh @@ -0,0 +1,74 @@ +#pragma once +/** + * @file + * + * @brief Pos and AbstractPos + */ + +#include +#include + +#include "source-path.hh" + +namespace nix { + +/** + * A position and an origin for that position (like a source file). + */ +struct Pos +{ + uint32_t line = 0; + uint32_t column = 0; + + struct Stdin { + ref source; + bool operator==(const Stdin & rhs) const + { return *source == *rhs.source; } + bool operator!=(const Stdin & rhs) const + { return *source != *rhs.source; } + bool operator<(const Stdin & rhs) const + { return *source < *rhs.source; } + }; + struct String { + ref source; + bool operator==(const String & rhs) const + { return *source == *rhs.source; } + bool operator!=(const String & rhs) const + { return *source != *rhs.source; } + bool operator<(const String & rhs) const + { return *source < *rhs.source; } + }; + + typedef std::variant Origin; + + Origin origin = std::monostate(); + + Pos() { } + Pos(uint32_t line, uint32_t column, Origin origin) + : line(line), column(column), origin(origin) { } + Pos(Pos & other) = default; + Pos(const Pos & other) = default; + Pos(Pos && other) = default; + Pos(const Pos * other); + + explicit operator bool() const { return line > 0; } + + operator std::shared_ptr() const; + + /** + * Return the contents of the source file. + */ + std::optional getSource() const; + + void print(std::ostream & out, bool showOrigin) const; + + std::optional getCodeLines() const; + + bool operator==(const Pos & rhs) const = default; + bool operator!=(const Pos & rhs) const = default; + bool operator<(const Pos & rhs) const; +}; + +std::ostream & operator<<(std::ostream & str, const Pos & pos); + +} diff --git a/src/libutil/ref.hh b/src/libutil/ref.hh index af5f8304c..5d0c3696d 100644 --- a/src/libutil/ref.hh +++ b/src/libutil/ref.hh @@ -1,6 +1,7 @@ #pragma once ///@file +#include #include #include #include diff --git a/src/libstore/repair-flag.hh b/src/libutil/repair-flag.hh similarity index 100% rename from src/libstore/repair-flag.hh rename to src/libutil/repair-flag.hh diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc new file mode 100644 index 000000000..d85b0b7fe --- /dev/null +++ b/src/libutil/source-path.cc @@ -0,0 +1,105 @@ +#include "source-path.hh" + +namespace nix { + +std::string_view SourcePath::baseName() const +{ return path.baseName().value_or("source"); } + +SourcePath SourcePath::parent() const +{ + auto p = path.parent(); + assert(p); + return {accessor, std::move(*p)}; +} + +std::string SourcePath::readFile() const +{ return accessor->readFile(path); } + +bool SourcePath::pathExists() const +{ return accessor->pathExists(path); } + +InputAccessor::Stat SourcePath::lstat() const +{ return accessor->lstat(path); } + +std::optional SourcePath::maybeLstat() const +{ return accessor->maybeLstat(path); } + +InputAccessor::DirEntries SourcePath::readDirectory() const +{ return accessor->readDirectory(path); } + +std::string SourcePath::readLink() const +{ return accessor->readLink(path); } + +void SourcePath::dumpPath( + Sink & sink, + PathFilter & filter) const +{ return accessor->dumpPath(path, sink, filter); } + +std::optional SourcePath::getPhysicalPath() const +{ return accessor->getPhysicalPath(path); } + +std::string SourcePath::to_string() const +{ return accessor->showPath(path); } + +SourcePath SourcePath::operator+(const CanonPath & x) const +{ return {accessor, path + x}; } + +SourcePath SourcePath::operator+(std::string_view c) const +{ return {accessor, path + c}; } + +bool SourcePath::operator==(const SourcePath & x) const +{ + return std::tie(*accessor, path) == std::tie(*x.accessor, x.path); +} + +bool SourcePath::operator!=(const SourcePath & x) const +{ + return std::tie(*accessor, path) != std::tie(*x.accessor, x.path); +} + +bool SourcePath::operator<(const SourcePath & x) const +{ + return std::tie(*accessor, path) < std::tie(*x.accessor, x.path); +} + +SourcePath SourcePath::resolveSymlinks() const +{ + auto res = SourcePath(accessor); + + int linksAllowed = 1024; + + std::list todo; + for (auto & c : path) + todo.push_back(std::string(c)); + + while (!todo.empty()) { + auto c = *todo.begin(); + todo.pop_front(); + if (c == "" || c == ".") + ; + else if (c == "..") + res.path.pop(); + else { + res.path.push(c); + if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) { + if (!linksAllowed--) + throw Error("infinite symlink recursion in path '%s'", path); + auto target = res.readLink(); + res.path.pop(); + if (hasPrefix(target, "/")) + res.path = CanonPath::root; + todo.splice(todo.begin(), tokenizeString>(target, "/")); + } + } + } + + return res; +} + +std::ostream & operator<<(std::ostream & str, const SourcePath & path) +{ + str << path.to_string(); + return str; +} + +} diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh new file mode 100644 index 000000000..bf5625ca5 --- /dev/null +++ b/src/libutil/source-path.hh @@ -0,0 +1,114 @@ +#pragma once +/** + * @file + * + * @brief SourcePath + */ + +#include "ref.hh" +#include "canon-path.hh" +#include "input-accessor.hh" + +namespace nix { + +/** + * An abstraction for accessing source files during + * evaluation. Currently, it's just a wrapper around `CanonPath` that + * accesses files in the regular filesystem, but in the future it will + * support fetching files in other ways. + */ +struct SourcePath +{ + ref accessor; + CanonPath path; + + SourcePath(ref accessor, CanonPath path = CanonPath::root) + : accessor(std::move(accessor)) + , path(std::move(path)) + { } + + std::string_view baseName() const; + + /** + * Construct the parent of this `SourcePath`. Aborts if `this` + * denotes the root. + */ + SourcePath parent() const; + + /** + * If this `SourcePath` denotes a regular file (not a symlink), + * return its contents; otherwise throw an error. + */ + std::string readFile() const; + + /** + * Return whether this `SourcePath` denotes a file (of any type) + * that exists + */ + bool pathExists() const; + + /** + * Return stats about this `SourcePath`, or throw an exception if + * it doesn't exist. + */ + InputAccessor::Stat lstat() const; + + /** + * Return stats about this `SourcePath`, or std::nullopt if it + * doesn't exist. + */ + std::optional maybeLstat() const; + + /** + * If this `SourcePath` denotes a directory (not a symlink), + * return its directory entries; otherwise throw an error. + */ + InputAccessor::DirEntries readDirectory() const; + + /** + * If this `SourcePath` denotes a symlink, return its target; + * otherwise throw an error. + */ + std::string readLink() const; + + /** + * Dump this `SourcePath` to `sink` as a NAR archive. + */ + void dumpPath( + Sink & sink, + PathFilter & filter = defaultPathFilter) const; + + /** + * Return the location of this path in the "real" filesystem, if + * it has a physical location. + */ + std::optional getPhysicalPath() const; + + std::string to_string() const; + + /** + * Append a `CanonPath` to this path. + */ + SourcePath operator + (const CanonPath & x) const; + + /** + * Append a single component `c` to this path. `c` must not + * contain a slash. A slash is implicitly added between this path + * and `c`. + */ + SourcePath operator+(std::string_view c) const; + bool operator==(const SourcePath & x) const; + bool operator!=(const SourcePath & x) const; + bool operator<(const SourcePath & x) const; + + /** + * Resolve any symlinks in this `SourcePath` (including its + * parents). The result is a `SourcePath` in which no element is a + * symlink. + */ + SourcePath resolveSymlinks() const; +}; + +std::ostream & operator << (std::ostream & str, const SourcePath & path); + +} From bbd0a959e17e988ef1ec2fadd1ab5bb66420fd6f Mon Sep 17 00:00:00 2001 From: Weijia Wang <9713184+wegank@users.noreply.github.com> Date: Mon, 8 Jan 2024 20:37:42 +0100 Subject: [PATCH 064/307] Make lowdown optional Co-authored-by: John Ericson --- configure.ac | 16 +++++++++++++++- package.nix | 5 +++++ src/libcmd/markdown.cc | 6 ++++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/configure.ac b/configure.ac index b97e25bbd..929750932 100644 --- a/configure.ac +++ b/configure.ac @@ -374,7 +374,21 @@ PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9]) # Look for lowdown library. -PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"]) +AC_ARG_ENABLE([markdown], AS_HELP_STRING([--enable-markdown], [Enable Markdown rendering in the Nix binary (requires lowdown) [default=auto]]), + enable_markdown=$enableval, enable_markdown=auto) +AS_CASE(["$enable_markdown"], + [yes | auto], [ + PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [ + CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS" + have_lowdown=1 + AC_DEFINE(HAVE_LOWDOWN, 1, [Whether lowdown is available and should be used for Markdown rendering.]) + ], [ + AS_IF([test "x$enable_markdown" == "xyes"], [AC_MSG_ERROR([--enable-markdown was specified, but lowdown was not found.])]) + ]) + ], + [no], [have_lowdown=], + [AC_MSG_ERROR([--enable-markdown must be one of: yes, no, auto])]) +AC_SUBST(HAVE_LOWDOWN, [$have_lowdown]) # Look for libgit2. diff --git a/package.nix b/package.nix index dfebdb0e4..dd37809d0 100644 --- a/package.nix +++ b/package.nix @@ -68,6 +68,9 @@ # Whether to build the regular manual , enableManual ? __forDefaults.canRunInstalled +# Whether to enable Markdown rendering in the Nix binary. +, enableMarkdown ? !stdenv.hostPlatform.isWindows + # Whether to compile `rl-next.md`, the release notes for the next # not-yet-released version of Nix in the manul, from the individual # change log entries in the directory. @@ -213,6 +216,7 @@ in { xz ] ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ editline + ] ++ lib.optionals enableMarkdown [ lowdown ] ++ lib.optionals buildUnitTests [ gtest @@ -269,6 +273,7 @@ in { (lib.enableFeature doInstallCheck "functional-tests") (lib.enableFeature enableInternalAPIDocs "internal-api-docs") (lib.enableFeature enableManual "doc-gen") + (lib.enableFeature enableMarkdown "markdown") (lib.enableFeature installUnitTests "install-unit-tests") ] ++ lib.optionals (!forDevShell) [ "--sysconfdir=/etc" diff --git a/src/libcmd/markdown.cc b/src/libcmd/markdown.cc index 8b3bbc1b5..a4e3c5a77 100644 --- a/src/libcmd/markdown.cc +++ b/src/libcmd/markdown.cc @@ -4,12 +4,15 @@ #include "terminal.hh" #include +#if HAVE_LOWDOWN #include +#endif namespace nix { std::string renderMarkdownToTerminal(std::string_view markdown) { +#if HAVE_LOWDOWN int windowWidth = getWindowSize().second; struct lowdown_opts opts { @@ -48,6 +51,9 @@ std::string renderMarkdownToTerminal(std::string_view markdown) throw Error("allocation error while rendering Markdown"); return filterANSIEscapes(std::string(buf->data, buf->size), !shouldANSI()); +#else + return std::string(markdown); +#endif } } From 29eb5ed1dc54ec45ab23b50ef259d2b370e8b1e8 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 8 Jan 2024 14:47:42 -0500 Subject: [PATCH 065/307] Fix Internal API docs Because of source filtering, they were empty. Fixes #9694 --- package.nix | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/package.nix b/package.nix index dfebdb0e4..aad022b32 100644 --- a/package.nix +++ b/package.nix @@ -164,6 +164,10 @@ in { ./doc/manual ] ++ lib.optionals enableInternalAPIDocs [ ./doc/internal-api + # Source might not be compiled, but still must be available + # for Doxygen to gather comments. + ./src + ./tests/unit ] ++ lib.optionals buildUnitTests [ ./tests/unit ] ++ lib.optionals doInstallCheck [ From 3d9e0c60e4cf135943d2c72a990ff2c0e3e311a7 Mon Sep 17 00:00:00 2001 From: DavHau Date: Tue, 9 Jan 2024 18:36:09 +0700 Subject: [PATCH 066/307] gitignore: add result-* --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index d9f9d949b..a47b195bb 100644 --- a/.gitignore +++ b/.gitignore @@ -141,6 +141,7 @@ compile_commands.json nix-rust/target result +result-* # IDE .vscode/ From 2cea88dbc8c277d7403e6dd65f482fd2eb931e52 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 2 Sep 2023 15:56:37 -0400 Subject: [PATCH 067/307] Improve build sytem support for readline instead of editline Changes: - CPP variable is now `USE_READLINE` not `READLINE` - `configure.ac` supports with new CLI flag - `package.nix` supports with new configuration option - `flake.nix` CIs this (along with no markdown) Remove old Ubuntu 16.04 stop-gap too, as that is now quite old. Motivation: - editline does not build for Windows, but readline *should*. (I am still working on this in Nixpkgs at this time, however. So there will be a follow-up Nix PR removing the windows-only skipping of the readline library once I am done.) - Per https://salsa.debian.org/debian/nix/-/blob/master/debian/rules?ref_type=heads#L27 and #2551, Debian builds Nix with readline. Now we better support and CI that build configuration. This is picking up where #2551 left off, ensuring we test a few more things not merely have CPP for them. Co-authored-by: Weijia Wang <9713184+wegank@users.noreply.github.com> --- configure.ac | 29 ++++++++++++++++++----------- flake.nix | 9 +++++++++ package.nix | 12 +++++++++++- src/libcmd/repl.cc | 6 +++--- 4 files changed, 41 insertions(+), 15 deletions(-) diff --git a/configure.ac b/configure.ac index fdbb2629e..2594544ab 100644 --- a/configure.ac +++ b/configure.ac @@ -251,17 +251,25 @@ PKG_CHECK_MODULES([SQLITE3], [sqlite3 >= 3.6.19], [CXXFLAGS="$SQLITE3_CFLAGS $CX # Look for libcurl, a required dependency. PKG_CHECK_MODULES([LIBCURL], [libcurl], [CXXFLAGS="$LIBCURL_CFLAGS $CXXFLAGS"]) -# Look for editline, a required dependency. +# Look for editline or readline, a required dependency. # The the libeditline.pc file was added only in libeditline >= 1.15.2, # see https://github.com/troglobit/editline/commit/0a8f2ef4203c3a4a4726b9dd1336869cd0da8607, -# but e.g. Ubuntu 16.04 has an older version, so we fall back to searching for -# editline.h when the pkg-config approach fails. -PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"], [ - AC_CHECK_HEADERS([editline.h], [true], - [AC_MSG_ERROR([Nix requires libeditline; it was found neither via pkg-config nor its normal header.])]) - AC_SEARCH_LIBS([readline read_history], [editline], [], - [AC_MSG_ERROR([Nix requires libeditline; it was not found via pkg-config, but via its header, but required functions do not work. Maybe it is too old? >= 1.14 is required.])]) -]) +# Older versions are no longer supported. +AC_ARG_WITH( + [readline-flavor], + AS_HELP_STRING([--with-readline-flavor],[Which library to use for nice line editting with the Nix language REPL" [default=editline]]), + [readline_flavor=$withval], + [readline_flavor=editline]) +AS_CASE(["$readline_flavor"], + [editline], [ + readline_flavor_pc=libeditline + ], + [readline], [ + readline_flavor_pc=readline + AC_DEFINE([USE_READLINE], [1], [Use readline instead of editline]) + ], + [AC_MSG_ERROR([bad value "$readline_flavor" for --with-readline-flavor, must be one of: editline, readline])]) +PKG_CHECK_MODULES([EDITLINE], [$readline_flavor_pc], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"]) # Look for libsodium. PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"]) @@ -387,8 +395,7 @@ AS_CASE(["$enable_markdown"], ]) ], [no], [have_lowdown=], - [AC_MSG_ERROR([--enable-markdown must be one of: yes, no, auto])]) -AC_SUBST(HAVE_LOWDOWN, [$have_lowdown]) + [AC_MSG_ERROR([bad value "$enable_markdown" for --enable-markdown, must be one of: yes, no, auto])]) # Look for libgit2. diff --git a/flake.nix b/flake.nix index 32354a88f..c7aee7541 100644 --- a/flake.nix +++ b/flake.nix @@ -230,6 +230,15 @@ } ); + # Toggles some settings for better coverage. Windows needs these + # library combinations, and Debian build Nix with GNU readline too. + buildReadlineNoMarkdown = forAllSystems (system: + self.packages.${system}.nix.override { + enableMarkdown = false; + readlineFlavor = "readline"; + } + ); + # Perl bindings for various platforms. perlBindings = forAllSystems (system: nixpkgsFor.${system}.native.nix.perl-bindings); diff --git a/package.nix b/package.nix index 727f5e646..d0b9fc3f3 100644 --- a/package.nix +++ b/package.nix @@ -13,6 +13,7 @@ , changelog-d , curl , editline +, readline , fileset , flex , git @@ -71,6 +72,14 @@ # Whether to enable Markdown rendering in the Nix binary. , enableMarkdown ? !stdenv.hostPlatform.isWindows +# Which interactive line editor library to use for Nix's repl. +# +# Currently supported choices are: +# +# - editline (default) +# - readline +, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline" + # Whether to compile `rl-next.md`, the release notes for the next # not-yet-released version of Nix in the manul, from the individual # change log entries in the directory. @@ -219,7 +228,7 @@ in { sqlite xz ] ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ - editline + ({ inherit readline editline; }.${readlineFlavor}) ] ++ lib.optionals enableMarkdown [ lowdown ] ++ lib.optionals buildUnitTests [ @@ -279,6 +288,7 @@ in { (lib.enableFeature enableManual "doc-gen") (lib.enableFeature enableMarkdown "markdown") (lib.enableFeature installUnitTests "install-unit-tests") + (lib.withFeatureAs true "readline-flavor" readlineFlavor) ] ++ lib.optionals (!forDevShell) [ "--sysconfdir=/etc" ] ++ lib.optionals installUnitTests [ diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index dea91ba63..9c92f2b6e 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -5,7 +5,7 @@ #include -#ifdef READLINE +#ifdef USE_READLINE #include #include #else @@ -249,14 +249,14 @@ void NixRepl::mainLoop() } catch (SysError & e) { logWarning(e.info()); } -#ifndef READLINE +#ifndef USE_READLINE el_hist_size = 1000; #endif read_history(historyFile.c_str()); auto oldRepl = curRepl; curRepl = this; Finally restoreRepl([&] { curRepl = oldRepl; }); -#ifndef READLINE +#ifndef USE_READLINE rl_set_complete_func(completionCallback); rl_set_list_possib_func(listPossibleCallback); #endif From 0c3ce237549d43de52e897f12e6d6c8ee59ac227 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 9 Jan 2024 17:31:40 -0500 Subject: [PATCH 068/307] Improve the build without GC We don't just want to pass `--enable-gc=no`; we also want to make sure boehmgc is not a dependency. Creating a nix-level configuration option to do both, and then using that for the CI job, is more robust. --- flake.nix | 4 +++- package.nix | 12 ++++++++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/flake.nix b/flake.nix index c7aee7541..49f214e72 100644 --- a/flake.nix +++ b/flake.nix @@ -220,7 +220,9 @@ buildCross = forAllCrossSystems (crossSystem: lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}")); - buildNoGc = forAllSystems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];})); + buildNoGc = forAllSystems (system: + self.packages.${system}.nix.override { enableGC = false; } + ); buildNoTests = forAllSystems (system: self.packages.${system}.nix.override { diff --git a/package.nix b/package.nix index d0b9fc3f3..71ee80e33 100644 --- a/package.nix +++ b/package.nix @@ -69,6 +69,14 @@ # Whether to build the regular manual , enableManual ? __forDefaults.canRunInstalled +# Whether to use garbage collection for the Nix language evaluator. +# +# If it is disabled, we just leak memory, but this is not as bad as it +# sounds so long as evaluation just takes places within short-lived +# processes. (When the process exits, the memory is reclaimed; it is +# only leaked *within* the process.) +, enableGC ? true + # Whether to enable Markdown rendering in the Nix binary. , enableMarkdown ? !stdenv.hostPlatform.isWindows @@ -245,9 +253,8 @@ in { ; propagatedBuildInputs = [ - boehmgc nlohmann_json - ]; + ] ++ lib.optional enableGC boehmgc; dontBuild = !attrs.doBuild; doCheck = attrs.doCheck; @@ -286,6 +293,7 @@ in { (lib.enableFeature doInstallCheck "functional-tests") (lib.enableFeature enableInternalAPIDocs "internal-api-docs") (lib.enableFeature enableManual "doc-gen") + (lib.enableFeature enableGC "gc") (lib.enableFeature enableMarkdown "markdown") (lib.enableFeature installUnitTests "install-unit-tests") (lib.withFeatureAs true "readline-flavor" readlineFlavor) From 57dc4fc878bc74dfb38cd9d435a85c560b43cebb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 2 Sep 2023 16:21:44 -0400 Subject: [PATCH 069/307] Make more expressive `HOST_*` macro system --- mk/lib.mk | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/mk/lib.mk b/mk/lib.mk index 3d503364f..a5a067e48 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -14,20 +14,34 @@ install-tests-groups := ifdef HOST_OS HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS))) + ifeq ($(patsubst mingw%,,$(HOST_KERNEL)),) + HOST_MINGW = 1 + HOST_WINDOWS = 1 + endif ifeq ($(HOST_KERNEL), cygwin) HOST_CYGWIN = 1 + HOST_WINDOWS = 1 + HOST_UNIX = 1 endif ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),) HOST_DARWIN = 1 + HOST_UNIX = 1 endif ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),) HOST_FREEBSD = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst netbsd%,,$(HOST_KERNEL)),) + HOST_NETBSD = 1 + HOST_UNIX = 1 endif ifeq ($(HOST_KERNEL), linux) HOST_LINUX = 1 + HOST_UNIX = 1 endif ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),) HOST_SOLARIS = 1 + HOST_UNIX = 1 endif endif From f9e5eb5f0a61555d24fe85b8edccf49f0b176152 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 10 Jan 2024 20:26:02 -0500 Subject: [PATCH 070/307] Make indentation in makesfiles consistent Tab (as required) for rules, two spaces for `if`...`endif`. --- src/libexpr/local.mk | 2 +- src/libstore/local.mk | 12 ++++++------ src/libutil/local.mk | 2 +- tests/functional/local.mk | 10 +++++----- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk index ed6bc761a..b60936a0e 100644 --- a/src/libexpr/local.mk +++ b/src/libexpr/local.mk @@ -18,7 +18,7 @@ libexpr_LIBS = libutil libstore libfetchers libexpr_LDFLAGS += -lboost_context -pthread ifdef HOST_LINUX - libexpr_LDFLAGS += -ldl + libexpr_LDFLAGS += -ldl endif # The dependency on libgc must be propagated (i.e. meaning that diff --git a/src/libstore/local.mk b/src/libstore/local.mk index 675976314..f447e190d 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -16,15 +16,15 @@ endif $(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox))) ifeq ($(ENABLE_S3), 1) - libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp + libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp endif ifdef HOST_SOLARIS - libstore_LDFLAGS += -lsocket + libstore_LDFLAGS += -lsocket endif ifeq ($(HAVE_SECCOMP), 1) - libstore_LDFLAGS += $(LIBSECCOMP_LIBS) + libstore_LDFLAGS += $(LIBSECCOMP_LIBS) endif libstore_CXXFLAGS += \ @@ -48,9 +48,9 @@ $(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell) $(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp @mv $@.tmp $@ else -ifneq ($(sandbox_shell),) -libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\"" -endif + ifneq ($(sandbox_shell),) + libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\"" + endif endif $(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh diff --git a/src/libutil/local.mk b/src/libutil/local.mk index 0fdebaf5c..6e3d6d052 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -17,5 +17,5 @@ $(foreach i, $(wildcard $(d)/signature/*.hh), \ ifeq ($(HAVE_LIBCPUID), 1) - libutil_LDFLAGS += -lcpuid + libutil_LDFLAGS += -lcpuid endif diff --git a/tests/functional/local.mk b/tests/functional/local.mk index 192e275e3..25fcbcfe7 100644 --- a/tests/functional/local.mk +++ b/tests/functional/local.mk @@ -129,15 +129,15 @@ nix_tests = \ impure-env.sh ifeq ($(HAVE_LIBCPUID), 1) - nix_tests += compute-levels.sh + nix_tests += compute-levels.sh endif ifeq ($(ENABLE_BUILD), yes) - nix_tests += test-libstoreconsumer.sh + nix_tests += test-libstoreconsumer.sh - ifeq ($(BUILD_SHARED_LIBS), 1) - nix_tests += plugins.sh - endif + ifeq ($(BUILD_SHARED_LIBS), 1) + nix_tests += plugins.sh + endif endif $(d)/test-libstoreconsumer.sh.test $(d)/test-libstoreconsumer.sh.test-debug: \ From 423484ad26850046c101affc9ff6ac4c36ccda06 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 9 Jan 2024 12:29:37 -0500 Subject: [PATCH 071/307] Only link with `-pthread` on Unix We don't want this with MinGW. --- mk/libraries.mk | 6 ++++++ src/libcmd/local.mk | 2 +- src/libexpr/local.mk | 2 +- src/libfetchers/local.mk | 2 +- src/libstore/local.mk | 2 +- src/libutil/local.mk | 2 +- src/nix/local.mk | 2 +- tests/functional/test-libstoreconsumer/local.mk | 2 +- tests/unit/libexpr-support/local.mk | 2 +- tests/unit/libstore-support/local.mk | 2 +- tests/unit/libutil-support/local.mk | 2 +- 11 files changed, 16 insertions(+), 10 deletions(-) diff --git a/mk/libraries.mk b/mk/libraries.mk index 1bc73d7f7..515a481f6 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -10,6 +10,12 @@ else endif endif +ifdef HOST_UNIX + THREAD_LDFLAGS = -pthread +else + THREAD_LDFLAGS = +endif + # Build a library with symbolic name $(1). The library is defined by # various variables prefixed by ‘$(1)_’: # diff --git a/src/libcmd/local.mk b/src/libcmd/local.mk index afd35af08..abb7459a7 100644 --- a/src/libcmd/local.mk +++ b/src/libcmd/local.mk @@ -8,7 +8,7 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc) libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers -libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) -pthread +libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) $(THREAD_LDFLAGS) libcmd_LIBS = libstore libutil libexpr libmain libfetchers diff --git a/src/libexpr/local.mk b/src/libexpr/local.mk index b60936a0e..0c3e36750 100644 --- a/src/libexpr/local.mk +++ b/src/libexpr/local.mk @@ -16,7 +16,7 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib libexpr_LIBS = libutil libstore libfetchers -libexpr_LDFLAGS += -lboost_context -pthread +libexpr_LDFLAGS += -lboost_context $(THREAD_LDFLAGS) ifdef HOST_LINUX libexpr_LDFLAGS += -ldl endif diff --git a/src/libfetchers/local.mk b/src/libfetchers/local.mk index 266e7a211..e54db4937 100644 --- a/src/libfetchers/local.mk +++ b/src/libfetchers/local.mk @@ -8,6 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc) libfetchers_CXXFLAGS += -I src/libutil -I src/libstore -libfetchers_LDFLAGS += -pthread $(LIBGIT2_LIBS) -larchive +libfetchers_LDFLAGS += $(THREAD_LDFLAGS) $(LIBGIT2_LIBS) -larchive libfetchers_LIBS = libutil libstore diff --git a/src/libstore/local.mk b/src/libstore/local.mk index f447e190d..f86643849 100644 --- a/src/libstore/local.mk +++ b/src/libstore/local.mk @@ -8,7 +8,7 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc) libstore_LIBS = libutil -libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) -pthread +libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(THREAD_LDFLAGS) ifdef HOST_LINUX libstore_LDFLAGS += -ldl endif diff --git a/src/libutil/local.mk b/src/libutil/local.mk index 6e3d6d052..200026c1e 100644 --- a/src/libutil/local.mk +++ b/src/libutil/local.mk @@ -8,7 +8,7 @@ libutil_SOURCES := $(wildcard $(d)/*.cc $(d)/signature/*.cc) libutil_CXXFLAGS += -I src/libutil -libutil_LDFLAGS += -pthread $(LIBCURL_LIBS) $(SODIUM_LIBS) $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context +libutil_LDFLAGS += $(THREAD_LDFLAGS) $(LIBCURL_LIBS) $(SODIUM_LIBS) $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context $(foreach i, $(wildcard $(d)/args/*.hh), \ $(eval $(call install-file-in, $(i), $(includedir)/nix/args, 0644))) diff --git a/src/nix/local.mk b/src/nix/local.mk index a21aa705f..1d6f560d6 100644 --- a/src/nix/local.mk +++ b/src/nix/local.mk @@ -18,7 +18,7 @@ nix_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/libexpr nix_LIBS = libexpr libmain libfetchers libstore libutil libcmd -nix_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) +nix_LDFLAGS = $(THREAD_LDFLAGS) $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) $(foreach name, \ nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-url nix-shell nix-store, \ diff --git a/tests/functional/test-libstoreconsumer/local.mk b/tests/functional/test-libstoreconsumer/local.mk index edc140723..a1825c405 100644 --- a/tests/functional/test-libstoreconsumer/local.mk +++ b/tests/functional/test-libstoreconsumer/local.mk @@ -12,4 +12,4 @@ test-libstoreconsumer_CXXFLAGS += -I src/libutil -I src/libstore test-libstoreconsumer_LIBS = libstore libutil -test-libstoreconsumer_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) +test-libstoreconsumer_LDFLAGS = $(THREAD_LDFLAGS) $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) $(LOWDOWN_LIBS) diff --git a/tests/unit/libexpr-support/local.mk b/tests/unit/libexpr-support/local.mk index 12a76206a..0501de33c 100644 --- a/tests/unit/libexpr-support/local.mk +++ b/tests/unit/libexpr-support/local.mk @@ -20,4 +20,4 @@ libexpr-test-support_LIBS = \ libstore-test-support libutil-test-support \ libexpr libstore libutil -libexpr-test-support_LDFLAGS := -pthread -lrapidcheck +libexpr-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck diff --git a/tests/unit/libstore-support/local.mk b/tests/unit/libstore-support/local.mk index ff075c96a..56dedd825 100644 --- a/tests/unit/libstore-support/local.mk +++ b/tests/unit/libstore-support/local.mk @@ -18,4 +18,4 @@ libstore-test-support_LIBS = \ libutil-test-support \ libstore libutil -libstore-test-support_LDFLAGS := -pthread -lrapidcheck +libstore-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck diff --git a/tests/unit/libutil-support/local.mk b/tests/unit/libutil-support/local.mk index 2ee2cdb6c..5f7835c9f 100644 --- a/tests/unit/libutil-support/local.mk +++ b/tests/unit/libutil-support/local.mk @@ -16,4 +16,4 @@ libutil-test-support_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES) libutil-test-support_LIBS = libutil -libutil-test-support_LDFLAGS := -pthread -lrapidcheck +libutil-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck From 813c113b9ecfab917e6d43ac5831a4f207fecaf0 Mon Sep 17 00:00:00 2001 From: DavHau Date: Fri, 29 Dec 2023 15:15:16 +0700 Subject: [PATCH 072/307] initialize test suite for git fetchers solves #9388 This utilizes nixos vm tests to allow: - writing tests for fetchTree and fetchGit involving actual networking. - writing small independent test cases by automating local and remote repository setup per test case. This adds: - a gitea module setting up a gitea server - a setup module that simplifies writing test cases by automating the repo setup. - a simple git http test case Other improvements: For all nixos tests, add capability of overriding the nix version to test against. This should make it easier to prevent regressions. If a new test is added it can simply be ran against any older nix version without having to backport the test. For example, for running the container tests against nix 2.12.0: `nix build "$(nix eval --raw .#hydraJobs.tests.containers --impure --apply 't: (t.forNix "2.12.0").drvPath')^*" -L` --- tests/nixos/default.nix | 30 ++++-- tests/nixos/fetch-git/default.nix | 60 +++++++++++ tests/nixos/fetch-git/testsupport/gitea.nix | 63 ++++++++++++ tests/nixos/fetch-git/testsupport/setup.nix | 106 ++++++++++++++++++++ 4 files changed, 252 insertions(+), 7 deletions(-) create mode 100644 tests/nixos/fetch-git/default.nix create mode 100644 tests/nixos/fetch-git/testsupport/gitea.nix create mode 100644 tests/nixos/fetch-git/testsupport/setup.nix diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index 4459aa664..1a42f886c 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -5,14 +5,28 @@ let nixos-lib = import (nixpkgs + "/nixos/lib") { }; # https://nixos.org/manual/nixos/unstable/index.html#sec-calling-nixos-tests - runNixOSTestFor = system: test: nixos-lib.runTest { - imports = [ test ]; - hostPkgs = nixpkgsFor.${system}.native; - defaults = { - nixpkgs.pkgs = nixpkgsFor.${system}.native; + runNixOSTestFor = system: test: + (nixos-lib.runTest { + imports = [ test ]; + hostPkgs = nixpkgsFor.${system}.native; + defaults = { + nixpkgs.pkgs = nixpkgsFor.${system}.native; + nix.checkAllErrors = false; + }; + _module.args.nixpkgs = nixpkgs; + _module.args.system = system; + }) + // { + # allow running tests against older nix versions via `nix eval --apply` + # Example: + # nix build "$(nix eval --raw --impure .#hydraJobs.tests.fetch-git --apply 't: (t.forNix "2.19.2").drvPath')^*" + forNix = nixVersion: runNixOSTestFor system { + imports = [test]; + defaults.nixpkgs.overlays = [(curr: prev: { + nix = (builtins.getFlake "nix/${nixVersion}").packages.${system}.nix; + })]; + }; }; - _module.args.nixpkgs = nixpkgs; - }; in @@ -40,4 +54,6 @@ in setuid = lib.genAttrs ["i686-linux" "x86_64-linux"] (system: runNixOSTestFor system ./setuid.nix); + + fetch-git = runNixOSTestFor "x86_64-linux" ./fetch-git; } diff --git a/tests/nixos/fetch-git/default.nix b/tests/nixos/fetch-git/default.nix new file mode 100644 index 000000000..abeefb0e3 --- /dev/null +++ b/tests/nixos/fetch-git/default.nix @@ -0,0 +1,60 @@ +{ lib, config, ... }: +{ + name = "fetch-git"; + + imports = [ + ./testsupport/gitea.nix + ]; + + /* + Test cases + The following is set up automatically for each test case: + - a repo with the {name} is created on the gitea server + - a repo with the {name} is created on the client + - the client repo is configured to push to the server repo + Python variables: + - repo.path: the path to the directory of the client repo + - repo.git: the git command with the client repo as the working directory + - repo.remote: the url to the server repo + */ + testCases = [ + { + name = "simple-http"; + description = "can fetch a git repo via http"; + script = '' + # add a file to the repo + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' + """) + + # memoize the revision + rev1 = client.succeed(f""" + {repo.git} rev-parse HEAD + """).strip() + + # push to the server + client.succeed(f""" + {repo.git} push origin main + """) + + # fetch the repo via nix + fetched1 = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).outPath" + """) + + # check if the committed file is there + client.succeed(f""" + test -f {fetched1}/thailand + """) + + # check if the revision is the same + rev1_fetched = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).rev" + """).strip() + assert rev1 == rev1_fetched + ''; + } + ]; +} diff --git a/tests/nixos/fetch-git/testsupport/gitea.nix b/tests/nixos/fetch-git/testsupport/gitea.nix new file mode 100644 index 000000000..d2bd622e4 --- /dev/null +++ b/tests/nixos/fetch-git/testsupport/gitea.nix @@ -0,0 +1,63 @@ +{ lib, nixpkgs, system, ... }: { + imports = [ + ../testsupport/setup.nix + ]; + nodes = { + gitea = { pkgs, ... }: { + services.gitea.enable = true; + services.gitea.settings.service.DISABLE_REGISTRATION = true; + services.gitea.settings.log.LEVEL = "Info"; + services.gitea.settings.database.LOG_SQL = false; + networking.firewall.allowedTCPPorts = [ 3000 ]; + environment.systemPackages = [ pkgs.gitea ]; + + # TODO: remove this after updating to nixos-23.11 + nixpkgs.pkgs = lib.mkForce (import nixpkgs { + inherit system; + config.permittedInsecurePackages = [ + "gitea-1.19.4" + ]; + }); + }; + client = { pkgs, ... }: { + environment.systemPackages = [ pkgs.git ]; + }; + }; + defaults = { pkgs, ... }: { + environment.systemPackages = [ pkgs.jq ]; + }; + + setupScript = '' + import shlex + + gitea.wait_for_unit("gitea.service") + + gitea_admin = "test" + gitea_admin_password = "test123test" + + gitea.succeed(f""" + gitea --version >&2 + su -l gitea -c 'GITEA_WORK_DIR=/var/lib/gitea gitea admin user create \ + --username {gitea_admin} --password {gitea_admin_password} --email test@client' + """) + + client.wait_for_unit("multi-user.target") + gitea.wait_for_open_port(3000) + + gitea_admin_token = gitea.succeed(f""" + curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/users/test/tokens \ + -H 'Accept: application/json' -H 'Content-Type: application/json' \ + -d {shlex.quote( '{"name":"token", "scopes":["all"]}' )} \ + | jq -r '.sha1' + """).strip() + + client.succeed(f""" + echo "http://{gitea_admin}:{gitea_admin_password}@gitea:3000" >~/.git-credentials-admin + git config --global credential.helper 'store --file ~/.git-credentials-admin' + git config --global user.email "test@client" + git config --global user.name "Test User" + git config --global gc.autodetach 0 + git config --global gc.auto 0 + """) + ''; +} diff --git a/tests/nixos/fetch-git/testsupport/setup.nix b/tests/nixos/fetch-git/testsupport/setup.nix new file mode 100644 index 000000000..f2fbd737d --- /dev/null +++ b/tests/nixos/fetch-git/testsupport/setup.nix @@ -0,0 +1,106 @@ +{ lib, config, extendModules, ... }: +let + inherit (lib) + mkOption + types + ; + + indent = lib.replaceStrings ["\n"] ["\n "]; + + execTestCase = testCase: '' + + ### TEST ${testCase.name}: ${testCase.description} ### + + with subtest("${testCase.description}"): + repo = Repo("${testCase.name}") + ${indent testCase.script} + ''; +in +{ + + options = { + setupScript = mkOption { + type = types.lines; + description = '' + Python code that runs before the main test. + + Variables defined by this code will be available in the test. + ''; + default = ""; + }; + testCases = mkOption { + description = '' + The test cases. See `testScript`. + ''; + type = types.listOf (types.submodule { + options.name = mkOption { + type = types.str; + description = '' + The name of the test case. + + A repository with that name will be set up on the gitea server and locally. + + This name can also be used to execute only a single test case via: + `nix build .#hydraJobs.fetch-git.{test-case-name}` + ''; + }; + options.description = mkOption { + type = types.str; + description = '' + A description of the test case. + ''; + }; + options.script = mkOption { + type = types.lines; + description = '' + Python code that runs the test. + + Variables defined by `setupScript` will be available here. + ''; + }; + }); + }; + }; + + config = { + nodes.client = { + environment.variables = { + _NIX_FORCE_HTTP = "1"; + }; + nix.settings.experimental-features = ["nix-command" "flakes"]; + }; + setupScript = '' + class Repo: + """ + A class to create a git repository on the gitea server and locally. + """ + def __init__(self, name): + self.name = name + self.path = "/tmp/repos/" + name + self.remote = "http://gitea:3000/test/" + name + self.git = f"git -C {self.path}" + self.create() + + def create(self): + gitea.succeed(f""" + curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/user/repos \ + -H 'Accept: application/json' -H 'Content-Type: application/json' \ + -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main"}}' )} + """) + client.succeed(f""" + mkdir -p {self.path} \ + && git init -b main {self.path} \ + && {self.git} remote add origin {self.remote} + """) + ''; + testScript = '' + start_all(); + + ${config.setupScript} + + ### SETUP COMPLETE ### + + ${lib.concatStringsSep "\n" (map execTestCase config.testCases)} + ''; + }; +} From 0f95330fde6ebad95b5a50be3aacb0a1d11363af Mon Sep 17 00:00:00 2001 From: DavHau Date: Thu, 11 Jan 2024 14:41:35 +0700 Subject: [PATCH 073/307] fetchGit: add simple test for ssh fetching Also move tests to separate files which are auto-imported. This should allow people adding tests concurrently without introducing merge conflicts --- tests/nixos/fetch-git/default.nix | 52 +++++-------------- .../test-cases/http-simple/default.nix | 37 +++++++++++++ .../test-cases/ssh-simple/default.nix | 41 +++++++++++++++ tests/nixos/fetch-git/testsupport/gitea.nix | 41 ++++++++++++++- tests/nixos/fetch-git/testsupport/setup.nix | 10 +++- 5 files changed, 138 insertions(+), 43 deletions(-) create mode 100644 tests/nixos/fetch-git/test-cases/http-simple/default.nix create mode 100644 tests/nixos/fetch-git/test-cases/ssh-simple/default.nix diff --git a/tests/nixos/fetch-git/default.nix b/tests/nixos/fetch-git/default.nix index abeefb0e3..254fecaaf 100644 --- a/tests/nixos/fetch-git/default.nix +++ b/tests/nixos/fetch-git/default.nix @@ -8,53 +8,25 @@ /* Test cases + + Test cases are automatically imported from ./test-cases/{name} + The following is set up automatically for each test case: - a repo with the {name} is created on the gitea server - a repo with the {name} is created on the client - the client repo is configured to push to the server repo + Python variables: - repo.path: the path to the directory of the client repo - repo.git: the git command with the client repo as the working directory - repo.remote: the url to the server repo */ - testCases = [ - { - name = "simple-http"; - description = "can fetch a git repo via http"; - script = '' - # add a file to the repo - client.succeed(f""" - echo chiang-mai > {repo.path}/thailand \ - && {repo.git} add thailand \ - && {repo.git} commit -m 'commit1' - """) - - # memoize the revision - rev1 = client.succeed(f""" - {repo.git} rev-parse HEAD - """).strip() - - # push to the server - client.succeed(f""" - {repo.git} push origin main - """) - - # fetch the repo via nix - fetched1 = client.succeed(f""" - nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).outPath" - """) - - # check if the committed file is there - client.succeed(f""" - test -f {fetched1}/thailand - """) - - # check if the revision is the same - rev1_fetched = client.succeed(f""" - nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).rev" - """).strip() - assert rev1 == rev1_fetched - ''; - } - ]; + testCases = + map + (testCaseName: {...}: { + imports = ["${./test-cases}/${testCaseName}"]; + # ensures tests are named like their directories they are defined in + name = testCaseName; + }) + (lib.attrNames (builtins.readDir ./test-cases)); } diff --git a/tests/nixos/fetch-git/test-cases/http-simple/default.nix b/tests/nixos/fetch-git/test-cases/http-simple/default.nix new file mode 100644 index 000000000..1bd5bbba2 --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/http-simple/default.nix @@ -0,0 +1,37 @@ +{ + description = "can fetch a git repo via http"; + script = '' + # add a file to the repo + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' + """) + + # memoize the revision + rev1 = client.succeed(f""" + {repo.git} rev-parse HEAD + """).strip() + + # push to the server + client.succeed(f""" + {repo.git} push origin main + """) + + # fetch the repo via nix + fetched1 = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).outPath" + """) + + # check if the committed file is there + client.succeed(f""" + test -f {fetched1}/thailand + """) + + # check if the revision is the same + rev1_fetched = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).rev" + """).strip() + assert rev1 == rev1_fetched, f"rev1: {rev1} != rev1_fetched: {rev1_fetched}" + ''; +} diff --git a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix new file mode 100644 index 000000000..0e4494ae0 --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix @@ -0,0 +1,41 @@ +{ + description = "can fetch a git repo via ssh"; + script = '' + # add a file to the repo + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' + """) + + # memoize the revision + rev1 = client.succeed(f""" + {repo.git} rev-parse HEAD + """).strip() + + # push to the server + client.succeed(f""" + {repo.git} push origin-ssh main + """) + + # fetch the repo via nix + fetched1 = client.succeed(f""" + nix eval --impure --raw --expr ' + (builtins.fetchGit "{repo.remote_ssh}").outPath + ' + """) + + # check if the committed file is there + client.succeed(f""" + test -f {fetched1}/thailand + """) + + # check if the revision is the same + rev1_fetched = client.succeed(f""" + nix eval --impure --raw --expr ' + (builtins.fetchGit "{repo.remote_ssh}").rev + ' + """).strip() + assert rev1 == rev1_fetched, f"rev1: {rev1} != rev1_fetched: {rev1_fetched}" + ''; +} diff --git a/tests/nixos/fetch-git/testsupport/gitea.nix b/tests/nixos/fetch-git/testsupport/gitea.nix index d2bd622e4..2ea23961e 100644 --- a/tests/nixos/fetch-git/testsupport/gitea.nix +++ b/tests/nixos/fetch-git/testsupport/gitea.nix @@ -1,4 +1,18 @@ -{ lib, nixpkgs, system, ... }: { +{ lib, nixpkgs, system, pkgs, ... }: let + clientPrivateKey = pkgs.writeText "id_ed25519" '' + -----BEGIN OPENSSH PRIVATE KEY----- + b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW + QyNTUxOQAAACBbeWvHh/AWGWI6EIc1xlSihyXtacNQ9KeztlW/VUy8wQAAAJAwVQ5VMFUO + VQAAAAtzc2gtZWQyNTUxOQAAACBbeWvHh/AWGWI6EIc1xlSihyXtacNQ9KeztlW/VUy8wQ + AAAEB7lbfkkdkJoE+4TKHPdPQWBKLSx+J54Eg8DaTr+3KoSlt5a8eH8BYZYjoQhzXGVKKH + Je1pw1D0p7O2Vb9VTLzBAAAACGJmb0BtaW5pAQIDBAU= + -----END OPENSSH PRIVATE KEY----- + ''; + + clientPublicKey = + "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFt5a8eH8BYZYjoQhzXGVKKHJe1pw1D0p7O2Vb9VTLzB"; + +in { imports = [ ../testsupport/setup.nix ]; @@ -8,8 +22,11 @@ services.gitea.settings.service.DISABLE_REGISTRATION = true; services.gitea.settings.log.LEVEL = "Info"; services.gitea.settings.database.LOG_SQL = false; + services.openssh.enable = true; networking.firewall.allowedTCPPorts = [ 3000 ]; - environment.systemPackages = [ pkgs.gitea ]; + environment.systemPackages = [ pkgs.git pkgs.gitea ]; + + users.users.root.openssh.authorizedKeys.keys = [clientPublicKey]; # TODO: remove this after updating to nixos-23.11 nixpkgs.pkgs = lib.mkForce (import nixpkgs { @@ -59,5 +76,25 @@ git config --global gc.autodetach 0 git config --global gc.auto 0 """) + + # add client's private key to ~/.ssh + client.succeed(""" + mkdir -p ~/.ssh + chmod 700 ~/.ssh + cat ${clientPrivateKey} >~/.ssh/id_ed25519 + chmod 600 ~/.ssh/id_ed25519 + """) + + client.succeed(""" + echo "Host gitea" >>~/.ssh/config + echo " StrictHostKeyChecking no" >>~/.ssh/config + echo " UserKnownHostsFile /dev/null" >>~/.ssh/config + echo " User root" >>~/.ssh/config + """) + + # ensure ssh from client to gitea works + client.succeed(""" + ssh root@gitea true + """) ''; } diff --git a/tests/nixos/fetch-git/testsupport/setup.nix b/tests/nixos/fetch-git/testsupport/setup.nix index f2fbd737d..2f74f51f8 100644 --- a/tests/nixos/fetch-git/testsupport/setup.nix +++ b/tests/nixos/fetch-git/testsupport/setup.nix @@ -78,19 +78,27 @@ in self.name = name self.path = "/tmp/repos/" + name self.remote = "http://gitea:3000/test/" + name + self.remote_ssh = "ssh://gitea/root/" + name self.git = f"git -C {self.path}" self.create() def create(self): + # create ssh remote repo + gitea.succeed(f""" + git init --bare -b main /root/{self.name} + """) + # create http remote repo gitea.succeed(f""" curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/user/repos \ -H 'Accept: application/json' -H 'Content-Type: application/json' \ -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main"}}' )} """) + # setup git remotes on client client.succeed(f""" mkdir -p {self.path} \ && git init -b main {self.path} \ - && {self.git} remote add origin {self.remote} + && {self.git} remote add origin {self.remote} \ + && {self.git} remote add origin-ssh root@gitea:{self.name} """) ''; testScript = '' From a923444a9462cd2fabcd816fa2e9cb54c485f13f Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 11 Jan 2024 13:55:28 -0500 Subject: [PATCH 074/307] packages.nix: Fix `installUnitTests` condition The intent was we install the tests when we can *not* run them. Instead, we were installing them when we can. --- package.nix | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/package.nix b/package.nix index 71ee80e33..37410dc2f 100644 --- a/package.nix +++ b/package.nix @@ -100,7 +100,7 @@ # Whether to install unit tests. This is useful when cross compiling # since we cannot run them natively during the build, but can do so # later. -, installUnitTests ? __forDefaults.canRunInstalled +, installUnitTests ? doBuild && !__forDefaults.canExecuteHost # For running the functional tests against a pre-built Nix. Probably # want to use in conjunction with `doBuild = false;`. @@ -113,7 +113,8 @@ # Not a real argument, just the only way to approximate let-binding some # stuff for argument defaults. , __forDefaults ? { - canRunInstalled = doBuild && stdenv.buildPlatform.canExecute stdenv.hostPlatform; + canExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform; + canRunInstalled = doBuild && __forDefaults.canExecuteHost; } }: From c9125603a535f82cc9a53f47533f0a3d174e7008 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 18 Dec 2023 10:34:19 -0800 Subject: [PATCH 075/307] Unindent `print.hh` declarations --- src/libexpr/print.hh | 82 +++++++++++++++++++++++--------------------- 1 file changed, 42 insertions(+), 40 deletions(-) diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index 3b72ae201..abf830864 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -10,45 +10,47 @@ #include namespace nix { - /** - * Print a string as a Nix string literal. - * - * Quotes and fairly minimal escaping are added. - * - * @param s The logical string - */ - std::ostream & printLiteralString(std::ostream & o, std::string_view s); - inline std::ostream & printLiteralString(std::ostream & o, const char * s) { - return printLiteralString(o, std::string_view(s)); - } - inline std::ostream & printLiteralString(std::ostream & o, const std::string & s) { - return printLiteralString(o, std::string_view(s)); - } - /** Print `true` or `false`. */ - std::ostream & printLiteralBool(std::ostream & o, bool b); - - /** - * Print a string as an attribute name in the Nix expression language syntax. - * - * Prints a quoted string if necessary. - */ - std::ostream & printAttributeName(std::ostream & o, std::string_view s); - - /** - * Returns `true' is a string is a reserved keyword which requires quotation - * when printing attribute set field names. - */ - bool isReservedKeyword(const std::string_view str); - - /** - * Print a string as an identifier in the Nix expression language syntax. - * - * FIXME: "identifier" is ambiguous. Identifiers do not have a single - * textual representation. They can be used in variable references, - * let bindings, left-hand sides or attribute names in a select - * expression, or something else entirely, like JSON. Use one of the - * `print*` functions instead. - */ - std::ostream & printIdentifier(std::ostream & o, std::string_view s); +/** + * Print a string as a Nix string literal. + * + * Quotes and fairly minimal escaping are added. + * + * @param s The logical string + */ +std::ostream & printLiteralString(std::ostream & o, std::string_view s); +inline std::ostream & printLiteralString(std::ostream & o, const char * s) { + return printLiteralString(o, std::string_view(s)); +} +inline std::ostream & printLiteralString(std::ostream & o, const std::string & s) { + return printLiteralString(o, std::string_view(s)); +} + +/** Print `true` or `false`. */ +std::ostream & printLiteralBool(std::ostream & o, bool b); + +/** + * Print a string as an attribute name in the Nix expression language syntax. + * + * Prints a quoted string if necessary. + */ +std::ostream & printAttributeName(std::ostream & o, std::string_view s); + +/** + * Returns `true' is a string is a reserved keyword which requires quotation + * when printing attribute set field names. + */ +bool isReservedKeyword(const std::string_view str); + +/** + * Print a string as an identifier in the Nix expression language syntax. + * + * FIXME: "identifier" is ambiguous. Identifiers do not have a single + * textual representation. They can be used in variable references, + * let bindings, left-hand sides or attribute names in a select + * expression, or something else entirely, like JSON. Use one of the + * `print*` functions instead. + */ +std::ostream & printIdentifier(std::ostream & o, std::string_view s); + } From 0fa08b451682fb3311fe58112ff05c4fe5bee3a4 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Tue, 12 Dec 2023 13:57:36 -0800 Subject: [PATCH 076/307] Unify and refactor value printing Previously, there were two mostly-identical value printers -- one in `libexpr/eval.cc` (which didn't force values) and one in `libcmd/repl.cc` (which did force values and also printed ANSI color codes). This PR unifies both of these printers into `print.cc` and provides a `PrintOptions` struct for controlling the output, which allows for toggling whether values are forced, whether repeated values are tracked, and whether ANSI color codes are displayed. Additionally, `PrintOptions` allows tuning the maximum number of attributes, list items, and bytes in a string that will be displayed; this makes it ideal for contexts where printing too much output (e.g. all of Nixpkgs) is distracting. (As requested by @roberth in https://github.com/NixOS/nix/pull/9554#issuecomment-1845095735) Please read the tests for example output. Future work: - It would be nice to provide this function as a builtin, perhaps `builtins.toStringDebug` -- a printing function that never fails would be useful when debugging Nix code. - It would be nice to support customizing `PrintOptions` members on the command line, e.g. `--option to-string-max-attrs 1000`. --- src/libcmd/repl.cc | 158 +---- src/libexpr/eval.cc | 126 +--- src/libexpr/eval.hh | 4 +- src/libexpr/print-options.hh | 52 ++ src/libexpr/print.cc | 416 +++++++++++- src/libexpr/print.hh | 6 + src/libexpr/value.hh | 17 +- src/libutil/english.cc | 18 + src/libutil/english.hh | 18 + src/nix-env/user-env.cc | 5 +- src/nix-instantiate/nix-instantiate.cc | 2 +- tests/functional/lang/eval-okay-print.err.exp | 2 +- tests/functional/lang/eval-okay-print.exp | 2 +- .../lang/eval-okay-repeated-empty-attrs.exp | 1 + .../lang/eval-okay-repeated-empty-attrs.nix | 2 + .../lang/eval-okay-repeated-empty-list.exp | 1 + .../lang/eval-okay-repeated-empty-list.nix | 1 + tests/unit/libexpr/value/print.cc | 621 +++++++++++++++++- 18 files changed, 1174 insertions(+), 278 deletions(-) create mode 100644 src/libexpr/print-options.hh create mode 100644 src/libutil/english.cc create mode 100644 src/libutil/english.hh create mode 100644 tests/functional/lang/eval-okay-repeated-empty-attrs.exp create mode 100644 tests/functional/lang/eval-okay-repeated-empty-attrs.nix create mode 100644 tests/functional/lang/eval-okay-repeated-empty-list.exp create mode 100644 tests/functional/lang/eval-okay-repeated-empty-list.nix diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 7a1df74ef..72e3559df 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -93,9 +93,17 @@ struct NixRepl void evalString(std::string s, Value & v); void loadDebugTraceEnv(DebugTrace & dt); - typedef std::set ValuesSeen; - std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth); - std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen); + void printValue(std::ostream & str, + Value & v, + unsigned int maxDepth = std::numeric_limits::max()) + { + ::nix::printValue(*state, str, v, PrintOptions { + .ansiColors = true, + .force = true, + .derivationPaths = true, + .maxDepth = maxDepth + }); + } }; std::string removeWhitespace(std::string s) @@ -708,7 +716,8 @@ bool NixRepl::processLine(std::string line) else if (command == ":p" || command == ":print") { Value v; evalString(arg, v); - printValue(std::cout, v, 1000000000) << std::endl; + printValue(std::cout, v); + std::cout << std::endl; } else if (command == ":q" || command == ":quit") { @@ -770,7 +779,8 @@ bool NixRepl::processLine(std::string line) } else { Value v; evalString(line, v); - printValue(std::cout, v, 1) << std::endl; + printValue(std::cout, v, 1); + std::cout << std::endl; } } @@ -892,144 +902,6 @@ void NixRepl::evalString(std::string s, Value & v) } -std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth) -{ - ValuesSeen seen; - return printValue(str, v, maxDepth, seen); -} - - - - -// FIXME: lot of cut&paste from Nix's eval.cc. -std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen) -{ - str.flush(); - checkInterrupt(); - - state->forceValue(v, v.determinePos(noPos)); - - switch (v.type()) { - - case nInt: - str << ANSI_CYAN << v.integer << ANSI_NORMAL; - break; - - case nBool: - str << ANSI_CYAN; - printLiteralBool(str, v.boolean); - str << ANSI_NORMAL; - break; - - case nString: - str << ANSI_WARNING; - printLiteralString(str, v.string_view()); - str << ANSI_NORMAL; - break; - - case nPath: - str << ANSI_GREEN << v.path().to_string() << ANSI_NORMAL; // !!! escaping? - break; - - case nNull: - str << ANSI_CYAN "null" ANSI_NORMAL; - break; - - case nAttrs: { - seen.insert(&v); - - bool isDrv = state->isDerivation(v); - - if (isDrv) { - str << "«derivation "; - Bindings::iterator i = v.attrs->find(state->sDrvPath); - NixStringContext context; - if (i != v.attrs->end()) - str << state->store->printStorePath(state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation")); - else - str << "???"; - str << "»"; - } - - else if (maxDepth > 0) { - str << "{ "; - - typedef std::map Sorted; - Sorted sorted; - for (auto & i : *v.attrs) - sorted.emplace(state->symbols[i.name], i.value); - - for (auto & i : sorted) { - printAttributeName(str, i.first); - str << " = "; - if (seen.count(i.second)) - str << "«repeated»"; - else - try { - printValue(str, *i.second, maxDepth - 1, seen); - } catch (AssertionError & e) { - str << ANSI_RED "«error: " << e.msg() << "»" ANSI_NORMAL; - } - str << "; "; - } - - str << "}"; - } else - str << "{ ... }"; - - break; - } - - case nList: - seen.insert(&v); - - str << "[ "; - if (maxDepth > 0) - for (auto elem : v.listItems()) { - if (seen.count(elem)) - str << "«repeated»"; - else - try { - printValue(str, *elem, maxDepth - 1, seen); - } catch (AssertionError & e) { - str << ANSI_RED "«error: " << e.msg() << "»" ANSI_NORMAL; - } - str << " "; - } - else - str << "... "; - str << "]"; - break; - - case nFunction: - if (v.isLambda()) { - std::ostringstream s; - s << state->positions[v.lambda.fun->pos]; - str << ANSI_BLUE "«lambda @ " << filterANSIEscapes(s.str()) << "»" ANSI_NORMAL; - } else if (v.isPrimOp()) { - str << ANSI_MAGENTA "«primop»" ANSI_NORMAL; - } else if (v.isPrimOpApp()) { - str << ANSI_BLUE "«primop-app»" ANSI_NORMAL; - } else { - abort(); - } - break; - - case nFloat: - str << v.fpoint; - break; - - case nThunk: - case nExternal: - default: - str << ANSI_RED "«unknown»" ANSI_NORMAL; - break; - } - - return str; -} - - std::unique_ptr AbstractNixRepl::create( const SearchPath & searchPath, nix::ref store, ref state, std::function getValues) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index d408f1adc..0659a2173 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -105,117 +105,23 @@ RootValue allocRootValue(Value * v) #endif } -void Value::print(const SymbolTable &symbols, std::ostream &str, - std::set *seen, int depth) const - -{ - checkInterrupt(); - - if (depth <= 0) { - str << "«too deep»"; - return; - } - switch (internalType) { - case tInt: - str << integer; - break; - case tBool: - printLiteralBool(str, boolean); - break; - case tString: - printLiteralString(str, string_view()); - break; - case tPath: - str << path().to_string(); // !!! escaping? - break; - case tNull: - str << "null"; - break; - case tAttrs: { - if (seen && !attrs->empty() && !seen->insert(attrs).second) - str << "«repeated»"; - else { - str << "{ "; - for (auto & i : attrs->lexicographicOrder(symbols)) { - str << symbols[i->name] << " = "; - i->value->print(symbols, str, seen, depth - 1); - str << "; "; - } - str << "}"; - } - break; - } - case tList1: - case tList2: - case tListN: - if (seen && listSize() && !seen->insert(listElems()).second) - str << "«repeated»"; - else { - str << "[ "; - for (auto v2 : listItems()) { - if (v2) - v2->print(symbols, str, seen, depth - 1); - else - str << "(nullptr)"; - str << " "; - } - str << "]"; - } - break; - case tThunk: - case tApp: - if (!isBlackhole()) { - str << ""; - } else { - // Although we know for sure that it's going to be an infinite recursion - // when this value is accessed _in the current context_, it's likely - // that the user will misinterpret a simpler «infinite recursion» output - // as a definitive statement about the value, while in fact it may be - // a valid value after `builtins.trace` and perhaps some other steps - // have completed. - str << "«potential infinite recursion»"; - } - break; - case tLambda: - str << ""; - break; - case tPrimOp: - str << ""; - break; - case tPrimOpApp: - str << ""; - break; - case tExternal: - str << *external; - break; - case tFloat: - str << fpoint; - break; - default: - printError("Nix evaluator internal error: Value::print(): invalid value type %1%", internalType); - abort(); - } -} - -void Value::print(const SymbolTable &symbols, std::ostream &str, - bool showRepeated, int depth) const { - std::set seen; - print(symbols, str, showRepeated ? nullptr : &seen, depth); -} - // Pretty print types for assertion errors std::ostream & operator << (std::ostream & os, const ValueType t) { os << showType(t); return os; } -std::string printValue(const EvalState & state, const Value & v) +std::string printValue(EvalState & state, Value & v) { std::ostringstream out; - v.print(state.symbols, out); + v.print(state, out); return out.str(); } +void Value::print(EvalState & state, std::ostream & str, PrintOptions options) +{ + printValue(state, str, *this, options); +} const Value * getPrimOp(const Value &v) { const Value * primOp = &v; @@ -710,6 +616,26 @@ void PrimOp::check() } +std::ostream & operator<<(std::ostream & output, PrimOp & primOp) +{ + output << "primop " << primOp.name; + return output; +} + + +PrimOp * Value::primOpAppPrimOp() const +{ + Value * left = primOpApp.left; + while (left && !left->isPrimOp()) { + left = left->primOpApp.left; + } + + if (!left) + return nullptr; + return left->primOp; +} + + void Value::mkPrimOp(PrimOp * p) { p->check(); diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 5e0f1886d..9141156b1 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -84,6 +84,8 @@ struct PrimOp void check(); }; +std::ostream & operator<<(std::ostream & output, PrimOp & primOp); + /** * Info about a constant */ @@ -127,7 +129,7 @@ std::unique_ptr mapStaticEnvBindings(const SymbolTable & st, const Stati void copyContext(const Value & v, NixStringContext & context); -std::string printValue(const EvalState & state, const Value & v); +std::string printValue(EvalState & state, Value & v); std::ostream & operator << (std::ostream & os, const ValueType t); diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh new file mode 100644 index 000000000..11ff9ae87 --- /dev/null +++ b/src/libexpr/print-options.hh @@ -0,0 +1,52 @@ +#pragma once +/** + * @file + * @brief Options for printing Nix values. + */ + +#include + +namespace nix { + +/** + * Options for printing Nix values. + */ +struct PrintOptions +{ + /** + * If true, output ANSI color sequences. + */ + bool ansiColors = false; + /** + * If true, force values. + */ + bool force = false; + /** + * If true and `force` is set, print derivations as + * `«derivation /nix/store/...»` instead of as attribute sets. + */ + bool derivationPaths = false; + /** + * If true, track which values have been printed and skip them on + * subsequent encounters. Useful for self-referential values. + */ + bool trackRepeated = true; + /** + * Maximum depth to evaluate to. + */ + size_t maxDepth = std::numeric_limits::max(); + /** + * Maximum number of attributes in an attribute set to print. + */ + size_t maxAttrs = std::numeric_limits::max(); + /** + * Maximum number of list items to print. + */ + size_t maxListItems = std::numeric_limits::max(); + /** + * Maximum string length to print. + */ + size_t maxStringLength = std::numeric_limits::max(); +}; + +} diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 53ba70bdd..db26ed4c2 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -1,24 +1,66 @@ -#include "print.hh" +#include #include +#include "print.hh" +#include "ansicolor.hh" +#include "signals.hh" +#include "store-api.hh" +#include "terminal.hh" +#include "english.hh" + namespace nix { -std::ostream & -printLiteralString(std::ostream & str, const std::string_view string) +void printElided( + std::ostream & output, + unsigned int value, + const std::string_view single, + const std::string_view plural, + bool ansiColors) { + if (ansiColors) + output << ANSI_FAINT; + output << " «"; + pluralize(output, value, single, plural); + output << " elided»"; + if (ansiColors) + output << ANSI_NORMAL; +} + + +std::ostream & +printLiteralString(std::ostream & str, const std::string_view string, size_t maxLength, bool ansiColors) +{ + size_t charsPrinted = 0; + if (ansiColors) + str << ANSI_MAGENTA; str << "\""; for (auto i = string.begin(); i != string.end(); ++i) { + if (charsPrinted >= maxLength) { + str << "\""; + printElided(str, string.length() - charsPrinted, "byte", "bytes", ansiColors); + return str; + } + if (*i == '\"' || *i == '\\') str << "\\" << *i; else if (*i == '\n') str << "\\n"; else if (*i == '\r') str << "\\r"; else if (*i == '\t') str << "\\t"; else if (*i == '$' && *(i+1) == '{') str << "\\" << *i; else str << *i; + charsPrinted++; } str << "\""; + if (ansiColors) + str << ANSI_NORMAL; return str; } +std::ostream & +printLiteralString(std::ostream & str, const std::string_view string) +{ + return printLiteralString(str, string, std::numeric_limits::max(), false); +} + std::ostream & printLiteralBool(std::ostream & str, bool boolean) { @@ -90,5 +132,373 @@ printAttributeName(std::ostream & str, std::string_view name) { return str; } +bool isImportantAttrName(const std::string& attrName) +{ + return attrName == "type" || attrName == "_type"; +} + +typedef std::pair AttrPair; + +struct ImportantFirstAttrNameCmp +{ + + bool operator()(const AttrPair& lhs, const AttrPair& rhs) const + { + auto lhsIsImportant = isImportantAttrName(lhs.first); + auto rhsIsImportant = isImportantAttrName(rhs.first); + return std::forward_as_tuple(!lhsIsImportant, lhs.first) + < std::forward_as_tuple(!rhsIsImportant, rhs.first); + } +}; + +typedef std::set ValuesSeen; + +class Printer +{ +private: + std::ostream & output; + EvalState & state; + PrintOptions options; + std::optional seen; + + void printRepeated() + { + if (options.ansiColors) + output << ANSI_MAGENTA; + output << "«repeated»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printNullptr() + { + if (options.ansiColors) + output << ANSI_MAGENTA; + output << "«nullptr»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printElided(unsigned int value, const std::string_view single, const std::string_view plural) + { + ::nix::printElided(output, value, single, plural, options.ansiColors); + } + + void printInt(Value & v) + { + if (options.ansiColors) + output << ANSI_CYAN; + output << v.integer; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printFloat(Value & v) + { + if (options.ansiColors) + output << ANSI_CYAN; + output << v.fpoint; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printBool(Value & v) + { + if (options.ansiColors) + output << ANSI_CYAN; + printLiteralBool(output, v.boolean); + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printString(Value & v) + { + printLiteralString(output, v.string_view(), options.maxStringLength, options.ansiColors); + } + + void printPath(Value & v) + { + if (options.ansiColors) + output << ANSI_GREEN; + output << v.path().to_string(); // !!! escaping? + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printNull() + { + if (options.ansiColors) + output << ANSI_CYAN; + output << "null"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printDerivation(Value & v) + { + try { + Bindings::iterator i = v.attrs->find(state.sDrvPath); + NixStringContext context; + std::string storePath; + if (i != v.attrs->end()) + storePath = state.store->printStorePath(state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation")); + + if (options.ansiColors) + output << ANSI_GREEN; + output << "«derivation"; + if (!storePath.empty()) { + output << " " << storePath; + } + output << "»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } catch (BaseError & e) { + printError_(e); + } + } + + void printAttrs(Value & v, size_t depth) + { + if (seen && !seen->insert(&v).second) { + printRepeated(); + return; + } + + if (options.force && options.derivationPaths && state.isDerivation(v)) { + printDerivation(v); + } else if (depth < options.maxDepth) { + output << "{ "; + + std::vector> sorted; + for (auto & i : *v.attrs) + sorted.emplace_back(std::pair(state.symbols[i.name], i.value)); + + if (options.maxAttrs == std::numeric_limits::max()) + std::sort(sorted.begin(), sorted.end()); + else + std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp()); + + size_t attrsPrinted = 0; + for (auto & i : sorted) { + if (attrsPrinted >= options.maxAttrs) { + printElided(sorted.size() - attrsPrinted, "attribute", "attributes"); + break; + } + + printAttributeName(output, i.first); + output << " = "; + print(*i.second, depth + 1); + output << "; "; + attrsPrinted++; + } + + output << "}"; + } else + output << "{ ... }"; + } + + void printList(Value & v, size_t depth) + { + if (seen && v.listSize() && !seen->insert(&v).second) { + printRepeated(); + return; + } + + output << "[ "; + if (depth < options.maxDepth) { + size_t listItemsPrinted = 0; + for (auto elem : v.listItems()) { + if (listItemsPrinted >= options.maxListItems) { + printElided(v.listSize() - listItemsPrinted, "item", "items"); + break; + } + + if (elem) { + print(*elem, depth + 1); + } else { + printNullptr(); + } + output << " "; + listItemsPrinted++; + } + } + else + output << "... "; + output << "]"; + } + + void printFunction(Value & v) + { + if (options.ansiColors) + output << ANSI_BLUE; + output << "«"; + + if (v.isLambda()) { + output << "lambda"; + if (v.lambda.fun) { + if (v.lambda.fun->name) { + output << " " << state.symbols[v.lambda.fun->name]; + } + + std::ostringstream s; + s << state.positions[v.lambda.fun->pos]; + output << " @ " << filterANSIEscapes(s.str()); + } + } else if (v.isPrimOp()) { + if (v.primOp) + output << *v.primOp; + else + output << "primop"; + } else if (v.isPrimOpApp()) { + output << "partially applied "; + auto primOp = v.primOpAppPrimOp(); + if (primOp) + output << *primOp; + else + output << "primop"; + } else { + abort(); + } + + output << "»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printThunk(Value & v) + { + if (v.isBlackhole()) { + // Although we know for sure that it's going to be an infinite recursion + // when this value is accessed _in the current context_, it's likely + // that the user will misinterpret a simpler «infinite recursion» output + // as a definitive statement about the value, while in fact it may be + // a valid value after `builtins.trace` and perhaps some other steps + // have completed. + if (options.ansiColors) + output << ANSI_RED; + output << "«potential infinite recursion»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } else if (v.isThunk() || v.isApp()) { + if (options.ansiColors) + output << ANSI_MAGENTA; + output << "«thunk»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } else { + abort(); + } + } + + void printExternal(Value & v) + { + v.external->print(output); + } + + void printUnknown() + { + if (options.ansiColors) + output << ANSI_RED; + output << "«unknown»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void printError_(BaseError & e) + { + if (options.ansiColors) + output << ANSI_RED; + output << "«" << e.msg() << "»"; + if (options.ansiColors) + output << ANSI_NORMAL; + } + + void print(Value & v, size_t depth) + { + output.flush(); + checkInterrupt(); + + if (options.force) { + try { + state.forceValue(v, v.determinePos(noPos)); + } catch (BaseError & e) { + printError_(e); + return; + } + } + + switch (v.type()) { + + case nInt: + printInt(v); + break; + + case nFloat: + printFloat(v); + break; + + case nBool: + printBool(v); + break; + + case nString: + printString(v); + break; + + case nPath: + printPath(v); + break; + + case nNull: + printNull(); + break; + + case nAttrs: + printAttrs(v, depth); + break; + + case nList: + printList(v, depth); + break; + + case nFunction: + printFunction(v); + break; + + case nThunk: + printThunk(v); + break; + + case nExternal: + printExternal(v); + break; + + default: + printUnknown(); + break; + } + } + +public: + Printer(std::ostream & output, EvalState & state, PrintOptions options) + : output(output), state(state), options(options) { } + + void print(Value & v) + { + if (options.trackRepeated) { + seen.emplace(); + } else { + seen.reset(); + } + + ValuesSeen seen; + print(v, 0); + } +}; + +void printValue(EvalState & state, std::ostream & output, Value & v, PrintOptions options) +{ + Printer(output, state, options).print(v); +} } diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index abf830864..40207d777 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -9,6 +9,9 @@ #include +#include "eval.hh" +#include "print-options.hh" + namespace nix { /** @@ -16,6 +19,7 @@ namespace nix { * * Quotes and fairly minimal escaping are added. * + * @param o The output stream to print to * @param s The logical string */ std::ostream & printLiteralString(std::ostream & o, std::string_view s); @@ -53,4 +57,6 @@ bool isReservedKeyword(const std::string_view str); */ std::ostream & printIdentifier(std::ostream & o, std::string_view s); +void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions {}); + } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index c65b336b0..214d52271 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -9,6 +9,7 @@ #include "value/context.hh" #include "input-accessor.hh" #include "source-path.hh" +#include "print-options.hh" #if HAVE_BOEHMGC #include @@ -70,7 +71,7 @@ struct Pos; class StorePath; class EvalState; class XMLWriter; - +class Printer; typedef int64_t NixInt; typedef double NixFloat; @@ -82,6 +83,7 @@ typedef double NixFloat; class ExternalValueBase { friend std::ostream & operator << (std::ostream & str, const ExternalValueBase & v); + friend class Printer; protected: /** * Print out the value @@ -139,11 +141,9 @@ private: friend std::string showType(const Value & v); - void print(const SymbolTable &symbols, std::ostream &str, std::set *seen, int depth) const; - public: - void print(const SymbolTable &symbols, std::ostream &str, bool showRepeated = false, int depth = INT_MAX) const; + void print(EvalState &state, std::ostream &str, PrintOptions options = PrintOptions {}); // Functions needed to distinguish the type // These should be removed eventually, by putting the functionality that's @@ -364,10 +364,15 @@ public: inline void mkPrimOpApp(Value * l, Value * r) { internalType = tPrimOpApp; - app.left = l; - app.right = r; + primOpApp.left = l; + primOpApp.right = r; } + /** + * For a `tPrimOpApp` value, get the original `PrimOp` value. + */ + PrimOp * primOpAppPrimOp() const; + inline void mkExternal(ExternalValueBase * e) { clearValue(); diff --git a/src/libutil/english.cc b/src/libutil/english.cc new file mode 100644 index 000000000..8c93c9156 --- /dev/null +++ b/src/libutil/english.cc @@ -0,0 +1,18 @@ +#include "english.hh" + +namespace nix { + +std::ostream & pluralize( + std::ostream & output, + unsigned int count, + const std::string_view single, + const std::string_view plural) +{ + if (count == 1) + output << "1 " << single; + else + output << count << " " << plural; + return output; +} + +} diff --git a/src/libutil/english.hh b/src/libutil/english.hh new file mode 100644 index 000000000..9c6c93571 --- /dev/null +++ b/src/libutil/english.hh @@ -0,0 +1,18 @@ +#pragma once + +#include + +namespace nix { + +/** + * Pluralize a given value. + * + * If `count == 1`, prints `1 {single}` to `output`, otherwise prints `{count} {plural}`. + */ +std::ostream & pluralize( + std::ostream & output, + unsigned int count, + const std::string_view single, + const std::string_view plural); + +} diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 9f4d063d2..3d07cab7a 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -8,6 +8,8 @@ #include "eval.hh" #include "eval-inline.hh" #include "profiles.hh" +#include "print-ambiguous.hh" +#include namespace nix { @@ -106,7 +108,8 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, environment. */ auto manifestFile = ({ std::ostringstream str; - manifest.print(state.symbols, str, true); + std::set seen; + printAmbiguous(manifest, state.symbols, str, &seen, std::numeric_limits::max()); // TODO with C++20 we can use str.view() instead and avoid copy. std::string str2 = str.str(); StringSource source { str2 }; diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index ab590b3a6..9b36dccc6 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -56,7 +56,7 @@ void processExpr(EvalState & state, const Strings & attrPaths, std::cout << std::endl; } else { if (strict) state.forceValueDeep(vRes); - vRes.print(state.symbols, std::cout); + vRes.print(state, std::cout); std::cout << std::endl; } } else { diff --git a/tests/functional/lang/eval-okay-print.err.exp b/tests/functional/lang/eval-okay-print.err.exp index 3fc99be3e..80aa17c6e 100644 --- a/tests/functional/lang/eval-okay-print.err.exp +++ b/tests/functional/lang/eval-okay-print.err.exp @@ -1 +1 @@ -trace: [ ] +trace: [ «thunk» ] diff --git a/tests/functional/lang/eval-okay-print.exp b/tests/functional/lang/eval-okay-print.exp index 0d960fb70..aa1b2379e 100644 --- a/tests/functional/lang/eval-okay-print.exp +++ b/tests/functional/lang/eval-okay-print.exp @@ -1 +1 @@ -[ null [ [ «repeated» ] ] ] +[ null «primop toString» «partially applied primop deepSeq» «lambda @ /pwd/lang/eval-okay-print.nix:1:61» [ [ «repeated» ] ] ] diff --git a/tests/functional/lang/eval-okay-repeated-empty-attrs.exp b/tests/functional/lang/eval-okay-repeated-empty-attrs.exp new file mode 100644 index 000000000..d21e6db6b --- /dev/null +++ b/tests/functional/lang/eval-okay-repeated-empty-attrs.exp @@ -0,0 +1 @@ +[ { } { } ] diff --git a/tests/functional/lang/eval-okay-repeated-empty-attrs.nix b/tests/functional/lang/eval-okay-repeated-empty-attrs.nix new file mode 100644 index 000000000..030a3b85c --- /dev/null +++ b/tests/functional/lang/eval-okay-repeated-empty-attrs.nix @@ -0,0 +1,2 @@ +# Tests that empty attribute sets are not printed as `«repeated»`. +[ {} {} ] diff --git a/tests/functional/lang/eval-okay-repeated-empty-list.exp b/tests/functional/lang/eval-okay-repeated-empty-list.exp new file mode 100644 index 000000000..701fc7e20 --- /dev/null +++ b/tests/functional/lang/eval-okay-repeated-empty-list.exp @@ -0,0 +1 @@ +[ [ ] [ ] ] diff --git a/tests/functional/lang/eval-okay-repeated-empty-list.nix b/tests/functional/lang/eval-okay-repeated-empty-list.nix new file mode 100644 index 000000000..376c51be8 --- /dev/null +++ b/tests/functional/lang/eval-okay-repeated-empty-list.nix @@ -0,0 +1 @@ +[ [] [] ] diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc index a4f6fc014..98131112e 100644 --- a/tests/unit/libexpr/value/print.cc +++ b/tests/unit/libexpr/value/print.cc @@ -1,6 +1,7 @@ #include "tests/libexpr.hh" #include "value.hh" +#include "print.hh" namespace nix { @@ -12,7 +13,7 @@ struct ValuePrintingTests : LibExprTest void test(Value v, std::string_view expected, A... args) { std::stringstream out; - v.print(state.symbols, out, args...); + v.print(state, out, args...); ASSERT_EQ(out.str(), expected); } }; @@ -84,7 +85,7 @@ TEST_F(ValuePrintingTests, tList) vList.bigList.elems[1] = &vTwo; vList.bigList.size = 3; - test(vList, "[ 1 2 (nullptr) ]"); + test(vList, "[ 1 2 «nullptr» ]"); } TEST_F(ValuePrintingTests, vThunk) @@ -92,7 +93,7 @@ TEST_F(ValuePrintingTests, vThunk) Value vThunk; vThunk.mkThunk(nullptr, nullptr); - test(vThunk, ""); + test(vThunk, "«thunk»"); } TEST_F(ValuePrintingTests, vApp) @@ -100,32 +101,55 @@ TEST_F(ValuePrintingTests, vApp) Value vApp; vApp.mkApp(nullptr, nullptr); - test(vApp, ""); + test(vApp, "«thunk»"); } TEST_F(ValuePrintingTests, vLambda) { - Value vLambda; - vLambda.mkLambda(nullptr, nullptr); + Env env { + .up = nullptr, + .values = { } + }; + PosTable::Origin origin((std::monostate())); + auto posIdx = state.positions.add(origin, 1, 1); + auto body = ExprInt(0); + auto formals = Formals {}; - test(vLambda, ""); + ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body); + + Value vLambda; + vLambda.mkLambda(&env, &eLambda); + + test(vLambda, "«lambda @ «none»:1:1»"); + + eLambda.setName(createSymbol("puppy")); + + test(vLambda, "«lambda puppy @ «none»:1:1»"); } TEST_F(ValuePrintingTests, vPrimOp) { Value vPrimOp; - PrimOp primOp{}; + PrimOp primOp{ + .name = "puppy" + }; vPrimOp.mkPrimOp(&primOp); - test(vPrimOp, ""); + test(vPrimOp, "«primop puppy»"); } TEST_F(ValuePrintingTests, vPrimOpApp) { - Value vPrimOpApp; - vPrimOpApp.mkPrimOpApp(nullptr, nullptr); + PrimOp primOp{ + .name = "puppy" + }; + Value vPrimOp; + vPrimOp.mkPrimOp(&primOp); - test(vPrimOpApp, ""); + Value vPrimOpApp; + vPrimOpApp.mkPrimOpApp(&vPrimOp, nullptr); + + test(vPrimOpApp, "«partially applied primop puppy»"); } TEST_F(ValuePrintingTests, vExternal) @@ -176,9 +200,14 @@ TEST_F(ValuePrintingTests, depthAttrs) Value vTwo; vTwo.mkInt(2); + BindingsBuilder builderEmpty(state, state.allocBindings(0)); + Value vAttrsEmpty; + vAttrsEmpty.mkAttrs(builderEmpty.finish()); + BindingsBuilder builder(state, state.allocBindings(10)); builder.insert(state.symbols.create("one"), &vOne); builder.insert(state.symbols.create("two"), &vTwo); + builder.insert(state.symbols.create("nested"), &vAttrsEmpty); Value vAttrs; vAttrs.mkAttrs(builder.finish()); @@ -191,10 +220,10 @@ TEST_F(ValuePrintingTests, depthAttrs) Value vNested; vNested.mkAttrs(builder2.finish()); - test(vNested, "{ nested = «too deep»; one = «too deep»; two = «too deep»; }", false, 1); - test(vNested, "{ nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; }", false, 2); - test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 3); - test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 4); + test(vNested, "{ nested = { ... }; one = 1; two = 2; }", PrintOptions { .maxDepth = 1 }); + test(vNested, "{ nested = { nested = { ... }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 2 }); + test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 3 }); + test(vNested, "{ nested = { nested = { }; one = 1; two = 2; }; one = 1; two = 2; }", PrintOptions { .maxDepth = 4 }); } TEST_F(ValuePrintingTests, depthList) @@ -227,11 +256,561 @@ TEST_F(ValuePrintingTests, depthList) vList.bigList.elems[2] = &vNested; vList.bigList.size = 3; - test(vList, "[ «too deep» «too deep» «too deep» ]", false, 1); - test(vList, "[ 1 2 { nested = «too deep»; one = «too deep»; two = «too deep»; } ]", false, 2); - test(vList, "[ 1 2 { nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; } ]", false, 3); - test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 4); - test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 5); + test(vList, "[ 1 2 { ... } ]", PrintOptions { .maxDepth = 1 }); + test(vList, "[ 1 2 { nested = { ... }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 2 }); + test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 3 }); + test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 4 }); + test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", PrintOptions { .maxDepth = 5 }); +} + +struct StringPrintingTests : LibExprTest +{ + template + void test(std::string_view literal, std::string_view expected, unsigned int maxLength, A... args) + { + Value v; + v.mkString(literal); + + std::stringstream out; + printValue(state, out, v, PrintOptions { + .maxStringLength = maxLength + }); + ASSERT_EQ(out.str(), expected); + } +}; + +TEST_F(StringPrintingTests, maxLengthTruncation) +{ + test("abcdefghi", "\"abcdefghi\"", 10); + test("abcdefghij", "\"abcdefghij\"", 10); + test("abcdefghijk", "\"abcdefghij\" «1 byte elided»", 10); + test("abcdefghijkl", "\"abcdefghij\" «2 bytes elided»", 10); + test("abcdefghijklm", "\"abcdefghij\" «3 bytes elided»", 10); +} + +// Check that printing an attrset shows 'important' attributes like `type` +// first, but only reorder the attrs when we have a maxAttrs budget. +TEST_F(ValuePrintingTests, attrsTypeFirst) +{ + Value vType; + vType.mkString("puppy"); + + Value vApple; + vApple.mkString("apple"); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.symbols.create("type"), &vType); + builder.insert(state.symbols.create("apple"), &vApple); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ type = \"puppy\"; apple = \"apple\"; }", + PrintOptions { + .maxAttrs = 100 + }); + + test(vAttrs, + "{ apple = \"apple\"; type = \"puppy\"; }", + PrintOptions { }); +} + +TEST_F(ValuePrintingTests, ansiColorsInt) +{ + Value v; + v.mkInt(10); + + test(v, + ANSI_CYAN "10" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsFloat) +{ + Value v; + v.mkFloat(1.6); + + test(v, + ANSI_CYAN "1.6" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsBool) +{ + Value v; + v.mkBool(true); + + test(v, + ANSI_CYAN "true" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsString) +{ + Value v; + v.mkString("puppy"); + + test(v, + ANSI_MAGENTA "\"puppy\"" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsStringElided) +{ + Value v; + v.mkString("puppy"); + + test(v, + ANSI_MAGENTA "\"pup\"" ANSI_FAINT " «2 bytes elided»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .maxStringLength = 3 + }); +} + +TEST_F(ValuePrintingTests, ansiColorsPath) +{ + Value v; + v.mkPath(state.rootPath(CanonPath("puppy"))); + + test(v, + ANSI_GREEN "/puppy" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsNull) +{ + Value v; + v.mkNull(); + + test(v, + ANSI_CYAN "null" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsAttrs) +{ + Value vOne; + vOne.mkInt(1); + + Value vTwo; + vTwo.mkInt(2); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.symbols.create("one"), &vOne); + builder.insert(state.symbols.create("two"), &vTwo); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; two = " ANSI_CYAN "2" ANSI_NORMAL "; }", + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsDerivation) +{ + Value vDerivation; + vDerivation.mkString("derivation"); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.sType, &vDerivation); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + ANSI_GREEN "«derivation»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true, + .derivationPaths = true + }); + + test(vAttrs, + "{ type = " ANSI_MAGENTA "\"derivation\"" ANSI_NORMAL "; }", + PrintOptions { + .ansiColors = true, + .force = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsError) +{ + Value throw_ = state.getBuiltin("throw"); + Value message; + message.mkString("uh oh!"); + Value vError; + vError.mkApp(&throw_, &message); + + test(vError, + ANSI_RED + "«" + ANSI_RED + "error:" + ANSI_NORMAL + "\n … while calling the '" + ANSI_MAGENTA + "throw" + ANSI_NORMAL + "' builtin\n\n " + ANSI_RED + "error:" + ANSI_NORMAL + " uh oh!»" + ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true, + }); +} + +TEST_F(ValuePrintingTests, ansiColorsDerivationError) +{ + Value throw_ = state.getBuiltin("throw"); + Value message; + message.mkString("uh oh!"); + Value vError; + vError.mkApp(&throw_, &message); + + Value vDerivation; + vDerivation.mkString("derivation"); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.sType, &vDerivation); + builder.insert(state.sDrvPath, &vError); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ drvPath = " + ANSI_RED + "«" + ANSI_RED + "error:" + ANSI_NORMAL + "\n … while calling the '" + ANSI_MAGENTA + "throw" + ANSI_NORMAL + "' builtin\n\n " + ANSI_RED + "error:" + ANSI_NORMAL + " uh oh!»" + ANSI_NORMAL + "; type = " + ANSI_MAGENTA + "\"derivation\"" + ANSI_NORMAL + "; }", + PrintOptions { + .ansiColors = true, + .force = true + }); + + test(vAttrs, + ANSI_RED + "«" + ANSI_RED + "error:" + ANSI_NORMAL + "\n … while calling the '" + ANSI_MAGENTA + "throw" + ANSI_NORMAL + "' builtin\n\n " + ANSI_RED + "error:" + ANSI_NORMAL + " uh oh!»" + ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true, + .derivationPaths = true, + }); +} + +TEST_F(ValuePrintingTests, ansiColorsAssert) +{ + ExprVar eFalse(state.symbols.create("false")); + eFalse.bindVars(state, state.staticBaseEnv); + ExprInt eInt(1); + + ExprAssert expr(noPos, &eFalse, &eInt); + + Value v; + state.mkThunk_(v, &expr); + + test(v, + ANSI_RED "«" ANSI_RED "error:" ANSI_NORMAL " assertion '" ANSI_MAGENTA "false" ANSI_NORMAL "' failed»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsList) +{ + Value vOne; + vOne.mkInt(1); + + Value vTwo; + vTwo.mkInt(2); + + Value vList; + state.mkList(vList, 5); + vList.bigList.elems[0] = &vOne; + vList.bigList.elems[1] = &vTwo; + vList.bigList.size = 3; + + test(vList, + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_CYAN "2" ANSI_NORMAL " " ANSI_MAGENTA "«nullptr»" ANSI_NORMAL " ]", + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsLambda) +{ + Env env { + .up = nullptr, + .values = { } + }; + PosTable::Origin origin((std::monostate())); + auto posIdx = state.positions.add(origin, 1, 1); + auto body = ExprInt(0); + auto formals = Formals {}; + + ExprLambda eLambda(posIdx, createSymbol("a"), &formals, &body); + + Value vLambda; + vLambda.mkLambda(&env, &eLambda); + + test(vLambda, + ANSI_BLUE "«lambda @ «none»:1:1»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true + }); + + eLambda.setName(createSymbol("puppy")); + + test(vLambda, + ANSI_BLUE "«lambda puppy @ «none»:1:1»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true, + .force = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsPrimOp) +{ + PrimOp primOp{ + .name = "puppy" + }; + Value v; + v.mkPrimOp(&primOp); + + test(v, + ANSI_BLUE "«primop puppy»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsPrimOpApp) +{ + PrimOp primOp{ + .name = "puppy" + }; + Value vPrimOp; + vPrimOp.mkPrimOp(&primOp); + + Value v; + v.mkPrimOpApp(&vPrimOp, nullptr); + + test(v, + ANSI_BLUE "«partially applied primop puppy»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsThunk) +{ + Value v; + v.mkThunk(nullptr, nullptr); + + test(v, + ANSI_MAGENTA "«thunk»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsBlackhole) +{ + Value v; + v.mkBlackhole(); + + test(v, + ANSI_RED "«potential infinite recursion»" ANSI_NORMAL, + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsAttrsRepeated) +{ + BindingsBuilder emptyBuilder(state, state.allocBindings(1)); + + Value vEmpty; + vEmpty.mkAttrs(emptyBuilder.finish()); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.symbols.create("a"), &vEmpty); + builder.insert(state.symbols.create("b"), &vEmpty); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ a = { }; b = " ANSI_MAGENTA "«repeated»" ANSI_NORMAL "; }", + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, ansiColorsListRepeated) +{ + BindingsBuilder emptyBuilder(state, state.allocBindings(1)); + + Value vEmpty; + vEmpty.mkAttrs(emptyBuilder.finish()); + + Value vList; + state.mkList(vList, 3); + vList.bigList.elems[0] = &vEmpty; + vList.bigList.elems[1] = &vEmpty; + vList.bigList.size = 2; + + test(vList, + "[ { } " ANSI_MAGENTA "«repeated»" ANSI_NORMAL " ]", + PrintOptions { + .ansiColors = true + }); +} + +TEST_F(ValuePrintingTests, listRepeated) +{ + BindingsBuilder emptyBuilder(state, state.allocBindings(1)); + + Value vEmpty; + vEmpty.mkAttrs(emptyBuilder.finish()); + + Value vList; + state.mkList(vList, 3); + vList.bigList.elems[0] = &vEmpty; + vList.bigList.elems[1] = &vEmpty; + vList.bigList.size = 2; + + test(vList, "[ { } «repeated» ]", PrintOptions { }); + test(vList, + "[ { } { } ]", + PrintOptions { + .trackRepeated = false + }); +} + +TEST_F(ValuePrintingTests, ansiColorsAttrsElided) +{ + Value vOne; + vOne.mkInt(1); + + Value vTwo; + vTwo.mkInt(2); + + BindingsBuilder builder(state, state.allocBindings(10)); + builder.insert(state.symbols.create("one"), &vOne); + builder.insert(state.symbols.create("two"), &vTwo); + + Value vAttrs; + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT " «1 attribute elided»" ANSI_NORMAL "}", + PrintOptions { + .ansiColors = true, + .maxAttrs = 1 + }); + + Value vThree; + vThree.mkInt(3); + + builder.insert(state.symbols.create("three"), &vThree); + vAttrs.mkAttrs(builder.finish()); + + test(vAttrs, + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT " «2 attributes elided»" ANSI_NORMAL "}", + PrintOptions { + .ansiColors = true, + .maxAttrs = 1 + }); +} + +TEST_F(ValuePrintingTests, ansiColorsListElided) +{ + BindingsBuilder emptyBuilder(state, state.allocBindings(1)); + + Value vOne; + vOne.mkInt(1); + + Value vTwo; + vTwo.mkInt(2); + + Value vList; + state.mkList(vList, 4); + vList.bigList.elems[0] = &vOne; + vList.bigList.elems[1] = &vTwo; + vList.bigList.size = 2; + + test(vList, + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT " «1 item elided»" ANSI_NORMAL "]", + PrintOptions { + .ansiColors = true, + .maxListItems = 1 + }); + + Value vThree; + vThree.mkInt(3); + + vList.bigList.elems[2] = &vThree; + vList.bigList.size = 3; + + test(vList, + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT " «2 items elided»" ANSI_NORMAL "]", + PrintOptions { + .ansiColors = true, + .maxListItems = 1 + }); } } // namespace nix From df84dd4d8dd3fd6381ac2ca3064432ab31a16b79 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Tue, 9 Jan 2024 11:13:45 -0800 Subject: [PATCH 077/307] Restore ambiguous value printer for `nix-instantiate` The Nix team has requested that this output format remain unchanged. I've added a warning to the man page explaining that `nix-instantiate --eval` output will not parse correctly in many situations. --- doc/manual/src/command-ref/nix-instantiate.md | 80 ++++++++++---- src/libexpr/print-ambiguous.cc | 100 ++++++++++++++++++ src/libexpr/print-ambiguous.hh | 24 +++++ src/nix-env/user-env.cc | 3 +- src/nix-instantiate/nix-instantiate.cc | 6 +- tests/functional/lang/eval-okay-print.exp | 2 +- 6 files changed, 189 insertions(+), 26 deletions(-) create mode 100644 src/libexpr/print-ambiguous.cc create mode 100644 src/libexpr/print-ambiguous.hh diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/src/command-ref/nix-instantiate.md index e1b4a3e80..483150aa8 100644 --- a/doc/manual/src/command-ref/nix-instantiate.md +++ b/doc/manual/src/command-ref/nix-instantiate.md @@ -35,13 +35,50 @@ standard input. - `--parse`\ Just parse the input files, and print their abstract syntax trees on - standard output in ATerm format. + standard output as a Nix expression. - `--eval`\ Just parse and evaluate the input files, and print the resulting values on standard output. No instantiation of store derivations takes place. + > **Warning** + > + > This option produces ambiguous output which is not suitable for machine + > consumption. For example, these two Nix expressions print the same result + > despite having different types: + > + > ```console + > $ nix-instantiate --eval --expr '{ a = {}; }' + > { a = ; } + > $ nix-instantiate --eval --expr '{ a = ; }' + > { a = ; } + > ``` + > + > For human-readable output, `nix eval` (experimental) is more informative: + > + > ```console + > $ nix-instantiate --eval --expr 'a: a' + > + > $ nix eval --expr 'a: a' + > «lambda @ «string»:1:1» + > ``` + > + > For machine-readable output, the `--xml` option produces unambiguous + > output: + > + > ```console + > $ nix-instantiate --eval --xml --expr '{ foo = ; }' + > + > + > + > + > + > + > + > + > ``` + - `--find-file`\ Look up the given files in Nix’s search path (as specified by the `NIX_PATH` environment variable). If found, print the corresponding @@ -61,11 +98,11 @@ standard input. - `--json`\ When used with `--eval`, print the resulting value as an JSON - representation of the abstract syntax tree rather than as an ATerm. + representation of the abstract syntax tree rather than as a Nix expression. - `--xml`\ When used with `--eval`, print the resulting value as an XML - representation of the abstract syntax tree rather than as an ATerm. + representation of the abstract syntax tree rather than as a Nix expression. The schema is the same as that used by the [`toXML` built-in](../language/builtins.md). @@ -133,28 +170,29 @@ $ nix-instantiate --eval --xml --expr '1 + 2' The difference between non-strict and strict evaluation: ```console -$ nix-instantiate --eval --xml --expr 'rec { x = "foo"; y = x; }' -... - - - - - - -... +$ nix-instantiate --eval --xml --expr '{ x = {}; }' + + + + + + + + ``` Note that `y` is left unevaluated (the XML representation doesn’t attempt to show non-normal forms). ```console -$ nix-instantiate --eval --xml --strict --expr 'rec { x = "foo"; y = x; }' -... - - - - - - -... +$ nix-instantiate --eval --xml --strict --expr '{ x = {}; }' + + + + + + + + + ``` diff --git a/src/libexpr/print-ambiguous.cc b/src/libexpr/print-ambiguous.cc new file mode 100644 index 000000000..07c398dd2 --- /dev/null +++ b/src/libexpr/print-ambiguous.cc @@ -0,0 +1,100 @@ +#include "print-ambiguous.hh" +#include "print.hh" +#include "signals.hh" + +namespace nix { + +// See: https://github.com/NixOS/nix/issues/9730 +void printAmbiguous( + Value &v, + const SymbolTable &symbols, + std::ostream &str, + std::set *seen, + int depth) +{ + checkInterrupt(); + + if (depth <= 0) { + str << "«too deep»"; + return; + } + switch (v.type()) { + case nInt: + str << v.integer; + break; + case nBool: + printLiteralBool(str, v.boolean); + break; + case nString: + printLiteralString(str, v.string_view()); + break; + case nPath: + str << v.path().to_string(); // !!! escaping? + break; + case nNull: + str << "null"; + break; + case nAttrs: { + if (seen && !v.attrs->empty() && !seen->insert(v.attrs).second) + str << "«repeated»"; + else { + str << "{ "; + for (auto & i : v.attrs->lexicographicOrder(symbols)) { + str << symbols[i->name] << " = "; + printAmbiguous(*i->value, symbols, str, seen, depth - 1); + str << "; "; + } + str << "}"; + } + break; + } + case nList: + if (seen && v.listSize() && !seen->insert(v.listElems()).second) + str << "«repeated»"; + else { + str << "[ "; + for (auto v2 : v.listItems()) { + if (v2) + printAmbiguous(*v2, symbols, str, seen, depth - 1); + else + str << "(nullptr)"; + str << " "; + } + str << "]"; + } + break; + case nThunk: + if (!v.isBlackhole()) { + str << ""; + } else { + // Although we know for sure that it's going to be an infinite recursion + // when this value is accessed _in the current context_, it's likely + // that the user will misinterpret a simpler «infinite recursion» output + // as a definitive statement about the value, while in fact it may be + // a valid value after `builtins.trace` and perhaps some other steps + // have completed. + str << "«potential infinite recursion»"; + } + break; + case nFunction: + if (v.isLambda()) { + str << ""; + } else if (v.isPrimOp()) { + str << ""; + } else if (v.isPrimOpApp()) { + str << ""; + } + break; + case nExternal: + str << *v.external; + break; + case nFloat: + str << v.fpoint; + break; + default: + printError("Nix evaluator internal error: printAmbiguous: invalid value type"); + abort(); + } +} + +} diff --git a/src/libexpr/print-ambiguous.hh b/src/libexpr/print-ambiguous.hh new file mode 100644 index 000000000..50c260a9b --- /dev/null +++ b/src/libexpr/print-ambiguous.hh @@ -0,0 +1,24 @@ +#pragma once + +#include "value.hh" + +namespace nix { + +/** + * Print a value in the deprecated format used by `nix-instantiate --eval` and + * `nix-env` (for manifests). + * + * This output can't be changed because it's part of the `nix-instantiate` API, + * but it produces ambiguous output; unevaluated thunks and lambdas (and a few + * other types) are printed as Nix path syntax like ``. + * + * See: https://github.com/NixOS/nix/issues/9730 + */ +void printAmbiguous( + Value &v, + const SymbolTable &symbols, + std::ostream &str, + std::set *seen, + int depth); + +} diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 3d07cab7a..973b6ee2b 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -108,8 +108,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, environment. */ auto manifestFile = ({ std::ostringstream str; - std::set seen; - printAmbiguous(manifest, state.symbols, str, &seen, std::numeric_limits::max()); + printAmbiguous(manifest, state.symbols, str, nullptr, std::numeric_limits::max()); // TODO with C++20 we can use str.view() instead and avoid copy. std::string str2 = str.str(); StringSource source { str2 }; diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 9b36dccc6..87bc986e8 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -1,9 +1,11 @@ #include "globals.hh" +#include "print-ambiguous.hh" #include "shared.hh" #include "eval.hh" #include "eval-inline.hh" #include "get-drvs.hh" #include "attr-path.hh" +#include "signals.hh" #include "value-to-xml.hh" #include "value-to-json.hh" #include "store-api.hh" @@ -24,7 +26,6 @@ static int rootNr = 0; enum OutputKind { okPlain, okXML, okJSON }; - void processExpr(EvalState & state, const Strings & attrPaths, bool parseOnly, bool strict, Bindings & autoArgs, bool evalOnly, OutputKind output, bool location, Expr * e) @@ -56,7 +57,8 @@ void processExpr(EvalState & state, const Strings & attrPaths, std::cout << std::endl; } else { if (strict) state.forceValueDeep(vRes); - vRes.print(state, std::cout); + std::set seen; + printAmbiguous(vRes, state.symbols, std::cout, &seen, std::numeric_limits::max()); std::cout << std::endl; } } else { diff --git a/tests/functional/lang/eval-okay-print.exp b/tests/functional/lang/eval-okay-print.exp index aa1b2379e..0d960fb70 100644 --- a/tests/functional/lang/eval-okay-print.exp +++ b/tests/functional/lang/eval-okay-print.exp @@ -1 +1 @@ -[ null «primop toString» «partially applied primop deepSeq» «lambda @ /pwd/lang/eval-okay-print.nix:1:61» [ [ «repeated» ] ] ] +[ null [ [ «repeated» ] ] ] From 34bb6dcab1334ebc6ac0afaf4fe6f9e6f13de4b5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 11 Jan 2024 11:14:13 -0500 Subject: [PATCH 078/307] makefiles: Support `.exe` executable prefix on Windows --- mk/programs.mk | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/mk/programs.mk b/mk/programs.mk index 6235311e9..623caaf55 100644 --- a/mk/programs.mk +++ b/mk/programs.mk @@ -1,5 +1,11 @@ programs-list := +ifdef HOST_WINDOWS + EXE_EXT = .exe +else + EXE_EXT = +endif + # Build a program with symbolic name $(1). The program is defined by # various variables prefixed by ‘$(1)_’: # @@ -31,7 +37,7 @@ define build-program _srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src))) $(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs)))) _libs := $$(foreach lib, $$($(1)_LIBS), $$(foreach lib2, $$($$(lib)_LIB_CLOSURE), $$($$(lib2)_PATH))) - $(1)_PATH := $$(_d)/$$($(1)_NAME) + $(1)_PATH := $$(_d)/$$($(1)_NAME)$(EXE_EXT) $$(eval $$(call create-dir, $$(_d))) @@ -42,7 +48,7 @@ define build-program ifdef $(1)_INSTALL_DIR - $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME) + $(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME)$(EXE_EXT) $$(eval $$(call create-dir, $$($(1)_INSTALL_DIR))) From af0345df3688494d1e53a659eacb16fc4b9915b5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 11 Jan 2024 11:14:13 -0500 Subject: [PATCH 079/307] makefiles: Do some HOST_CYGWIN -> HOST_WINDOWS These bits are not Cygwin-specific. --- mk/libraries.mk | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mk/libraries.mk b/mk/libraries.mk index 515a481f6..b99ba2782 100644 --- a/mk/libraries.mk +++ b/mk/libraries.mk @@ -3,7 +3,7 @@ libs-list := ifdef HOST_DARWIN SO_EXT = dylib else - ifdef HOST_CYGWIN + ifdef HOST_WINDOWS SO_EXT = dll else SO_EXT = so @@ -65,7 +65,7 @@ define build-library $(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs)))) _libs := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_PATH)) - ifdef HOST_CYGWIN + ifdef HOST_WINDOWS $(1)_INSTALL_DIR ?= $$(bindir) else $(1)_INSTALL_DIR ?= $$(libdir) @@ -85,7 +85,7 @@ define build-library endif else ifndef HOST_DARWIN - ifndef HOST_CYGWIN + ifndef HOST_WINDOWS $(1)_LDFLAGS += -Wl,-z,defs endif endif From 90fdbfc601a8d005f57c984284c5922dc38480eb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 9 Jan 2024 12:41:42 -0500 Subject: [PATCH 080/307] Build Windows DLLs with `-Wl,--export-all-symbols` This is not the most elegant, but will match the SOs in exporting everything for now. Later we can refine what is public/private to clean up the interface. --- Makefile | 37 ++++++++++++++++++++++++++++++++----- mk/lib.mk | 33 +-------------------------------- mk/platform.mk | 32 ++++++++++++++++++++++++++++++++ 3 files changed, 65 insertions(+), 37 deletions(-) create mode 100644 mk/platform.mk diff --git a/Makefile b/Makefile index 1fdb6e897..7bbfbddbe 100644 --- a/Makefile +++ b/Makefile @@ -1,8 +1,12 @@ +# External build directory support + include mk/build-dir.mk -include $(buildprefix)Makefile.config clean-files += $(buildprefix)Makefile.config +# List makefiles + ifeq ($(ENABLE_BUILD), yes) makefiles = \ mk/precompiled-headers.mk \ @@ -43,6 +47,8 @@ makefiles += \ tests/functional/plugins/local.mk endif +# Miscellaneous global Flags + OPTIMIZE = 1 ifeq ($(OPTIMIZE), 1) @@ -52,9 +58,29 @@ else GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE endif +include mk/platform.mk + +ifdef HOST_WINDOWS + # Windows DLLs are stricter about symbol visibility than Unix shared + # objects --- see https://gcc.gnu.org/wiki/Visibility for details. + # This is a temporary sledgehammer to export everything like on Unix, + # and not detail with this yet. + # + # TODO do not do this, and instead do fine-grained export annotations. + GLOBAL_LDFLAGS += -Wl,--export-all-symbols +endif + +GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src + +# Include the main lib, causing rules to be defined + include mk/lib.mk -# Must be included after `mk/lib.mk` so isn't the default target. +# Fallback stub rules for better UX when things are disabled +# +# These must be defined after `mk/lib.mk`. Otherwise the first rule +# incorrectly becomes the default target. + ifneq ($(ENABLE_UNIT_TESTS), yes) .PHONY: check check: @@ -69,8 +95,11 @@ installcheck: @exit 1 endif -# Must be included after `mk/lib.mk` so rules refer to variables defined -# by the library. Rules are not "lazy" like variables, unfortunately. +# Documentation or else fallback stub rules. +# +# The documentation makefiles be included after `mk/lib.mk` so rules +# refer to variables defined by `mk/lib.mk`. Rules are not "lazy" like +# variables, unfortunately. ifeq ($(ENABLE_DOC_GEN), yes) $(eval $(call include-sub-makefile, doc/manual/local.mk)) @@ -89,5 +118,3 @@ internal-api-html: @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'." @exit 1 endif - -GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src diff --git a/mk/lib.mk b/mk/lib.mk index a5a067e48..10ce8d436 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -12,38 +12,7 @@ man-pages := install-tests := install-tests-groups := -ifdef HOST_OS - HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS))) - ifeq ($(patsubst mingw%,,$(HOST_KERNEL)),) - HOST_MINGW = 1 - HOST_WINDOWS = 1 - endif - ifeq ($(HOST_KERNEL), cygwin) - HOST_CYGWIN = 1 - HOST_WINDOWS = 1 - HOST_UNIX = 1 - endif - ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),) - HOST_DARWIN = 1 - HOST_UNIX = 1 - endif - ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),) - HOST_FREEBSD = 1 - HOST_UNIX = 1 - endif - ifeq ($(patsubst netbsd%,,$(HOST_KERNEL)),) - HOST_NETBSD = 1 - HOST_UNIX = 1 - endif - ifeq ($(HOST_KERNEL), linux) - HOST_LINUX = 1 - HOST_UNIX = 1 - endif - ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),) - HOST_SOLARIS = 1 - HOST_UNIX = 1 - endif -endif +include mk/platform.mk # Hack to define a literal space. space := diff --git a/mk/platform.mk b/mk/platform.mk new file mode 100644 index 000000000..fe960dedf --- /dev/null +++ b/mk/platform.mk @@ -0,0 +1,32 @@ +ifdef HOST_OS + HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS))) + ifeq ($(patsubst mingw%,,$(HOST_KERNEL)),) + HOST_MINGW = 1 + HOST_WINDOWS = 1 + endif + ifeq ($(HOST_KERNEL), cygwin) + HOST_CYGWIN = 1 + HOST_WINDOWS = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),) + HOST_DARWIN = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),) + HOST_FREEBSD = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst netbsd%,,$(HOST_KERNEL)),) + HOST_NETBSD = 1 + HOST_UNIX = 1 + endif + ifeq ($(HOST_KERNEL), linux) + HOST_LINUX = 1 + HOST_UNIX = 1 + endif + ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),) + HOST_SOLARIS = 1 + HOST_UNIX = 1 + endif +endif From 3e237598342dee46188c83fba49cc30d509ee553 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 12 Jan 2024 12:38:55 +0100 Subject: [PATCH 081/307] gc-non-blocking.sh: Add explanation Also name the _NIX_TEST_GC_SYNC environment variables logically. --- src/libstore/gc.cc | 10 +++++----- tests/functional/gc-non-blocking.sh | 20 ++++++++++++++------ 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index bd64e238d..80e036e7e 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -511,7 +511,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) /* Synchronisation point to test ENOENT handling in addTempRoot(), see tests/gc-non-blocking.sh. */ - if (auto p = getEnv("_NIX_TEST_GC_SYNC_2")) + if (auto p = getEnv("_NIX_TEST_GC_SYNC_1")) readFile(*p); /* Start the server for receiving new roots. */ @@ -637,6 +637,10 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) roots.insert(root.first); } + /* Synchronisation point for testing, see tests/functional/gc-non-blocking.sh. */ + if (auto p = getEnv("_NIX_TEST_GC_SYNC_2")) + readFile(*p); + /* Helper function that deletes a path from the store and throws GCLimitReached if we've deleted enough garbage. */ auto deleteFromStore = [&](std::string_view baseName) @@ -783,10 +787,6 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) } }; - /* Synchronisation point for testing, see tests/functional/gc-non-blocking.sh. */ - if (auto p = getEnv("_NIX_TEST_GC_SYNC")) - readFile(*p); - /* Either delete all garbage paths, or just the specified paths (for gcDeleteSpecific). */ if (options.action == GCOptions::gcDeleteSpecific) { diff --git a/tests/functional/gc-non-blocking.sh b/tests/functional/gc-non-blocking.sh index 7f2aebb8b..ec280badb 100644 --- a/tests/functional/gc-non-blocking.sh +++ b/tests/functional/gc-non-blocking.sh @@ -6,10 +6,14 @@ needLocalStore "the GC test needs a synchronisation point" clearStore -fifo=$TEST_ROOT/test.fifo -mkfifo "$fifo" +# This FIFO is read just after the global GC lock has been acquired, +# but before the root server is started. +fifo1=$TEST_ROOT/test2.fifo +mkfifo "$fifo1" -fifo2=$TEST_ROOT/test2.fifo +# This FIFO is read just after the roots have been read, but before +# the actual GC starts. +fifo2=$TEST_ROOT/test.fifo mkfifo "$fifo2" dummy=$(nix store add-path ./simple.nix) @@ -17,19 +21,23 @@ dummy=$(nix store add-path ./simple.nix) running=$TEST_ROOT/running touch $running -(_NIX_TEST_GC_SYNC=$fifo _NIX_TEST_GC_SYNC_2=$fifo2 nix-store --gc -vvvvv; rm $running) & +# Start GC. +(_NIX_TEST_GC_SYNC_1=$fifo1 _NIX_TEST_GC_SYNC_2=$fifo2 nix-store --gc -vvvvv; rm $running) & pid=$! sleep 2 -(sleep 1; echo > $fifo2) & +# Delay the start of the root server to check that the build below +# correctly handles ENOENT when connecting to the root server. +(sleep 1; echo > $fifo1) & pid2=$! +# Start a build. This should not be blocked by the GC in progress. outPath=$(nix-build --max-silent-time 60 -o "$TEST_ROOT/result" -E " with import ./config.nix; mkDerivation { name = \"non-blocking\"; - buildCommand = \"set -x; test -e $running; mkdir \$out; echo > $fifo\"; + buildCommand = \"set -x; test -e $running; mkdir \$out; echo > $fifo2\"; }") wait $pid From 7c6f093abcb68a2d07cd6450672c120f33ab96d6 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 12 Jan 2024 13:00:53 +0100 Subject: [PATCH 082/307] .data() -> .c_str() to be on the safe side --- src/nix/develop.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 5e25833eb..1f2891378 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -603,7 +603,7 @@ struct CmdDevelop : Common, MixEnvironment setEnviron(); // prevent garbage collection until shell exits - setenv("NIX_GCROOT", gcroot.data(), 1); + setenv("NIX_GCROOT", gcroot.c_str(), 1); Path shell = "bash"; @@ -648,7 +648,7 @@ struct CmdDevelop : Common, MixEnvironment // Override SHELL with the one chosen for this environment. // This is to make sure the system shell doesn't leak into the build environment. - setenv("SHELL", shell.data(), 1); + setenv("SHELL", shell.c_str(), 1); // If running a phase or single command, don't want an interactive shell running after // Ctrl-C, so don't pass --rcfile From 4d0ecda33e29520756fdb7ccb7549205ed1afd52 Mon Sep 17 00:00:00 2001 From: DavHau Date: Sun, 19 Nov 2023 20:37:42 +0700 Subject: [PATCH 083/307] fetchTree/fetchGit: add test for .gitattributes ...with the intention to prevent future regressions in fetchGit --- tests/functional/fetchGit.sh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index 4985c7764..f0438f548 100644 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -229,6 +229,15 @@ rev_tag2=$(git -C $repo rev-parse refs/tags/tag2) [[ $rev_tag2_nix = $rev_tag2 ]] unset _NIX_FORCE_HTTP +# Ensure .gitattributes is respected +touch $repo/not-exported-file +echo "/not-exported-file export-ignore" >> $repo/.gitattributes +git -C $repo add not-exported-file .gitattributes +git -C $repo commit -m 'Bla6' +rev5=$(git -C $repo rev-parse HEAD) +path12=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev5\"; }).outPath") +[[ ! -e $path12/not-exported-file ]] + # should fail if there is no repo rm -rf $repo/.git (! nix eval --impure --raw --expr "(builtins.fetchGit \"file://$repo\").outPath") From ce6d58a97cf6f975a0b930605605fab153445b22 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 27 Nov 2023 22:34:41 +0100 Subject: [PATCH 084/307] git fetcher: Add exportIgnore parameter Enabled for fetchGit, which historically had this behavior, among other behaviors we do not want in fetchGit. fetchTree disables this parameter by default. It can choose the simpler behavior, as it is still experimental. I am not confident that the filtering implementation is future proof. It should reuse a source filtering wrapper, which I believe Eelco has already written, but not merged yet. --- src/libexpr/primops/fetchTree.cc | 14 ++++++++ src/libfetchers/git-utils.cc | 57 +++++++++++++++++++++++++++----- src/libfetchers/git-utils.hh | 4 +-- src/libfetchers/git.cc | 15 +++++++-- tests/functional/fetchGit.sh | 5 ++- 5 files changed, 81 insertions(+), 14 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index eb2df8626..e00c4f190 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -39,6 +39,10 @@ void emitTreeAttrs( attrs.alloc("submodules").mkBool( fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false)); + if (input.getType() == "git") + attrs.alloc("exportIgnore").mkBool( + fetchers::maybeGetBoolAttr(input.attrs, "exportIgnore").value_or(false)); + if (!forceDirty) { if (auto rev = input.getRev()) { @@ -112,6 +116,11 @@ static void fetchTree( attrs.emplace("type", type.value()); + if (params.isFetchGit) { + // Default value; user attrs are assigned later. + attrs.emplace("exportIgnore", Explicit{true}); + } + for (auto & attr : *args[0]->attrs) { if (attr.name == state.sType) continue; state.forceValue(*attr.value, attr.pos); @@ -593,6 +602,11 @@ static RegisterPrimOp primop_fetchGit({ A Boolean parameter that specifies whether submodules should be checked out. + - `exportIgnore` (default: `true`) + + A Boolean parameter that specifies whether `export-ignore` from `.gitattributes` should be applied. + This approximates part of the `git archive` behavior. + - `shallow` (default: `false`) A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed. diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 65f7b45ef..4dc749504 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -7,6 +7,7 @@ #include +#include #include #include #include @@ -21,6 +22,7 @@ #include #include +#include #include #include #include @@ -307,7 +309,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return std::nullopt; } - std::vector> getSubmodules(const Hash & rev) override; + std::vector> getSubmodules(const Hash & rev, bool exportIgnore) override; std::string resolveSubmoduleUrl( const std::string & url, @@ -340,7 +342,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return true; } - ref getAccessor(const Hash & rev) override; + ref getAccessor(const Hash & rev, bool exportIgnore) override; static int sidebandProgressCallback(const char * str, int len, void * payload) { @@ -460,10 +462,12 @@ struct GitInputAccessor : InputAccessor { ref repo; Tree root; + bool exportIgnore; - GitInputAccessor(ref repo_, const Hash & rev) + GitInputAccessor(ref repo_, const Hash & rev, bool exportIgnore) : repo(repo_) , root(peelObject(*repo, lookupObject(*repo, hashToOID(rev)).get(), GIT_OBJECT_TREE)) + , exportIgnore(exportIgnore) { } @@ -492,7 +496,7 @@ struct GitInputAccessor : InputAccessor return Stat { .type = tDirectory }; auto entry = lookup(path); - if (!entry) + if (!entry || isExportIgnored(path)) return std::nullopt; auto mode = git_tree_entry_filemode(entry); @@ -527,6 +531,12 @@ struct GitInputAccessor : InputAccessor for (size_t n = 0; n < count; ++n) { auto entry = git_tree_entry_byindex(tree.get(), n); + if (exportIgnore) { + if (isExportIgnored(path + git_tree_entry_name(entry))) { + continue; + } + } + // FIXME: add to cache res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{}); } @@ -556,6 +566,33 @@ struct GitInputAccessor : InputAccessor std::unordered_map lookupCache; + bool isExportIgnored(const CanonPath & path) { + if (!exportIgnore) + return false; + + const char *exportIgnoreEntry = nullptr; + + // GIT_ATTR_CHECK_INDEX_ONLY: + // > It will use index only for creating archives or for a bare repo + // > (if an index has been specified for the bare repo). + // -- https://github.com/libgit2/libgit2/blob/HEAD/include/git2/attr.h#L113C62-L115C48 + if (git_attr_get(&exportIgnoreEntry, + *repo, + GIT_ATTR_CHECK_INDEX_ONLY, + std::string(path.rel()).c_str(), + "export-ignore")) { + if (git_error_last()->klass == GIT_ENOTFOUND) + return false; + else + throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); + } + else { + // Official git will silently reject export-ignore lines that have + // values. We do the same. + return GIT_ATTR_IS_TRUE(exportIgnoreEntry); + } + } + /* Recursively look up 'path' relative to the root. */ git_tree_entry * lookup(const CanonPath & path) { @@ -569,6 +606,10 @@ struct GitInputAccessor : InputAccessor throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); } + if (entry && isExportIgnored(path)) { + entry.reset(); + } + i = lookupCache.emplace(path, std::move(entry)).first; } @@ -644,17 +685,17 @@ struct GitInputAccessor : InputAccessor } }; -ref GitRepoImpl::getAccessor(const Hash & rev) +ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) { - return make_ref(ref(shared_from_this()), rev); + return make_ref(ref(shared_from_this()), rev, exportIgnore); } -std::vector> GitRepoImpl::getSubmodules(const Hash & rev) +std::vector> GitRepoImpl::getSubmodules(const Hash & rev, bool exportIgnore) { /* Read the .gitmodules files from this revision. */ CanonPath modulesFile(".gitmodules"); - auto accessor = getAccessor(rev); + auto accessor = getAccessor(rev, exportIgnore); if (!accessor->pathExists(modulesFile)) return {}; /* Parse it and get the revision of each submodule. */ diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index 1def82071..f1cb48065 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -57,7 +57,7 @@ struct GitRepo * Return the submodules of this repo at the indicated revision, * along with the revision of each submodule. */ - virtual std::vector> getSubmodules(const Hash & rev) = 0; + virtual std::vector> getSubmodules(const Hash & rev, bool exportIgnore) = 0; virtual std::string resolveSubmoduleUrl( const std::string & url, @@ -71,7 +71,7 @@ struct GitRepo virtual bool hasObject(const Hash & oid) = 0; - virtual ref getAccessor(const Hash & rev) = 0; + virtual ref getAccessor(const Hash & rev, bool exportIgnore) = 0; virtual void fetch( const std::string & url, diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 79270c317..fb8bf5bf4 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -174,7 +174,7 @@ struct GitInputScheme : InputScheme for (auto & [name, value] : url.query) { if (name == "rev" || name == "ref" || name == "keytype" || name == "publicKey" || name == "publicKeys") attrs.emplace(name, value); - else if (name == "shallow" || name == "submodules" || name == "allRefs" || name == "verifyCommit") + else if (name == "shallow" || name == "submodules" || name == "exportIgnore" || name == "allRefs" || name == "verifyCommit") attrs.emplace(name, Explicit { value == "1" }); else url2.query.emplace(name, value); @@ -199,6 +199,7 @@ struct GitInputScheme : InputScheme "rev", "shallow", "submodules", + "exportIgnore", "lastModified", "revCount", "narHash", @@ -250,6 +251,8 @@ struct GitInputScheme : InputScheme url.query.insert_or_assign("shallow", "1"); if (getSubmodulesAttr(input)) url.query.insert_or_assign("submodules", "1"); + if (maybeGetBoolAttr(input.attrs, "exportIgnore").value_or(false)) + url.query.insert_or_assign("exportIgnore", "1"); if (maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(false)) url.query.insert_or_assign("verifyCommit", "1"); auto publicKeys = getPublicKeys(input.attrs); @@ -372,6 +375,11 @@ struct GitInputScheme : InputScheme return maybeGetBoolAttr(input.attrs, "submodules").value_or(false); } + bool getExportIgnoreAttr(const Input & input) const + { + return maybeGetBoolAttr(input.attrs, "exportIgnore").value_or(false); + } + bool getAllRefsAttr(const Input & input) const { return maybeGetBoolAttr(input.attrs, "allRefs").value_or(false); @@ -600,7 +608,8 @@ struct GitInputScheme : InputScheme verifyCommit(input, repo); - auto accessor = repo->getAccessor(rev); + bool exportIgnore = getExportIgnoreAttr(input); + auto accessor = repo->getAccessor(rev, exportIgnore); accessor->setPathDisplay("«" + input.to_string() + "»"); @@ -610,7 +619,7 @@ struct GitInputScheme : InputScheme if (getSubmodulesAttr(input)) { std::map> mounts; - for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev)) { + for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev, exportIgnore)) { auto resolved = repo->resolveSubmoduleUrl(submodule.url, repoInfo.url); debug("Git submodule %s: %s %s %s -> %s", submodule.path, submodule.url, submodule.branch, submoduleRev.gitRev(), resolved); diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index f0438f548..46532c025 100644 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -231,12 +231,15 @@ unset _NIX_FORCE_HTTP # Ensure .gitattributes is respected touch $repo/not-exported-file +touch $repo/exported-wonky echo "/not-exported-file export-ignore" >> $repo/.gitattributes -git -C $repo add not-exported-file .gitattributes +echo "/exported-wonky export-ignore=wonk" >> $repo/.gitattributes +git -C $repo add not-exported-file exported-wonky .gitattributes git -C $repo commit -m 'Bla6' rev5=$(git -C $repo rev-parse HEAD) path12=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \"$rev5\"; }).outPath") [[ ! -e $path12/not-exported-file ]] +[[ -e $path12/exported-wonky ]] # should fail if there is no repo rm -rf $repo/.git From 1c6bb609af3277ff3f747f49d04be80463d1f153 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 28 Nov 2023 00:41:01 +0100 Subject: [PATCH 085/307] fetchTree: allow larger output attrsets Intentionally dumb change ahead of architectural improvements. --- src/libexpr/primops/fetchTree.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index e00c4f190..d04908b77 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -25,7 +25,7 @@ void emitTreeAttrs( { assert(input.isLocked()); - auto attrs = state.buildBindings(10); + auto attrs = state.buildBindings(100); state.mkStorePathString(storePath, attrs.alloc(state.sOutPath)); From f6b1d155804a946ff2965b5fd1a57159770e8b58 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 19:27:13 +0100 Subject: [PATCH 086/307] MakeNotAllowedError: Touch up doc --- src/libfetchers/filtering-input-accessor.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-input-accessor.hh index a352a33a6..2e2495c78 100644 --- a/src/libfetchers/filtering-input-accessor.hh +++ b/src/libfetchers/filtering-input-accessor.hh @@ -6,7 +6,7 @@ namespace nix { /** - * A function that should throw an exception of type + * A function that returns an exception of type * `RestrictedPathError` explaining that access to `path` is * forbidden. */ From cd5e752fa72bf15ba8fe6fcdae92c77ac6dc2375 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 19:30:10 +0100 Subject: [PATCH 087/307] GitRepoImpl::getSubmodules: Access getSubmoduleRev without cast This will be needed because the accessor will be wrapped, and therefore not be an instance of GitInputAccessor anymore. --- src/libfetchers/git-utils.cc | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 4dc749504..d8a4f1778 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -52,6 +52,8 @@ bool operator == (const git_oid & oid1, const git_oid & oid2) namespace nix { +struct GitInputAccessor; + // Some wrapper types that ensure that the git_*_free functions get called. template struct Deleter @@ -342,6 +344,11 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return true; } + /** + * A 'GitInputAccessor' with no regard for export-ignore or any other transformations. + */ + ref getRawAccessor(const Hash & rev); + ref getAccessor(const Hash & rev, bool exportIgnore) override; static int sidebandProgressCallback(const char * str, int len, void * payload) @@ -685,6 +692,12 @@ struct GitInputAccessor : InputAccessor } }; +ref GitRepoImpl::getRawAccessor(const Hash & rev) +{ + auto self = ref(shared_from_this()); + return make_ref(self, rev); +} + ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) { return make_ref(ref(shared_from_this()), rev, exportIgnore); @@ -706,8 +719,10 @@ std::vector> GitRepoImpl::getSubmodules std::vector> result; + auto rawAccessor = getRawAccessor(rev); + for (auto & submodule : parseSubmodules(CanonPath(pathTemp))) { - auto rev = accessor.dynamic_pointer_cast()->getSubmoduleRev(submodule.path); + auto rev = rawAccessor->getSubmoduleRev(submodule.path); result.push_back({std::move(submodule), rev}); } From 467c62a96eaabe2f71939a07d923a759f82a466f Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 19:32:18 +0100 Subject: [PATCH 088/307] GitRepoImpl: Move exportIgnore into a filtering accessor --- src/libfetchers/git-utils.cc | 96 ++++++++++++++++++++---------------- 1 file changed, 54 insertions(+), 42 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index d8a4f1778..f8b2afeef 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -1,5 +1,6 @@ #include "git-utils.hh" #include "input-accessor.hh" +#include "filtering-input-accessor.hh" #include "cache.hh" #include "finally.hh" #include "processes.hh" @@ -465,16 +466,17 @@ ref GitRepo::openRepo(const CanonPath & path, bool create, bool bare) return make_ref(path, create, bare); } +/** + * Raw git tree input accessor. + */ struct GitInputAccessor : InputAccessor { ref repo; Tree root; - bool exportIgnore; - GitInputAccessor(ref repo_, const Hash & rev, bool exportIgnore) + GitInputAccessor(ref repo_, const Hash & rev) : repo(repo_) , root(peelObject(*repo, lookupObject(*repo, hashToOID(rev)).get(), GIT_OBJECT_TREE)) - , exportIgnore(exportIgnore) { } @@ -503,7 +505,7 @@ struct GitInputAccessor : InputAccessor return Stat { .type = tDirectory }; auto entry = lookup(path); - if (!entry || isExportIgnored(path)) + if (!entry) return std::nullopt; auto mode = git_tree_entry_filemode(entry); @@ -538,12 +540,6 @@ struct GitInputAccessor : InputAccessor for (size_t n = 0; n < count; ++n) { auto entry = git_tree_entry_byindex(tree.get(), n); - if (exportIgnore) { - if (isExportIgnored(path + git_tree_entry_name(entry))) { - continue; - } - } - // FIXME: add to cache res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{}); } @@ -573,33 +569,6 @@ struct GitInputAccessor : InputAccessor std::unordered_map lookupCache; - bool isExportIgnored(const CanonPath & path) { - if (!exportIgnore) - return false; - - const char *exportIgnoreEntry = nullptr; - - // GIT_ATTR_CHECK_INDEX_ONLY: - // > It will use index only for creating archives or for a bare repo - // > (if an index has been specified for the bare repo). - // -- https://github.com/libgit2/libgit2/blob/HEAD/include/git2/attr.h#L113C62-L115C48 - if (git_attr_get(&exportIgnoreEntry, - *repo, - GIT_ATTR_CHECK_INDEX_ONLY, - std::string(path.rel()).c_str(), - "export-ignore")) { - if (git_error_last()->klass == GIT_ENOTFOUND) - return false; - else - throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); - } - else { - // Official git will silently reject export-ignore lines that have - // values. We do the same. - return GIT_ATTR_IS_TRUE(exportIgnoreEntry); - } - } - /* Recursively look up 'path' relative to the root. */ git_tree_entry * lookup(const CanonPath & path) { @@ -613,10 +582,6 @@ struct GitInputAccessor : InputAccessor throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); } - if (entry && isExportIgnored(path)) { - entry.reset(); - } - i = lookupCache.emplace(path, std::move(entry)).first; } @@ -692,6 +657,46 @@ struct GitInputAccessor : InputAccessor } }; +struct GitExportIgnoreInputAccessor : FilteringInputAccessor { + ref repo; + + GitExportIgnoreInputAccessor(ref repo, ref next) + : FilteringInputAccessor(next, [&](const CanonPath & path) { + return RestrictedPathError(fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path)); + }) + , repo(repo) + { } + + bool isExportIgnored(const CanonPath & path) { + const char *exportIgnoreEntry = nullptr; + + // GIT_ATTR_CHECK_INDEX_ONLY: + // > It will use index only for creating archives or for a bare repo + // > (if an index has been specified for the bare repo). + // -- https://github.com/libgit2/libgit2/blob/HEAD/include/git2/attr.h#L113C62-L115C48 + if (git_attr_get(&exportIgnoreEntry, + *repo, + GIT_ATTR_CHECK_INDEX_ONLY, + std::string(path.rel()).c_str(), + "export-ignore")) { + if (git_error_last()->klass == GIT_ENOTFOUND) + return false; + else + throw Error("looking up '%s': %s", showPath(path), git_error_last()->message); + } + else { + // Official git will silently reject export-ignore lines that have + // values. We do the same. + return GIT_ATTR_IS_TRUE(exportIgnoreEntry); + } + } + + bool isAllowed(const CanonPath & path) override { + return !isExportIgnored(path); + } + +}; + ref GitRepoImpl::getRawAccessor(const Hash & rev) { auto self = ref(shared_from_this()); @@ -700,7 +705,14 @@ ref GitRepoImpl::getRawAccessor(const Hash & rev) ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) { - return make_ref(ref(shared_from_this()), rev, exportIgnore); + auto self = ref(shared_from_this()); + ref rawGitAccessor = getRawAccessor(rev); + if (exportIgnore) { + return make_ref(self, rawGitAccessor); + } + else { + return rawGitAccessor; + } } std::vector> GitRepoImpl::getSubmodules(const Hash & rev, bool exportIgnore) From 8024b954d702e0693b532650230037e398453693 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 19:42:46 +0100 Subject: [PATCH 089/307] fetchTree: Recommend against exportIgnore --- src/libexpr/primops/fetchTree.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index d04908b77..2e4b72c9f 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -607,6 +607,8 @@ static RegisterPrimOp primop_fetchGit({ A Boolean parameter that specifies whether `export-ignore` from `.gitattributes` should be applied. This approximates part of the `git archive` behavior. + Enabling this option is not recommended because it is unknown whether the Git developers commit to the reproducibility of `export-ignore` in newer Git versions. + - `shallow` (default: `false`) A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed. From 7774eff10e0ec1f540a6dc22d8fd78de40714bdf Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 22:28:53 +0100 Subject: [PATCH 090/307] libfetchers/git: Move workdir accessor into GitRepo::getAccessor --- src/libfetchers/git-utils.cc | 19 +++++++++++++++++++ src/libfetchers/git-utils.hh | 3 +++ src/libfetchers/git.cc | 8 ++++---- 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index f8b2afeef..d218276b4 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -1,4 +1,5 @@ #include "git-utils.hh" +#include "fs-input-accessor.hh" #include "input-accessor.hh" #include "filtering-input-accessor.hh" #include "cache.hh" @@ -352,6 +353,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this ref getAccessor(const Hash & rev, bool exportIgnore) override; + ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override; + static int sidebandProgressCallback(const char * str, int len, void * payload) { auto act = (Activity *) payload; @@ -715,6 +718,22 @@ ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) } } +ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) +{ + auto self = ref(shared_from_this()); + ref fileAccessor = + AllowListInputAccessor::create( + makeFSInputAccessor(path), + std::set { wd.files }, + std::move(makeNotAllowedError)); + if (exportIgnore) { + return make_ref(self, fileAccessor); + } + else { + return fileAccessor; + } +} + std::vector> GitRepoImpl::getSubmodules(const Hash & rev, bool exportIgnore) { /* Read the .gitmodules files from this revision. */ diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index f1cb48065..768554780 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -1,5 +1,6 @@ #pragma once +#include "filtering-input-accessor.hh" #include "input-accessor.hh" namespace nix { @@ -73,6 +74,8 @@ struct GitRepo virtual ref getAccessor(const Hash & rev, bool exportIgnore) = 0; + virtual ref getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0; + virtual void fetch( const std::string & url, const std::string & refspec, diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index fb8bf5bf4..d7818988f 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -9,7 +9,6 @@ #include "processes.hh" #include "git.hh" #include "fs-input-accessor.hh" -#include "filtering-input-accessor.hh" #include "mounted-input-accessor.hh" #include "git-utils.hh" #include "logging.hh" @@ -659,10 +658,11 @@ struct GitInputScheme : InputScheme for (auto & submodule : repoInfo.workdirInfo.submodules) repoInfo.workdirInfo.files.insert(submodule.path); + auto repo = GitRepo::openRepo(CanonPath(repoInfo.url), false, false); + ref accessor = - AllowListInputAccessor::create( - makeFSInputAccessor(CanonPath(repoInfo.url)), - std::move(repoInfo.workdirInfo.files), + repo->getAccessor(repoInfo.workdirInfo, + getExportIgnoreAttr(input), makeNotAllowedError(repoInfo.url)); /* If the repo has submodules, return a mounted input accessor From 1bbe8371849f33da4edba23289de7b7e3c5d6c83 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 22:35:11 +0100 Subject: [PATCH 091/307] fetchTree: Add isFetchGit exportIgnore --- src/libexpr/primops/fetchTree.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 2e4b72c9f..c167444b0 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -161,6 +161,7 @@ static void fetchTree( fetchers::Attrs attrs; attrs.emplace("type", "git"); attrs.emplace("url", fixGitURL(url)); + attrs.emplace("exportIgnore", Explicit{true}); input = fetchers::Input::fromAttrs(std::move(attrs)); } else { if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) From 99bd12f0b18b1a2a94639134c49c478c9ab56b3b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 11 Dec 2023 22:36:08 +0100 Subject: [PATCH 092/307] fetchGit/fetchTree: Improve exportIgnore, submodule interaction Also fingerprint and some preparatory improvements. Testing is still not up to scratch because lots of logic is duplicated between the workdir and commit cases. --- src/libexpr/primops/fetchTree.cc | 16 ++++++---- src/libfetchers/fetchers.hh | 7 +++++ src/libfetchers/git-utils.cc | 43 +++++++++++++++++++++----- src/libfetchers/git.cc | 9 ++++-- tests/functional/fetchGitSubmodules.sh | 42 +++++++++++++++++++++++++ 5 files changed, 101 insertions(+), 16 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index c167444b0..7a4725334 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -116,11 +116,6 @@ static void fetchTree( attrs.emplace("type", type.value()); - if (params.isFetchGit) { - // Default value; user attrs are assigned later. - attrs.emplace("exportIgnore", Explicit{true}); - } - for (auto & attr : *args[0]->attrs) { if (attr.name == state.sType) continue; state.forceValue(*attr.value, attr.pos); @@ -144,6 +139,12 @@ static void fetchTree( state.symbols[attr.name], showType(*attr.value))); } + if (params.isFetchGit && !attrs.contains("exportIgnore")) { + // Default value; user attrs are assigned later. + // FIXME: exportIgnore := !submodules + attrs.emplace("exportIgnore", Explicit{true}); + } + if (!params.allowNameArgument) if (auto nameIter = attrs.find("name"); nameIter != attrs.end()) state.debugThrowLastTrace(EvalError({ @@ -161,7 +162,10 @@ static void fetchTree( fetchers::Attrs attrs; attrs.emplace("type", "git"); attrs.emplace("url", fixGitURL(url)); - attrs.emplace("exportIgnore", Explicit{true}); + if (!attrs.contains("exportIgnore")) { + // FIXME: exportIgnore := !submodules + attrs.emplace("exportIgnore", Explicit{true}); + } input = fetchers::Input::fromAttrs(std::move(attrs)); } else { if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) diff --git a/src/libfetchers/fetchers.hh b/src/libfetchers/fetchers.hh index 5f3254b6d..036647830 100644 --- a/src/libfetchers/fetchers.hh +++ b/src/libfetchers/fetchers.hh @@ -187,6 +187,13 @@ struct InputScheme virtual bool isDirect(const Input & input) const { return true; } + /** + * A sufficiently unique string that can be used as a cache key to identify the `input`. + * + * Only known-equivalent inputs should return the same fingerprint. + * + * This is not a stable identifier between Nix versions, but not guaranteed to change either. + */ virtual std::optional getFingerprint(ref store, const Input & input) const { return std::nullopt; } }; diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index d218276b4..cd65e0fda 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -662,14 +662,45 @@ struct GitInputAccessor : InputAccessor struct GitExportIgnoreInputAccessor : FilteringInputAccessor { ref repo; + std::optional rev; - GitExportIgnoreInputAccessor(ref repo, ref next) + GitExportIgnoreInputAccessor(ref repo, ref next, std::optional rev) : FilteringInputAccessor(next, [&](const CanonPath & path) { return RestrictedPathError(fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path)); }) , repo(repo) + , rev(rev) { } + bool gitAttrGet(const CanonPath & path, const char * attrName, const char * & valueOut) + { + std::string pathStr {path.rel()}; + const char * pathCStr = pathStr.c_str(); + + if (rev) { + git_attr_options opts = GIT_ATTR_OPTIONS_INIT; + opts.attr_commit_id = hashToOID(*rev); + // TODO: test that gitattributes from global and system are not used + // (ie more or less: home and etc - both of them!) + opts.flags = GIT_ATTR_CHECK_INCLUDE_COMMIT | GIT_ATTR_CHECK_NO_SYSTEM; + return git_attr_get_ext( + &valueOut, + *repo, + &opts, + pathCStr, + attrName + ); + } + else { + return git_attr_get( + &valueOut, + *repo, + GIT_ATTR_CHECK_INDEX_ONLY | GIT_ATTR_CHECK_NO_SYSTEM, + pathCStr, + attrName); + } + } + bool isExportIgnored(const CanonPath & path) { const char *exportIgnoreEntry = nullptr; @@ -677,11 +708,7 @@ struct GitExportIgnoreInputAccessor : FilteringInputAccessor { // > It will use index only for creating archives or for a bare repo // > (if an index has been specified for the bare repo). // -- https://github.com/libgit2/libgit2/blob/HEAD/include/git2/attr.h#L113C62-L115C48 - if (git_attr_get(&exportIgnoreEntry, - *repo, - GIT_ATTR_CHECK_INDEX_ONLY, - std::string(path.rel()).c_str(), - "export-ignore")) { + if (gitAttrGet(path, "export-ignore", exportIgnoreEntry)) { if (git_error_last()->klass == GIT_ENOTFOUND) return false; else @@ -711,7 +738,7 @@ ref GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore) auto self = ref(shared_from_this()); ref rawGitAccessor = getRawAccessor(rev); if (exportIgnore) { - return make_ref(self, rawGitAccessor); + return make_ref(self, rawGitAccessor, rev); } else { return rawGitAccessor; @@ -727,7 +754,7 @@ ref GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportI std::set { wd.files }, std::move(makeNotAllowedError)); if (exportIgnore) { - return make_ref(self, fileAccessor); + return make_ref(self, fileAccessor, std::nullopt); } else { return fileAccessor; diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index d7818988f..10c0aef97 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -628,6 +628,7 @@ struct GitInputScheme : InputScheme if (submodule.branch != "") attrs.insert_or_assign("ref", submodule.branch); attrs.insert_or_assign("rev", submoduleRev.gitRev()); + attrs.insert_or_assign("exportIgnore", Explicit{ exportIgnore }); auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs)); auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(store); @@ -660,9 +661,11 @@ struct GitInputScheme : InputScheme auto repo = GitRepo::openRepo(CanonPath(repoInfo.url), false, false); + auto exportIgnore = getExportIgnoreAttr(input); + ref accessor = repo->getAccessor(repoInfo.workdirInfo, - getExportIgnoreAttr(input), + exportIgnore, makeNotAllowedError(repoInfo.url)); /* If the repo has submodules, return a mounted input accessor @@ -676,6 +679,8 @@ struct GitInputScheme : InputScheme fetchers::Attrs attrs; attrs.insert_or_assign("type", "git"); attrs.insert_or_assign("url", submodulePath.abs()); + attrs.insert_or_assign("exportIgnore", Explicit{ exportIgnore }); + auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs)); auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(store); @@ -747,7 +752,7 @@ struct GitInputScheme : InputScheme std::optional getFingerprint(ref store, const Input & input) const override { if (auto rev = input.getRev()) - return rev->gitRev() + (getSubmodulesAttr(input) ? ";s" : ""); + return rev->gitRev() + (getSubmodulesAttr(input) ? ";s" : "") + (getExportIgnoreAttr(input) ? ";e" : ""); else return std::nullopt; } diff --git a/tests/functional/fetchGitSubmodules.sh b/tests/functional/fetchGitSubmodules.sh index 369cdc5db..1b425820e 100644 --- a/tests/functional/fetchGitSubmodules.sh +++ b/tests/functional/fetchGitSubmodules.sh @@ -118,3 +118,45 @@ cloneRepo=$TEST_ROOT/a/b/gitSubmodulesClone # NB /a/b to make the relative path git clone $rootRepo $cloneRepo pathIndirect=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$cloneRepo; rev = \"$rev2\"; submodules = true; }).outPath") [[ $pathIndirect = $pathWithRelative ]] + +# Test submodule export-ignore interaction +git -C $rootRepo/sub config user.email "foobar@example.com" +git -C $rootRepo/sub config user.name "Foobar" + +echo "/exclude-from-root export-ignore" >> $rootRepo/.gitattributes +echo nope > $rootRepo/exclude-from-root +git -C $rootRepo add .gitattributes exclude-from-root +git -C $rootRepo commit -m "Add export-ignore" + +echo "/exclude-from-sub export-ignore" >> $rootRepo/sub/.gitattributes +echo nope > $rootRepo/sub/exclude-from-sub +git -C $rootRepo/sub add .gitattributes exclude-from-sub +git -C $rootRepo/sub commit -m "Add export-ignore (sub)" + +git -C $rootRepo add sub +git -C $rootRepo commit -m "Update submodule" + +git -C $rootRepo status + +# exportIgnore can be used with submodules +pathWithExportIgnore=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; exportIgnore = true; }).outPath") +# find $pathWithExportIgnore +# git -C $rootRepo archive --format=tar HEAD | tar -t +# cp -a $rootRepo /tmp/rootRepo + +[[ -e $pathWithExportIgnore/sub/content ]] +[[ ! -e $pathWithExportIgnore/exclude-from-root ]] +[[ ! -e $pathWithExportIgnore/sub/exclude-from-sub ]] + +# exportIgnore can be explicitly disabled with submodules +pathWithoutExportIgnore=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; exportIgnore = false; }).outPath") +# find $pathWithoutExportIgnore + +[[ -e $pathWithoutExportIgnore/exclude-from-root ]] +[[ -e $pathWithoutExportIgnore/sub/exclude-from-sub ]] + +# exportIgnore defaults to false when submodules = true +pathWithSubmodules=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; }).outPath") + +[[ -e $pathWithoutExportIgnore/exclude-from-root ]] +[[ -e $pathWithoutExportIgnore/sub/exclude-from-sub ]] From 71d08af15bb2973dc2a1cb7fee18f94d779dfed7 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 5 Jan 2024 19:01:12 +0100 Subject: [PATCH 093/307] rl-next: Add *general* note about git fetcher reimpl --- doc/manual/rl-next/git-fetcher.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 doc/manual/rl-next/git-fetcher.md diff --git a/doc/manual/rl-next/git-fetcher.md b/doc/manual/rl-next/git-fetcher.md new file mode 100644 index 000000000..54c0d216d --- /dev/null +++ b/doc/manual/rl-next/git-fetcher.md @@ -0,0 +1,18 @@ +--- +synopsis: "Nix now uses `libgit2` for Git fetching" +prs: + - 9240 + - 9241 + - 9258 + - 9480 +issues: + - 5313 +--- + +Nix has built-in support for fetching sources from Git, during evaluation and locking; outside the sandbox. +The existing implementation based on the Git CLI had issues regarding reproducibility and performance. + +Most of the original `fetchGit` behavior has been implemented using the `libgit2` library, which gives the fetcher fine-grained control. + +Known issues: +- The `export-subst` behavior has not been reimplemented. [Partial](https://github.com/NixOS/nix/pull/9391#issuecomment-1872503447) support for this Git feature is feasible, but it did not make the release window. From 692e9197bc91f874ec30f839b1ae6d1beefa1eeb Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 5 Jan 2024 19:49:39 +0100 Subject: [PATCH 094/307] fetchTree: Disallow combination of submodules and exportIgnore for now --- src/libexpr/primops/fetchTree.cc | 8 +++----- src/libfetchers/git.cc | 11 +++++++++++ tests/functional/fetchGitSubmodules.sh | 26 ++++++++++++++++++-------- 3 files changed, 32 insertions(+), 13 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 7a4725334..4d22ca01e 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -1,3 +1,4 @@ +#include "libfetchers/attrs.hh" #include "primops.hh" #include "eval-inline.hh" #include "eval-settings.hh" @@ -139,9 +140,7 @@ static void fetchTree( state.symbols[attr.name], showType(*attr.value))); } - if (params.isFetchGit && !attrs.contains("exportIgnore")) { - // Default value; user attrs are assigned later. - // FIXME: exportIgnore := !submodules + if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { attrs.emplace("exportIgnore", Explicit{true}); } @@ -162,8 +161,7 @@ static void fetchTree( fetchers::Attrs attrs; attrs.emplace("type", "git"); attrs.emplace("url", fixGitURL(url)); - if (!attrs.contains("exportIgnore")) { - // FIXME: exportIgnore := !submodules + if (!attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { attrs.emplace("exportIgnore", Explicit{true}); } input = fetchers::Input::fromAttrs(std::move(attrs)); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 10c0aef97..6ecb7a4ea 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -1,3 +1,4 @@ +#include "error.hh" #include "fetchers.hh" #include "users.hh" #include "cache.hh" @@ -739,6 +740,16 @@ struct GitInputScheme : InputScheme auto repoInfo = getRepoInfo(input); + if (getExportIgnoreAttr(input) + && getSubmodulesAttr(input)) { + /* In this situation, we don't have a git CLI behavior that we can copy. + `git archive` does not support submodules, so it is unclear whether + rules from the parent should affect the submodule or not. + When git may eventually implement this, we need Nix to match its + behavior. */ + throw UnimplementedError("exportIgnore and submodules are not supported together yet"); + } + auto [accessor, final] = input.getRef() || input.getRev() || !repoInfo.isLocal ? getAccessorFromCommit(store, repoInfo, std::move(input)) diff --git a/tests/functional/fetchGitSubmodules.sh b/tests/functional/fetchGitSubmodules.sh index 1b425820e..cd180815d 100644 --- a/tests/functional/fetchGitSubmodules.sh +++ b/tests/functional/fetchGitSubmodules.sh @@ -124,12 +124,16 @@ git -C $rootRepo/sub config user.email "foobar@example.com" git -C $rootRepo/sub config user.name "Foobar" echo "/exclude-from-root export-ignore" >> $rootRepo/.gitattributes +# TBD possible semantics for submodules + exportIgnore +# echo "/sub/exclude-deep export-ignore" >> $rootRepo/.gitattributes echo nope > $rootRepo/exclude-from-root git -C $rootRepo add .gitattributes exclude-from-root git -C $rootRepo commit -m "Add export-ignore" echo "/exclude-from-sub export-ignore" >> $rootRepo/sub/.gitattributes echo nope > $rootRepo/sub/exclude-from-sub +# TBD possible semantics for submodules + exportIgnore +# echo aye > $rootRepo/sub/exclude-from-root git -C $rootRepo/sub add .gitattributes exclude-from-sub git -C $rootRepo/sub commit -m "Add export-ignore (sub)" @@ -138,15 +142,21 @@ git -C $rootRepo commit -m "Update submodule" git -C $rootRepo status -# exportIgnore can be used with submodules -pathWithExportIgnore=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; exportIgnore = true; }).outPath") -# find $pathWithExportIgnore -# git -C $rootRepo archive --format=tar HEAD | tar -t -# cp -a $rootRepo /tmp/rootRepo +# # TBD: not supported yet, because semantics are undecided and current implementation leaks rules from the root to submodules +# # exportIgnore can be used with submodules +# pathWithExportIgnore=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; exportIgnore = true; }).outPath") +# # find $pathWithExportIgnore +# # git -C $rootRepo archive --format=tar HEAD | tar -t +# # cp -a $rootRepo /tmp/rootRepo + +# [[ -e $pathWithExportIgnore/sub/content ]] +# [[ ! -e $pathWithExportIgnore/exclude-from-root ]] +# [[ ! -e $pathWithExportIgnore/sub/exclude-from-sub ]] +# TBD possible semantics for submodules + exportIgnore +# # root .gitattribute has no power across submodule boundary +# [[ -e $pathWithExportIgnore/sub/exclude-from-root ]] +# [[ -e $pathWithExportIgnore/sub/exclude-deep ]] -[[ -e $pathWithExportIgnore/sub/content ]] -[[ ! -e $pathWithExportIgnore/exclude-from-root ]] -[[ ! -e $pathWithExportIgnore/sub/exclude-from-sub ]] # exportIgnore can be explicitly disabled with submodules pathWithoutExportIgnore=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = file://$rootRepo; submodules = true; exportIgnore = false; }).outPath") From 469cf263c7d1b7991a9122b76b827f3d65a02301 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 12 Jan 2024 14:02:58 +0100 Subject: [PATCH 095/307] Format --- src/libfetchers/git-utils.cc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index cd65e0fda..b416c3b52 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -701,7 +701,8 @@ struct GitExportIgnoreInputAccessor : FilteringInputAccessor { } } - bool isExportIgnored(const CanonPath & path) { + bool isExportIgnored(const CanonPath & path) + { const char *exportIgnoreEntry = nullptr; // GIT_ATTR_CHECK_INDEX_ONLY: @@ -721,7 +722,8 @@ struct GitExportIgnoreInputAccessor : FilteringInputAccessor { } } - bool isAllowed(const CanonPath & path) override { + bool isAllowed(const CanonPath & path) override + { return !isExportIgnored(path); } From f68ad5acbb74c32d7ae6019bc17931940456603a Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 12 Jan 2024 16:05:36 +0100 Subject: [PATCH 096/307] fetchTree/git: Don't expose exportIgnore attr --- src/libexpr/primops/fetchTree.cc | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index 4d22ca01e..7251cbbbe 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -40,10 +40,6 @@ void emitTreeAttrs( attrs.alloc("submodules").mkBool( fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false)); - if (input.getType() == "git") - attrs.alloc("exportIgnore").mkBool( - fetchers::maybeGetBoolAttr(input.attrs, "exportIgnore").value_or(false)); - if (!forceDirty) { if (auto rev = input.getRev()) { From 8c7e2ed77c3c14f8a7c82ab6ab7b20ebcfb943a0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 12 Jan 2024 16:21:07 +0100 Subject: [PATCH 097/307] Update release notes --- doc/manual/rl-next/nix-profile-names.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/manual/rl-next/nix-profile-names.md b/doc/manual/rl-next/nix-profile-names.md index f5953bd72..b7ad4b5d7 100644 --- a/doc/manual/rl-next/nix-profile-names.md +++ b/doc/manual/rl-next/nix-profile-names.md @@ -3,4 +3,6 @@ synopsis: "`nix profile` now allows referring to elements by human-readable name prs: 8678 --- -[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Indices are deprecated and will be removed in a future version. +[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed. + +**Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix. From 72560f7bbef2ab3c02b8ca040fe084328bdd5fbe Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 12 Jan 2024 16:33:15 +0100 Subject: [PATCH 098/307] Add profile migration test --- tests/functional/nix-profile.sh | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 003af5174..6f304bd9a 100644 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -193,3 +193,12 @@ nix profile install $flake2Dir --priority 0 clearProfiles nix profile install $(nix build $flake1Dir --no-link --print-out-paths) expect 1 nix profile install --impure --expr "(builtins.getFlake ''$flake2Dir'').packages.$system.default" + +# Test upgrading from profile version 2. +clearProfiles +mkdir -p $TEST_ROOT/import-profile +outPath=$(nix build --no-link --print-out-paths $flake1Dir/flake.nix^out) +printf '{ "version": 2, "elements": [ { "active": true, "attrPath": "legacyPackages.x86_64-linux.hello", "originalUrl": "flake:nixpkgs", "outputs": null, "priority": 5, "storePaths": [ "%s" ], "url": "github:NixOS/nixpkgs/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } ] }' "$outPath" > $TEST_ROOT/import-profile/manifest.json +nix build --profile $TEST_HOME/.nix-profile $(nix store add-path $TEST_ROOT/import-profile) +nix profile list | grep -A4 'Name:.*hello' | grep "Store paths:.*$outPath" +nix profile remove hello 2>&1 | grep 'removed 1 packages, kept 0 packages' From d80c582b783e4c189432a2afd383be39cc09f17c Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 12 Jan 2024 17:16:59 +0100 Subject: [PATCH 099/307] libfetchers: Add CachingFilteringInputAccessor Co-authored-by: Eelco Dolstra --- src/libfetchers/filtering-input-accessor.cc | 9 +++++++++ src/libfetchers/filtering-input-accessor.hh | 14 ++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/src/libfetchers/filtering-input-accessor.cc b/src/libfetchers/filtering-input-accessor.cc index 5ae416fd3..581ce3c1d 100644 --- a/src/libfetchers/filtering-input-accessor.cc +++ b/src/libfetchers/filtering-input-accessor.cc @@ -80,4 +80,13 @@ ref AllowListInputAccessor::create( return make_ref(next, std::move(allowedPaths), std::move(makeNotAllowedError)); } +bool CachingFilteringInputAccessor::isAllowed(const CanonPath & path) +{ + auto i = cache.find(path); + if (i != cache.end()) return i->second; + auto res = isAllowedUncached(path); + cache.emplace(path, res); + return res; +} + } diff --git a/src/libfetchers/filtering-input-accessor.hh b/src/libfetchers/filtering-input-accessor.hh index 2e2495c78..8a9b206ee 100644 --- a/src/libfetchers/filtering-input-accessor.hh +++ b/src/libfetchers/filtering-input-accessor.hh @@ -71,4 +71,18 @@ struct AllowListInputAccessor : public FilteringInputAccessor using FilteringInputAccessor::FilteringInputAccessor; }; +/** + * A wrapping `InputAccessor` mix-in where `isAllowed()` caches the result of virtual `isAllowedUncached()`. + */ +struct CachingFilteringInputAccessor : FilteringInputAccessor +{ + std::map cache; + + using FilteringInputAccessor::FilteringInputAccessor; + + bool isAllowed(const CanonPath & path) override; + + virtual bool isAllowedUncached(const CanonPath & path) = 0; +}; + } From 274d887feee7e8bc3d4a7e6c5087fbe5aec4fd4e Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 12 Jan 2024 17:18:56 +0100 Subject: [PATCH 100/307] fetchTree/git: Cache export-ignore filter --- src/libfetchers/git-utils.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index b416c3b52..bfc7059fe 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -660,12 +660,12 @@ struct GitInputAccessor : InputAccessor } }; -struct GitExportIgnoreInputAccessor : FilteringInputAccessor { +struct GitExportIgnoreInputAccessor : CachingFilteringInputAccessor { ref repo; std::optional rev; GitExportIgnoreInputAccessor(ref repo, ref next, std::optional rev) - : FilteringInputAccessor(next, [&](const CanonPath & path) { + : CachingFilteringInputAccessor(next, [&](const CanonPath & path) { return RestrictedPathError(fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path)); }) , repo(repo) @@ -722,7 +722,7 @@ struct GitExportIgnoreInputAccessor : FilteringInputAccessor { } } - bool isAllowed(const CanonPath & path) override + bool isAllowedUncached(const CanonPath & path) override { return !isExportIgnored(path); } From 25c889baacd6a8b9b66ce4776ec729a40e39cf77 Mon Sep 17 00:00:00 2001 From: Mel Zuser Date: Thu, 11 Jan 2024 14:40:54 -0800 Subject: [PATCH 101/307] Fix performance of builtins.substring for empty substrings When returning a 0-length substring, avoid calling coerceToString, since it returns a string_view with the string's length, which is expensive to compute for large strings. --- src/libexpr/primops.cc | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index ee07e5568..c08aea898 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -3712,9 +3712,6 @@ static RegisterPrimOp primop_toString({ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, Value & v) { int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring"); - int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring"); - NixStringContext context; - auto s = state.coerceToString(pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring"); if (start < 0) state.debugThrowLastTrace(EvalError({ @@ -3722,6 +3719,22 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, .errPos = state.positions[pos] })); + + int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring"); + + // Special-case on empty substring to avoid O(n) strlen + // This allows for the use of empty substrings to efficently capture string context + if (len == 0) { + state.forceValue(*args[2], pos); + if (args[2]->type() == nString) { + v.mkString("", args[2]->context()); + return; + } + } + + NixStringContext context; + auto s = state.coerceToString(pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring"); + v.mkString((unsigned int) start >= s->size() ? "" : s->substr(start, len), context); } From 6208ca72093a0e05c56561dab349423f9bff069b Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 1 Dec 2023 17:03:28 -0500 Subject: [PATCH 102/307] Separate `SystemError` from `SysError` Most of this is a `catch SysError` -> `catch SystemError` sed. This is a rather pure-churn change I would like to get out of the way. **The intersting part is `src/libutil/error.hh`.** On Unix, we will only throw the `SysError` concrete class, which has the same constructors that `SystemError` used to have. On Windows, we will throw `WinError` *and* `SysError`. `WinError` (which will be created in a later PR), will use a `DWORD` instead of `int` error value, and `GetLastError()`, which is the Windows equivalent of the `errno` machinery. Windows will *also* use `SysError` because Window's "libc" (MSVCRT) implements the POSIX interface, and we use it too. As the docs describe, while we *throw* one of the 3 choices above (2 concrete classes or the alias), we should always *catch* `SystemError`. This ensures no matter how the implementation changes for Windows (e.g. between `SysError` and `WinError`) the catching logic stays the same and stays correct. Co-Authored-By volth Co-Authored-By Eugene Butler --- src/libcmd/repl.cc | 2 +- src/libstore/build/local-derivation-goal.cc | 4 +- src/libstore/gc.cc | 2 +- src/libstore/globals.cc | 2 +- src/libstore/keys.cc | 2 +- src/libstore/local-store.cc | 2 +- src/libstore/optimise-store.cc | 2 +- src/libstore/remote-fs-accessor.cc | 4 +- src/libutil/args.cc | 2 +- src/libutil/cgroup.cc | 2 +- src/libutil/config.cc | 2 +- src/libutil/error.hh | 42 ++++++++++++++++++--- src/libutil/file-descriptor.cc | 2 +- src/libutil/logging.cc | 2 +- src/libutil/serialise.cc | 2 +- src/libutil/util.cc | 2 +- src/nix-build/nix-build.cc | 2 +- src/nix/config-check.cc | 2 +- tests/unit/libutil/logging.cc | 6 +-- 19 files changed, 59 insertions(+), 27 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 72e3559df..918b2e53a 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -254,7 +254,7 @@ void NixRepl::mainLoop() rl_readline_name = "nix-repl"; try { createDirs(dirOf(historyFile)); - } catch (SysError & e) { + } catch (SystemError & e) { logWarning(e.info()); } #ifndef USE_READLINE diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index b01d9e237..f85301950 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -1495,7 +1495,7 @@ void LocalDerivationGoal::startDaemon() daemon::processConnection(store, from, to, NotTrusted, daemon::Recursive); debug("terminated daemon connection"); - } catch (SysError &) { + } catch (SystemError &) { ignoreException(); } }); @@ -1707,7 +1707,7 @@ void LocalDerivationGoal::runChild() try { if (drv->isBuiltin() && drv->builder == "builtin:fetchurl") netrcData = readFile(settings.netrcFile); - } catch (SysError &) { } + } catch (SystemError &) { } #if __linux__ if (useChroot) { diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 2bd3a2edc..5cbce0748 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -413,7 +413,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor) auto env_end = std::sregex_iterator{}; for (auto i = std::sregex_iterator{envString.begin(), envString.end(), storePathRegex}; i != env_end; ++i) unchecked[i->str()].emplace(envFile); - } catch (SysError & e) { + } catch (SystemError & e) { if (errno == ENOENT || errno == EACCES || errno == ESRCH) continue; throw; diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 50584e06c..d22ae4ca0 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -118,7 +118,7 @@ void loadConfFile() try { std::string contents = readFile(path); globalConfig.applyConfig(contents, path); - } catch (SysError &) { } + } catch (SystemError &) { } }; applyConfigFile(settings.nixConfDir + "/nix.conf"); diff --git a/src/libstore/keys.cc b/src/libstore/keys.cc index 2cc50970f..70478e7ad 100644 --- a/src/libstore/keys.cc +++ b/src/libstore/keys.cc @@ -19,7 +19,7 @@ PublicKeys getDefaultPublicKeys() try { SecretKey secretKey(readFile(secretKeyFile)); publicKeys.emplace(secretKey.name, secretKey.toPublicKey()); - } catch (SysError & e) { + } catch (SystemError & e) { /* Ignore unreadable key files. That's normal in a multi-user installation. */ } diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 0f3c37c8a..5a399c8be 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -276,7 +276,7 @@ LocalStore::LocalStore(const Params & params) [[gnu::unused]] auto res2 = ftruncate(fd.get(), settings.reservedSize); } } - } catch (SysError & e) { /* don't care about errors */ + } catch (SystemError & e) { /* don't care about errors */ } /* Acquire the big fat lock in shared mode to make sure that no diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index a494e6ecc..78e4f6d86 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -242,7 +242,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats, /* Atomically replace the old file with the new hard link. */ try { renameFile(tempLink, path); - } catch (SysError & e) { + } catch (SystemError & e) { if (unlink(tempLink.c_str()) == -1) printError("unable to unlink '%1%'", tempLink); if (errno == EMLINK) { diff --git a/src/libstore/remote-fs-accessor.cc b/src/libstore/remote-fs-accessor.cc index 03e57a565..b44edfe89 100644 --- a/src/libstore/remote-fs-accessor.cc +++ b/src/libstore/remote-fs-accessor.cc @@ -87,13 +87,13 @@ std::pair, CanonPath> RemoteFSAccessor::fetch(const CanonPat nars.emplace(storePath.hashPart(), narAccessor); return {narAccessor, restPath}; - } catch (SysError &) { } + } catch (SystemError &) { } try { auto narAccessor = makeNarAccessor(nix::readFile(cacheFile)); nars.emplace(storePath.hashPart(), narAccessor); return {narAccessor, restPath}; - } catch (SysError &) { } + } catch (SystemError &) { } } StringSink sink; diff --git a/src/libutil/args.cc b/src/libutil/args.cc index e2668c673..5187e7396 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -304,7 +304,7 @@ void RootArgs::parseCmdline(const Strings & _cmdline, bool allowShebang) for (auto pos = savedArgs.begin(); pos != savedArgs.end();pos++) cmdline.push_back(*pos); } - } catch (SysError &) { } + } catch (SystemError &) { } } for (auto pos = cmdline.begin(); pos != cmdline.end(); ) { diff --git a/src/libutil/cgroup.cc b/src/libutil/cgroup.cc index 4c2bf31ff..de83b5ad1 100644 --- a/src/libutil/cgroup.cc +++ b/src/libutil/cgroup.cc @@ -95,7 +95,7 @@ static CgroupStats destroyCgroup(const Path & cgroup, bool returnStats) using namespace std::string_literals; warn("killing stray builder process %d (%s)...", pid, trim(replaceStrings(cmdline, "\0"s, " "))); - } catch (SysError &) { + } catch (SystemError &) { } } // FIXME: pid wraparound diff --git a/src/libutil/config.cc b/src/libutil/config.cc index a3310f4ec..37f5b50c7 100644 --- a/src/libutil/config.cc +++ b/src/libutil/config.cc @@ -124,7 +124,7 @@ static void applyConfigInner(const std::string & contents, const std::string & p try { std::string includedContents = readFile(path); applyConfigInner(includedContents, p, parsedContents); - } catch (SysError &) { + } catch (SystemError &) { // TODO: Do we actually want to ignore this? Or is it better to fail? } } else if (!ignoreMissing) { diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 234cbe1f6..764fac1ce 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -178,20 +178,50 @@ MakeError(Error, BaseError); MakeError(UsageError, Error); MakeError(UnimplementedError, Error); -class SysError : public Error +/** + * To use in catch-blocks. + */ +MakeError(SystemError, Error); + +/** + * POSIX system error, created using `errno`, `strerror` friends. + * + * Throw this, but prefer not to catch this, and catch `SystemError` + * instead. This allows implementations to freely switch between this + * and `WinError` without breaking catch blocks. + * + * However, it is permissible to catch this and rethrow so long as + * certain conditions are not met (e.g. to catch only if `errNo = + * EFooBar`). In that case, try to also catch the equivalent `WinError` + * code. + * + * @todo Rename this to `PosixError` or similar. At this point Windows + * support is too WIP to justify the code churn, but if it is finished + * then a better identifier becomes moe worth it. + */ +class SysError : public SystemError { public: int errNo; + /** + * Construct using the explicitly-provided error number. `strerror` + * will be used to try to add additional information to the message. + */ template - SysError(int errNo_, const Args & ... args) - : Error("") + SysError(int errNo, const Args & ... args) + : SystemError(""), errNo(errNo) { - errNo = errNo_; auto hf = hintfmt(args...); err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo)); } + /** + * Construct using the ambient `errno`. + * + * Be sure to not perform another `errno`-modifying operation before + * calling this constructor! + */ template SysError(const Args & ... args) : SysError(errno, args ...) @@ -199,7 +229,9 @@ public: } }; -/** Throw an exception for the purpose of checking that exception handling works; see 'initLibUtil()'. +/** + * Throw an exception for the purpose of checking that exception + * handling works; see 'initLibUtil()'. */ void throwExceptionSelfCheck(); diff --git a/src/libutil/file-descriptor.cc b/src/libutil/file-descriptor.cc index 38dd70c8e..692be3383 100644 --- a/src/libutil/file-descriptor.cc +++ b/src/libutil/file-descriptor.cc @@ -231,7 +231,7 @@ void closeMostFDs(const std::set & exceptions) } } return; - } catch (SysError &) { + } catch (SystemError &) { } #endif diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index 183aee2dc..d68ddacc0 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -116,7 +116,7 @@ void writeToStderr(std::string_view s) { try { writeFull(STDERR_FILENO, s, false); - } catch (SysError & e) { + } catch (SystemError & e) { /* Ignore failing writes to stderr. We need to ignore write errors to ensure that cleanup code that logs to stderr runs to completion if the other side of stderr has been closed diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 76b378e18..316105603 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -53,7 +53,7 @@ void FdSink::writeUnbuffered(std::string_view data) written += data.size(); try { writeFull(fd, data); - } catch (SysError & e) { + } catch (SystemError & e) { _good = false; throw; } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 7b4b1d031..b23362b5c 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -20,7 +20,7 @@ void initLibUtil() { // When exception handling fails, the message tends to be printed by the // C++ runtime, followed by an abort. // For example on macOS we might see an error such as - // libc++abi: terminating with uncaught exception of type nix::SysError: error: C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded. + // libc++abi: terminating with uncaught exception of type nix::SystemError: error: C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded. bool caught = false; try { throwExceptionSelfCheck(); diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index ee2addb72..1ad4b387c 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -148,7 +148,7 @@ static void main_nix_build(int argc, char * * argv) args.push_back(word); } } - } catch (SysError &) { } + } catch (SystemError &) { } } struct MyArgs : LegacyArgs, MixEvalArgs diff --git a/src/nix/config-check.cc b/src/nix/config-check.cc index 410feca2f..8d4717e15 100644 --- a/src/nix/config-check.cc +++ b/src/nix/config-check.cc @@ -107,7 +107,7 @@ struct CmdConfigCheck : StoreCommand if (profileDir.find("/profiles/") == std::string::npos) dirs.insert(dir); } - } catch (SysError &) {} + } catch (SystemError &) {} } if (!dirs.empty()) { diff --git a/tests/unit/libutil/logging.cc b/tests/unit/libutil/logging.cc index c6dfe63d3..8950a26d4 100644 --- a/tests/unit/libutil/logging.cc +++ b/tests/unit/libutil/logging.cc @@ -73,7 +73,7 @@ namespace nix { } - TEST(logEI, picksUpSysErrorExitCode) { + TEST(logEI, picksUpSystemErrorExitCode) { MakeError(TestError, Error); ErrorInfo::programName = std::optional("error-unit-test"); @@ -81,12 +81,12 @@ namespace nix { try { auto x = readFile(-1); } - catch (SysError &e) { + catch (SystemError &e) { testing::internal::CaptureStderr(); logError(e.info()); auto str = testing::internal::GetCapturedStderr(); - ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n"); + ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SystemError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n"); } } From 1996105e91d8d2022869c4e66c0a0734e363052b Mon Sep 17 00:00:00 2001 From: Mel Zuser Date: Fri, 12 Jan 2024 08:57:08 -0800 Subject: [PATCH 103/307] added test for empty substring special case --- tests/functional/lang/eval-okay-substring-context.exp | 1 + tests/functional/lang/eval-okay-substring-context.nix | 11 +++++++++++ 2 files changed, 12 insertions(+) create mode 100644 tests/functional/lang/eval-okay-substring-context.exp create mode 100644 tests/functional/lang/eval-okay-substring-context.nix diff --git a/tests/functional/lang/eval-okay-substring-context.exp b/tests/functional/lang/eval-okay-substring-context.exp new file mode 100644 index 000000000..2fe7f71fa --- /dev/null +++ b/tests/functional/lang/eval-okay-substring-context.exp @@ -0,0 +1 @@ +"okay" diff --git a/tests/functional/lang/eval-okay-substring-context.nix b/tests/functional/lang/eval-okay-substring-context.nix new file mode 100644 index 000000000..d0ef70d4e --- /dev/null +++ b/tests/functional/lang/eval-okay-substring-context.nix @@ -0,0 +1,11 @@ +with builtins; + +let + + s = "${builtins.derivation { name = "test"; builder = "/bin/sh"; system = "x86_64-linux"; }}"; + +in + +if getContext s == getContext "${substring 0 0 s + unsafeDiscardStringContext s}" +then "okay" +else throw "empty substring should preserve context" From b29be1ff57e6e358b2925012a13d7d4a0312560e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 12 Jan 2024 10:01:55 -0800 Subject: [PATCH 104/307] Document unit tests in hacking.md --- doc/manual/src/contributing/hacking.md | 5 ++++- doc/manual/src/contributing/testing.md | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 9a03ac9b6..0fa59e891 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -51,11 +51,14 @@ To install it in `$(pwd)/outputs` and test it: ```console [nix-shell]$ make install -[nix-shell]$ make installcheck -j $NIX_BUILD_CORES +[nix-shell]$ make installcheck check -j $NIX_BUILD_CORES [nix-shell]$ nix --version nix (Nix) 2.12 ``` +For more information on running and filtering tests, see +[`testing.md`](./testing.md). + To build a release version of Nix for the current operating system and CPU architecture: ```console diff --git a/doc/manual/src/contributing/testing.md b/doc/manual/src/contributing/testing.md index d8d162379..31c39c16c 100644 --- a/doc/manual/src/contributing/testing.md +++ b/doc/manual/src/contributing/testing.md @@ -77,7 +77,7 @@ there is no risk of any build-system wildcards for the library accidentally pick ### Running tests You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`. -Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable. +Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable, e.g. `GTEST_FILTER='ErrorTraceTest.*' make check`. ### Characterisation testing { #characaterisation-testing-unit } From 2d96c7a51f04755dc22856be012bd73dec13ad13 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 12 Jan 2024 11:27:31 -0800 Subject: [PATCH 105/307] Remove outdated reference to `y` in `nix-instantiate` man page --- doc/manual/src/command-ref/nix-instantiate.md | 5 ----- 1 file changed, 5 deletions(-) diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/src/command-ref/nix-instantiate.md index 483150aa8..89e106bb0 100644 --- a/doc/manual/src/command-ref/nix-instantiate.md +++ b/doc/manual/src/command-ref/nix-instantiate.md @@ -179,12 +179,7 @@ $ nix-instantiate --eval --xml --expr '{ x = {}; }' -``` -Note that `y` is left unevaluated (the XML representation doesn’t -attempt to show non-normal forms). - -```console $ nix-instantiate --eval --xml --strict --expr '{ x = {}; }' From f73e50144f21adc9a6344bc4a5f8ded757d781fd Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 12 Jan 2024 11:31:49 -0800 Subject: [PATCH 106/307] Clarify ambiguity in `nix-instantiate` man page --- doc/manual/src/command-ref/nix-instantiate.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/doc/manual/src/command-ref/nix-instantiate.md b/doc/manual/src/command-ref/nix-instantiate.md index 89e106bb0..479c9abcf 100644 --- a/doc/manual/src/command-ref/nix-instantiate.md +++ b/doc/manual/src/command-ref/nix-instantiate.md @@ -44,9 +44,10 @@ standard input. > **Warning** > - > This option produces ambiguous output which is not suitable for machine - > consumption. For example, these two Nix expressions print the same result - > despite having different types: + > This option produces output which can be parsed as a Nix expression which + > will produce a different result than the input expression when evaluated. + > For example, these two Nix expressions print the same result despite + > having different meaning: > > ```console > $ nix-instantiate --eval --expr '{ a = {}; }' From 15f7bdaf276252f7b536c189b9b3eef73ad0e6e7 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 12 Jan 2024 22:55:37 +0100 Subject: [PATCH 107/307] CanonPath: Add rel_c_str() Defensively because isRoot() is also defensive. --- src/libfetchers/git-utils.cc | 3 +-- src/libutil/canon-path.hh | 7 +++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index bfc7059fe..6726407b5 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -674,8 +674,7 @@ struct GitExportIgnoreInputAccessor : CachingFilteringInputAccessor { bool gitAttrGet(const CanonPath & path, const char * attrName, const char * & valueOut) { - std::string pathStr {path.rel()}; - const char * pathCStr = pathStr.c_str(); + const char * pathCStr = path.rel_c_str(); if (rev) { git_attr_options opts = GIT_ATTR_OPTIONS_INIT; diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh index 6aff4ec0d..997c8c731 100644 --- a/src/libutil/canon-path.hh +++ b/src/libutil/canon-path.hh @@ -88,6 +88,13 @@ public: std::string_view rel() const { return ((std::string_view) path).substr(1); } + const char * rel_c_str() const + { + auto cs = path.c_str(); + assert(cs[0]); // for safety if invariant is broken + return &cs[1]; + } + struct Iterator { std::string_view remaining; From dd7e7b0a30a0564741c40e70f33cbf1cd6130106 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 11 Jan 2024 11:26:03 -0500 Subject: [PATCH 108/307] Newer Nixpkgs, get `readline` on Windows MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Now `nix repl` an, in principle, work on that platform too. Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/2c9c58e98243930f8cb70387934daa4bc8b00373' (2023-12-31) → 'github:NixOS/nixpkgs/86501af7f1d51915e6c335f90f2cab73d7704ef3' (2024-01-11) --- flake.lock | 6 +++--- package.nix | 1 - 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/flake.lock b/flake.lock index ae98d789a..65e468e8b 100644 --- a/flake.lock +++ b/flake.lock @@ -34,11 +34,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1704018918, - "narHash": "sha256-erjg/HrpC9liEfm7oLqb8GXCqsxaFwIIPqCsknW5aFY=", + "lastModified": 1704982786, + "narHash": "sha256-w62+4HyaHafLWjvrC2Eto7bSnSJQtia8oqs3//mkpCU=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "2c9c58e98243930f8cb70387934daa4bc8b00373", + "rev": "86501af7f1d51915e6c335f90f2cab73d7704ef3", "type": "github" }, "original": { diff --git a/package.nix b/package.nix index 37410dc2f..a632fd6ec 100644 --- a/package.nix +++ b/package.nix @@ -236,7 +236,6 @@ in { openssl sqlite xz - ] ++ lib.optionals (!stdenv.hostPlatform.isWindows) [ ({ inherit readline editline; }.${readlineFlavor}) ] ++ lib.optionals enableMarkdown [ lowdown From e739a5002dab199a6cf207e6e62b394fa77f8cb2 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 12 Jan 2024 19:46:48 -0500 Subject: [PATCH 109/307] Avoid Windows macros in the parser and lexer `FLOAT`, `INT`, and `IN` are identifers taken by macros. The name `IN_KW` is chosen to match `OR_KW`, which is presumably named that way for the same reason of dodging macros. --- src/libexpr/lexer.l | 6 +++--- src/libexpr/parser.y | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index df2cbd06f..9addb3ae8 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -130,7 +130,7 @@ else { return ELSE; } assert { return ASSERT; } with { return WITH; } let { return LET; } -in { return IN; } +in { return IN_KW; } rec { return REC; } inherit { return INHERIT; } or { return OR_KW; } @@ -156,7 +156,7 @@ or { return OR_KW; } .errPos = data->state.positions[CUR_POS], }); } - return INT; + return INT_LIT; } {FLOAT} { errno = 0; yylval->nf = strtod(yytext, 0); @@ -165,7 +165,7 @@ or { return OR_KW; } .msg = hintfmt("invalid float '%1%'", yytext), .errPos = data->state.positions[CUR_POS], }); - return FLOAT; + return FLOAT_LIT; } \$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index b331776f0..60bcfebf9 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -365,11 +365,11 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err %type attr %token ID %token STR IND_STR -%token INT -%token FLOAT +%token INT_LIT +%token FLOAT_LIT %token PATH HPATH SPATH PATH_END %token URI -%token IF THEN ELSE ASSERT WITH LET IN REC INHERIT EQ NEQ AND OR IMPL OR_KW +%token IF THEN ELSE ASSERT WITH LET IN_KW REC INHERIT EQ NEQ AND OR IMPL OR_KW %token DOLLAR_CURLY /* == ${ */ %token IND_STRING_OPEN IND_STRING_CLOSE %token ELLIPSIS @@ -412,7 +412,7 @@ expr_function { $$ = new ExprAssert(CUR_POS, $2, $4); } | WITH expr ';' expr_function { $$ = new ExprWith(CUR_POS, $2, $4); } - | LET binds IN expr_function + | LET binds IN_KW expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in let"), @@ -482,8 +482,8 @@ expr_simple else $$ = new ExprVar(CUR_POS, data->symbols.create($1)); } - | INT { $$ = new ExprInt($1); } - | FLOAT { $$ = new ExprFloat($1); } + | INT_LIT { $$ = new ExprInt($1); } + | FLOAT_LIT { $$ = new ExprFloat($1); } | '"' string_parts '"' { $$ = $2; } | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { $$ = stripIndentation(CUR_POS, data->symbols, std::move(*$2)); From cbd5553d57ebf5d0532047165a2d81825424bd76 Mon Sep 17 00:00:00 2001 From: Qyriad Date: Sat, 13 Jan 2024 04:20:08 -0700 Subject: [PATCH 110/307] doc: provide context in glossary definitions (#9378) --- doc/manual/src/glossary.md | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 07891175a..1fdb8b4dd 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -3,10 +3,10 @@ - [derivation]{#gloss-derivation} A description of a build task. The result of a derivation is a - store object. Derivations are typically specified in Nix expressions + store object. Derivations declared in Nix expressions are specified using the [`derivation` primitive](./language/derivations.md). These are translated into low-level *store derivations* (implicitly by - `nix-env` and `nix-build`, or explicitly by `nix-instantiate`). + `nix-build`, or explicitly by `nix-instantiate`). [derivation]: #gloss-derivation @@ -14,6 +14,7 @@ A [derivation] represented as a `.drv` file in the [store]. It has a [store path], like any [store object]. + It is the [instantiated][instantiate] form of a derivation. Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv` @@ -23,9 +24,9 @@ - [instantiate]{#gloss-instantiate}, instantiation - Translate a [derivation] into a [store derivation]. + Save an evaluated [derivation] as a [store derivation] in the Nix [store]. - See [`nix-instantiate`](./command-ref/nix-instantiate.md). + See [`nix-instantiate`](./command-ref/nix-instantiate.md), which produces a store derivation from a Nix expression that evaluates to a derivation. [instantiate]: #gloss-instantiate @@ -66,7 +67,7 @@ From the perspective of the location where Nix is invoked, the Nix store can be referred to _local_ or _remote_. Only a [local store]{#gloss-local-store} exposes a location in the file system of the machine where Nix is invoked that allows access to store objects, typically `/nix/store`. - Local stores can be used for building [derivations](#derivation). + Local stores can be used for building [derivations](#gloss-derivation). See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details. [store]: #gloss-store @@ -168,9 +169,10 @@ A high-level description of software packages and compositions thereof. Deploying software using Nix entails writing Nix - expressions for your packages. Nix expressions are translated to - derivations that are stored in the Nix store. These derivations can - then be built. + expressions for your packages. Nix expressions specify [derivations][derivation], + which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation]. + These derivations can then be [realised][realise] to produce + [outputs][output]. - [reference]{#gloss-reference} @@ -222,6 +224,9 @@ The [store derivation] that produced an [output path]. + The deriver for an output path can be queried with the `--deriver` option to + [`nix-store --query`](@docroot@/command-ref/nix-store/query.md). + - [validity]{#gloss-validity} A store path is valid if all [store object]s in its [closure] can be read from the [store]. From e838ac98d4fc54774bcaaa30a72cd9d3da01befc Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 13 Jan 2024 19:41:27 +0100 Subject: [PATCH 111/307] doc/glossary: Nix expression can be language expression --- doc/manual/src/glossary.md | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 1fdb8b4dd..870b2c3c6 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -167,12 +167,13 @@ - [Nix expression]{#gloss-nix-expression} - A high-level description of software packages and compositions - thereof. Deploying software using Nix entails writing Nix - expressions for your packages. Nix expressions specify [derivations][derivation], - which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation]. - These derivations can then be [realised][realise] to produce - [outputs][output]. + 1. Commonly, a high-level description of software packages and compositions + thereof. Deploying software using Nix entails writing Nix + expressions for your packages. Nix expressions specify [derivations][derivation], + which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation]. + These derivations can then be [realised][realise] to produce [outputs][output]. + + 2. A syntactically valid use of the [Nix language]. For example, the contents of a `.nix` file form an expression. - [reference]{#gloss-reference} @@ -287,3 +288,6 @@ These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting. See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md). + + +[Nix language]: ./language/index.md \ No newline at end of file From 7e5fa5c25ce585da5399038bc92980fddbb65d8b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 13 Jan 2024 20:00:06 +0100 Subject: [PATCH 112/307] doc/glossary: Define package and package attribute set A small step towards https://github.com/NixOS/nix/issues/6507 I believe this incomplete definition is one that can be agreed on. It would be nice to define more, but considering that the issue also proposes changes to the design, I believe we should hold off on those. As for the wording, we're dealing with some very general and vague terms, that have to be treated with exactly the right amount of vagueness to be effective. I start out with a fairly abstract definition of package. 1. to establish a baseline so we know what we're talking about 2. so that we can go in and clarify that we have an extra, Nix-specific definition. "Software" is notoriously ill-defined, so it makes a great qualifier for package, which we don't really want to pin down either, because that would just get us lost in discussion. We can come back to this after we've done 6057 and a few years in a desert cave. Then comes the "package attribute set" definition. I can already hear Valentin say "That's not even Nix's responsibility!" and on some days I might even agree. However, in our current reality, we have `nix-env`, `nix-build` and `nix profile`, which query the `outputName` attribute - among others - which just don't exist in the derivation. For those who can't believe what they're reading: $ nix-build --expr 'with import ./. {}; bind // {outputName = "lib";}' --no-out-link this path will be fetched (1.16 MiB download, 3.72 MiB unpacked): /nix/store/rfk6klfx3z972gavxlw6iypnj6j806ma-bind-9.18.21-lib copying path '/nix/store/rfk6klfx3z972gavxlw6iypnj6j806ma-bind-9.18.21-lib' from 'https://cache.nixos.org'... /nix/store/rfk6klfx3z972gavxlw6iypnj6j806ma-bind-9.18.21-lib and let me tell you that bind is not a library. So anyway, that's also proof of why calling this a "derivation attrset" would be wrong, despite the type attribute. --- doc/manual/src/glossary.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 1fdb8b4dd..5e3c0e024 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -271,6 +271,21 @@ The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute. +- [package]{#package} + + 1. A software package; typically a collection of programs, files and data. + + 2. A [package attribute set]. + +- [package attribute set]{#package-attribute-set} + + An [attribute set] containing the attribute `type = "derivation";` (derivation for historical reasons), as well as other attributes, such as + - attributes that refer to the files of a [package], typically in the form of [derivation outputs](#output), + - attributes that declare something about how the package is supposed to be installed or used, + - other metadata or arbitrary attributes. + + [package attribute set]: #package-attribute-set + - [string interpolation]{#gloss-string-interpolation} Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name]. From bbcd9fcfc1216bd7d88fef7933766e616c7111d0 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Sat, 13 Jan 2024 11:27:04 -0800 Subject: [PATCH 113/307] Arbitrarily bring back some nix-daemon calls This means that both `nix daemon` and `nix-daemon` will be (somewhat) tested. --- tests/functional/build-remote-trustless-should-pass-1.sh | 2 +- tests/functional/nix-daemon-untrusting.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/functional/build-remote-trustless-should-pass-1.sh b/tests/functional/build-remote-trustless-should-pass-1.sh index 736e280e4..516bdf092 100644 --- a/tests/functional/build-remote-trustless-should-pass-1.sh +++ b/tests/functional/build-remote-trustless-should-pass-1.sh @@ -2,7 +2,7 @@ source common.sh # Remote trusts us file=build-hook.nix -prog='nix%20daemon' +prog=nix-daemon proto=ssh-ng source build-remote-trustless.sh diff --git a/tests/functional/nix-daemon-untrusting.sh b/tests/functional/nix-daemon-untrusting.sh index c339b5833..bcdb70989 100755 --- a/tests/functional/nix-daemon-untrusting.sh +++ b/tests/functional/nix-daemon-untrusting.sh @@ -1,3 +1,3 @@ #!/bin/sh -exec nix daemon --force-untrusted "$@" +exec nix-daemon --force-untrusted "$@" From f61d951909a619b7a430d8d8aa739e310c7bf472 Mon Sep 17 00:00:00 2001 From: Las Safin Date: Sat, 13 Jan 2024 19:27:20 +0000 Subject: [PATCH 114/307] Avoid unnecessary copy of goal log The data was (accidentally?) copied into a std::string, even though the string is immediately converted into a std::string_view. The code has been changed to construct a std::string_view directly, such that one copy less happens. --- src/libstore/build/worker.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 399ad47fd..974a9f510 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -449,7 +449,7 @@ void Worker::waitForInput() } else { printMsg(lvlVomit, "%1%: read %2% bytes", goal->getName(), rd); - std::string data((char *) buffer.data(), rd); + std::string_view data((char *) buffer.data(), rd); j->lastOutput = after; goal->handleChildOutput(k, data); } From 03a6ca9b253c35b33e041dce595239968224e0d3 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 14 Jan 2024 15:25:24 -0500 Subject: [PATCH 115/307] `tests/functional/nix-profile.sh`: Add missing `--no-link` Otherwise we get a stray `tests/functional/result`, which can cause spurious failures later. (I got a failure because the test temp dir effecting the store dir changed. This caused a test later because Nix didn't want to remove the old `result` because it wasn't pointing inside the new Nix store.) --- tests/functional/nix-profile.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 6f304bd9a..35a62fbe2 100644 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -199,6 +199,6 @@ clearProfiles mkdir -p $TEST_ROOT/import-profile outPath=$(nix build --no-link --print-out-paths $flake1Dir/flake.nix^out) printf '{ "version": 2, "elements": [ { "active": true, "attrPath": "legacyPackages.x86_64-linux.hello", "originalUrl": "flake:nixpkgs", "outputs": null, "priority": 5, "storePaths": [ "%s" ], "url": "github:NixOS/nixpkgs/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" } ] }' "$outPath" > $TEST_ROOT/import-profile/manifest.json -nix build --profile $TEST_HOME/.nix-profile $(nix store add-path $TEST_ROOT/import-profile) +nix build --profile $TEST_HOME/.nix-profile $(nix store add-path $TEST_ROOT/import-profile) --no-link nix profile list | grep -A4 'Name:.*hello' | grep "Store paths:.*$outPath" nix profile remove hello 2>&1 | grep 'removed 1 packages, kept 0 packages' From dd42a4e3e9ec6d76d393e24f449f161b62579dc5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 15 Jan 2024 08:04:46 -0500 Subject: [PATCH 116/307] flake.lock: Update MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/86501af7f1d51915e6c335f90f2cab73d7704ef3' (2024-01-11) → 'github:NixOS/nixpkgs/a1982c92d8980a0114372973cbdfe0a307f1bdea' (2024-01-12) --- flake.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/flake.lock b/flake.lock index 65e468e8b..f0efb4036 100644 --- a/flake.lock +++ b/flake.lock @@ -34,11 +34,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1704982786, - "narHash": "sha256-w62+4HyaHafLWjvrC2Eto7bSnSJQtia8oqs3//mkpCU=", + "lastModified": 1705033721, + "narHash": "sha256-K5eJHmL1/kev6WuqyqqbS1cdNnSidIZ3jeqJ7GbrYnQ=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "86501af7f1d51915e6c335f90f2cab73d7704ef3", + "rev": "a1982c92d8980a0114372973cbdfe0a307f1bdea", "type": "github" }, "original": { From 9b9ecdee3424056cb854bc8f1aa49fe330c08c83 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 11 Jan 2024 23:50:03 -0500 Subject: [PATCH 117/307] Simplify RapidCheck configure No more `RAPIDCHECK_HEADERS`! --- Makefile.config.in | 1 - configure.ac | 21 +-------------------- doc/internal-api/doxygen.cfg.in | 2 +- package.nix | 2 +- 4 files changed, 3 insertions(+), 23 deletions(-) diff --git a/Makefile.config.in b/Makefile.config.in index 21a9f41ec..d5c382630 100644 --- a/Makefile.config.in +++ b/Makefile.config.in @@ -29,7 +29,6 @@ LOWDOWN_LIBS = @LOWDOWN_LIBS@ OPENSSL_LIBS = @OPENSSL_LIBS@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_VERSION = @PACKAGE_VERSION@ -RAPIDCHECK_HEADERS = @RAPIDCHECK_HEADERS@ SHELL = @bash@ SODIUM_LIBS = @SODIUM_LIBS@ SQLITE3_LIBS = @SQLITE3_LIBS@ diff --git a/configure.ac b/configure.ac index 2594544ab..f46cff732 100644 --- a/configure.ac +++ b/configure.ac @@ -353,27 +353,8 @@ AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[ # Look for gtest. PKG_CHECK_MODULES([GTEST], [gtest_main]) - # Look for rapidcheck. -AC_ARG_VAR([RAPIDCHECK_HEADERS], [include path of gtest headers shipped by RAPIDCHECK]) -# No pkg-config yet, https://github.com/emil-e/rapidcheck/issues/302 -AC_LANG_PUSH(C++) -AC_SUBST(RAPIDCHECK_HEADERS) -[CXXFLAGS="-I $RAPIDCHECK_HEADERS $CXXFLAGS"] -[LIBS="-lrapidcheck -lgtest $LIBS"] -AC_CHECK_HEADERS([rapidcheck/gtest.h], [], [], [#include ]) -dnl AC_CHECK_LIB doesn't work for C++ libs with mangled symbols -AC_LINK_IFELSE([ - AC_LANG_PROGRAM([[ - #include - #include - ]], [[ - return RUN_ALL_TESTS(); - ]]) - ], - [], - [AC_MSG_ERROR([librapidcheck is not found.])]) -AC_LANG_POP(C++) +PKG_CHECK_MODULES([RAPIDCHECK], [rapidcheck rapidcheck_gtest]) ]) diff --git a/doc/internal-api/doxygen.cfg.in b/doc/internal-api/doxygen.cfg.in index ad5af97e6..6c6c325bd 100644 --- a/doc/internal-api/doxygen.cfg.in +++ b/doc/internal-api/doxygen.cfg.in @@ -81,7 +81,7 @@ EXPAND_ONLY_PREDEF = YES # RECURSIVE has no effect here. # This tag requires that the tag SEARCH_INCLUDES is set to YES. -INCLUDE_PATH = @RAPIDCHECK_HEADERS@ +INCLUDE_PATH = # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this # tag can be used to specify a list of macro names that should be expanded. The diff --git a/package.nix b/package.nix index a632fd6ec..a1188ba9c 100644 --- a/package.nix +++ b/package.nix @@ -309,7 +309,7 @@ in { ] ++ lib.optional (doBuild && stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) "LDFLAGS=-fuse-ld=gold" ++ lib.optional (doBuild && stdenv.hostPlatform.isStatic) "--enable-embedded-sandbox-shell" - ++ lib.optional buildUnitTests "RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include"; + ; enableParallelBuilding = true; From beed00c04e136e8d685905e4b2b1116ecdf42f63 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 13 Jan 2024 13:08:38 -0500 Subject: [PATCH 118/307] `absPath`: just take a `std::string_view` 1. Slightly more efficient 2. Easier to call Co-authored-by: Cole Helbling --- src/libutil/canon-path.cc | 6 +++--- src/libutil/file-system.cc | 14 +++++++++++--- src/libutil/file-system.hh | 2 +- 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index 1e465f1f6..0a0f96a05 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -6,11 +6,11 @@ namespace nix { CanonPath CanonPath::root = CanonPath("/"); CanonPath::CanonPath(std::string_view raw) - : path(absPath((Path) raw, "/")) + : path(absPath(raw, "/")) { } CanonPath::CanonPath(std::string_view raw, const CanonPath & root) - : path(absPath((Path) raw, root.abs())) + : path(absPath(raw, root.abs())) { } CanonPath::CanonPath(const std::vector & elems) @@ -22,7 +22,7 @@ CanonPath::CanonPath(const std::vector & elems) CanonPath CanonPath::fromCwd(std::string_view path) { - return CanonPath(unchecked_t(), absPath((Path) path)); + return CanonPath(unchecked_t(), absPath(path)); } std::optional CanonPath::parent() const diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index 4cac35ace..ab8d32275 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -21,9 +21,16 @@ namespace fs = std::filesystem; namespace nix { -Path absPath(Path path, std::optional dir, bool resolveSymlinks) +Path absPath(PathView path, std::optional dir, bool resolveSymlinks) { + std::string scratch; + if (path[0] != '/') { + // In this case we need to call `canonPath` on a newly-created + // string. We set `scratch` to that string first, and then set + // `path` to `scratch`. This ensures the newly-created string + // lives long enough for the call to `canonPath`, and allows us + // to just accept a `std::string_view`. if (!dir) { #ifdef __GNU__ /* GNU (aka. GNU/Hurd) doesn't have any limitation on path @@ -35,12 +42,13 @@ Path absPath(Path path, std::optional dir, bool resolveSymlinks) if (!getcwd(buf, sizeof(buf))) #endif throw SysError("cannot get cwd"); - path = concatStrings(buf, "/", path); + scratch = concatStrings(buf, "/", path); #ifdef __GNU__ free(buf); #endif } else - path = concatStrings(*dir, "/", path); + scratch = concatStrings(*dir, "/", path); + path = scratch; } return canonPath(path, resolveSymlinks); } diff --git a/src/libutil/file-system.hh b/src/libutil/file-system.hh index 4637507b3..464efc242 100644 --- a/src/libutil/file-system.hh +++ b/src/libutil/file-system.hh @@ -41,7 +41,7 @@ struct Source; * specified directory, or the current directory otherwise. The path * is also canonicalised. */ -Path absPath(Path path, +Path absPath(PathView path, std::optional dir = {}, bool resolveSymlinks = false); From f07388bf985c2440413f398cf93d5f5840d1ec8c Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 119/307] remove ParserFormals this is a proper subset of Formals anyway, so let's just use those and avoid the extra allocations and moves. --- src/libexpr/parser.y | 30 ++++++++++-------------------- 1 file changed, 10 insertions(+), 20 deletions(-) diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 60bcfebf9..b7b25854b 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -63,11 +63,6 @@ namespace nix { std::optional error; }; - struct ParserFormals { - std::vector formals; - bool ellipsis = false; - }; - } // using C a struct allows us to avoid having to define the special @@ -179,7 +174,7 @@ static void addAttr(ExprAttrs * attrs, AttrPath && attrPath, } -static Formals * toFormals(ParseData & data, ParserFormals * formals, +static Formals * validateFormals(ParseData & data, Formals * formals, PosIdx pos = noPos, Symbol arg = {}) { std::sort(formals->formals.begin(), formals->formals.end(), @@ -200,18 +195,13 @@ static Formals * toFormals(ParseData & data, ParserFormals * formals, .errPos = data.state.positions[duplicate->second] }); - Formals result; - result.ellipsis = formals->ellipsis; - result.formals = std::move(formals->formals); - - if (arg && result.has(arg)) + if (arg && formals->has(arg)) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[arg]), .errPos = data.state.positions[pos] }); - delete formals; - return new Formals(std::move(result)); + return formals; } @@ -339,7 +329,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err nix::Expr * e; nix::ExprList * list; nix::ExprAttrs * attrs; - nix::ParserFormals * formals; + nix::Formals * formals; nix::Formal * formal; nix::NixInt n; nix::NixFloat nf; @@ -397,16 +387,16 @@ expr_function : ID ':' expr_function { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); } | '{' formals '}' ':' expr_function - { $$ = new ExprLambda(CUR_POS, toFormals(*data, $2), $5); } + { $$ = new ExprLambda(CUR_POS, validateFormals(*data, $2), $5); } | '{' formals '}' '@' ID ':' expr_function { auto arg = data->symbols.create($5); - $$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $2, CUR_POS, arg), $7); + $$ = new ExprLambda(CUR_POS, arg, validateFormals(*data, $2, CUR_POS, arg), $7); } | ID '@' '{' formals '}' ':' expr_function { auto arg = data->symbols.create($1); - $$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $4, CUR_POS, arg), $7); + $$ = new ExprLambda(CUR_POS, arg, validateFormals(*data, $4, CUR_POS, arg), $7); } | ASSERT expr ';' expr_function { $$ = new ExprAssert(CUR_POS, $2, $4); } @@ -650,11 +640,11 @@ formals : formal ',' formals { $$ = $3; $$->formals.emplace_back(*$1); delete $1; } | formal - { $$ = new ParserFormals; $$->formals.emplace_back(*$1); $$->ellipsis = false; delete $1; } + { $$ = new Formals; $$->formals.emplace_back(*$1); $$->ellipsis = false; delete $1; } | - { $$ = new ParserFormals; $$->ellipsis = false; } + { $$ = new Formals; $$->ellipsis = false; } | ELLIPSIS - { $$ = new ParserFormals; $$->ellipsis = true; } + { $$ = new Formals; $$->ellipsis = true; } ; formal From e8d9de967fe47a7f9324b0022a2ef50df59f419d Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 120/307] simplify parse error reporting since nix doesn't use the bison `error` terminal anywhere any invocation of yyerror will immediately cause a failure. since we're *already* leaking tons of memory whatever little bit bison allocates internally doesn't much matter any more, and we'll be replacing the parser soon anyway. coincidentally this now also matches the error behavior of URIs when they are disabled or ~/ paths in pure eval mode, duplicate attr detection etc. --- src/libexpr/parser.y | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index b7b25854b..44fae6880 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -18,6 +18,7 @@ #include +#include "finally.hh" #include "util.hh" #include "users.hh" @@ -60,7 +61,6 @@ namespace nix { Expr * result; SourcePath basePath; PosTable::Origin origin; - std::optional error; }; } @@ -315,10 +315,10 @@ static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data) void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * error) { - data->error = { + throw ParseError({ .msg = hintfmt(error), .errPos = data->state.positions[makeCurPos(*loc, data)] - }; + }); } @@ -689,11 +689,10 @@ Expr * EvalState::parse( }; yylex_init(&scanner); - yy_scan_buffer(text, length, scanner); - int res = yyparse(scanner, &data); - yylex_destroy(scanner); + Finally _destroy([&] { yylex_destroy(scanner); }); - if (res) throw ParseError(data.error.value()); + yy_scan_buffer(text, length, scanner); + yyparse(scanner, &data); data.result->bindVars(*this, staticEnv); From 1b09b80afac27c67157d4b315c237fa7bb9b8d08 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 121/307] make parser utility functions members of ParseData all of them need access to parser state in some way. make them members to allow this without fussing so much. --- src/libexpr/parser.y | 126 ++++++++++++++++++++++--------------------- 1 file changed, 66 insertions(+), 60 deletions(-) diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 44fae6880..beb660e36 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -27,6 +27,15 @@ #include "eval-settings.hh" #include "globals.hh" +// using C a struct allows us to avoid having to define the special +// members that using string_view here would implicitly delete. +struct StringToken { + const char * p; + size_t l; + bool hasIndentation; + operator std::string_view() const { return {p, l}; } +}; + namespace nix { #define YYLTYPE ::nix::ParserLocation @@ -61,19 +70,18 @@ namespace nix { Expr * result; SourcePath basePath; PosTable::Origin origin; + + void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); + void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); + void addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos); + Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {}); + Expr * stripIndentation(const PosIdx pos, + std::vector>> && es); + PosIdx makeCurPos(const ParserLocation & loc); }; } -// using C a struct allows us to avoid having to define the special -// members that using string_view here would implicitly delete. -struct StringToken { - const char * p; - size_t l; - bool hasIndentation; - operator std::string_view() const { return {p, l}; } -}; - #define YY_DECL int yylex \ (YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParseData * data) @@ -94,7 +102,7 @@ using namespace nix; namespace nix { -static void dupAttr(const EvalState & state, const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) +void ParseData::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", @@ -103,7 +111,7 @@ static void dupAttr(const EvalState & state, const AttrPath & attrPath, const Po }); } -static void dupAttr(const EvalState & state, Symbol attr, const PosIdx pos, const PosIdx prevPos) +void ParseData::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]), @@ -112,8 +120,7 @@ static void dupAttr(const EvalState & state, Symbol attr, const PosIdx pos, cons } -static void addAttr(ExprAttrs * attrs, AttrPath && attrPath, - Expr * e, const PosIdx pos, const nix::EvalState & state) +void ParseData::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos) { AttrPath::iterator i; // All attrpaths have at least one attr @@ -126,10 +133,10 @@ static void addAttr(ExprAttrs * attrs, AttrPath && attrPath, if (j != attrs->attrs.end()) { if (!j->second.inherited) { ExprAttrs * attrs2 = dynamic_cast(j->second.e); - if (!attrs2) dupAttr(state, attrPath, pos, j->second.pos); + if (!attrs2) dupAttr(attrPath, pos, j->second.pos); attrs = attrs2; } else - dupAttr(state, attrPath, pos, j->second.pos); + dupAttr(attrPath, pos, j->second.pos); } else { ExprAttrs * nested = new ExprAttrs; attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos); @@ -156,12 +163,12 @@ static void addAttr(ExprAttrs * attrs, AttrPath && attrPath, for (auto & ad : ae->attrs) { auto j2 = jAttrs->attrs.find(ad.first); if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error. - dupAttr(state, ad.first, j2->second.pos, ad.second.pos); + dupAttr(ad.first, j2->second.pos, ad.second.pos); jAttrs->attrs.emplace(ad.first, ad.second); } jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end()); } else { - dupAttr(state, attrPath, pos, j->second.pos); + dupAttr(attrPath, pos, j->second.pos); } } else { // This attr path is not defined. Let's create it. @@ -174,8 +181,7 @@ static void addAttr(ExprAttrs * attrs, AttrPath && attrPath, } -static Formals * validateFormals(ParseData & data, Formals * formals, - PosIdx pos = noPos, Symbol arg = {}) +Formals * ParseData::validateFormals(Formals * formals, PosIdx pos, Symbol arg) { std::sort(formals->formals.begin(), formals->formals.end(), [] (const auto & a, const auto & b) { @@ -191,21 +197,21 @@ static Formals * validateFormals(ParseData & data, Formals * formals, } if (duplicate) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[duplicate->first]), - .errPos = data.state.positions[duplicate->second] + .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), + .errPos = state.positions[duplicate->second] }); if (arg && formals->has(arg)) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[arg]), - .errPos = data.state.positions[pos] + .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), + .errPos = state.positions[pos] }); return formals; } -static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols, +Expr * ParseData::stripIndentation(const PosIdx pos, std::vector>> && es) { if (es.empty()) return new ExprString(""); @@ -302,12 +308,12 @@ static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols, } -static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data) +PosIdx ParseData::makeCurPos(const ParserLocation & loc) { - return data->state.positions.add(data->origin, loc.first_line, loc.first_column); + return state.positions.add(origin, loc.first_line, loc.first_column); } -#define CUR_POS makeCurPos(*yylocp, data) +#define CUR_POS data->makeCurPos(*yylocp) } @@ -317,7 +323,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err { throw ParseError({ .msg = hintfmt(error), - .errPos = data->state.positions[makeCurPos(*loc, data)] + .errPos = data->state.positions[data->makeCurPos(*loc)] }); } @@ -387,16 +393,16 @@ expr_function : ID ':' expr_function { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); } | '{' formals '}' ':' expr_function - { $$ = new ExprLambda(CUR_POS, validateFormals(*data, $2), $5); } + { $$ = new ExprLambda(CUR_POS, data->validateFormals($2), $5); } | '{' formals '}' '@' ID ':' expr_function { auto arg = data->symbols.create($5); - $$ = new ExprLambda(CUR_POS, arg, validateFormals(*data, $2, CUR_POS, arg), $7); + $$ = new ExprLambda(CUR_POS, arg, data->validateFormals($2, CUR_POS, arg), $7); } | ID '@' '{' formals '}' ':' expr_function { auto arg = data->symbols.create($1); - $$ = new ExprLambda(CUR_POS, arg, validateFormals(*data, $4, CUR_POS, arg), $7); + $$ = new ExprLambda(CUR_POS, arg, data->validateFormals($4, CUR_POS, arg), $7); } | ASSERT expr ';' expr_function { $$ = new ExprAssert(CUR_POS, $2, $4); } @@ -423,21 +429,21 @@ expr_op | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); } | expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); } | expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); } - | expr_op '<' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); } - | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); } - | expr_op '>' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); } - | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); } - | expr_op AND expr_op { $$ = new ExprOpAnd(makeCurPos(@2, data), $1, $3); } - | expr_op OR expr_op { $$ = new ExprOpOr(makeCurPos(@2, data), $1, $3); } - | expr_op IMPL expr_op { $$ = new ExprOpImpl(makeCurPos(@2, data), $1, $3); } - | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(makeCurPos(@2, data), $1, $3); } + | expr_op '<' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); } + | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); } + | expr_op '>' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); } + | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); } + | expr_op AND expr_op { $$ = new ExprOpAnd(data->makeCurPos(@2), $1, $3); } + | expr_op OR expr_op { $$ = new ExprOpOr(data->makeCurPos(@2), $1, $3); } + | expr_op IMPL expr_op { $$ = new ExprOpImpl(data->makeCurPos(@2), $1, $3); } + | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(data->makeCurPos(@2), $1, $3); } | expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; } | expr_op '+' expr_op - { $$ = new ExprConcatStrings(makeCurPos(@2, data), false, new std::vector >({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); } - | expr_op '-' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__sub")), {$1, $3}); } - | expr_op '*' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__mul")), {$1, $3}); } - | expr_op '/' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__div")), {$1, $3}); } - | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(makeCurPos(@2, data), $1, $3); } + { $$ = new ExprConcatStrings(data->makeCurPos(@2), false, new std::vector >({{data->makeCurPos(@1), $1}, {data->makeCurPos(@3), $3}})); } + | expr_op '-' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__sub")), {$1, $3}); } + | expr_op '*' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__mul")), {$1, $3}); } + | expr_op '/' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__div")), {$1, $3}); } + | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(data->makeCurPos(@2), $1, $3); } | expr_app ; @@ -476,12 +482,12 @@ expr_simple | FLOAT_LIT { $$ = new ExprFloat($1); } | '"' string_parts '"' { $$ = $2; } | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { - $$ = stripIndentation(CUR_POS, data->symbols, std::move(*$2)); + $$ = data->stripIndentation(CUR_POS, std::move(*$2)); delete $2; } | path_start PATH_END | path_start string_parts_interpolated PATH_END { - $2->insert($2->begin(), {makeCurPos(@1, data), $1}); + $2->insert($2->begin(), {data->makeCurPos(@1), $1}); $$ = new ExprConcatStrings(CUR_POS, false, $2); } | SPATH { @@ -520,13 +526,13 @@ string_parts string_parts_interpolated : string_parts_interpolated STR - { $$ = $1; $1->emplace_back(makeCurPos(@2, data), new ExprString(std::string($2))); } - | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); } - | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(makeCurPos(@1, data), $2); } + { $$ = $1; $1->emplace_back(data->makeCurPos(@2), new ExprString(std::string($2))); } + | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $3); } + | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(data->makeCurPos(@1), $2); } | STR DOLLAR_CURLY expr '}' { $$ = new std::vector>; - $$->emplace_back(makeCurPos(@1, data), new ExprString(std::string($1))); - $$->emplace_back(makeCurPos(@2, data), $3); + $$->emplace_back(data->makeCurPos(@1), new ExprString(std::string($1))); + $$->emplace_back(data->makeCurPos(@2), $3); } ; @@ -551,19 +557,19 @@ path_start ; ind_string_parts - : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); } - | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); } + : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $2); } + | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $3); } | { $$ = new std::vector>>; } ; binds - : binds attrpath '=' expr ';' { $$ = $1; addAttr($$, std::move(*$2), $4, makeCurPos(@2, data), data->state); delete $2; } + : binds attrpath '=' expr ';' { $$ = $1; data->addAttr($$, std::move(*$2), $4, data->makeCurPos(@2)); delete $2; } | binds INHERIT attrs ';' { $$ = $1; for (auto & i : *$3) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - dupAttr(data->state, i.symbol, makeCurPos(@3, data), $$->attrs[i.symbol].pos); - auto pos = makeCurPos(@3, data); + data->dupAttr(i.symbol, data->makeCurPos(@3), $$->attrs[i.symbol].pos); + auto pos = data->makeCurPos(@3); $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true)); } delete $3; @@ -573,12 +579,12 @@ binds /* !!! Should ensure sharing of the expression in $4. */ for (auto & i : *$6) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - dupAttr(data->state, i.symbol, makeCurPos(@6, data), $$->attrs[i.symbol].pos); - $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data))); + data->dupAttr(i.symbol, data->makeCurPos(@6), $$->attrs[i.symbol].pos); + $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), data->makeCurPos(@6))); } delete $6; } - | { $$ = new ExprAttrs(makeCurPos(@0, data)); } + | { $$ = new ExprAttrs(data->makeCurPos(@0)); } ; attrs @@ -592,7 +598,7 @@ attrs } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = data->state.positions[makeCurPos(@2, data)] + .errPos = data->state.positions[data->makeCurPos(@2)] }); } | { $$ = new AttrPath; } From 007605616477f4f0d8a0064c375b1d3cf6188ac5 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 122/307] move ParseData to own header, rename to ParserState ParserState better describes what this struct really is. the parser really does modify its state (most notably position and symbol tables), so calling it that rather than obliquely "data" (which implies being input only) makes sense. --- src/libexpr/lexer.l | 17 +- src/libexpr/parser-state.hh | 262 +++++++++++++++++++++++ src/libexpr/parser.y | 413 +++++++----------------------------- 3 files changed, 339 insertions(+), 353 deletions(-) create mode 100644 src/libexpr/parser-state.hh diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index 9addb3ae8..cfd61c90e 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -29,12 +29,7 @@ using namespace nix; namespace nix { -static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data) -{ - return data->state.positions.add(data->origin, loc.first_line, loc.first_column); -} - -#define CUR_POS makeCurPos(*yylloc, data) +#define CUR_POS state->makeCurPos(*yylloc) static void initLoc(YYLTYPE * loc) { @@ -153,7 +148,7 @@ or { return OR_KW; } } catch (const boost::bad_lexical_cast &) { throw ParseError({ .msg = hintfmt("invalid integer '%1%'", yytext), - .errPos = data->state.positions[CUR_POS], + .errPos = state->state.positions[CUR_POS], }); } return INT_LIT; @@ -163,7 +158,7 @@ or { return OR_KW; } if (errno != 0) throw ParseError({ .msg = hintfmt("invalid float '%1%'", yytext), - .errPos = data->state.positions[CUR_POS], + .errPos = state->state.positions[CUR_POS], }); return FLOAT_LIT; } @@ -186,7 +181,7 @@ or { return OR_KW; } /* It is impossible to match strings ending with '$' with one regex because trailing contexts are only valid at the end of a rule. (A sane but undocumented limitation.) */ - yylval->str = unescapeStr(data->symbols, yytext, yyleng); + yylval->str = unescapeStr(state->symbols, yytext, yyleng); return STR; } \$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } @@ -214,7 +209,7 @@ or { return OR_KW; } return IND_STR; } \'\'\\{ANY} { - yylval->str = unescapeStr(data->symbols, yytext + 2, yyleng - 2); + yylval->str = unescapeStr(state->symbols, yytext + 2, yyleng - 2); return IND_STR; } \$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } @@ -292,7 +287,7 @@ or { return OR_KW; } <> { throw ParseError({ .msg = hintfmt("path has a trailing slash"), - .errPos = data->state.positions[CUR_POS], + .errPos = state->state.positions[CUR_POS], }); } diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh new file mode 100644 index 000000000..b33311743 --- /dev/null +++ b/src/libexpr/parser-state.hh @@ -0,0 +1,262 @@ +#pragma once + +#include "eval.hh" + +namespace nix { + +// using C a struct allows us to avoid having to define the special +// members that using string_view here would implicitly delete. +struct StringToken { + const char * p; + size_t l; + bool hasIndentation; + operator std::string_view() const { return {p, l}; } +}; + +struct ParserLocation { + int first_line, first_column; + int last_line, last_column; + + // backup to recover from yyless(0) + int stashed_first_line, stashed_first_column; + int stashed_last_line, stashed_last_column; + + void stash() { + stashed_first_line = first_line; + stashed_first_column = first_column; + stashed_last_line = last_line; + stashed_last_column = last_column; + } + + void unstash() { + first_line = stashed_first_line; + first_column = stashed_first_column; + last_line = stashed_last_line; + last_column = stashed_last_column; + } +}; + +struct ParserState { + EvalState & state; + SymbolTable & symbols; + Expr * result; + SourcePath basePath; + PosTable::Origin origin; + + void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); + void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); + void addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos); + Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {}); + Expr * stripIndentation(const PosIdx pos, + std::vector>> && es); + PosIdx makeCurPos(const ParserLocation & loc); +}; + +inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) +{ + throw ParseError({ + .msg = hintfmt("attribute '%1%' already defined at %2%", + showAttrPath(state.symbols, attrPath), state.positions[prevPos]), + .errPos = state.positions[pos] + }); +} + +inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) +{ + throw ParseError({ + .msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]), + .errPos = state.positions[pos] + }); +} + +inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos) +{ + AttrPath::iterator i; + // All attrpaths have at least one attr + assert(!attrPath.empty()); + // Checking attrPath validity. + // =========================== + for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) { + if (i->symbol) { + ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); + if (j != attrs->attrs.end()) { + if (!j->second.inherited) { + ExprAttrs * attrs2 = dynamic_cast(j->second.e); + if (!attrs2) dupAttr(attrPath, pos, j->second.pos); + attrs = attrs2; + } else + dupAttr(attrPath, pos, j->second.pos); + } else { + ExprAttrs * nested = new ExprAttrs; + attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos); + attrs = nested; + } + } else { + ExprAttrs *nested = new ExprAttrs; + attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos)); + attrs = nested; + } + } + // Expr insertion. + // ========================== + if (i->symbol) { + ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); + if (j != attrs->attrs.end()) { + // This attr path is already defined. However, if both + // e and the expr pointed by the attr path are two attribute sets, + // we want to merge them. + // Otherwise, throw an error. + auto ae = dynamic_cast(e); + auto jAttrs = dynamic_cast(j->second.e); + if (jAttrs && ae) { + for (auto & ad : ae->attrs) { + auto j2 = jAttrs->attrs.find(ad.first); + if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error. + dupAttr(ad.first, j2->second.pos, ad.second.pos); + jAttrs->attrs.emplace(ad.first, ad.second); + } + jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end()); + } else { + dupAttr(attrPath, pos, j->second.pos); + } + } else { + // This attr path is not defined. Let's create it. + attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos)); + e->setName(i->symbol); + } + } else { + attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos)); + } +} + +inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Symbol arg) +{ + std::sort(formals->formals.begin(), formals->formals.end(), + [] (const auto & a, const auto & b) { + return std::tie(a.name, a.pos) < std::tie(b.name, b.pos); + }); + + std::optional> duplicate; + for (size_t i = 0; i + 1 < formals->formals.size(); i++) { + if (formals->formals[i].name != formals->formals[i + 1].name) + continue; + std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos}; + duplicate = std::min(thisDup, duplicate.value_or(thisDup)); + } + if (duplicate) + throw ParseError({ + .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), + .errPos = state.positions[duplicate->second] + }); + + if (arg && formals->has(arg)) + throw ParseError({ + .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), + .errPos = state.positions[pos] + }); + + return formals; +} + +inline Expr * ParserState::stripIndentation(const PosIdx pos, + std::vector>> && es) +{ + if (es.empty()) return new ExprString(""); + + /* Figure out the minimum indentation. Note that by design + whitespace-only final lines are not taken into account. (So + the " " in "\n ''" is ignored, but the " " in "\n foo''" is.) */ + bool atStartOfLine = true; /* = seen only whitespace in the current line */ + size_t minIndent = 1000000; + size_t curIndent = 0; + for (auto & [i_pos, i] : es) { + auto * str = std::get_if(&i); + if (!str || !str->hasIndentation) { + /* Anti-quotations and escaped characters end the current start-of-line whitespace. */ + if (atStartOfLine) { + atStartOfLine = false; + if (curIndent < minIndent) minIndent = curIndent; + } + continue; + } + for (size_t j = 0; j < str->l; ++j) { + if (atStartOfLine) { + if (str->p[j] == ' ') + curIndent++; + else if (str->p[j] == '\n') { + /* Empty line, doesn't influence minimum + indentation. */ + curIndent = 0; + } else { + atStartOfLine = false; + if (curIndent < minIndent) minIndent = curIndent; + } + } else if (str->p[j] == '\n') { + atStartOfLine = true; + curIndent = 0; + } + } + } + + /* Strip spaces from each line. */ + auto * es2 = new std::vector>; + atStartOfLine = true; + size_t curDropped = 0; + size_t n = es.size(); + auto i = es.begin(); + const auto trimExpr = [&] (Expr * e) { + atStartOfLine = false; + curDropped = 0; + es2->emplace_back(i->first, e); + }; + const auto trimString = [&] (const StringToken & t) { + std::string s2; + for (size_t j = 0; j < t.l; ++j) { + if (atStartOfLine) { + if (t.p[j] == ' ') { + if (curDropped++ >= minIndent) + s2 += t.p[j]; + } + else if (t.p[j] == '\n') { + curDropped = 0; + s2 += t.p[j]; + } else { + atStartOfLine = false; + curDropped = 0; + s2 += t.p[j]; + } + } else { + s2 += t.p[j]; + if (t.p[j] == '\n') atStartOfLine = true; + } + } + + /* Remove the last line if it is empty and consists only of + spaces. */ + if (n == 1) { + std::string::size_type p = s2.find_last_of('\n'); + if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos) + s2 = std::string(s2, 0, p + 1); + } + + es2->emplace_back(i->first, new ExprString(std::move(s2))); + }; + for (; i != es.end(); ++i, --n) { + std::visit(overloaded { trimExpr, trimString }, i->second); + } + + /* If this is a single string, then don't do a concatenation. */ + if (es2->size() == 1 && dynamic_cast((*es2)[0].second)) { + auto *const result = (*es2)[0].second; + delete es2; + return result; + } + return new ExprConcatStrings(pos, true, es2); +} + +inline PosIdx ParserState::makeCurPos(const ParserLocation & loc) +{ + return state.positions.add(origin, loc.first_line, loc.first_column); +} + +} diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index beb660e36..7ce493df5 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -5,9 +5,9 @@ %defines /* %no-lines */ %parse-param { void * scanner } -%parse-param { nix::ParseData * data } +%parse-param { nix::ParserState * state } %lex-param { void * scanner } -%lex-param { nix::ParseData * data } +%lex-param { nix::ParserState * state } %expect 1 %expect-rr 1 @@ -26,64 +26,11 @@ #include "eval.hh" #include "eval-settings.hh" #include "globals.hh" - -// using C a struct allows us to avoid having to define the special -// members that using string_view here would implicitly delete. -struct StringToken { - const char * p; - size_t l; - bool hasIndentation; - operator std::string_view() const { return {p, l}; } -}; - -namespace nix { +#include "parser-state.hh" #define YYLTYPE ::nix::ParserLocation - struct ParserLocation - { - int first_line, first_column; - int last_line, last_column; - - // backup to recover from yyless(0) - int stashed_first_line, stashed_first_column; - int stashed_last_line, stashed_last_column; - - void stash() { - stashed_first_line = first_line; - stashed_first_column = first_column; - stashed_last_line = last_line; - stashed_last_column = last_column; - } - - void unstash() { - first_line = stashed_first_line; - first_column = stashed_first_column; - last_line = stashed_last_line; - last_column = stashed_last_column; - } - }; - - struct ParseData - { - EvalState & state; - SymbolTable & symbols; - Expr * result; - SourcePath basePath; - PosTable::Origin origin; - - void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); - void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); - void addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos); - Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {}); - Expr * stripIndentation(const PosIdx pos, - std::vector>> && es); - PosIdx makeCurPos(const ParserLocation & loc); - }; - -} - #define YY_DECL int yylex \ - (YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParseData * data) + (YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParserState * state) #endif @@ -98,232 +45,14 @@ YY_DECL; using namespace nix; - -namespace nix { +#define CUR_POS state->makeCurPos(*yylocp) -void ParseData::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) -{ - throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", - showAttrPath(state.symbols, attrPath), state.positions[prevPos]), - .errPos = state.positions[pos] - }); -} - -void ParseData::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) -{ - throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]), - .errPos = state.positions[pos] - }); -} - - -void ParseData::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos) -{ - AttrPath::iterator i; - // All attrpaths have at least one attr - assert(!attrPath.empty()); - // Checking attrPath validity. - // =========================== - for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) { - if (i->symbol) { - ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); - if (j != attrs->attrs.end()) { - if (!j->second.inherited) { - ExprAttrs * attrs2 = dynamic_cast(j->second.e); - if (!attrs2) dupAttr(attrPath, pos, j->second.pos); - attrs = attrs2; - } else - dupAttr(attrPath, pos, j->second.pos); - } else { - ExprAttrs * nested = new ExprAttrs; - attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos); - attrs = nested; - } - } else { - ExprAttrs *nested = new ExprAttrs; - attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos)); - attrs = nested; - } - } - // Expr insertion. - // ========================== - if (i->symbol) { - ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol); - if (j != attrs->attrs.end()) { - // This attr path is already defined. However, if both - // e and the expr pointed by the attr path are two attribute sets, - // we want to merge them. - // Otherwise, throw an error. - auto ae = dynamic_cast(e); - auto jAttrs = dynamic_cast(j->second.e); - if (jAttrs && ae) { - for (auto & ad : ae->attrs) { - auto j2 = jAttrs->attrs.find(ad.first); - if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error. - dupAttr(ad.first, j2->second.pos, ad.second.pos); - jAttrs->attrs.emplace(ad.first, ad.second); - } - jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end()); - } else { - dupAttr(attrPath, pos, j->second.pos); - } - } else { - // This attr path is not defined. Let's create it. - attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos)); - e->setName(i->symbol); - } - } else { - attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos)); - } -} - - -Formals * ParseData::validateFormals(Formals * formals, PosIdx pos, Symbol arg) -{ - std::sort(formals->formals.begin(), formals->formals.end(), - [] (const auto & a, const auto & b) { - return std::tie(a.name, a.pos) < std::tie(b.name, b.pos); - }); - - std::optional> duplicate; - for (size_t i = 0; i + 1 < formals->formals.size(); i++) { - if (formals->formals[i].name != formals->formals[i + 1].name) - continue; - std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos}; - duplicate = std::min(thisDup, duplicate.value_or(thisDup)); - } - if (duplicate) - throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), - .errPos = state.positions[duplicate->second] - }); - - if (arg && formals->has(arg)) - throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), - .errPos = state.positions[pos] - }); - - return formals; -} - - -Expr * ParseData::stripIndentation(const PosIdx pos, - std::vector>> && es) -{ - if (es.empty()) return new ExprString(""); - - /* Figure out the minimum indentation. Note that by design - whitespace-only final lines are not taken into account. (So - the " " in "\n ''" is ignored, but the " " in "\n foo''" is.) */ - bool atStartOfLine = true; /* = seen only whitespace in the current line */ - size_t minIndent = 1000000; - size_t curIndent = 0; - for (auto & [i_pos, i] : es) { - auto * str = std::get_if(&i); - if (!str || !str->hasIndentation) { - /* Anti-quotations and escaped characters end the current start-of-line whitespace. */ - if (atStartOfLine) { - atStartOfLine = false; - if (curIndent < minIndent) minIndent = curIndent; - } - continue; - } - for (size_t j = 0; j < str->l; ++j) { - if (atStartOfLine) { - if (str->p[j] == ' ') - curIndent++; - else if (str->p[j] == '\n') { - /* Empty line, doesn't influence minimum - indentation. */ - curIndent = 0; - } else { - atStartOfLine = false; - if (curIndent < minIndent) minIndent = curIndent; - } - } else if (str->p[j] == '\n') { - atStartOfLine = true; - curIndent = 0; - } - } - } - - /* Strip spaces from each line. */ - auto * es2 = new std::vector>; - atStartOfLine = true; - size_t curDropped = 0; - size_t n = es.size(); - auto i = es.begin(); - const auto trimExpr = [&] (Expr * e) { - atStartOfLine = false; - curDropped = 0; - es2->emplace_back(i->first, e); - }; - const auto trimString = [&] (const StringToken & t) { - std::string s2; - for (size_t j = 0; j < t.l; ++j) { - if (atStartOfLine) { - if (t.p[j] == ' ') { - if (curDropped++ >= minIndent) - s2 += t.p[j]; - } - else if (t.p[j] == '\n') { - curDropped = 0; - s2 += t.p[j]; - } else { - atStartOfLine = false; - curDropped = 0; - s2 += t.p[j]; - } - } else { - s2 += t.p[j]; - if (t.p[j] == '\n') atStartOfLine = true; - } - } - - /* Remove the last line if it is empty and consists only of - spaces. */ - if (n == 1) { - std::string::size_type p = s2.find_last_of('\n'); - if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos) - s2 = std::string(s2, 0, p + 1); - } - - es2->emplace_back(i->first, new ExprString(std::move(s2))); - }; - for (; i != es.end(); ++i, --n) { - std::visit(overloaded { trimExpr, trimString }, i->second); - } - - /* If this is a single string, then don't do a concatenation. */ - if (es2->size() == 1 && dynamic_cast((*es2)[0].second)) { - auto *const result = (*es2)[0].second; - delete es2; - return result; - } - return new ExprConcatStrings(pos, true, es2); -} - - -PosIdx ParseData::makeCurPos(const ParserLocation & loc) -{ - return state.positions.add(origin, loc.first_line, loc.first_column); -} - -#define CUR_POS data->makeCurPos(*yylocp) - - -} - - -void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * error) +void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error) { throw ParseError({ .msg = hintfmt(error), - .errPos = data->state.positions[data->makeCurPos(*loc)] + .errPos = state->state.positions[state->makeCurPos(*loc)] }); } @@ -339,13 +68,13 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err nix::Formal * formal; nix::NixInt n; nix::NixFloat nf; - StringToken id; // !!! -> Symbol - StringToken path; - StringToken uri; - StringToken str; + nix::StringToken id; // !!! -> Symbol + nix::StringToken path; + nix::StringToken uri; + nix::StringToken str; std::vector * attrNames; std::vector> * string_parts; - std::vector>> * ind_string_parts; + std::vector>> * ind_string_parts; } %type start expr expr_function expr_if expr_op @@ -385,24 +114,24 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err %% -start: expr { data->result = $1; }; +start: expr { state->result = $1; }; expr: expr_function; expr_function : ID ':' expr_function - { $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); } + { $$ = new ExprLambda(CUR_POS, state->symbols.create($1), 0, $3); } | '{' formals '}' ':' expr_function - { $$ = new ExprLambda(CUR_POS, data->validateFormals($2), $5); } + { $$ = new ExprLambda(CUR_POS, state->validateFormals($2), $5); } | '{' formals '}' '@' ID ':' expr_function { - auto arg = data->symbols.create($5); - $$ = new ExprLambda(CUR_POS, arg, data->validateFormals($2, CUR_POS, arg), $7); + auto arg = state->symbols.create($5); + $$ = new ExprLambda(CUR_POS, arg, state->validateFormals($2, CUR_POS, arg), $7); } | ID '@' '{' formals '}' ':' expr_function { - auto arg = data->symbols.create($1); - $$ = new ExprLambda(CUR_POS, arg, data->validateFormals($4, CUR_POS, arg), $7); + auto arg = state->symbols.create($1); + $$ = new ExprLambda(CUR_POS, arg, state->validateFormals($4, CUR_POS, arg), $7); } | ASSERT expr ';' expr_function { $$ = new ExprAssert(CUR_POS, $2, $4); } @@ -412,7 +141,7 @@ expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in let"), - .errPos = data->state.positions[CUR_POS] + .errPos = state->state.positions[CUR_POS] }); $$ = new ExprLet($2, $4); } @@ -426,24 +155,24 @@ expr_if expr_op : '!' expr_op %prec NOT { $$ = new ExprOpNot($2); } - | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); } + | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->symbols.create("__sub")), {new ExprInt(0), $2}); } | expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); } | expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); } - | expr_op '<' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); } - | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); } - | expr_op '>' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); } - | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); } - | expr_op AND expr_op { $$ = new ExprOpAnd(data->makeCurPos(@2), $1, $3); } - | expr_op OR expr_op { $$ = new ExprOpOr(data->makeCurPos(@2), $1, $3); } - | expr_op IMPL expr_op { $$ = new ExprOpImpl(data->makeCurPos(@2), $1, $3); } - | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(data->makeCurPos(@2), $1, $3); } + | expr_op '<' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3}); } + | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1})); } + | expr_op '>' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1}); } + | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3})); } + | expr_op AND expr_op { $$ = new ExprOpAnd(state->makeCurPos(@2), $1, $3); } + | expr_op OR expr_op { $$ = new ExprOpOr(state->makeCurPos(@2), $1, $3); } + | expr_op IMPL expr_op { $$ = new ExprOpImpl(state->makeCurPos(@2), $1, $3); } + | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(state->makeCurPos(@2), $1, $3); } | expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; } | expr_op '+' expr_op - { $$ = new ExprConcatStrings(data->makeCurPos(@2), false, new std::vector >({{data->makeCurPos(@1), $1}, {data->makeCurPos(@3), $3}})); } - | expr_op '-' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__sub")), {$1, $3}); } - | expr_op '*' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__mul")), {$1, $3}); } - | expr_op '/' expr_op { $$ = new ExprCall(data->makeCurPos(@2), new ExprVar(data->symbols.create("__div")), {$1, $3}); } - | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(data->makeCurPos(@2), $1, $3); } + { $$ = new ExprConcatStrings(state->makeCurPos(@2), false, new std::vector >({{state->makeCurPos(@1), $1}, {state->makeCurPos(@3), $3}})); } + | expr_op '-' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__sub")), {$1, $3}); } + | expr_op '*' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__mul")), {$1, $3}); } + | expr_op '/' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__div")), {$1, $3}); } + | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->makeCurPos(@2), $1, $3); } | expr_app ; @@ -466,7 +195,7 @@ expr_select | /* Backwards compatibility: because Nixpkgs has a rarely used function named ‘or’, allow stuff like ‘map or [...]’. */ expr_simple OR_KW - { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, data->symbols.create("or"))}); } + { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->symbols.create("or"))}); } | expr_simple ; @@ -476,25 +205,25 @@ expr_simple if ($1.l == s.size() && strncmp($1.p, s.data(), s.size()) == 0) $$ = new ExprPos(CUR_POS); else - $$ = new ExprVar(CUR_POS, data->symbols.create($1)); + $$ = new ExprVar(CUR_POS, state->symbols.create($1)); } | INT_LIT { $$ = new ExprInt($1); } | FLOAT_LIT { $$ = new ExprFloat($1); } | '"' string_parts '"' { $$ = $2; } | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { - $$ = data->stripIndentation(CUR_POS, std::move(*$2)); + $$ = state->stripIndentation(CUR_POS, std::move(*$2)); delete $2; } | path_start PATH_END | path_start string_parts_interpolated PATH_END { - $2->insert($2->begin(), {data->makeCurPos(@1), $1}); + $2->insert($2->begin(), {state->makeCurPos(@1), $1}); $$ = new ExprConcatStrings(CUR_POS, false, $2); } | SPATH { std::string path($1.p + 1, $1.l - 2); $$ = new ExprCall(CUR_POS, - new ExprVar(data->symbols.create("__findFile")), - {new ExprVar(data->symbols.create("__nixPath")), + new ExprVar(state->symbols.create("__findFile")), + {new ExprVar(state->symbols.create("__nixPath")), new ExprString(std::move(path))}); } | URI { @@ -502,7 +231,7 @@ expr_simple if (noURLLiterals) throw ParseError({ .msg = hintfmt("URL literals are disabled"), - .errPos = data->state.positions[CUR_POS] + .errPos = state->state.positions[CUR_POS] }); $$ = new ExprString(std::string($1)); } @@ -510,7 +239,7 @@ expr_simple /* Let expressions `let {..., body = ...}' are just desugared into `(rec {..., body = ...}).body'. */ | LET '{' binds '}' - { $3->recursive = true; $$ = new ExprSelect(noPos, $3, data->symbols.create("body")); } + { $3->recursive = true; $$ = new ExprSelect(noPos, $3, state->symbols.create("body")); } | REC '{' binds '}' { $3->recursive = true; $$ = $3; } | '{' binds '}' @@ -526,23 +255,23 @@ string_parts string_parts_interpolated : string_parts_interpolated STR - { $$ = $1; $1->emplace_back(data->makeCurPos(@2), new ExprString(std::string($2))); } - | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $3); } - | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(data->makeCurPos(@1), $2); } + { $$ = $1; $1->emplace_back(state->makeCurPos(@2), new ExprString(std::string($2))); } + | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $3); } + | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(state->makeCurPos(@1), $2); } | STR DOLLAR_CURLY expr '}' { $$ = new std::vector>; - $$->emplace_back(data->makeCurPos(@1), new ExprString(std::string($1))); - $$->emplace_back(data->makeCurPos(@2), $3); + $$->emplace_back(state->makeCurPos(@1), new ExprString(std::string($1))); + $$->emplace_back(state->makeCurPos(@2), $3); } ; path_start : PATH { - Path path(absPath({$1.p, $1.l}, data->basePath.path.abs())); + Path path(absPath({$1.p, $1.l}, state->basePath.path.abs())); /* add back in the trailing '/' to the first segment */ if ($1.p[$1.l-1] == '/' && $1.l > 1) path += "/"; - $$ = new ExprPath(ref(data->state.rootFS), std::move(path)); + $$ = new ExprPath(ref(state->state.rootFS), std::move(path)); } | HPATH { if (evalSettings.pureEval) { @@ -552,24 +281,24 @@ path_start ); } Path path(getHome() + std::string($1.p + 1, $1.l - 1)); - $$ = new ExprPath(ref(data->state.rootFS), std::move(path)); + $$ = new ExprPath(ref(state->state.rootFS), std::move(path)); } ; ind_string_parts - : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $2); } - | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(data->makeCurPos(@2), $3); } + : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $2); } + | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $3); } | { $$ = new std::vector>>; } ; binds - : binds attrpath '=' expr ';' { $$ = $1; data->addAttr($$, std::move(*$2), $4, data->makeCurPos(@2)); delete $2; } + : binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->makeCurPos(@2)); delete $2; } | binds INHERIT attrs ';' { $$ = $1; for (auto & i : *$3) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - data->dupAttr(i.symbol, data->makeCurPos(@3), $$->attrs[i.symbol].pos); - auto pos = data->makeCurPos(@3); + state->dupAttr(i.symbol, state->makeCurPos(@3), $$->attrs[i.symbol].pos); + auto pos = state->makeCurPos(@3); $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true)); } delete $3; @@ -579,48 +308,48 @@ binds /* !!! Should ensure sharing of the expression in $4. */ for (auto & i : *$6) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - data->dupAttr(i.symbol, data->makeCurPos(@6), $$->attrs[i.symbol].pos); - $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), data->makeCurPos(@6))); + state->dupAttr(i.symbol, state->makeCurPos(@6), $$->attrs[i.symbol].pos); + $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->makeCurPos(@6))); } delete $6; } - | { $$ = new ExprAttrs(data->makeCurPos(@0)); } + | { $$ = new ExprAttrs(state->makeCurPos(@0)); } ; attrs - : attrs attr { $$ = $1; $1->push_back(AttrName(data->symbols.create($2))); } + : attrs attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($2))); } | attrs string_attr { $$ = $1; ExprString * str = dynamic_cast($2); if (str) { - $$->push_back(AttrName(data->symbols.create(str->s))); + $$->push_back(AttrName(state->symbols.create(str->s))); delete str; } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = data->state.positions[data->makeCurPos(@2)] + .errPos = state->state.positions[state->makeCurPos(@2)] }); } | { $$ = new AttrPath; } ; attrpath - : attrpath '.' attr { $$ = $1; $1->push_back(AttrName(data->symbols.create($3))); } + : attrpath '.' attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($3))); } | attrpath '.' string_attr { $$ = $1; ExprString * str = dynamic_cast($3); if (str) { - $$->push_back(AttrName(data->symbols.create(str->s))); + $$->push_back(AttrName(state->symbols.create(str->s))); delete str; } else $$->push_back(AttrName($3)); } - | attr { $$ = new std::vector; $$->push_back(AttrName(data->symbols.create($1))); } + | attr { $$ = new std::vector; $$->push_back(AttrName(state->symbols.create($1))); } | string_attr { $$ = new std::vector; ExprString *str = dynamic_cast($1); if (str) { - $$->push_back(AttrName(data->symbols.create(str->s))); + $$->push_back(AttrName(state->symbols.create(str->s))); delete str; } else $$->push_back(AttrName($1)); @@ -654,8 +383,8 @@ formals ; formal - : ID { $$ = new Formal{CUR_POS, data->symbols.create($1), 0}; } - | ID '?' expr { $$ = new Formal{CUR_POS, data->symbols.create($1), $3}; } + : ID { $$ = new Formal{CUR_POS, state->symbols.create($1), 0}; } + | ID '?' expr { $$ = new Formal{CUR_POS, state->symbols.create($1), $3}; } ; %% @@ -687,7 +416,7 @@ Expr * EvalState::parse( std::shared_ptr & staticEnv) { yyscan_t scanner; - ParseData data { + ParserState state { .state = *this, .symbols = symbols, .basePath = basePath, @@ -698,11 +427,11 @@ Expr * EvalState::parse( Finally _destroy([&] { yylex_destroy(scanner); }); yy_scan_buffer(text, length, scanner); - yyparse(scanner, &data); + yyparse(scanner, &state); - data.result->bindVars(*this, staticEnv); + state.result->bindVars(*this, staticEnv); - return data.result; + return state.result; } From 835a6c7bcfd0b22acc16f31de5fc7bb650d52017 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 123/307] rename ParserState::{makeCurPos -> at} most instances of this being used do not refer to the "current" position, sometimes not even to one reasonably close by. it could also be called `makePos` instead, but `at` seems clear in context. --- src/libexpr/lexer.l | 2 +- src/libexpr/parser-state.hh | 4 +-- src/libexpr/parser.y | 60 ++++++++++++++++++------------------- 3 files changed, 33 insertions(+), 33 deletions(-) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index cfd61c90e..fae0e7a85 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -29,7 +29,7 @@ using namespace nix; namespace nix { -#define CUR_POS state->makeCurPos(*yylloc) +#define CUR_POS state->at(*yylloc) static void initLoc(YYLTYPE * loc) { diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index b33311743..167d3f4ae 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -49,7 +49,7 @@ struct ParserState { Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {}); Expr * stripIndentation(const PosIdx pos, std::vector>> && es); - PosIdx makeCurPos(const ParserLocation & loc); + PosIdx at(const ParserLocation & loc); }; inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) @@ -254,7 +254,7 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos, return new ExprConcatStrings(pos, true, es2); } -inline PosIdx ParserState::makeCurPos(const ParserLocation & loc) +inline PosIdx ParserState::at(const ParserLocation & loc) { return state.positions.add(origin, loc.first_line, loc.first_column); } diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 7ce493df5..7763a72bc 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -45,14 +45,14 @@ YY_DECL; using namespace nix; -#define CUR_POS state->makeCurPos(*yylocp) +#define CUR_POS state->at(*yylocp) void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error) { throw ParseError({ .msg = hintfmt(error), - .errPos = state->state.positions[state->makeCurPos(*loc)] + .errPos = state->state.positions[state->at(*loc)] }); } @@ -158,21 +158,21 @@ expr_op | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->symbols.create("__sub")), {new ExprInt(0), $2}); } | expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); } | expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); } - | expr_op '<' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3}); } - | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1})); } - | expr_op '>' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1}); } - | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3})); } - | expr_op AND expr_op { $$ = new ExprOpAnd(state->makeCurPos(@2), $1, $3); } - | expr_op OR expr_op { $$ = new ExprOpOr(state->makeCurPos(@2), $1, $3); } - | expr_op IMPL expr_op { $$ = new ExprOpImpl(state->makeCurPos(@2), $1, $3); } - | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(state->makeCurPos(@2), $1, $3); } + | expr_op '<' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3}); } + | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1})); } + | expr_op '>' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1}); } + | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3})); } + | expr_op AND expr_op { $$ = new ExprOpAnd(state->at(@2), $1, $3); } + | expr_op OR expr_op { $$ = new ExprOpOr(state->at(@2), $1, $3); } + | expr_op IMPL expr_op { $$ = new ExprOpImpl(state->at(@2), $1, $3); } + | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(state->at(@2), $1, $3); } | expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; } | expr_op '+' expr_op - { $$ = new ExprConcatStrings(state->makeCurPos(@2), false, new std::vector >({{state->makeCurPos(@1), $1}, {state->makeCurPos(@3), $3}})); } - | expr_op '-' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__sub")), {$1, $3}); } - | expr_op '*' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__mul")), {$1, $3}); } - | expr_op '/' expr_op { $$ = new ExprCall(state->makeCurPos(@2), new ExprVar(state->symbols.create("__div")), {$1, $3}); } - | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->makeCurPos(@2), $1, $3); } + { $$ = new ExprConcatStrings(state->at(@2), false, new std::vector >({{state->at(@1), $1}, {state->at(@3), $3}})); } + | expr_op '-' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__sub")), {$1, $3}); } + | expr_op '*' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__mul")), {$1, $3}); } + | expr_op '/' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__div")), {$1, $3}); } + | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->at(@2), $1, $3); } | expr_app ; @@ -216,7 +216,7 @@ expr_simple } | path_start PATH_END | path_start string_parts_interpolated PATH_END { - $2->insert($2->begin(), {state->makeCurPos(@1), $1}); + $2->insert($2->begin(), {state->at(@1), $1}); $$ = new ExprConcatStrings(CUR_POS, false, $2); } | SPATH { @@ -255,13 +255,13 @@ string_parts string_parts_interpolated : string_parts_interpolated STR - { $$ = $1; $1->emplace_back(state->makeCurPos(@2), new ExprString(std::string($2))); } - | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $3); } - | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(state->makeCurPos(@1), $2); } + { $$ = $1; $1->emplace_back(state->at(@2), new ExprString(std::string($2))); } + | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); } + | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(state->at(@1), $2); } | STR DOLLAR_CURLY expr '}' { $$ = new std::vector>; - $$->emplace_back(state->makeCurPos(@1), new ExprString(std::string($1))); - $$->emplace_back(state->makeCurPos(@2), $3); + $$->emplace_back(state->at(@1), new ExprString(std::string($1))); + $$->emplace_back(state->at(@2), $3); } ; @@ -286,19 +286,19 @@ path_start ; ind_string_parts - : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $2); } - | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->makeCurPos(@2), $3); } + : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(state->at(@2), $2); } + | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); } | { $$ = new std::vector>>; } ; binds - : binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->makeCurPos(@2)); delete $2; } + : binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->at(@2)); delete $2; } | binds INHERIT attrs ';' { $$ = $1; for (auto & i : *$3) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - state->dupAttr(i.symbol, state->makeCurPos(@3), $$->attrs[i.symbol].pos); - auto pos = state->makeCurPos(@3); + state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos); + auto pos = state->at(@3); $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true)); } delete $3; @@ -308,12 +308,12 @@ binds /* !!! Should ensure sharing of the expression in $4. */ for (auto & i : *$6) { if ($$->attrs.find(i.symbol) != $$->attrs.end()) - state->dupAttr(i.symbol, state->makeCurPos(@6), $$->attrs[i.symbol].pos); - $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->makeCurPos(@6))); + state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos); + $$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->at(@6))); } delete $6; } - | { $$ = new ExprAttrs(state->makeCurPos(@0)); } + | { $$ = new ExprAttrs(state->at(@0)); } ; attrs @@ -327,7 +327,7 @@ attrs } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = state->state.positions[state->makeCurPos(@2)] + .errPos = state->state.positions[state->at(@2)] }); } | { $$ = new AttrPath; } From e1aa585964c3d864ebff0030584f3349a539d615 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 124/307] slim down parser.y most EvalState and Expr members defined here could be elsewhere, where they'd be easier to maintain (not being embedded in a file with arcane syntax) and *somewhat* more faithfully placed according to the path of the file they're defined in. --- src/libexpr/eval.cc | 164 ++++++++++++++++++++++++++++++++++++++ src/libexpr/nixexpr.cc | 2 + src/libexpr/parser.y | 176 ----------------------------------------- 3 files changed, 166 insertions(+), 176 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 0659a2173..6eee7cdce 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -20,6 +20,8 @@ #include "gc-small-vector.hh" #include "url.hh" #include "fetch-to-store.hh" +#include "tarball.hh" +#include "flake/flakeref.hh" #include #include @@ -2636,6 +2638,168 @@ void EvalState::printStatistics() } +SourcePath resolveExprPath(SourcePath path) +{ + unsigned int followCount = 0, maxFollow = 1024; + + /* If `path' is a symlink, follow it. This is so that relative + path references work. */ + while (!path.path.isRoot()) { + // Basic cycle/depth limit to avoid infinite loops. + if (++followCount >= maxFollow) + throw Error("too many symbolic links encountered while traversing the path '%s'", path); + auto p = path.parent().resolveSymlinks() + path.baseName(); + if (p.lstat().type != InputAccessor::tSymlink) break; + path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))}; + } + + /* If `path' refers to a directory, append `/default.nix'. */ + if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory) + return path + "default.nix"; + + return path; +} + + +Expr * EvalState::parseExprFromFile(const SourcePath & path) +{ + return parseExprFromFile(path, staticBaseEnv); +} + + +Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) +{ + auto buffer = path.resolveSymlinks().readFile(); + // readFile hopefully have left some extra space for terminators + buffer.append("\0\0", 2); + return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv); +} + + +Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr & staticEnv) +{ + auto s = make_ref(std::move(s_)); + s->append("\0\0", 2); + return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv); +} + + +Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath) +{ + return parseExprFromString(std::move(s), basePath, staticBaseEnv); +} + + +Expr * EvalState::parseStdin() +{ + //Activity act(*logger, lvlTalkative, "parsing standard input"); + auto buffer = drainFD(0); + // drainFD should have left some extra space for terminators + buffer.append("\0\0", 2); + auto s = make_ref(std::move(buffer)); + return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv); +} + + +SourcePath EvalState::findFile(const std::string_view path) +{ + return findFile(searchPath, path); +} + + +SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos) +{ + for (auto & i : searchPath.elements) { + auto suffixOpt = i.prefix.suffixIfPotentialMatch(path); + + if (!suffixOpt) continue; + auto suffix = *suffixOpt; + + auto rOpt = resolveSearchPathPath(i.path); + if (!rOpt) continue; + auto r = *rOpt; + + Path res = suffix == "" ? r : concatStrings(r, "/", suffix); + if (pathExists(res)) return rootPath(CanonPath(canonPath(res))); + } + + if (hasPrefix(path, "nix/")) + return {corepkgsFS, CanonPath(path.substr(3))}; + + debugThrow(ThrownError({ + .msg = hintfmt(evalSettings.pureEval + ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" + : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", + path), + .errPos = positions[pos] + }), 0, 0); +} + + +std::optional EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl) +{ + auto & value = value0.s; + auto i = searchPathResolved.find(value); + if (i != searchPathResolved.end()) return i->second; + + std::optional res; + + if (EvalSettings::isPseudoUrl(value)) { + try { + auto storePath = fetchers::downloadTarball( + store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath; + res = { store->toRealPath(storePath) }; + } catch (FileTransferError & e) { + logWarning({ + .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) + }); + } + } + + else if (hasPrefix(value, "flake:")) { + experimentalFeatureSettings.require(Xp::Flakes); + auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false); + debug("fetching flake search path element '%s''", value); + auto storePath = flakeRef.resolve(store).fetchTree(store).first; + res = { store->toRealPath(storePath) }; + } + + else { + auto path = absPath(value); + + /* Allow access to paths in the search path. */ + if (initAccessControl) { + allowPath(path); + if (store->isInStore(path)) { + try { + StorePathSet closure; + store->computeFSClosure(store->toStorePath(path).first, closure); + for (auto & p : closure) + allowPath(p); + } catch (InvalidPath &) { } + } + } + + if (pathExists(path)) + res = { path }; + else { + logWarning({ + .msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value) + }); + res = std::nullopt; + } + } + + if (res) + debug("resolved search path element '%s' to '%s'", value, *res); + else + debug("failed to resolve search path element '%s'", value); + + searchPathResolved.emplace(value, res); + return res; +} + + std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { throw TypeError({ diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 964de6351..6fe4ba81b 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -9,6 +9,8 @@ namespace nix { +unsigned long Expr::nrExprs = 0; + ExprBlackHole eBlackHole; // FIXME: remove, because *symbols* are abstract and do not have a single diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 7763a72bc..519d6b11f 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -389,25 +389,11 @@ formal %% - -#include -#include -#include -#include - #include "eval.hh" -#include "filetransfer.hh" -#include "tarball.hh" -#include "store-api.hh" -#include "flake/flake.hh" -#include "fs-input-accessor.hh" -#include "memory-input-accessor.hh" namespace nix { -unsigned long Expr::nrExprs = 0; - Expr * EvalState::parse( char * text, size_t length, @@ -435,166 +421,4 @@ Expr * EvalState::parse( } -SourcePath resolveExprPath(SourcePath path) -{ - unsigned int followCount = 0, maxFollow = 1024; - - /* If `path' is a symlink, follow it. This is so that relative - path references work. */ - while (!path.path.isRoot()) { - // Basic cycle/depth limit to avoid infinite loops. - if (++followCount >= maxFollow) - throw Error("too many symbolic links encountered while traversing the path '%s'", path); - auto p = path.parent().resolveSymlinks() + path.baseName(); - if (p.lstat().type != InputAccessor::tSymlink) break; - path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))}; - } - - /* If `path' refers to a directory, append `/default.nix'. */ - if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory) - return path + "default.nix"; - - return path; -} - - -Expr * EvalState::parseExprFromFile(const SourcePath & path) -{ - return parseExprFromFile(path, staticBaseEnv); -} - - -Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) -{ - auto buffer = path.resolveSymlinks().readFile(); - // readFile hopefully have left some extra space for terminators - buffer.append("\0\0", 2); - return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv); -} - - -Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr & staticEnv) -{ - auto s = make_ref(std::move(s_)); - s->append("\0\0", 2); - return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv); -} - - -Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath) -{ - return parseExprFromString(std::move(s), basePath, staticBaseEnv); -} - - -Expr * EvalState::parseStdin() -{ - //Activity act(*logger, lvlTalkative, "parsing standard input"); - auto buffer = drainFD(0); - // drainFD should have left some extra space for terminators - buffer.append("\0\0", 2); - auto s = make_ref(std::move(buffer)); - return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv); -} - - -SourcePath EvalState::findFile(const std::string_view path) -{ - return findFile(searchPath, path); -} - - -SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos) -{ - for (auto & i : searchPath.elements) { - auto suffixOpt = i.prefix.suffixIfPotentialMatch(path); - - if (!suffixOpt) continue; - auto suffix = *suffixOpt; - - auto rOpt = resolveSearchPathPath(i.path); - if (!rOpt) continue; - auto r = *rOpt; - - Path res = suffix == "" ? r : concatStrings(r, "/", suffix); - if (pathExists(res)) return rootPath(CanonPath(canonPath(res))); - } - - if (hasPrefix(path, "nix/")) - return {corepkgsFS, CanonPath(path.substr(3))}; - - debugThrow(ThrownError({ - .msg = hintfmt(evalSettings.pureEval - ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" - : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", - path), - .errPos = positions[pos] - }), 0, 0); -} - - -std::optional EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl) -{ - auto & value = value0.s; - auto i = searchPathResolved.find(value); - if (i != searchPathResolved.end()) return i->second; - - std::optional res; - - if (EvalSettings::isPseudoUrl(value)) { - try { - auto storePath = fetchers::downloadTarball( - store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath; - res = { store->toRealPath(storePath) }; - } catch (FileTransferError & e) { - logWarning({ - .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) - }); - } - } - - else if (hasPrefix(value, "flake:")) { - experimentalFeatureSettings.require(Xp::Flakes); - auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false); - debug("fetching flake search path element '%s''", value); - auto storePath = flakeRef.resolve(store).fetchTree(store).first; - res = { store->toRealPath(storePath) }; - } - - else { - auto path = absPath(value); - - /* Allow access to paths in the search path. */ - if (initAccessControl) { - allowPath(path); - if (store->isInStore(path)) { - try { - StorePathSet closure; - store->computeFSClosure(store->toStorePath(path).first, closure); - for (auto & p : closure) - allowPath(p); - } catch (InvalidPath &) { } - } - } - - if (pathExists(path)) - res = { path }; - else { - logWarning({ - .msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value) - }); - res = std::nullopt; - } - } - - if (res) - debug("resolved search path element '%s' to '%s'", value, *res); - else - debug("failed to resolve search path element '%s'", value); - - searchPathResolved.emplace(value, res); - return res; -} - - } From b596cc9e7960b9256bcd557334d81e9d555be5a2 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 125/307] decouple parser and EvalState there's no reason the parser itself should be doing semantic analysis like bindVars. split this bit apart (retaining the previous name in EvalState) and have the parser really do *only* parsing, decoupled from EvalState. --- src/libexpr/eval.cc | 16 ++++++++++++++++ src/libexpr/lexer.l | 6 +++--- src/libexpr/parser-state.hh | 17 +++++++++-------- src/libexpr/parser.y | 36 +++++++++++++++++++++++++----------- 4 files changed, 53 insertions(+), 22 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 6eee7cdce..b05ccfc85 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -22,6 +22,7 @@ #include "fetch-to-store.hh" #include "tarball.hh" #include "flake/flakeref.hh" +#include "parser-tab.hh" #include #include @@ -2800,6 +2801,21 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa } +Expr * EvalState::parse( + char * text, + size_t length, + Pos::Origin origin, + const SourcePath & basePath, + std::shared_ptr & staticEnv) +{ + auto result = parseExprFromBuf(text, length, origin, basePath, symbols, positions, rootFS); + + result->bindVars(*this, staticEnv); + + return result; +} + + std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { throw TypeError({ diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index fae0e7a85..d7a0b5048 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -148,7 +148,7 @@ or { return OR_KW; } } catch (const boost::bad_lexical_cast &) { throw ParseError({ .msg = hintfmt("invalid integer '%1%'", yytext), - .errPos = state->state.positions[CUR_POS], + .errPos = state->positions[CUR_POS], }); } return INT_LIT; @@ -158,7 +158,7 @@ or { return OR_KW; } if (errno != 0) throw ParseError({ .msg = hintfmt("invalid float '%1%'", yytext), - .errPos = state->state.positions[CUR_POS], + .errPos = state->positions[CUR_POS], }); return FLOAT_LIT; } @@ -287,7 +287,7 @@ or { return OR_KW; } <> { throw ParseError({ .msg = hintfmt("path has a trailing slash"), - .errPos = state->state.positions[CUR_POS], + .errPos = state->positions[CUR_POS], }); } diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index 167d3f4ae..6ab9fc962 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -37,11 +37,12 @@ struct ParserLocation { }; struct ParserState { - EvalState & state; SymbolTable & symbols; + PosTable & positions; Expr * result; SourcePath basePath; PosTable::Origin origin; + const ref rootFS; void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); @@ -56,16 +57,16 @@ inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, co { throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", - showAttrPath(state.symbols, attrPath), state.positions[prevPos]), - .errPos = state.positions[pos] + showAttrPath(symbols, attrPath), positions[prevPos]), + .errPos = positions[pos] }); } inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]), - .errPos = state.positions[pos] + .msg = hintfmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), + .errPos = positions[pos] }); } @@ -146,13 +147,13 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym if (duplicate) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), - .errPos = state.positions[duplicate->second] + .errPos = positions[duplicate->second] }); if (arg && formals->has(arg)) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), - .errPos = state.positions[pos] + .errPos = positions[pos] }); return formals; @@ -256,7 +257,7 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos, inline PosIdx ParserState::at(const ParserLocation & loc) { - return state.positions.add(origin, loc.first_line, loc.first_column); + return positions.add(origin, loc.first_line, loc.first_column); } } diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 519d6b11f..faf5e897f 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -32,6 +32,19 @@ #define YY_DECL int yylex \ (YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParserState * state) +namespace nix { + +Expr * parseExprFromBuf( + char * text, + size_t length, + Pos::Origin origin, + const SourcePath & basePath, + SymbolTable & symbols, + PosTable & positions, + const ref rootFS); + +} + #endif } @@ -52,7 +65,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * { throw ParseError({ .msg = hintfmt(error), - .errPos = state->state.positions[state->at(*loc)] + .errPos = state->positions[state->at(*loc)] }); } @@ -141,7 +154,7 @@ expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in let"), - .errPos = state->state.positions[CUR_POS] + .errPos = state->positions[CUR_POS] }); $$ = new ExprLet($2, $4); } @@ -231,7 +244,7 @@ expr_simple if (noURLLiterals) throw ParseError({ .msg = hintfmt("URL literals are disabled"), - .errPos = state->state.positions[CUR_POS] + .errPos = state->positions[CUR_POS] }); $$ = new ExprString(std::string($1)); } @@ -271,7 +284,7 @@ path_start /* add back in the trailing '/' to the first segment */ if ($1.p[$1.l-1] == '/' && $1.l > 1) path += "/"; - $$ = new ExprPath(ref(state->state.rootFS), std::move(path)); + $$ = new ExprPath(ref(state->rootFS), std::move(path)); } | HPATH { if (evalSettings.pureEval) { @@ -281,7 +294,7 @@ path_start ); } Path path(getHome() + std::string($1.p + 1, $1.l - 1)); - $$ = new ExprPath(ref(state->state.rootFS), std::move(path)); + $$ = new ExprPath(ref(state->rootFS), std::move(path)); } ; @@ -327,7 +340,7 @@ attrs } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = state->state.positions[state->at(@2)] + .errPos = state->positions[state->at(@2)] }); } | { $$ = new AttrPath; } @@ -394,19 +407,22 @@ formal namespace nix { -Expr * EvalState::parse( +Expr * parseExprFromBuf( char * text, size_t length, Pos::Origin origin, const SourcePath & basePath, - std::shared_ptr & staticEnv) + SymbolTable & symbols, + PosTable & positions, + const ref rootFS) { yyscan_t scanner; ParserState state { - .state = *this, .symbols = symbols, + .positions = positions, .basePath = basePath, .origin = {origin}, + .rootFS = rootFS, }; yylex_init(&scanner); @@ -415,8 +431,6 @@ Expr * EvalState::parse( yy_scan_buffer(text, length, scanner); yyparse(scanner, &state); - state.result->bindVars(*this, staticEnv); - return state.result; } From 09a1128d9e2ff0ae6176784938047350d6f8a782 Mon Sep 17 00:00:00 2001 From: pennae Date: Mon, 15 Jan 2024 16:52:18 +0100 Subject: [PATCH 126/307] don't repeatedly look up ast internal symbols these symbols are used a *lot*, so it makes sense to cache them. this mostly increases clarity of the code (however clear one may wish to call the parser desugaring here), but it also provides a small performance benefit. --- src/libexpr/eval.cc | 12 +++++++++++- src/libexpr/eval.hh | 2 ++ src/libexpr/nixexpr.hh | 5 +++++ src/libexpr/parser-state.hh | 1 + src/libexpr/parser.y | 31 +++++++++++++++++-------------- 5 files changed, 36 insertions(+), 15 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index b05ccfc85..dc9167144 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -419,6 +419,16 @@ EvalState::EvalState( , sPath(symbols.create("path")) , sPrefix(symbols.create("prefix")) , sOutputSpecified(symbols.create("outputSpecified")) + , exprSymbols{ + .sub = symbols.create("__sub"), + .lessThan = symbols.create("__lessThan"), + .mul = symbols.create("__mul"), + .div = symbols.create("__div"), + .or_ = symbols.create("or"), + .findFile = symbols.create("__findFile"), + .nixPath = symbols.create("__nixPath"), + .body = symbols.create("body") + } , repair(NoRepair) , emptyBindings(0) , rootFS( @@ -2808,7 +2818,7 @@ Expr * EvalState::parse( const SourcePath & basePath, std::shared_ptr & staticEnv) { - auto result = parseExprFromBuf(text, length, origin, basePath, symbols, positions, rootFS); + auto result = parseExprFromBuf(text, length, origin, basePath, symbols, positions, rootFS, exprSymbols); result->bindVars(*this, staticEnv); diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 9141156b1..2368187b1 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -207,6 +207,8 @@ public: sPrefix, sOutputSpecified; + const Expr::AstSymbols exprSymbols; + /** * If set, force copying files to the Nix store even if they * already exist there. diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index 3cd46ca27..b6189c2a9 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -140,6 +140,11 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath) struct Expr { + struct AstSymbols { + Symbol sub, lessThan, mul, div, or_, findFile, nixPath, body; + }; + + static unsigned long nrExprs; Expr() { nrExprs++; diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index 6ab9fc962..a5b932ae8 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -43,6 +43,7 @@ struct ParserState { SourcePath basePath; PosTable::Origin origin; const ref rootFS; + const Expr::AstSymbols & s; void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos); void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos); diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index faf5e897f..e95da37f7 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -41,7 +41,8 @@ Expr * parseExprFromBuf( const SourcePath & basePath, SymbolTable & symbols, PosTable & positions, - const ref rootFS); + const ref rootFS, + const Expr::AstSymbols & astSymbols); } @@ -168,13 +169,13 @@ expr_if expr_op : '!' expr_op %prec NOT { $$ = new ExprOpNot($2); } - | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->symbols.create("__sub")), {new ExprInt(0), $2}); } + | '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->s.sub), {new ExprInt(0), $2}); } | expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); } | expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); } - | expr_op '<' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3}); } - | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1})); } - | expr_op '>' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$3, $1}); } - | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__lessThan")), {$1, $3})); } + | expr_op '<' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3}); } + | expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1})); } + | expr_op '>' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1}); } + | expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3})); } | expr_op AND expr_op { $$ = new ExprOpAnd(state->at(@2), $1, $3); } | expr_op OR expr_op { $$ = new ExprOpOr(state->at(@2), $1, $3); } | expr_op IMPL expr_op { $$ = new ExprOpImpl(state->at(@2), $1, $3); } @@ -182,9 +183,9 @@ expr_op | expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; } | expr_op '+' expr_op { $$ = new ExprConcatStrings(state->at(@2), false, new std::vector >({{state->at(@1), $1}, {state->at(@3), $3}})); } - | expr_op '-' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__sub")), {$1, $3}); } - | expr_op '*' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__mul")), {$1, $3}); } - | expr_op '/' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->symbols.create("__div")), {$1, $3}); } + | expr_op '-' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.sub), {$1, $3}); } + | expr_op '*' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.mul), {$1, $3}); } + | expr_op '/' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.div), {$1, $3}); } | expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->at(@2), $1, $3); } | expr_app ; @@ -208,7 +209,7 @@ expr_select | /* Backwards compatibility: because Nixpkgs has a rarely used function named ‘or’, allow stuff like ‘map or [...]’. */ expr_simple OR_KW - { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->symbols.create("or"))}); } + { $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->s.or_)}); } | expr_simple ; @@ -235,8 +236,8 @@ expr_simple | SPATH { std::string path($1.p + 1, $1.l - 2); $$ = new ExprCall(CUR_POS, - new ExprVar(state->symbols.create("__findFile")), - {new ExprVar(state->symbols.create("__nixPath")), + new ExprVar(state->s.findFile), + {new ExprVar(state->s.nixPath), new ExprString(std::move(path))}); } | URI { @@ -252,7 +253,7 @@ expr_simple /* Let expressions `let {..., body = ...}' are just desugared into `(rec {..., body = ...}).body'. */ | LET '{' binds '}' - { $3->recursive = true; $$ = new ExprSelect(noPos, $3, state->symbols.create("body")); } + { $3->recursive = true; $$ = new ExprSelect(noPos, $3, state->s.body); } | REC '{' binds '}' { $3->recursive = true; $$ = $3; } | '{' binds '}' @@ -414,7 +415,8 @@ Expr * parseExprFromBuf( const SourcePath & basePath, SymbolTable & symbols, PosTable & positions, - const ref rootFS) + const ref rootFS, + const Expr::AstSymbols & astSymbols) { yyscan_t scanner; ParserState state { @@ -423,6 +425,7 @@ Expr * parseExprFromBuf( .basePath = basePath, .origin = {origin}, .rootFS = rootFS, + .s = astSymbols, }; yylex_init(&scanner); From e0a76430861efbcfaf14c8b3691a091e6e72a8ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 22:35:12 +0000 Subject: [PATCH 127/307] Bump cachix/install-nix-action from 24 to 25 Bumps [cachix/install-nix-action](https://github.com/cachix/install-nix-action) from 24 to 25. - [Release notes](https://github.com/cachix/install-nix-action/releases) - [Commits](https://github.com/cachix/install-nix-action/compare/v24...v25) --- updated-dependencies: - dependency-name: cachix/install-nix-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aa2551424..8d88de4b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v24 + - uses: cachix/install-nix-action@v25 with: # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: "sandbox = true" @@ -62,7 +62,7 @@ jobs: with: fetch-depth: 0 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v24 + - uses: cachix/install-nix-action@v25 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - uses: cachix/cachix-action@v13 @@ -84,7 +84,7 @@ jobs: steps: - uses: actions/checkout@v4 - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/install-nix-action@v24 + - uses: cachix/install-nix-action@v25 with: install_url: '${{needs.installer.outputs.installerURL}}' install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve" @@ -114,7 +114,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v24 + - uses: cachix/install-nix-action@v25 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV From bf7754c0991c33146da9c339a71d661615afc93a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 22:35:15 +0000 Subject: [PATCH 128/307] Bump cachix/cachix-action from 13 to 14 Bumps [cachix/cachix-action](https://github.com/cachix/cachix-action) from 13 to 14. - [Release notes](https://github.com/cachix/cachix-action/releases) - [Commits](https://github.com/cachix/cachix-action/compare/v13...v14) --- updated-dependencies: - dependency-name: cachix/cachix-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index aa2551424..878720acc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,7 +25,7 @@ jobs: # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: "sandbox = true" - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - - uses: cachix/cachix-action@v13 + - uses: cachix/cachix-action@v14 if: needs.check_secrets.outputs.cachix == 'true' with: name: '${{ env.CACHIX_NAME }}' @@ -65,7 +65,7 @@ jobs: - uses: cachix/install-nix-action@v24 with: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - - uses: cachix/cachix-action@v13 + - uses: cachix/cachix-action@v14 with: name: '${{ env.CACHIX_NAME }}' signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}' @@ -119,7 +119,7 @@ jobs: install_url: https://releases.nixos.org/nix/nix-2.13.3/install - run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV - - uses: cachix/cachix-action@v13 + - uses: cachix/cachix-action@v14 if: needs.check_secrets.outputs.cachix == 'true' with: name: '${{ env.CACHIX_NAME }}' From cbc319e9be3b29e3eb29a6e4cf08db1e0363d7bd Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 16 Jan 2024 12:18:02 +0100 Subject: [PATCH 129/307] tests/functional/lang: Test substring with negative length --- tests/functional/lang/eval-okay-substring.exp | 2 +- tests/functional/lang/eval-okay-substring.nix | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/functional/lang/eval-okay-substring.exp b/tests/functional/lang/eval-okay-substring.exp index 6aace04b0..f48b4623a 100644 --- a/tests/functional/lang/eval-okay-substring.exp +++ b/tests/functional/lang/eval-okay-substring.exp @@ -1 +1 @@ -"ooxfoobarybarzobaabbc" +"ooxfoobarybarzobaabbc_bad" diff --git a/tests/functional/lang/eval-okay-substring.nix b/tests/functional/lang/eval-okay-substring.nix index 424af00d9..54c97e162 100644 --- a/tests/functional/lang/eval-okay-substring.nix +++ b/tests/functional/lang/eval-okay-substring.nix @@ -19,3 +19,5 @@ substring 1 2 s + substring 3 1 s + "c" + substring 5 10 "perl" ++ "_" ++ substring 3 (-1) "tebbad" From baea5f42c602c0233c3ed9c2d668409f86f901b9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 16 Jan 2024 14:50:53 +0100 Subject: [PATCH 130/307] doc/glossary: Simplify software package definition Co-authored-by: Valentin Gagarin --- doc/manual/src/glossary.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 5e3c0e024..3c0570a44 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -273,7 +273,7 @@ - [package]{#package} - 1. A software package; typically a collection of programs, files and data. + 1. A software package; a collection of files and other data. 2. A [package attribute set]. From 0b1d93d2bae5fda9924f13246d7a667ce4392a4d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Jan 2024 15:23:22 +0100 Subject: [PATCH 131/307] Sleep a bit between attempts to connect to the root server --- src/libstore/gc.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index f60011f95..cb820e2d5 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -154,6 +154,7 @@ void LocalStore::addTempRoot(const StorePath & path) if (e.errNo == ECONNREFUSED || e.errNo == ENOENT) { debug("GC socket connection refused: %s", e.msg()); fdRootsSocket->close(); + std::this_thread::sleep_for(std::chrono::milliseconds(100)); goto restart; } throw; From d005bade7f3339cc68bee12ce13d863d51d54dc4 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 16 Jan 2024 15:23:46 +0100 Subject: [PATCH 132/307] connect(): Propagate errno from the child process This is necessary on macOS since addTempRoot() relies on errno. --- src/libutil/unix-domain-socket.cc | 39 +++++++++++++++++++++---------- 1 file changed, 27 insertions(+), 12 deletions(-) diff --git a/src/libutil/unix-domain-socket.cc b/src/libutil/unix-domain-socket.cc index 8949461d2..05bbb5ba3 100644 --- a/src/libutil/unix-domain-socket.cc +++ b/src/libutil/unix-domain-socket.cc @@ -1,6 +1,7 @@ #include "file-system.hh" #include "processes.hh" #include "unix-domain-socket.hh" +#include "util.hh" #include #include @@ -75,21 +76,35 @@ void connect(int fd, const std::string & path) addr.sun_family = AF_UNIX; if (path.size() + 1 >= sizeof(addr.sun_path)) { + Pipe pipe; + pipe.create(); Pid pid = startProcess([&]() { - Path dir = dirOf(path); - if (chdir(dir.c_str()) == -1) - throw SysError("chdir to '%s' failed", dir); - std::string base(baseNameOf(path)); - if (base.size() + 1 >= sizeof(addr.sun_path)) - throw Error("socket path '%s' is too long", base); - memcpy(addr.sun_path, base.c_str(), base.size() + 1); - if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) - throw SysError("cannot connect to socket at '%s'", path); - _exit(0); + try { + pipe.readSide.close(); + Path dir = dirOf(path); + if (chdir(dir.c_str()) == -1) + throw SysError("chdir to '%s' failed", dir); + std::string base(baseNameOf(path)); + if (base.size() + 1 >= sizeof(addr.sun_path)) + throw Error("socket path '%s' is too long", base); + memcpy(addr.sun_path, base.c_str(), base.size() + 1); + if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + throw SysError("cannot connect to socket at '%s'", path); + writeFull(pipe.writeSide.get(), "0\n"); + } catch (SysError & e) { + writeFull(pipe.writeSide.get(), fmt("%d\n", e.errNo)); + } catch (...) { + writeFull(pipe.writeSide.get(), "-1\n"); + } }); - int status = pid.wait(); - if (status != 0) + pipe.writeSide.close(); + auto errNo = string2Int(chomp(drainFD(pipe.readSide.get()))); + if (!errNo || *errNo == -1) throw Error("cannot connect to socket at '%s'", path); + else if (*errNo > 0) { + errno = *errNo; + throw SysError("cannot connect to socket at '%s'", path); + } } else { memcpy(addr.sun_path, path.c_str(), path.size() + 1); if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) From 65255edc9b0c2bbe8b0be50ac7b2671b50309ea8 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 16 Jan 2024 15:25:04 +0100 Subject: [PATCH 133/307] DerivationInfo -> PackageInfo This does not yet resolve the coupling between packages and derivations, but it makes the code more consistent with the terminology, and it accentuates places where the coupling is obvious, such as auto drvPath = packageInfo.queryDrvPath(); if (!drvPath) throw Error("'%s' is not a derivation", what()); ... which isn't wrong, and in my opinion, doesn't even look wrong, because it just reflects the current logic. However, I do like that we can now start to see in the code that this coupling is perhaps a bit arbitrary. After this rename, we can bring the DerivingPath concept into type and start to lift this limitation. --- src/libcmd/installable-attr-path.cc | 10 +-- src/libcmd/installable-value.hh | 2 +- src/libcmd/installables.hh | 2 +- src/libcmd/repl.cc | 6 +- src/libexpr/get-drvs.cc | 48 ++++++------- src/libexpr/get-drvs.hh | 18 ++--- src/nix-build/nix-build.cc | 16 ++--- src/nix-env/nix-env.cc | 94 +++++++++++++------------- src/nix-env/user-env.cc | 8 +-- src/nix-env/user-env.hh | 4 +- src/nix-instantiate/nix-instantiate.cc | 2 +- src/nix/flake.cc | 6 +- src/nix/profile.cc | 6 +- 13 files changed, 111 insertions(+), 111 deletions(-) diff --git a/src/libcmd/installable-attr-path.cc b/src/libcmd/installable-attr-path.cc index 06e507872..3ec1c1614 100644 --- a/src/libcmd/installable-attr-path.cc +++ b/src/libcmd/installable-attr-path.cc @@ -58,22 +58,22 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths() Bindings & autoArgs = *cmd.getAutoArgs(*state); - DrvInfos drvInfos; - getDerivations(*state, *v, "", autoArgs, drvInfos, false); + PackageInfos packageInfos; + getDerivations(*state, *v, "", autoArgs, packageInfos, false); // Backward compatibility hack: group results by drvPath. This // helps keep .all output together. std::map byDrvPath; - for (auto & drvInfo : drvInfos) { - auto drvPath = drvInfo.queryDrvPath(); + for (auto & packageInfo : packageInfos) { + auto drvPath = packageInfo.queryDrvPath(); if (!drvPath) throw Error("'%s' is not a derivation", what()); auto newOutputs = std::visit(overloaded { [&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec { std::set outputsToInstall; - for (auto & output : drvInfo.queryOutputs(false, true)) + for (auto & output : packageInfo.queryOutputs(false, true)) outputsToInstall.insert(output.first); return OutputsSpec::Names { std::move(outputsToInstall) }; }, diff --git a/src/libcmd/installable-value.hh b/src/libcmd/installable-value.hh index 3138ce8ec..f300d392b 100644 --- a/src/libcmd/installable-value.hh +++ b/src/libcmd/installable-value.hh @@ -6,7 +6,7 @@ namespace nix { -struct DrvInfo; +struct PackageInfo; struct SourceExprCommand; namespace eval_cache { class EvalCache; class AttrCursor; } diff --git a/src/libcmd/installables.hh b/src/libcmd/installables.hh index 95e8841ca..bf5759230 100644 --- a/src/libcmd/installables.hh +++ b/src/libcmd/installables.hh @@ -12,7 +12,7 @@ namespace nix { -struct DrvInfo; +struct PackageInfo; enum class Realise { /** diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 918b2e53a..d7d8f9819 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -450,10 +450,10 @@ static bool isVarName(std::string_view s) StorePath NixRepl::getDerivationPath(Value & v) { - auto drvInfo = getDerivation(*state, v, false); - if (!drvInfo) + auto packageInfo = getDerivation(*state, v, false); + if (!packageInfo) throw Error("expression does not evaluate to a derivation, so I can't build it"); - auto drvPath = drvInfo->queryDrvPath(); + auto drvPath = packageInfo->queryDrvPath(); if (!drvPath) throw Error("expression did not evaluate to a valid derivation (no 'drvPath' attribute)"); if (!state->store->isValidPath(*drvPath)) diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index a6441871c..51449ccb3 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -11,13 +11,13 @@ namespace nix { -DrvInfo::DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs) +PackageInfo::PackageInfo(EvalState & state, std::string attrPath, Bindings * attrs) : state(&state), attrs(attrs), attrPath(std::move(attrPath)) { } -DrvInfo::DrvInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs) +PackageInfo::PackageInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs) : state(&state), attrs(nullptr), attrPath("") { auto [drvPath, selectedOutputs] = parsePathWithOutputs(*store, drvPathWithOutputs); @@ -45,7 +45,7 @@ DrvInfo::DrvInfo(EvalState & state, ref store, const std::string & drvPat } -std::string DrvInfo::queryName() const +std::string PackageInfo::queryName() const { if (name == "" && attrs) { auto i = attrs->find(state->sName); @@ -56,7 +56,7 @@ std::string DrvInfo::queryName() const } -std::string DrvInfo::querySystem() const +std::string PackageInfo::querySystem() const { if (system == "" && attrs) { auto i = attrs->find(state->sSystem); @@ -66,7 +66,7 @@ std::string DrvInfo::querySystem() const } -std::optional DrvInfo::queryDrvPath() const +std::optional PackageInfo::queryDrvPath() const { if (!drvPath && attrs) { Bindings::iterator i = attrs->find(state->sDrvPath); @@ -80,7 +80,7 @@ std::optional DrvInfo::queryDrvPath() const } -StorePath DrvInfo::requireDrvPath() const +StorePath PackageInfo::requireDrvPath() const { if (auto drvPath = queryDrvPath()) return *drvPath; @@ -88,7 +88,7 @@ StorePath DrvInfo::requireDrvPath() const } -StorePath DrvInfo::queryOutPath() const +StorePath PackageInfo::queryOutPath() const { if (!outPath && attrs) { Bindings::iterator i = attrs->find(state->sOutPath); @@ -102,7 +102,7 @@ StorePath DrvInfo::queryOutPath() const } -DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall) +PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall) { if (outputs.empty()) { /* Get the ‘outputs’ list. */ @@ -164,7 +164,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall } -std::string DrvInfo::queryOutputName() const +std::string PackageInfo::queryOutputName() const { if (outputName == "" && attrs) { Bindings::iterator i = attrs->find(state->sOutputName); @@ -174,7 +174,7 @@ std::string DrvInfo::queryOutputName() const } -Bindings * DrvInfo::getMeta() +Bindings * PackageInfo::getMeta() { if (meta) return meta; if (!attrs) return 0; @@ -186,7 +186,7 @@ Bindings * DrvInfo::getMeta() } -StringSet DrvInfo::queryMetaNames() +StringSet PackageInfo::queryMetaNames() { StringSet res; if (!getMeta()) return res; @@ -196,7 +196,7 @@ StringSet DrvInfo::queryMetaNames() } -bool DrvInfo::checkMeta(Value & v) +bool PackageInfo::checkMeta(Value & v) { state->forceValue(v, v.determinePos(noPos)); if (v.type() == nList) { @@ -216,7 +216,7 @@ bool DrvInfo::checkMeta(Value & v) } -Value * DrvInfo::queryMeta(const std::string & name) +Value * PackageInfo::queryMeta(const std::string & name) { if (!getMeta()) return 0; Bindings::iterator a = meta->find(state->symbols.create(name)); @@ -225,7 +225,7 @@ Value * DrvInfo::queryMeta(const std::string & name) } -std::string DrvInfo::queryMetaString(const std::string & name) +std::string PackageInfo::queryMetaString(const std::string & name) { Value * v = queryMeta(name); if (!v || v->type() != nString) return ""; @@ -233,7 +233,7 @@ std::string DrvInfo::queryMetaString(const std::string & name) } -NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def) +NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def) { Value * v = queryMeta(name); if (!v) return def; @@ -247,7 +247,7 @@ NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def) return def; } -NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def) +NixFloat PackageInfo::queryMetaFloat(const std::string & name, NixFloat def) { Value * v = queryMeta(name); if (!v) return def; @@ -262,7 +262,7 @@ NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def) } -bool DrvInfo::queryMetaBool(const std::string & name, bool def) +bool PackageInfo::queryMetaBool(const std::string & name, bool def) { Value * v = queryMeta(name); if (!v) return def; @@ -277,7 +277,7 @@ bool DrvInfo::queryMetaBool(const std::string & name, bool def) } -void DrvInfo::setMeta(const std::string & name, Value * v) +void PackageInfo::setMeta(const std::string & name, Value * v) { getMeta(); auto attrs = state->buildBindings(1 + (meta ? meta->size() : 0)); @@ -300,7 +300,7 @@ typedef std::set Done; The result boolean indicates whether it makes sense for the caller to recursively search for derivations in `v'. */ static bool getDerivation(EvalState & state, Value & v, - const std::string & attrPath, DrvInfos & drvs, Done & done, + const std::string & attrPath, PackageInfos & drvs, Done & done, bool ignoreAssertionFailures) { try { @@ -311,7 +311,7 @@ static bool getDerivation(EvalState & state, Value & v, derivation {...}; y = x;}'. */ if (!done.insert(v.attrs).second) return false; - DrvInfo drv(state, attrPath, v.attrs); + PackageInfo drv(state, attrPath, v.attrs); drv.queryName(); @@ -326,11 +326,11 @@ static bool getDerivation(EvalState & state, Value & v, } -std::optional getDerivation(EvalState & state, Value & v, +std::optional getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures) { Done done; - DrvInfos drvs; + PackageInfos drvs; getDerivation(state, v, "", drvs, done, ignoreAssertionFailures); if (drvs.size() != 1) return {}; return std::move(drvs.front()); @@ -348,7 +348,7 @@ static std::regex attrRegex("[A-Za-z_][A-Za-z0-9-_+]*"); static void getDerivations(EvalState & state, Value & vIn, const std::string & pathPrefix, Bindings & autoArgs, - DrvInfos & drvs, Done & done, + PackageInfos & drvs, Done & done, bool ignoreAssertionFailures) { Value v; @@ -401,7 +401,7 @@ static void getDerivations(EvalState & state, Value & vIn, void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix, - Bindings & autoArgs, DrvInfos & drvs, bool ignoreAssertionFailures) + Bindings & autoArgs, PackageInfos & drvs, bool ignoreAssertionFailures) { Done done; getDerivations(state, v, pathPrefix, autoArgs, drvs, done, ignoreAssertionFailures); diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh index 584d64ac1..b886581b6 100644 --- a/src/libexpr/get-drvs.hh +++ b/src/libexpr/get-drvs.hh @@ -11,7 +11,7 @@ namespace nix { -struct DrvInfo +struct PackageInfo { public: typedef std::map> Outputs; @@ -43,9 +43,9 @@ public: */ std::string attrPath; - DrvInfo(EvalState & state) : state(&state) { }; - DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs); - DrvInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs); + PackageInfo(EvalState & state) : state(&state) { }; + PackageInfo(EvalState & state, std::string attrPath, Bindings * attrs); + PackageInfo(EvalState & state, ref store, const std::string & drvPathWithOutputs); std::string queryName() const; std::string querySystem() const; @@ -82,21 +82,21 @@ public: #if HAVE_BOEHMGC -typedef std::list> DrvInfos; +typedef std::list> PackageInfos; #else -typedef std::list DrvInfos; +typedef std::list PackageInfos; #endif /** - * If value `v` denotes a derivation, return a DrvInfo object + * If value `v` denotes a derivation, return a PackageInfo object * describing it. Otherwise return nothing. */ -std::optional getDerivation(EvalState & state, +std::optional getDerivation(EvalState & state, Value & v, bool ignoreAssertionFailures); void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix, - Bindings & autoArgs, DrvInfos & drvs, + Bindings & autoArgs, PackageInfos & drvs, bool ignoreAssertionFailures); diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 1ad4b387c..549adfbf7 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -289,7 +289,7 @@ static void main_nix_build(int argc, char * * argv) if (runEnv) setenv("IN_NIX_SHELL", pure ? "pure" : "impure", 1); - DrvInfos drvs; + PackageInfos drvs; /* Parse the expressions. */ std::vector exprs; @@ -307,7 +307,7 @@ static void main_nix_build(int argc, char * * argv) } catch (Error & e) {}; auto [path, outputNames] = parsePathWithOutputs(absolute); if (evalStore->isStorePath(path) && hasSuffix(path, ".drv")) - drvs.push_back(DrvInfo(*state, evalStore, absolute)); + drvs.push_back(PackageInfo(*state, evalStore, absolute)); else /* If we're in a #! script, interpret filenames relative to the script. */ @@ -383,8 +383,8 @@ static void main_nix_build(int argc, char * * argv) if (drvs.size() != 1) throw UsageError("nix-shell requires a single derivation"); - auto & drvInfo = drvs.front(); - auto drv = evalStore->derivationFromPath(drvInfo.requireDrvPath()); + auto & packageInfo = drvs.front(); + auto drv = evalStore->derivationFromPath(packageInfo.requireDrvPath()); std::vector pathsToBuild; RealisedPath::Set pathsToCopy; @@ -527,7 +527,7 @@ static void main_nix_build(int argc, char * * argv) for (const auto & [inputDrv, inputNode] : drv.inputDrvs.map) accumInputClosure(inputDrv, inputNode); - ParsedDerivation parsedDrv(drvInfo.requireDrvPath(), drv); + ParsedDerivation parsedDrv(packageInfo.requireDrvPath(), drv); if (auto structAttrs = parsedDrv.prepareStructuredAttrs(*store, inputs)) { auto json = structAttrs.value(); @@ -620,10 +620,10 @@ static void main_nix_build(int argc, char * * argv) std::map> drvMap; - for (auto & drvInfo : drvs) { - auto drvPath = drvInfo.requireDrvPath(); + for (auto & packageInfo : drvs) { + auto drvPath = packageInfo.requireDrvPath(); - auto outputName = drvInfo.queryOutputName(); + auto outputName = packageInfo.queryOutputName(); if (outputName == "") throw Error("derivation '%s' lacks an 'outputName' attribute", store->printStorePath(drvPath)); diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index e2bbd9775..d5b46c57a 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -184,7 +184,7 @@ static void loadSourceExpr(EvalState & state, const SourcePath & path, Value & v static void loadDerivations(EvalState & state, const SourcePath & nixExprPath, std::string systemFilter, Bindings & autoArgs, - const std::string & pathPrefix, DrvInfos & elems) + const std::string & pathPrefix, PackageInfos & elems) { Value vRoot; loadSourceExpr(state, nixExprPath, vRoot); @@ -195,7 +195,7 @@ static void loadDerivations(EvalState & state, const SourcePath & nixExprPath, /* Filter out all derivations not applicable to the current system. */ - for (DrvInfos::iterator i = elems.begin(), j; i != elems.end(); i = j) { + for (PackageInfos::iterator i = elems.begin(), j; i != elems.end(); i = j) { j = i; j++; if (systemFilter != "*" && i->querySystem() != systemFilter) elems.erase(i); @@ -203,13 +203,13 @@ static void loadDerivations(EvalState & state, const SourcePath & nixExprPath, } -static long getPriority(EvalState & state, DrvInfo & drv) +static long getPriority(EvalState & state, PackageInfo & drv) { return drv.queryMetaInt("priority", 0); } -static long comparePriorities(EvalState & state, DrvInfo & drv1, DrvInfo & drv2) +static long comparePriorities(EvalState & state, PackageInfo & drv1, PackageInfo & drv2) { return getPriority(state, drv2) - getPriority(state, drv1); } @@ -217,7 +217,7 @@ static long comparePriorities(EvalState & state, DrvInfo & drv1, DrvInfo & drv2) // FIXME: this function is rather slow since it checks a single path // at a time. -static bool isPrebuilt(EvalState & state, DrvInfo & elem) +static bool isPrebuilt(EvalState & state, PackageInfo & elem) { auto path = elem.queryOutPath(); if (state.store->isValidPath(path)) return true; @@ -236,11 +236,11 @@ static void checkSelectorUse(DrvNames & selectors) namespace { -std::set searchByPrefix(const DrvInfos & allElems, std::string_view prefix) { +std::set searchByPrefix(const PackageInfos & allElems, std::string_view prefix) { constexpr std::size_t maxResults = 3; std::set result; - for (const auto & drvInfo : allElems) { - const auto drvName = DrvName { drvInfo.queryName() }; + for (const auto & packageInfo : allElems) { + const auto drvName = DrvName { packageInfo.queryName() }; if (hasPrefix(drvName.name, prefix)) { result.emplace(drvName.name); @@ -254,11 +254,11 @@ std::set searchByPrefix(const DrvInfos & allElems, std::string_view struct Match { - DrvInfo drvInfo; + PackageInfo packageInfo; std::size_t index; - Match(DrvInfo drvInfo_, std::size_t index_) - : drvInfo{std::move(drvInfo_)} + Match(PackageInfo packageInfo_, std::size_t index_) + : packageInfo{std::move(packageInfo_)} , index{index_} {} }; @@ -276,7 +276,7 @@ std::vector pickNewestOnly(EvalState & state, std::vector matches) StringSet multiple; for (auto & match : matches) { - auto & oneDrv = match.drvInfo; + auto & oneDrv = match.packageInfo; const auto drvName = DrvName { oneDrv.queryName() }; long comparison = 1; @@ -284,7 +284,7 @@ std::vector pickNewestOnly(EvalState & state, std::vector matches) const auto itOther = newest.find(drvName.name); if (itOther != newest.end()) { - auto & newestDrv = itOther->second.drvInfo; + auto & newestDrv = itOther->second.packageInfo; comparison = oneDrv.querySystem() == newestDrv.querySystem() ? 0 : @@ -319,23 +319,23 @@ std::vector pickNewestOnly(EvalState & state, std::vector matches) } // end namespace -static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems, +static PackageInfos filterBySelector(EvalState & state, const PackageInfos & allElems, const Strings & args, bool newestOnly) { DrvNames selectors = drvNamesFromArgs(args); if (selectors.empty()) selectors.emplace_back("*"); - DrvInfos elems; + PackageInfos elems; std::set done; for (auto & selector : selectors) { std::vector matches; - for (const auto & [index, drvInfo] : enumerate(allElems)) { - const auto drvName = DrvName { drvInfo.queryName() }; + for (const auto & [index, packageInfo] : enumerate(allElems)) { + const auto drvName = DrvName { packageInfo.queryName() }; if (selector.matches(drvName)) { ++selector.hits; - matches.emplace_back(drvInfo, index); + matches.emplace_back(packageInfo, index); } } @@ -347,7 +347,7 @@ static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems, haven't inserted before. */ for (auto & match : matches) if (done.insert(match.index).second) - elems.push_back(match.drvInfo); + elems.push_back(match.packageInfo); if (selector.hits == 0 && selector.fullName != "*") { const auto prefixHits = searchByPrefix(allElems, selector.name); @@ -376,7 +376,7 @@ static bool isPath(std::string_view s) static void queryInstSources(EvalState & state, InstallSourceInfo & instSource, const Strings & args, - DrvInfos & elems, bool newestOnly) + PackageInfos & elems, bool newestOnly) { InstallSourceType type = instSource.type; if (type == srcUnknown && args.size() > 0 && isPath(args.front())) @@ -392,7 +392,7 @@ static void queryInstSources(EvalState & state, /* Load the derivations from the (default or specified) Nix expression. */ - DrvInfos allElems; + PackageInfos allElems; loadDerivations(state, *instSource.nixExprPath, instSource.systemFilter, *instSource.autoArgs, "", allElems); @@ -433,7 +433,7 @@ static void queryInstSources(EvalState & state, std::string name(path.name()); - DrvInfo elem(state, "", nullptr); + PackageInfo elem(state, "", nullptr); elem.setName(name); if (path.isDerivation()) { @@ -476,7 +476,7 @@ static void queryInstSources(EvalState & state, } -static void printMissing(EvalState & state, DrvInfos & elems) +static void printMissing(EvalState & state, PackageInfos & elems) { std::vector targets; for (auto & i : elems) @@ -494,7 +494,7 @@ static void printMissing(EvalState & state, DrvInfos & elems) } -static bool keep(DrvInfo & drv) +static bool keep(PackageInfo & drv) { return drv.queryMetaBool("keep", false); } @@ -506,7 +506,7 @@ static void installDerivations(Globals & globals, debug("installing derivations"); /* Get the set of user environment elements to be installed. */ - DrvInfos newElems, newElemsTmp; + PackageInfos newElems, newElemsTmp; queryInstSources(*globals.state, globals.instSource, args, newElemsTmp, true); /* If --prebuilt-only is given, filter out source-only packages. */ @@ -529,12 +529,12 @@ static void installDerivations(Globals & globals, while (true) { auto lockToken = optimisticLockProfile(profile); - DrvInfos allElems(newElems); + PackageInfos allElems(newElems); /* Add in the already installed derivations, unless they have the same name as a to-be-installed element. */ if (!globals.removeAll) { - DrvInfos installedElems = queryInstalled(*globals.state, profile); + PackageInfos installedElems = queryInstalled(*globals.state, profile); for (auto & i : installedElems) { DrvName drvName(i.queryName()); @@ -592,14 +592,14 @@ static void upgradeDerivations(Globals & globals, while (true) { auto lockToken = optimisticLockProfile(globals.profile); - DrvInfos installedElems = queryInstalled(*globals.state, globals.profile); + PackageInfos installedElems = queryInstalled(*globals.state, globals.profile); /* Fetch all derivations from the input file. */ - DrvInfos availElems; + PackageInfos availElems; queryInstSources(*globals.state, globals.instSource, args, availElems, false); /* Go through all installed derivations. */ - DrvInfos newElems; + PackageInfos newElems; for (auto & i : installedElems) { DrvName drvName(i.queryName()); @@ -617,7 +617,7 @@ static void upgradeDerivations(Globals & globals, priority. If there are still multiple matches, take the one with the highest version. Do not upgrade if it would decrease the priority. */ - DrvInfos::iterator bestElem = availElems.end(); + PackageInfos::iterator bestElem = availElems.end(); std::string bestVersion; for (auto j = availElems.begin(); j != availElems.end(); ++j) { if (comparePriorities(*globals.state, i, *j) > 0) @@ -687,7 +687,7 @@ static void opUpgrade(Globals & globals, Strings opFlags, Strings opArgs) } -static void setMetaFlag(EvalState & state, DrvInfo & drv, +static void setMetaFlag(EvalState & state, PackageInfo & drv, const std::string & name, const std::string & value) { auto v = state.allocValue(); @@ -711,7 +711,7 @@ static void opSetFlag(Globals & globals, Strings opFlags, Strings opArgs) while (true) { std::string lockToken = optimisticLockProfile(globals.profile); - DrvInfos installedElems = queryInstalled(*globals.state, globals.profile); + PackageInfos installedElems = queryInstalled(*globals.state, globals.profile); /* Update all matching derivations. */ for (auto & i : installedElems) { @@ -745,13 +745,13 @@ static void opSet(Globals & globals, Strings opFlags, Strings opArgs) else throw UsageError("unknown flag '%1%'", arg); } - DrvInfos elems; + PackageInfos elems; queryInstSources(*globals.state, globals.instSource, opArgs, elems, true); if (elems.size() != 1) throw Error("--set requires exactly one derivation"); - DrvInfo & drv(elems.front()); + PackageInfo & drv(elems.front()); if (globals.forceName != "") drv.setName(globals.forceName); @@ -786,10 +786,10 @@ static void uninstallDerivations(Globals & globals, Strings & selectors, while (true) { auto lockToken = optimisticLockProfile(profile); - DrvInfos workingElems = queryInstalled(*globals.state, profile); + PackageInfos workingElems = queryInstalled(*globals.state, profile); for (auto & selector : selectors) { - DrvInfos::iterator split = workingElems.begin(); + PackageInfos::iterator split = workingElems.begin(); if (isPath(selector)) { StorePath selectorStorePath = globals.state->store->followLinksToStorePath(selector); split = std::partition( @@ -838,7 +838,7 @@ static bool cmpChars(char a, char b) } -static bool cmpElemByName(const DrvInfo & a, const DrvInfo & b) +static bool cmpElemByName(const PackageInfo & a, const PackageInfo & b) { auto a_name = a.queryName(); auto b_name = b.queryName(); @@ -891,7 +891,7 @@ void printTable(Table & table) typedef enum { cvLess, cvEqual, cvGreater, cvUnavail } VersionDiff; static VersionDiff compareVersionAgainstSet( - const DrvInfo & elem, const DrvInfos & elems, std::string & version) + const PackageInfo & elem, const PackageInfos & elems, std::string & version) { DrvName name(elem.queryName()); @@ -922,7 +922,7 @@ static VersionDiff compareVersionAgainstSet( } -static void queryJSON(Globals & globals, std::vector & elems, bool printOutPath, bool printDrvPath, bool printMeta) +static void queryJSON(Globals & globals, std::vector & elems, bool printOutPath, bool printDrvPath, bool printMeta) { using nlohmann::json; json topObj = json::object(); @@ -942,7 +942,7 @@ static void queryJSON(Globals & globals, std::vector & elems, bool prin }; { - DrvInfo::Outputs outputs = i.queryOutputs(printOutPath); + PackageInfo::Outputs outputs = i.queryOutputs(printOutPath); json &outputObj = pkgObj["outputs"]; outputObj = json::object(); for (auto & j : outputs) { @@ -1032,7 +1032,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) throw UsageError("--attr-path(-P) only works with --available"); /* Obtain derivation information from the specified source. */ - DrvInfos availElems, installedElems; + PackageInfos availElems, installedElems; if (source == sInstalled || compareVersions || printStatus) installedElems = queryInstalled(*globals.state, globals.profile); @@ -1042,16 +1042,16 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) globals.instSource.systemFilter, *globals.instSource.autoArgs, attrPath, availElems); - DrvInfos elems_ = filterBySelector(*globals.state, + PackageInfos elems_ = filterBySelector(*globals.state, source == sInstalled ? installedElems : availElems, opArgs, false); - DrvInfos & otherElems(source == sInstalled ? availElems : installedElems); + PackageInfos & otherElems(source == sInstalled ? availElems : installedElems); /* Sort them by name. */ /* !!! */ - std::vector elems; + std::vector elems; for (auto & i : elems_) elems.push_back(i); sort(elems.begin(), elems.end(), cmpElemByName); @@ -1192,7 +1192,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) attrs["outputName"] = i.queryOutputName(); if (printOutPath && !xmlOutput) { - DrvInfo::Outputs outputs = i.queryOutputs(); + PackageInfo::Outputs outputs = i.queryOutputs(); std::string s; for (auto & j : outputs) { if (!s.empty()) s += ';'; @@ -1212,7 +1212,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) if (xmlOutput) { XMLOpenElement item(xml, "item", attrs); - DrvInfo::Outputs outputs = i.queryOutputs(printOutPath); + PackageInfo::Outputs outputs = i.queryOutputs(printOutPath); for (auto & j : outputs) { XMLAttrs attrs2; attrs2["name"] = j.first; diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc index 973b6ee2b..2f9c988d5 100644 --- a/src/nix-env/user-env.cc +++ b/src/nix-env/user-env.cc @@ -15,9 +15,9 @@ namespace nix { -DrvInfos queryInstalled(EvalState & state, const Path & userEnv) +PackageInfos queryInstalled(EvalState & state, const Path & userEnv) { - DrvInfos elems; + PackageInfos elems; if (pathExists(userEnv + "/manifest.json")) throw Error("profile '%s' is incompatible with 'nix-env'; please use 'nix profile' instead", userEnv); auto manifestFile = userEnv + "/manifest.nix"; @@ -31,7 +31,7 @@ DrvInfos queryInstalled(EvalState & state, const Path & userEnv) } -bool createUserEnv(EvalState & state, DrvInfos & elems, +bool createUserEnv(EvalState & state, PackageInfos & elems, const Path & profile, bool keepDerivations, const std::string & lockToken) { @@ -57,7 +57,7 @@ bool createUserEnv(EvalState & state, DrvInfos & elems, output paths, and optionally the derivation path, as well as the meta attributes. */ std::optional drvPath = keepDerivations ? i.queryDrvPath() : std::nullopt; - DrvInfo::Outputs outputs = i.queryOutputs(true, true); + PackageInfo::Outputs outputs = i.queryOutputs(true, true); StringSet metaNames = i.queryMetaNames(); auto attrs = state.buildBindings(7 + outputs.size()); diff --git a/src/nix-env/user-env.hh b/src/nix-env/user-env.hh index af45d2d85..15da3fcb3 100644 --- a/src/nix-env/user-env.hh +++ b/src/nix-env/user-env.hh @@ -5,9 +5,9 @@ namespace nix { -DrvInfos queryInstalled(EvalState & state, const Path & userEnv); +PackageInfos queryInstalled(EvalState & state, const Path & userEnv); -bool createUserEnv(EvalState & state, DrvInfos & elems, +bool createUserEnv(EvalState & state, PackageInfos & elems, const Path & profile, bool keepDerivations, const std::string & lockToken); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index 87bc986e8..b9e626aed 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -62,7 +62,7 @@ void processExpr(EvalState & state, const Strings & attrPaths, std::cout << std::endl; } } else { - DrvInfos drvs; + PackageInfos drvs; getDerivations(state, v, "", autoArgs, drvs, false); for (auto & i : drvs) { auto drvPath = i.requireDrvPath(); diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 2b6e56283..bebc62deb 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -395,11 +395,11 @@ struct CmdFlakeCheck : FlakeCommand auto checkDerivation = [&](const std::string & attrPath, Value & v, const PosIdx pos) -> std::optional { try { - auto drvInfo = getDerivation(*state, v, false); - if (!drvInfo) + auto packageInfo = getDerivation(*state, v, false); + if (!packageInfo) throw Error("flake attribute '%s' is not a derivation", attrPath); // FIXME: check meta attributes - return drvInfo->queryDrvPath(); + return packageInfo->queryDrvPath(); } catch (Error & e) { e.addTrace(resolve(pos), hintfmt("while checking the derivation '%s'", attrPath)); reportError(e); diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 616fe9512..812e703b4 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -168,11 +168,11 @@ struct ProfileManifest state.allowPath(state.store->followLinksToStore(profile)); state.allowPath(state.store->followLinksToStore(profile + "/manifest.nix")); - auto drvInfos = queryInstalled(state, state.store->followLinksToStore(profile)); + auto packageInfos = queryInstalled(state, state.store->followLinksToStore(profile)); - for (auto & drvInfo : drvInfos) { + for (auto & packageInfo : packageInfos) { ProfileElement element; - element.storePaths = {drvInfo.queryOutPath()}; + element.storePaths = {packageInfo.queryOutPath()}; addElement(std::move(element)); } } From ea6aa5ffd87b27ddd89cab541f4b98b3efcb7ea9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 16 Jan 2024 15:44:02 +0100 Subject: [PATCH 134/307] Package{,Info}: comments --- src/libexpr/get-drvs.hh | 4 +++- src/libstore/builtins/buildenv.hh | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh index b886581b6..e8c1190f7 100644 --- a/src/libexpr/get-drvs.hh +++ b/src/libexpr/get-drvs.hh @@ -10,7 +10,9 @@ namespace nix { - +/** + * A "parsed" package attribute set. + */ struct PackageInfo { public: diff --git a/src/libstore/builtins/buildenv.hh b/src/libstore/builtins/buildenv.hh index 8bebd390d..b24633e27 100644 --- a/src/libstore/builtins/buildenv.hh +++ b/src/libstore/builtins/buildenv.hh @@ -5,6 +5,9 @@ namespace nix { +/** + * Think of this as a "store level package attrset", but stripped down to no more than the needs of buildenv. + */ struct Package { Path path; bool active; From 0bc66e529fa34b84ae31301dd99f31cc16ccfd6c Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 10:13:55 +0100 Subject: [PATCH 135/307] Use npos member variables instead of full type --- src/libutil/file-system.cc | 6 +++--- src/libutil/util.cc | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index ab8d32275..14d496958 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -90,7 +90,7 @@ Path canonPath(PathView path, bool resolveSymlinks) /* Normal component; copy it. */ else { s += '/'; - if (const auto slash = path.find('/'); slash == std::string::npos) { + if (const auto slash = path.find('/'); slash == path.npos) { s += path; path = {}; } else { @@ -123,7 +123,7 @@ Path canonPath(PathView path, bool resolveSymlinks) Path dirOf(const PathView path) { Path::size_type pos = path.rfind('/'); - if (pos == std::string::npos) + if (pos == path.npos) return "."; return pos == 0 ? "/" : Path(path, 0, pos); } @@ -139,7 +139,7 @@ std::string_view baseNameOf(std::string_view path) last -= 1; auto pos = path.rfind('/', last); - if (pos == std::string::npos) + if (pos == path.npos) pos = 0; else pos += 1; diff --git a/src/libutil/util.cc b/src/libutil/util.cc index b23362b5c..6e47ce2a3 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -52,9 +52,9 @@ template C tokenizeString(std::string_view s, std::string_view separato { C result; auto pos = s.find_first_not_of(separators, 0); - while (pos != std::string_view::npos) { + while (pos != s.npos) { auto end = s.find_first_of(separators, pos + 1); - if (end == std::string_view::npos) end = s.size(); + if (end == s.npos) end = s.size(); result.insert(result.end(), std::string(s, pos, end - pos)); pos = s.find_first_not_of(separators, end); } @@ -69,7 +69,7 @@ template std::vector tokenizeString(std::string_view s, std::string std::string chomp(std::string_view s) { size_t i = s.find_last_not_of(" \n\r\t"); - return i == std::string_view::npos ? "" : std::string(s, 0, i + 1); + return i == s.npos ? "" : std::string(s, 0, i + 1); } @@ -89,7 +89,7 @@ std::string replaceStrings( { if (from.empty()) return res; size_t pos = 0; - while ((pos = res.find(from, pos)) != std::string::npos) { + while ((pos = res.find(from, pos)) != res.npos) { res.replace(pos, from.size(), to); pos += to.size(); } @@ -102,7 +102,7 @@ std::string rewriteStrings(std::string s, const StringMap & rewrites) for (auto & i : rewrites) { if (i.first == i.second) continue; size_t j = 0; - while ((j = s.find(i.first, j)) != std::string::npos) + while ((j = s.find(i.first, j)) != s.npos) s.replace(j, i.first.size(), i.second); } return s; From 1885d579db145d45f0aaf6130cd7e4db17b5e214 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 15:49:22 +0100 Subject: [PATCH 136/307] Improve String Handling --- src/libutil/file-system.cc | 6 +++++- src/libutil/util.cc | 9 ++++----- src/libutil/util.hh | 2 +- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index 14d496958..cf8a6d967 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -116,7 +116,11 @@ Path canonPath(PathView path, bool resolveSymlinks) } } - return s.empty() ? "/" : std::move(s); + if (s.empty()) { + s = "/"; + } + + return s; } diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 6e47ce2a3..8f310c6fe 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -122,12 +122,11 @@ bool hasSuffix(std::string_view s, std::string_view suffix) } -std::string toLower(const std::string & s) +std::string toLower(std::string s) { - std::string r(s); - for (auto & c : r) + for (auto & c : s) c = std::tolower(c); - return r; + return s; } @@ -135,7 +134,7 @@ std::string shellEscape(const std::string_view s) { std::string r; r.reserve(s.size() + 2); - r += "'"; + r += '\''; for (auto & i : s) if (i == '\'') r += "'\\''"; else r += i; r += '\''; diff --git a/src/libutil/util.hh b/src/libutil/util.hh index 27faa4d6d..11a0431da 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -180,7 +180,7 @@ bool hasSuffix(std::string_view s, std::string_view suffix); /** * Convert a string to lower case. */ -std::string toLower(const std::string & s); +std::string toLower(std::string s); /** From c924147c9d782e70e0ad421329252ced57f88d09 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 15:50:00 +0100 Subject: [PATCH 137/307] Drop parentheses from thunks --- src/libutil/file-descriptor.cc | 2 +- src/libutil/processes.cc | 12 ++++++------ src/libutil/unix-domain-socket.cc | 4 ++-- src/libutil/util.cc | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/libutil/file-descriptor.cc b/src/libutil/file-descriptor.cc index 692be3383..43e3cd979 100644 --- a/src/libutil/file-descriptor.cc +++ b/src/libutil/file-descriptor.cc @@ -96,7 +96,7 @@ void drainFD(int fd, Sink & sink, bool block) throw SysError("making file descriptor non-blocking"); } - Finally finally([&]() { + Finally finally([&] { if (!block) { if (fcntl(fd, F_SETFL, saved) == -1) throw SysError("making file descriptor blocking"); diff --git a/src/libutil/processes.cc b/src/libutil/processes.cc index 91a0ea66f..e1e60302b 100644 --- a/src/libutil/processes.cc +++ b/src/libutil/processes.cc @@ -131,7 +131,7 @@ void killUser(uid_t uid) users to which the current process can send signals. So we fork a process, switch to uid, and send a mass kill. */ - Pid pid = startProcess([&]() { + Pid pid = startProcess([&] { if (setuid(uid) == -1) throw SysError("setting uid"); @@ -197,7 +197,7 @@ static int childEntry(void * arg) pid_t startProcess(std::function fun, const ProcessOptions & options) { - std::function wrapper = [&]() { + ChildWrapperFunction wrapper = [&] { if (!options.allowVfork) logger = makeSimpleLogger(); try { @@ -229,7 +229,7 @@ pid_t startProcess(std::function fun, const ProcessOptions & options) PROT_WRITE | PROT_READ, MAP_PRIVATE | MAP_ANONYMOUS | MAP_STACK, -1, 0); if (stack == MAP_FAILED) throw SysError("allocating stack"); - Finally freeStack([&]() { munmap(stack, stackSize); }); + Finally freeStack([&] { munmap(stack, stackSize); }); pid = clone(childEntry, stack + stackSize, options.cloneFlags | SIGCHLD, &wrapper); #else @@ -308,7 +308,7 @@ void runProgram2(const RunOptions & options) } /* Fork. */ - Pid pid = startProcess([&]() { + Pid pid = startProcess([&] { if (options.environment) replaceEnv(*options.environment); if (options.standardOut && dup2(out.writeSide.get(), STDOUT_FILENO) == -1) @@ -350,7 +350,7 @@ void runProgram2(const RunOptions & options) std::promise promise; - Finally doJoin([&]() { + Finally doJoin([&] { if (writerThread.joinable()) writerThread.join(); }); @@ -358,7 +358,7 @@ void runProgram2(const RunOptions & options) if (source) { in.readSide.close(); - writerThread = std::thread([&]() { + writerThread = std::thread([&] { try { std::vector buf(8 * 1024); while (true) { diff --git a/src/libutil/unix-domain-socket.cc b/src/libutil/unix-domain-socket.cc index 05bbb5ba3..dc19daf9e 100644 --- a/src/libutil/unix-domain-socket.cc +++ b/src/libutil/unix-domain-socket.cc @@ -47,7 +47,7 @@ void bind(int fd, const std::string & path) addr.sun_family = AF_UNIX; if (path.size() + 1 >= sizeof(addr.sun_path)) { - Pid pid = startProcess([&]() { + Pid pid = startProcess([&] { Path dir = dirOf(path); if (chdir(dir.c_str()) == -1) throw SysError("chdir to '%s' failed", dir); @@ -78,7 +78,7 @@ void connect(int fd, const std::string & path) if (path.size() + 1 >= sizeof(addr.sun_path)) { Pipe pipe; pipe.create(); - Pid pid = startProcess([&]() { + Pid pid = startProcess([&] { try { pipe.readSide.close(); Path dir = dirOf(path); diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 8f310c6fe..75bb31c9b 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -183,7 +183,7 @@ std::string base64Encode(std::string_view s) std::string base64Decode(std::string_view s) { constexpr char npos = -1; - constexpr std::array base64DecodeChars = [&]() { + constexpr std::array base64DecodeChars = [&] { std::array result{}; for (auto& c : result) c = npos; From d11d7849f7676eb8b2c771356b9be8d8bb756cc8 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 15:51:47 +0100 Subject: [PATCH 138/307] Use ChildWrapperFunction type and make casts more explicit --- src/libutil/processes.cc | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/libutil/processes.cc b/src/libutil/processes.cc index e1e60302b..28f1adcf0 100644 --- a/src/libutil/processes.cc +++ b/src/libutil/processes.cc @@ -168,11 +168,12 @@ void killUser(uid_t uid) ////////////////////////////////////////////////////////////////////// +using ChildWrapperFunction = std::function; /* Wrapper around vfork to prevent the child process from clobbering the caller's stack frame in the parent. */ -static pid_t doFork(bool allowVfork, std::function fun) __attribute__((noinline)); -static pid_t doFork(bool allowVfork, std::function fun) +static pid_t doFork(bool allowVfork, ChildWrapperFunction & fun) __attribute__((noinline)); +static pid_t doFork(bool allowVfork, ChildWrapperFunction & fun) { #ifdef __linux__ pid_t pid = allowVfork ? vfork() : fork(); @@ -188,8 +189,8 @@ static pid_t doFork(bool allowVfork, std::function fun) #if __linux__ static int childEntry(void * arg) { - auto main = (std::function *) arg; - (*main)(); + auto & fun = *reinterpret_cast(arg); + fun(); return 1; } #endif From 9d9f42cc38b06ddc3fe30f4c1695514774b5217e Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 15:52:38 +0100 Subject: [PATCH 139/307] Remove C-style casts --- src/libutil/file-descriptor.cc | 2 +- src/libutil/processes.cc | 4 ++-- src/libutil/unix-domain-socket.cc | 18 ++++++++++++++---- 3 files changed, 17 insertions(+), 7 deletions(-) diff --git a/src/libutil/file-descriptor.cc b/src/libutil/file-descriptor.cc index 43e3cd979..55d57e29b 100644 --- a/src/libutil/file-descriptor.cc +++ b/src/libutil/file-descriptor.cc @@ -114,7 +114,7 @@ void drainFD(int fd, Sink & sink, bool block) throw SysError("reading from file"); } else if (rd == 0) break; - else sink({(char *) buf.data(), (size_t) rd}); + else sink({reinterpret_cast(buf.data()), size_t(rd)}); } } diff --git a/src/libutil/processes.cc b/src/libutil/processes.cc index 28f1adcf0..f5d584330 100644 --- a/src/libutil/processes.cc +++ b/src/libutil/processes.cc @@ -226,8 +226,8 @@ pid_t startProcess(std::function fun, const ProcessOptions & options) assert(!(options.cloneFlags & CLONE_VM)); size_t stackSize = 1 * 1024 * 1024; - auto stack = (char *) mmap(0, stackSize, - PROT_WRITE | PROT_READ, MAP_PRIVATE | MAP_ANONYMOUS | MAP_STACK, -1, 0); + auto stack = static_cast(mmap(0, stackSize, + PROT_WRITE | PROT_READ, MAP_PRIVATE | MAP_ANONYMOUS | MAP_STACK, -1, 0)); if (stack == MAP_FAILED) throw SysError("allocating stack"); Finally freeStack([&] { munmap(stack, stackSize); }); diff --git a/src/libutil/unix-domain-socket.cc b/src/libutil/unix-domain-socket.cc index dc19daf9e..3b6d54a2c 100644 --- a/src/libutil/unix-domain-socket.cc +++ b/src/libutil/unix-domain-socket.cc @@ -38,6 +38,14 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode) return fdSocket; } +static struct sockaddr* safeSockAddrPointerCast(struct sockaddr_un *addr) { + // Casting between types like these legacy C library interfaces require + // is forbidden in C++. + // To maintain backwards compatibility, the implementation of the + // bind function contains some hints to the compiler that allow for this + // special case. + return reinterpret_cast(addr); +} void bind(int fd, const std::string & path) { @@ -45,6 +53,7 @@ void bind(int fd, const std::string & path) struct sockaddr_un addr; addr.sun_family = AF_UNIX; + auto psaddr {safeSockAddrPointerCast(&addr)}; if (path.size() + 1 >= sizeof(addr.sun_path)) { Pid pid = startProcess([&] { @@ -55,7 +64,7 @@ void bind(int fd, const std::string & path) if (base.size() + 1 >= sizeof(addr.sun_path)) throw Error("socket path '%s' is too long", base); memcpy(addr.sun_path, base.c_str(), base.size() + 1); - if (bind(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + if (bind(fd, psaddr, sizeof(addr)) == -1) throw SysError("cannot bind to socket '%s'", path); _exit(0); }); @@ -64,7 +73,7 @@ void bind(int fd, const std::string & path) throw Error("cannot bind to socket '%s'", path); } else { memcpy(addr.sun_path, path.c_str(), path.size() + 1); - if (bind(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + if (bind(fd, psaddr, sizeof(addr)) == -1) throw SysError("cannot bind to socket '%s'", path); } } @@ -74,6 +83,7 @@ void connect(int fd, const std::string & path) { struct sockaddr_un addr; addr.sun_family = AF_UNIX; + auto psaddr {safeSockAddrPointerCast(&addr)}; if (path.size() + 1 >= sizeof(addr.sun_path)) { Pipe pipe; @@ -88,7 +98,7 @@ void connect(int fd, const std::string & path) if (base.size() + 1 >= sizeof(addr.sun_path)) throw Error("socket path '%s' is too long", base); memcpy(addr.sun_path, base.c_str(), base.size() + 1); - if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + if (connect(fd, psaddr, sizeof(addr)) == -1) throw SysError("cannot connect to socket at '%s'", path); writeFull(pipe.writeSide.get(), "0\n"); } catch (SysError & e) { @@ -107,7 +117,7 @@ void connect(int fd, const std::string & path) } } else { memcpy(addr.sun_path, path.c_str(), path.size() + 1); - if (connect(fd, (struct sockaddr *) &addr, sizeof(addr)) == -1) + if (connect(fd, psaddr, sizeof(addr)) == -1) throw SysError("cannot connect to socket at '%s'", path); } } From 8ae3aeec9442e2b249abdb42a2853618b74a68a2 Mon Sep 17 00:00:00 2001 From: Jacek Galowicz Date: Thu, 2 Nov 2023 15:52:53 +0100 Subject: [PATCH 140/307] Don't use std::make_unique right before release --- src/libutil/signals.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/signals.cc b/src/libutil/signals.cc index 4632aa319..eaa4ea30e 100644 --- a/src/libutil/signals.cc +++ b/src/libutil/signals.cc @@ -179,7 +179,7 @@ std::unique_ptr createInterruptCallback(std::function auto token = interruptCallbacks->nextToken++; interruptCallbacks->callbacks.emplace(token, callback); - auto res = std::make_unique(); + std::unique_ptr res {new InterruptCallbackImpl{}}; res->token = token; return std::unique_ptr(res.release()); From 55da93942428d51ba3fa3577d3ff79cd739fb38e Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Wed, 17 Jan 2024 02:57:11 +0100 Subject: [PATCH 141/307] fix typo --- doc/manual/src/contributing/documentation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/contributing/documentation.md b/doc/manual/src/contributing/documentation.md index 75226cd1a..1dddb207c 100644 --- a/doc/manual/src/contributing/documentation.md +++ b/doc/manual/src/contributing/documentation.md @@ -172,7 +172,7 @@ Please observe these guidelines to ease reviews: > ``` ```` - Highlight syntax definiions as such, using [EBNF](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form) notation: + Highlight syntax definitions as such, using [EBNF](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form) notation: ```` > **Syntax** From f134dbdffb81cea72a2e4abfb9a13904417b82aa Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Wed, 17 Jan 2024 04:23:16 +0100 Subject: [PATCH 142/307] move section on make variables it should be after the general build instructions, as it goes into more detail. --- doc/manual/src/contributing/hacking.md | 42 +++++++++++++------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 0fa59e891..fbdc7b7f7 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -67,27 +67,6 @@ $ nix build You can also build Nix for one of the [supported platforms](#platforms). -## Makefile variables - -You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run -`make install`. - -You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment -variables to override `Makefile` variables. - -- `ENABLE_BUILD=yes` to enable building the C++ code. -- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). - - The docs can take a while to build, so you may want to disable this for local development. -- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests. -- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests. -- `OPTIMIZE=1` to enable optimizations. -- `libraries=libutil programs=` to only build a specific library (this will - fail in the linking phase if you don't have the other libraries built, but is - useful for checking types). -- `libraries= programs=nix` to only build a specific program (this will not, in - general, work, because the programs need the libraries). - ## Building Nix To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found: @@ -132,6 +111,27 @@ $ nix-build You can also build Nix for one of the [supported platforms](#platforms). +## Makefile variables + +You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run +`make install`. + +You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment +variables to override `Makefile` variables. + +- `ENABLE_BUILD=yes` to enable building the C++ code. +- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). + + The docs can take a while to build, so you may want to disable this for local development. +- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests. +- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests. +- `OPTIMIZE=1` to enable optimizations. +- `libraries=libutil programs=` to only build a specific library (this will + fail in the linking phase if you don't have the other libraries built, but is + useful for checking types). +- `libraries= programs=nix` to only build a specific program (this will not, in + general, work, because the programs need the libraries). + ## Platforms Nix can be built for various platforms, as specified in [`flake.nix`]: From 28eb406834ed176d84e22898ccbcf4ecb963416c Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Wed, 17 Jan 2024 04:39:26 +0100 Subject: [PATCH 143/307] reword section on make variables - use one line per sentence - use imperative for instructions - add link to Make documentation --- doc/manual/src/contributing/hacking.md | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index fbdc7b7f7..fe91787a3 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -113,11 +113,9 @@ You can also build Nix for one of the [supported platforms](#platforms). ## Makefile variables -You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run -`make install`. +You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run `make install`. -You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment -variables to override `Makefile` variables. +Run `make` with [`--environment-overrides`](https://www.gnu.org/software/make/manual/make.html#index-_002de) to allow environment variables to override `Makefile` variables: - `ENABLE_BUILD=yes` to enable building the C++ code. - `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). @@ -126,11 +124,12 @@ variables to override `Makefile` variables. - `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests. - `ENABLE_UNIT_TESTS=yes` to enable building the unit tests. - `OPTIMIZE=1` to enable optimizations. -- `libraries=libutil programs=` to only build a specific library (this will - fail in the linking phase if you don't have the other libraries built, but is - useful for checking types). -- `libraries= programs=nix` to only build a specific program (this will not, in - general, work, because the programs need the libraries). +- `libraries=libutil programs=` to only build a specific library. + + This will fail in the linking phase if the other libraries haven't been built, but is useful for checking types. +- `libraries= programs=nix` to only build a specific program. + + This will not work in general, because the programs need the libraries. ## Platforms From d0a284284bc93014c98294292b7f4b95864f37ee Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 17 Jan 2024 16:54:45 +0100 Subject: [PATCH 144/307] refactor: Extract simply, awkwardly Store::queryPathInfoFromClientCache This is useful for determining quickly which substituters to query. An alternative would be for users to invoke the narinfo cache db directly, so why do we need this change? - It is easier to use. I believe Nix itself should also use it. - This way, the narinfo cache db remains an implementation detail. - Callers get to use the in-memory cache as well. --- src/libstore/store-api.cc | 64 +++++++++++++++++++++++---------------- src/libstore/store-api.hh | 12 ++++++++ 2 files changed, 50 insertions(+), 26 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 0c37ecd30..66bc95625 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -685,6 +685,42 @@ static bool goodStorePath(const StorePath & expected, const StorePath & actual) && (expected.name() == Store::MissingName || expected.name() == actual.name()); } +bool Store::queryPathInfoFromClientCache(const StorePath & storePath, + Callback> & callback) +{ + auto hashPart = std::string(storePath.hashPart()); + + { + auto res = state.lock()->pathInfoCache.get(std::string(storePath.to_string())); + if (res && res->isKnownNow()) { + stats.narInfoReadAverted++; + if (!res->didExist()) + throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); + callback(ref(res->value)); + return true; + } + } + + if (diskCache) { + auto res = diskCache->lookupNarInfo(getUri(), hashPart); + if (res.first != NarInfoDiskCache::oUnknown) { + stats.narInfoReadAverted++; + { + auto state_(state.lock()); + state_->pathInfoCache.upsert(std::string(storePath.to_string()), + res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second }); + if (res.first == NarInfoDiskCache::oInvalid || + !goodStorePath(storePath, res.second->path)) + throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); + } + callback(ref(res.second)); + return true; + } + } + + return false; +} + void Store::queryPathInfo(const StorePath & storePath, Callback> callback) noexcept @@ -692,32 +728,8 @@ void Store::queryPathInfo(const StorePath & storePath, auto hashPart = std::string(storePath.hashPart()); try { - { - auto res = state.lock()->pathInfoCache.get(std::string(storePath.to_string())); - if (res && res->isKnownNow()) { - stats.narInfoReadAverted++; - if (!res->didExist()) - throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); - return callback(ref(res->value)); - } - } - - if (diskCache) { - auto res = diskCache->lookupNarInfo(getUri(), hashPart); - if (res.first != NarInfoDiskCache::oUnknown) { - stats.narInfoReadAverted++; - { - auto state_(state.lock()); - state_->pathInfoCache.upsert(std::string(storePath.to_string()), - res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second }); - if (res.first == NarInfoDiskCache::oInvalid || - !goodStorePath(storePath, res.second->path)) - throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); - } - return callback(ref(res.second)); - } - } - + if (queryPathInfoFromClientCache(storePath, callback)) + return; } catch (...) { return callback.rethrow(); } auto callbackPtr = std::make_shared(std::move(callback)); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 9667b5e9e..2a1092d9e 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -282,6 +282,18 @@ public: void queryPathInfo(const StorePath & path, Callback> callback) noexcept; + /** + * NOTE: this is not the final interface - to be modified in next commit. + * + * Asynchronous version that only queries the local narinfo cache and not + * the actual store. + * + * @return true if the path was known and the callback invoked + * @return false if the path was not known and the callback not invoked + * @throw InvalidPathError if the path is known to be invalid + */ + bool queryPathInfoFromClientCache(const StorePath & path, Callback> & callback); + /** * Query the information about a realisation. */ From e938912cff5ccded14444377f9776e86c585f917 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 17 Jan 2024 13:08:03 -0500 Subject: [PATCH 145/307] Fix indentation error in `flake.nix` --- flake.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/flake.nix b/flake.nix index 49f214e72..0309f84b3 100644 --- a/flake.nix +++ b/flake.nix @@ -197,12 +197,12 @@ perl-bindings = final.nix-perl-bindings; }; - nix-perl-bindings = final.callPackage ./perl { - inherit fileset stdenv; - }; - + nix-perl-bindings = final.callPackage ./perl { + inherit fileset stdenv; }; + }; + in { # A Nixpkgs overlay that overrides the 'nix' and # 'nix.perl-bindings' packages. From 1de8eed28a3cb1e449c10ecdb524aec27fe9dc35 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 17 Jan 2024 13:11:04 -0500 Subject: [PATCH 146/307] Move dependency patches from top level into subdir Good to not clutter the top-level directory. --- .../boehmgc-coroutine-sp-fallback.diff | 0 .../boehmgc-traceable_allocator-public.diff | 0 flake.nix | 4 ++-- package.nix | 1 - 4 files changed, 2 insertions(+), 3 deletions(-) rename boehmgc-coroutine-sp-fallback.diff => dep-patches/boehmgc-coroutine-sp-fallback.diff (100%) rename boehmgc-traceable_allocator-public.diff => dep-patches/boehmgc-traceable_allocator-public.diff (100%) diff --git a/boehmgc-coroutine-sp-fallback.diff b/dep-patches/boehmgc-coroutine-sp-fallback.diff similarity index 100% rename from boehmgc-coroutine-sp-fallback.diff rename to dep-patches/boehmgc-coroutine-sp-fallback.diff diff --git a/boehmgc-traceable_allocator-public.diff b/dep-patches/boehmgc-traceable_allocator-public.diff similarity index 100% rename from boehmgc-traceable_allocator-public.diff rename to dep-patches/boehmgc-traceable_allocator-public.diff diff --git a/flake.nix b/flake.nix index 49f214e72..2b9ce17e3 100644 --- a/flake.nix +++ b/flake.nix @@ -163,10 +163,10 @@ enableLargeConfig = true; }).overrideAttrs(o: { patches = (o.patches or []) ++ [ - ./boehmgc-coroutine-sp-fallback.diff + ./dep-patches/boehmgc-coroutine-sp-fallback.diff # https://github.com/ivmai/bdwgc/pull/586 - ./boehmgc-traceable_allocator-public.diff + ./dep-patches/boehmgc-traceable_allocator-public.diff ]; }); diff --git a/package.nix b/package.nix index a1188ba9c..192df90ab 100644 --- a/package.nix +++ b/package.nix @@ -174,7 +174,6 @@ in { ./mk (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts) ] ++ lib.optionals doBuild [ - ./boehmgc-coroutine-sp-fallback.diff ./doc ./misc ./precompiled-headers.h From 39ab50f9ee64f0455e37a8136638d9757252c226 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 17 Jan 2024 21:41:37 +0100 Subject: [PATCH 147/307] Store::buildPaths(): Fix display of store paths This was broken in 7ac39ff05c8353c665174e8df61dd76a2b0b93db. --- src/libstore/build/entry-points.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 74eca63f3..7f0a05d5d 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -26,9 +26,9 @@ void Store::buildPaths(const std::vector & reqs, BuildMode buildMod } if (i->exitCode != Goal::ecSuccess) { if (auto i2 = dynamic_cast(i.get())) - failed.insert(std::string { i2->drvPath.to_string() }); + failed.insert(printStorePath(i2->drvPath)); else if (auto i2 = dynamic_cast(i.get())) - failed.insert(std::string { i2->storePath.to_string()}); + failed.insert(printStorePath(i2->storePath)); } } From a3cf27ca47328b11173147ca7180e0bae798bb2c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Wed, 17 Jan 2024 22:19:51 +0100 Subject: [PATCH 148/307] Print a more helpful message if the daemon crashes Instead of error: unexpected end-of-file you now get error: Nix daemon disconnected unexpectedly (maybe it crashed?) --- src/libstore/remote-store.cc | 1 + src/libutil/serialise.cc | 2 +- src/libutil/serialise.hh | 5 +++-- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 078b9fe00..ccf95beef 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -67,6 +67,7 @@ void RemoteStore::initConnection(Connection & conn) { /* Send the magic greeting, check for the reply. */ try { + conn.from.endOfFileError = "Nix daemon disconnected unexpectedly (maybe it crashed?)"; conn.to << WORKER_MAGIC_1; conn.to.flush(); StringSink saved; diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 316105603..afbf66b9d 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -132,7 +132,7 @@ size_t FdSource::readUnbuffered(char * data, size_t len) n = ::read(fd, data, len); } while (n == -1 && errno == EINTR); if (n == -1) { _good = false; throw SysError("reading from file"); } - if (n == 0) { _good = false; throw EndOfFile("unexpected end-of-file"); } + if (n == 0) { _good = false; throw EndOfFile(endOfFileError); } read += n; return n; } diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 3f57ce88b..689b2070b 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -153,12 +153,13 @@ struct FdSource : BufferedSource { int fd; size_t read = 0; + std::string endOfFileError{"unexpected end-of-file"}; FdSource() : fd(-1) { } FdSource(int fd) : fd(fd) { } - FdSource(FdSource&&) = default; + FdSource(FdSource &&) = default; - FdSource& operator=(FdSource && s) + FdSource & operator=(FdSource && s) { fd = s.fd; s.fd = -1; From 3016e67c21c8ea1f1c44528c7895fad1761406c3 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 16 Jan 2024 10:35:16 -0500 Subject: [PATCH 149/307] `bind`: give same treatment as `connect` in #8544, dedup It is good to propagate the underlying error so whether or not we use a process to deal with path length issues is not observable. Also, as these wrapper functions got more and more complex, the code duplication got worse and worse. The new `bindConnectProcHelper` function deduplicates them. --- src/libutil/unix-domain-socket.cc | 84 ++++++++++++------------------- 1 file changed, 33 insertions(+), 51 deletions(-) diff --git a/src/libutil/unix-domain-socket.cc b/src/libutil/unix-domain-socket.cc index 3b6d54a2c..0bcf9040d 100644 --- a/src/libutil/unix-domain-socket.cc +++ b/src/libutil/unix-domain-socket.cc @@ -38,52 +38,20 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode) return fdSocket; } -static struct sockaddr* safeSockAddrPointerCast(struct sockaddr_un *addr) { - // Casting between types like these legacy C library interfaces require - // is forbidden in C++. - // To maintain backwards compatibility, the implementation of the - // bind function contains some hints to the compiler that allow for this + +static void bindConnectProcHelper( + std::string_view operationName, auto && operation, + int fd, const std::string & path) +{ + struct sockaddr_un addr; + addr.sun_family = AF_UNIX; + + // Casting between types like these legacy C library interfaces + // require is forbidden in C++. To maintain backwards + // compatibility, the implementation of the bind/connect functions + // contains some hints to the compiler that allow for this // special case. - return reinterpret_cast(addr); -} - -void bind(int fd, const std::string & path) -{ - unlink(path.c_str()); - - struct sockaddr_un addr; - addr.sun_family = AF_UNIX; - auto psaddr {safeSockAddrPointerCast(&addr)}; - - if (path.size() + 1 >= sizeof(addr.sun_path)) { - Pid pid = startProcess([&] { - Path dir = dirOf(path); - if (chdir(dir.c_str()) == -1) - throw SysError("chdir to '%s' failed", dir); - std::string base(baseNameOf(path)); - if (base.size() + 1 >= sizeof(addr.sun_path)) - throw Error("socket path '%s' is too long", base); - memcpy(addr.sun_path, base.c_str(), base.size() + 1); - if (bind(fd, psaddr, sizeof(addr)) == -1) - throw SysError("cannot bind to socket '%s'", path); - _exit(0); - }); - int status = pid.wait(); - if (status != 0) - throw Error("cannot bind to socket '%s'", path); - } else { - memcpy(addr.sun_path, path.c_str(), path.size() + 1); - if (bind(fd, psaddr, sizeof(addr)) == -1) - throw SysError("cannot bind to socket '%s'", path); - } -} - - -void connect(int fd, const std::string & path) -{ - struct sockaddr_un addr; - addr.sun_family = AF_UNIX; - auto psaddr {safeSockAddrPointerCast(&addr)}; + auto * psaddr = reinterpret_cast(&addr); if (path.size() + 1 >= sizeof(addr.sun_path)) { Pipe pipe; @@ -98,8 +66,8 @@ void connect(int fd, const std::string & path) if (base.size() + 1 >= sizeof(addr.sun_path)) throw Error("socket path '%s' is too long", base); memcpy(addr.sun_path, base.c_str(), base.size() + 1); - if (connect(fd, psaddr, sizeof(addr)) == -1) - throw SysError("cannot connect to socket at '%s'", path); + if (operation(fd, psaddr, sizeof(addr)) == -1) + throw SysError("cannot %s to socket at '%s'", operationName, path); writeFull(pipe.writeSide.get(), "0\n"); } catch (SysError & e) { writeFull(pipe.writeSide.get(), fmt("%d\n", e.errNo)); @@ -110,16 +78,30 @@ void connect(int fd, const std::string & path) pipe.writeSide.close(); auto errNo = string2Int(chomp(drainFD(pipe.readSide.get()))); if (!errNo || *errNo == -1) - throw Error("cannot connect to socket at '%s'", path); + throw Error("cannot %s to socket at '%s'", operationName, path); else if (*errNo > 0) { errno = *errNo; - throw SysError("cannot connect to socket at '%s'", path); + throw SysError("cannot %s to socket at '%s'", operationName, path); } } else { memcpy(addr.sun_path, path.c_str(), path.size() + 1); - if (connect(fd, psaddr, sizeof(addr)) == -1) - throw SysError("cannot connect to socket at '%s'", path); + if (operation(fd, psaddr, sizeof(addr)) == -1) + throw SysError("cannot %s to socket at '%s'", operationName, path); } } + +void bind(int fd, const std::string & path) +{ + unlink(path.c_str()); + + bindConnectProcHelper("bind", ::bind, fd, path); +} + + +void connect(int fd, const std::string & path) +{ + bindConnectProcHelper("connect", ::connect, fd, path); +} + } From 574db8350491d8da3f65625de1f91bc667e67360 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 17 Jan 2024 23:46:03 -0500 Subject: [PATCH 150/307] Push `addToStoreFromDump` `unsupported(...)` down `Store` class hierarchy Instead of having it be the default method in `Store` itself, have it be the implementation in `DummyStore` and `LegacySSHStore`. Then just the implementations which fail to provide the method pay the "penalty" of dealing with the icky `unimplemented` function for non-compliance. Picks up where #8217. Getting close to no `unsupported` in the `Store` interface itself! More progress on issue #5729. --- src/libstore/dummy-store.cc | 9 +++++++++ src/libstore/legacy-ssh-store.hh | 9 +++++++++ src/libstore/store-api.hh | 3 +-- 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index f52a309d1..e4f13b8f4 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -58,6 +58,15 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store RepairFlag repair, CheckSigsFlag checkSigs) override { unsupported("addToStore"); } + virtual StorePath addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), + RepairFlag repair = NoRepair) override + { unsupported("addToStore"); } + void narFromPath(const StorePath & path, Sink & sink) override { unsupported("narFromPath"); } diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh index c5a3ce677..7cee31d66 100644 --- a/src/libstore/legacy-ssh-store.hh +++ b/src/libstore/legacy-ssh-store.hh @@ -69,6 +69,15 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor RepairFlag repair) override { unsupported("addToStore"); } + virtual StorePath addToStoreFromDump( + Source & dump, + std::string_view name, + ContentAddressMethod method = FileIngestionMethod::Recursive, + HashAlgorithm hashAlgo = HashAlgorithm::SHA256, + const StorePathSet & references = StorePathSet(), + RepairFlag repair = NoRepair) override + { unsupported("addToStore"); } + private: void putBuildSettings(Connection & conn); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 9667b5e9e..b3c935db1 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -466,8 +466,7 @@ public: ContentAddressMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, const StorePathSet & references = StorePathSet(), - RepairFlag repair = NoRepair) - { unsupported("addToStoreFromDump"); } + RepairFlag repair = NoRepair) = 0; /** * Add a mapping indicating that `deriver!outputName` maps to the output path From 78074bdea4fcb4403cc7b1e533d1fcbcf51e01a5 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 12:47:29 +0100 Subject: [PATCH 151/307] tests/nixos/fetch-git: Apply suggestions --- tests/nixos/fetch-git/default.nix | 2 +- tests/nixos/fetch-git/testsupport/setup.nix | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/nixos/fetch-git/default.nix b/tests/nixos/fetch-git/default.nix index 254fecaaf..1d6bcb637 100644 --- a/tests/nixos/fetch-git/default.nix +++ b/tests/nixos/fetch-git/default.nix @@ -24,7 +24,7 @@ testCases = map (testCaseName: {...}: { - imports = ["${./test-cases}/${testCaseName}"]; + imports = [ (./test-cases + "/${testCaseName}") ]; # ensures tests are named like their directories they are defined in name = testCaseName; }) diff --git a/tests/nixos/fetch-git/testsupport/setup.nix b/tests/nixos/fetch-git/testsupport/setup.nix index 2f74f51f8..8fc8e2e7c 100644 --- a/tests/nixos/fetch-git/testsupport/setup.nix +++ b/tests/nixos/fetch-git/testsupport/setup.nix @@ -39,9 +39,6 @@ in The name of the test case. A repository with that name will be set up on the gitea server and locally. - - This name can also be used to execute only a single test case via: - `nix build .#hydraJobs.fetch-git.{test-case-name}` ''; }; options.description = mkOption { From 1fe8f54bd30fead52d21ae472fb4f0f68a5c6fdd Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 18 Jan 2024 15:27:57 +0100 Subject: [PATCH 152/307] Use BackedStringView --- src/libutil/serialise.cc | 2 +- src/libutil/serialise.hh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index afbf66b9d..7fc211491 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -132,7 +132,7 @@ size_t FdSource::readUnbuffered(char * data, size_t len) n = ::read(fd, data, len); } while (n == -1 && errno == EINTR); if (n == -1) { _good = false; throw SysError("reading from file"); } - if (n == 0) { _good = false; throw EndOfFile(endOfFileError); } + if (n == 0) { _good = false; throw EndOfFile(std::string(*endOfFileError)); } read += n; return n; } diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh index 689b2070b..d9522566f 100644 --- a/src/libutil/serialise.hh +++ b/src/libutil/serialise.hh @@ -153,7 +153,7 @@ struct FdSource : BufferedSource { int fd; size_t read = 0; - std::string endOfFileError{"unexpected end-of-file"}; + BackedStringView endOfFileError{"unexpected end-of-file"}; FdSource() : fd(-1) { } FdSource(int fd) : fd(fd) { } From ab786e22f16eed0d95123d5698eb71079c312584 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 18 Jan 2024 15:29:54 +0100 Subject: [PATCH 153/307] Show what goal is waiting for a build slot --- src/libstore/build/worker.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc index 974a9f510..d57e22393 100644 --- a/src/libstore/build/worker.cc +++ b/src/libstore/build/worker.cc @@ -251,7 +251,7 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers) void Worker::waitForBuildSlot(GoalPtr goal) { - debug("wait for build slot"); + goal->trace("wait for build slot"); bool isSubstitutionGoal = goal->jobCategory() == JobCategory::Substitution; if ((!isSubstitutionGoal && getNrLocalBuilds() < settings.maxBuildJobs) || (isSubstitutionGoal && getNrSubstitutions() < settings.maxSubstitutionJobs)) From a18d8d688a826ff535b3eeff289ef51db33a413b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 18 Jan 2024 17:01:45 +0100 Subject: [PATCH 154/307] LocalStore::addToStore(): Ignore exceptions from parseDump() In the "discard" case (i.e. when the store path already exists locally), when we call parseDump() from a Finally and it throws an exception (e.g. if the download of the NAR fails), Nix crashes: terminate called after throwing an instance of 'nix::SubstituteGone' what(): error: file 'nar/06br3254rx4gz4cvjzxlv028jrx80zg5i4jr62vjmn416dqihgr7.nar.xz' does not exist in binary cache 'http://localhost' Aborted (core dumped) --- src/libstore/local-store.cc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 5a399c8be..07068f8f8 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1049,7 +1049,11 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, Finally cleanup = [&]() { if (!narRead) { NullParseSink sink; - parseDump(sink, source); + try { + parseDump(sink, source); + } catch (...) { + ignoreException(); + } } }; From dca0a802405be9798e12ad8be2ec6d227d9a2fa2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 18 Jan 2024 17:16:34 +0100 Subject: [PATCH 155/307] copyStorePath(): Bail out early if the store path already exists In rare cases (e.g. when using allowSubstitutes = false), it's possible that we simultaneously have a DerivationGoal *and* a SubstitutionGoal building the same path. So if a DerivationGoal already built the path while the SubstitutionGoal was waiting for a download slot, it saves us a superfluous download to exit early. --- src/libstore/store-api.cc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 0c37ecd30..9cb187e66 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -981,6 +981,11 @@ void copyStorePath( RepairFlag repair, CheckSigsFlag checkSigs) { + /* Bail out early (before starting a download from srcStore) if + dstStore already has this path. */ + if (!repair && dstStore.isValidPath(storePath)) + return; + auto srcUri = srcStore.getUri(); auto dstUri = dstStore.getUri(); auto storePathS = srcStore.printStorePath(storePath); From fd41979d7857f6984b4b7571706a45b16f9c0a5d Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 13:12:59 +0100 Subject: [PATCH 156/307] tests/nixos/fetch-git: Factor out gitea repo module --- .../fetch-git/testsupport/gitea-repo.nix | 51 +++++++++++++++++++ tests/nixos/fetch-git/testsupport/gitea.nix | 2 + tests/nixos/fetch-git/testsupport/setup.nix | 44 +++++----------- 3 files changed, 65 insertions(+), 32 deletions(-) create mode 100644 tests/nixos/fetch-git/testsupport/gitea-repo.nix diff --git a/tests/nixos/fetch-git/testsupport/gitea-repo.nix b/tests/nixos/fetch-git/testsupport/gitea-repo.nix new file mode 100644 index 000000000..916552bb2 --- /dev/null +++ b/tests/nixos/fetch-git/testsupport/gitea-repo.nix @@ -0,0 +1,51 @@ +{ lib, ... }: +let + inherit (lib) mkOption types; + + testCaseExtension = { config, ... }: { + setupScript = '' + repo = Repo("${config.name}") + ''; + }; +in +{ + options = { + testCases = mkOption { + type = types.listOf (types.submodule testCaseExtension); + }; + }; + config = { + setupScript = '' + class Repo: + """ + A class to create a git repository on the gitea server and locally. + """ + def __init__(self, name): + self.name = name + self.path = "/tmp/repos/" + name + self.remote = "http://gitea:3000/test/" + name + self.remote_ssh = "ssh://gitea/root/" + name + self.git = f"git -C {self.path}" + self.create() + + def create(self): + # create ssh remote repo + gitea.succeed(f""" + git init --bare -b main /root/{self.name} + """) + # create http remote repo + gitea.succeed(f""" + curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/user/repos \ + -H 'Accept: application/json' -H 'Content-Type: application/json' \ + -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main"}}' )} + """) + # setup git remotes on client + client.succeed(f""" + mkdir -p {self.path} \ + && git init -b main {self.path} \ + && {self.git} remote add origin {self.remote} \ + && {self.git} remote add origin-ssh root@gitea:{self.name} + """) + ''; + }; +} \ No newline at end of file diff --git a/tests/nixos/fetch-git/testsupport/gitea.nix b/tests/nixos/fetch-git/testsupport/gitea.nix index 2ea23961e..cf87bb466 100644 --- a/tests/nixos/fetch-git/testsupport/gitea.nix +++ b/tests/nixos/fetch-git/testsupport/gitea.nix @@ -15,6 +15,7 @@ in { imports = [ ../testsupport/setup.nix + ../testsupport/gitea-repo.nix ]; nodes = { gitea = { pkgs, ... }: { @@ -96,5 +97,6 @@ in { client.succeed(""" ssh root@gitea true """) + ''; } diff --git a/tests/nixos/fetch-git/testsupport/setup.nix b/tests/nixos/fetch-git/testsupport/setup.nix index 8fc8e2e7c..a81d5614b 100644 --- a/tests/nixos/fetch-git/testsupport/setup.nix +++ b/tests/nixos/fetch-git/testsupport/setup.nix @@ -12,7 +12,10 @@ let ### TEST ${testCase.name}: ${testCase.description} ### with subtest("${testCase.description}"): - repo = Repo("${testCase.name}") + # Setup + ${indent testCase.setupScript} + + # Test ${indent testCase.script} ''; in @@ -47,12 +50,19 @@ in A description of the test case. ''; }; + options.setupScript = mkOption { + type = types.lines; + description = '' + Python code that runs before the test case. + ''; + default = ""; + }; options.script = mkOption { type = types.lines; description = '' Python code that runs the test. - Variables defined by `setupScript` will be available here. + Variables defined by the global `setupScript`, as well as `testCases.*.setupScript` will be available here. ''; }; }); @@ -67,36 +77,6 @@ in nix.settings.experimental-features = ["nix-command" "flakes"]; }; setupScript = '' - class Repo: - """ - A class to create a git repository on the gitea server and locally. - """ - def __init__(self, name): - self.name = name - self.path = "/tmp/repos/" + name - self.remote = "http://gitea:3000/test/" + name - self.remote_ssh = "ssh://gitea/root/" + name - self.git = f"git -C {self.path}" - self.create() - - def create(self): - # create ssh remote repo - gitea.succeed(f""" - git init --bare -b main /root/{self.name} - """) - # create http remote repo - gitea.succeed(f""" - curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/user/repos \ - -H 'Accept: application/json' -H 'Content-Type: application/json' \ - -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main"}}' )} - """) - # setup git remotes on client - client.succeed(f""" - mkdir -p {self.path} \ - && git init -b main {self.path} \ - && {self.git} remote add origin {self.remote} \ - && {self.git} remote add origin-ssh root@gitea:{self.name} - """) ''; testScript = '' start_all(); From 94eba0ebbbadfa48a4c1253cb94070a41310fae2 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 13:42:41 +0100 Subject: [PATCH 157/307] tests/nixos/fetch-git: Memoize -> save Memoization is for thunk-like behavior whereas this is executed eagerly. --- tests/nixos/fetch-git/test-cases/http-simple/default.nix | 2 +- tests/nixos/fetch-git/test-cases/ssh-simple/default.nix | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/nixos/fetch-git/test-cases/http-simple/default.nix b/tests/nixos/fetch-git/test-cases/http-simple/default.nix index 1bd5bbba2..333ee45fd 100644 --- a/tests/nixos/fetch-git/test-cases/http-simple/default.nix +++ b/tests/nixos/fetch-git/test-cases/http-simple/default.nix @@ -8,7 +8,7 @@ && {repo.git} commit -m 'commit1' """) - # memoize the revision + # save the revision rev1 = client.succeed(f""" {repo.git} rev-parse HEAD """).strip() diff --git a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix index 0e4494ae0..f2deca141 100644 --- a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix +++ b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix @@ -8,7 +8,7 @@ && {repo.git} commit -m 'commit1' """) - # memoize the revision + # save the revision rev1 = client.succeed(f""" {repo.git} rev-parse HEAD """).strip() From 12541704052849d4160a13f7bbd873b40f19a3f9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 20:56:33 +0100 Subject: [PATCH 158/307] tests/nixos/fetch-git: Make the store paths unique --- tests/nixos/fetch-git/test-cases/http-simple/default.nix | 6 ++++-- tests/nixos/fetch-git/test-cases/ssh-simple/default.nix | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/tests/nixos/fetch-git/test-cases/http-simple/default.nix b/tests/nixos/fetch-git/test-cases/http-simple/default.nix index 333ee45fd..dcab8067e 100644 --- a/tests/nixos/fetch-git/test-cases/http-simple/default.nix +++ b/tests/nixos/fetch-git/test-cases/http-simple/default.nix @@ -1,10 +1,12 @@ +{ config, ... }: { description = "can fetch a git repo via http"; script = '' # add a file to the repo client.succeed(f""" - echo chiang-mai > {repo.path}/thailand \ - && {repo.git} add thailand \ + echo ${config.name /* to make the git tree and store path unique */} > {repo.path}/test-case \ + && echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add test-case thailand \ && {repo.git} commit -m 'commit1' """) diff --git a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix index f2deca141..f5fba1698 100644 --- a/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix +++ b/tests/nixos/fetch-git/test-cases/ssh-simple/default.nix @@ -1,10 +1,12 @@ +{ config, ... }: { description = "can fetch a git repo via ssh"; script = '' # add a file to the repo client.succeed(f""" - echo chiang-mai > {repo.path}/thailand \ - && {repo.git} add thailand \ + echo ${config.name /* to make the git tree and store path unique */} > {repo.path}/test-case \ + && echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add test-case thailand \ && {repo.git} commit -m 'commit1' """) From ed975e953c30c335f8403352acc785323a5a925c Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 20:59:24 +0100 Subject: [PATCH 159/307] tests/nixos/fetch-git: Testsupport for private repos --- .../fetch-git/testsupport/gitea-repo.nix | 36 +++++++++++++++---- 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/tests/nixos/fetch-git/testsupport/gitea-repo.nix b/tests/nixos/fetch-git/testsupport/gitea-repo.nix index 916552bb2..a3ad65ca4 100644 --- a/tests/nixos/fetch-git/testsupport/gitea-repo.nix +++ b/tests/nixos/fetch-git/testsupport/gitea-repo.nix @@ -1,11 +1,31 @@ { lib, ... }: let - inherit (lib) mkOption types; + inherit (lib) + mkIf + mkOption + types + ; + + boolPyLiteral = b: if b then "True" else "False"; testCaseExtension = { config, ... }: { - setupScript = '' - repo = Repo("${config.name}") - ''; + options = { + repo.enable = mkOption { + type = types.bool; + default = true; + description = "Whether to provide a repo variable - automatic repo creation."; + }; + repo.private = mkOption { + type = types.bool; + default = false; + description = "Whether the repo should be private."; + }; + }; + config = mkIf config.repo.enable { + setupScript = '' + repo = Repo("${config.name}", private=${boolPyLiteral config.repo.private}) + ''; + }; }; in { @@ -16,16 +36,20 @@ in }; config = { setupScript = '' + def boolToJSON(b): + return "true" if b else "false" + class Repo: """ A class to create a git repository on the gitea server and locally. """ - def __init__(self, name): + def __init__(self, name, private=False): self.name = name self.path = "/tmp/repos/" + name self.remote = "http://gitea:3000/test/" + name self.remote_ssh = "ssh://gitea/root/" + name self.git = f"git -C {self.path}" + self.private = private self.create() def create(self): @@ -37,7 +61,7 @@ in gitea.succeed(f""" curl --fail -X POST http://{gitea_admin}:{gitea_admin_password}@gitea:3000/api/v1/user/repos \ -H 'Accept: application/json' -H 'Content-Type: application/json' \ - -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main"}}' )} + -d {shlex.quote( f'{{"name":"{self.name}", "default_branch": "main", "private": {boolToJSON(self.private)}}}' )} """) # setup git remotes on client client.succeed(f""" From 76a50b3a69dd7202fa4c68ca8d12fde152e6341a Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 22:25:30 +0100 Subject: [PATCH 160/307] doc: GitRepoImpl::path --- src/libfetchers/git-utils.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 6726407b5..f34329fab 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -139,6 +139,7 @@ T peelObject(git_repository * repo, git_object * obj, git_object_t type) struct GitRepoImpl : GitRepo, std::enable_shared_from_this { + /** Location of the repository on disk. */ CanonPath path; Repository repo; From 8d422c2fef4309b4b7de8e2f909957775a9ec3ef Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 22:26:24 +0100 Subject: [PATCH 161/307] Revert libgit2 fetching libgit2 is not capable of using git-credentials helpers yet. This prevents private repositories from being used. Based on code that was replaced in https://github.com/NixOS/nix/pull/9240 (Introduce libgit2); hence: Co-authored-by: Eelco Dolstra --- src/libfetchers/git-utils.cc | 31 ++++++++++++------------------- 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index f34329fab..911c16c4b 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -383,27 +383,20 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this { Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url)); - Remote remote; + // TODO: implement git-credential helper support (preferably via libgit2, which as of 2024-01 does not support that) + // then use code that was removed in this commit (see blame) - if (git_remote_create_anonymous(Setter(remote), *this, url.c_str())) - throw Error("cannot create Git remote '%s': %s", url, git_error_last()->message); + auto dir = this->path; - char * refspecs[] = {(char *) refspec.c_str()}; - git_strarray refspecs2 { - .strings = refspecs, - .count = 1 - }; - - git_fetch_options opts = GIT_FETCH_OPTIONS_INIT; - // FIXME: for some reason, shallow fetching over ssh barfs - // with "could not read from remote repository". - opts.depth = shallow && parseURL(url).scheme != "ssh" ? 1 : GIT_FETCH_DEPTH_FULL; - opts.callbacks.payload = &act; - opts.callbacks.sideband_progress = sidebandProgressCallback; - opts.callbacks.transfer_progress = transferProgressCallback; - - if (git_remote_fetch(remote.get(), &refspecs2, &opts, nullptr)) - throw Error("fetching '%s' from '%s': %s", refspec, url, git_error_last()->message); + runProgram(RunOptions { + .program = "git", + .searchPath = true, + // FIXME: git stderr messes up our progress indicator, so + // we're using --quiet for now. Should process its stderr. + .args = { "-C", path.abs(), "fetch", "--quiet", "--force", "--", url, refspec }, + .input = {}, + .isInteractive = true + }); } void verifyCommit( From 346d513d86491f2040735d22ba49cb0d701edb70 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 18 Jan 2024 22:34:38 +0100 Subject: [PATCH 162/307] tests/nixos/fetch-git: Add http-auth test --- .../test-cases/http-auth/default.nix | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 tests/nixos/fetch-git/test-cases/http-auth/default.nix diff --git a/tests/nixos/fetch-git/test-cases/http-auth/default.nix b/tests/nixos/fetch-git/test-cases/http-auth/default.nix new file mode 100644 index 000000000..d483d54fb --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/http-auth/default.nix @@ -0,0 +1,40 @@ +{ config, ... }: +{ + description = "can fetch a private git repo via http"; + repo.private = true; + script = '' + # add a file to the repo + client.succeed(f""" + echo ${config.name /* to make the git tree and store path unique */} > {repo.path}/test-case \ + && echo lutyabrook > {repo.path}/new-york-state \ + && {repo.git} add test-case new-york-state \ + && {repo.git} commit -m 'commit1' + """) + + # memoize the revision + rev1 = client.succeed(f""" + {repo.git} rev-parse HEAD + """).strip() + + # push to the server + client.succeed(f""" + {repo.git} push origin main + """) + + # fetch the repo via nix + fetched1 = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).outPath" + """) + + # check if the committed file is there + client.succeed(f""" + test -f {fetched1}/new-york-state + """) + + # check if the revision is the same + rev1_fetched = client.succeed(f""" + nix eval --impure --raw --expr "(builtins.fetchGit {repo.remote}).rev" + """).strip() + assert rev1 == rev1_fetched, f"rev1: {rev1} != rev1_fetched: {rev1_fetched}" + ''; +} From 14f470ec4e9d481698b97ea2dae101693fbaca95 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 19 Jan 2024 00:32:30 +0100 Subject: [PATCH 163/307] doc/hacking.md: Hint short option `make -e` Co-authored-by: Valentin Gagarin --- doc/manual/src/contributing/hacking.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index fe91787a3..9a7623dc9 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -115,7 +115,7 @@ You can also build Nix for one of the [supported platforms](#platforms). You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run `make install`. -Run `make` with [`--environment-overrides`](https://www.gnu.org/software/make/manual/make.html#index-_002de) to allow environment variables to override `Makefile` variables: +Run `make` with [`-e` / `--environment-overrides`](https://www.gnu.org/software/make/manual/make.html#index-_002de) to allow environment variables to override `Makefile` variables: - `ENABLE_BUILD=yes` to enable building the C++ code. - `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.). From a34ec0bd123619277e5682b7f6f8da41166e3eab Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 4 Nov 2023 20:10:55 -0400 Subject: [PATCH 164/307] Include store path exact spec in the docs This is niche, but deserves to be in the manual because it is describing behavior visible to the outside world, not mere implementation details. --- doc/manual/src/SUMMARY.md.in | 1 + doc/manual/src/protocols/store-path.md | 104 +++++++++++++++++++++++++ src/libstore/store-api.cc | 84 ++------------------ 3 files changed, 111 insertions(+), 78 deletions(-) create mode 100644 doc/manual/src/protocols/store-path.md diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index c67ddc6cb..e6390c60a 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -106,6 +106,7 @@ - [Architecture and Design](architecture/architecture.md) - [Protocols](protocols/index.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) + - [Exact Store Path Specification](protocols/store-path.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Glossary](glossary.md) - [Contributing](contributing/index.md) diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md new file mode 100644 index 000000000..d1c35b05e --- /dev/null +++ b/doc/manual/src/protocols/store-path.md @@ -0,0 +1,104 @@ +# Complete Store Path Calculation + +This is the complete specification for how store paths are calculated. + +Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to. +But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful. + +```bnf + ::= /- +``` +where + +- `` = base-32 representation of the first 160 bits of a [SHA-256] hash of `
`
+
+  Th is :the hash part of the store name
+
+- `
` = the string `:sha256:::`;
+
+  Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
+  (e.g. you won't get `/nix/store/-name1` and `/nix/store/-name2`, or `/gnu/store/-name1`, with equal hash parts).
+
+- `` = the name of the store object.
+
+- `` = the [store directory](@docroot@/store/store-path.md#store-directory)
+
+- `` = one of:
+
+  - ```bnf
+    text:::...
+    ```
+
+    for encoded derivations written to the store.
+    ` ... ` are the store paths referenced by this path.
+    Those are encoded in the form described by ``.
+
+  - ```bnf
+    source:::...::self
+    ```
+
+    For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
+    Just like in the text case, we can have the store objects referenced by their paths.
+    Additionally, we can have an optional `:self` label to denote self reference.
+
+  - ```bnf
+    output:
+    ```
+
+    For either the outputs built from derivations,
+    paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256].
+    (in that case "source" is used; it's silly, but it's done that way for compatibility).
+
+    `` is the name of the output (usually, "out").
+    For content-addressed store objects, ``, is always "out".
+
+- `` = base-16 representation of a SHA-256 hash of ``
+
+- `` = one of the following based on ``:
+
+  - if `` = `text:...`:
+
+    the string written to the resulting store path.
+
+  - if `` = `source:...`:
+
+    the the hash of the [Nix Archive (NAR)] serialization of the [file system object](@docroot@/store/file-system-object.md) of the store object.
+
+  - if `` = `output:`:
+
+    - For input-addressed derivation outputs:
+
+      the [ATerm](@docroot@/protocols/derivation-aterm.md) serialization of the derivation modulo fixed output derivations.
+
+    - For content-addressed store paths:
+
+      the string `fixed:out:::`, where
+
+      - `` = one of:
+
+        - `r:` hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
+
+        - `` (empty string) for hashes of the flat (single file) serialization
+
+      - `` = `md5`, `sha1` or `sha256`
+
+      - `` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
+
+      Note that `` = `out`, regardless of the name part of the store path.
+      Also note that NAR + SHA-256 must not use this case, and instead must use the `` = `source:...` case.
+
+[Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
+[sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
+
+## Historical Note
+
+The `` = `source:...` and `` = `output:out` grammars technically overlap, in that both can represent data hashed by its SHA-256 NAR serialization.
+
+The original reason for this way of computing names was to prevent name collisions (for security).
+For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
+The former would have an `` starting with `output:out:`, while the latter would have an `` starting with `source:`.
+
+Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
+Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
+This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
+It also removes the ambiguity from the grammar.
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 0c37ecd30..dcfe5991d 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -65,85 +65,13 @@ StorePath Store::followLinksToStorePath(std::string_view path) const
 }
 
 
-/* Store paths have the following form:
+/*
+The exact specification of store paths is in `protocols/store-path.md`
+in the Nix manual. These few functions implement that specification.
 
-    = /-
-
-   where
-
-    = the location of the Nix store, usually /nix/store
-
-    = a human readable name for the path, typically obtained
-     from the name attribute of the derivation, or the name of the
-     source file from which the store path is created.  For derivation
-     outputs other than the default "out" output, the string "-"
-     is suffixed to .
-
-    = base-32 representation of the first 160 bits of a SHA-256
-     hash of ; the hash part of the store name
-
-    = the string ":sha256:

::"; - note that it includes the location of the store as well as the - name to make sure that changes to either of those are reflected - in the hash (e.g. you won't get /nix/store/-name1 and - /nix/store/-name2 with equal hash parts). - - = one of: - "text:::..." - for plain text files written to the store using - addTextToStore(); ... are the store paths referenced - by this path, in the form described by - "source:::...::self" - for paths copied to the store using addToStore() when recursive - = true and hashAlgo = "sha256". Just like in the text case, we - can have the store paths referenced by the path. - Additionally, we can have an optional :self label to denote self - reference. - "output:" - for either the outputs created by derivations, OR paths copied - to the store using addToStore() with recursive != true or - hashAlgo != "sha256" (in that case "source" is used; it's - silly, but it's done that way for compatibility). is the - name of the output (usually, "out"). - -

= base-16 representation of a SHA-256 hash of - - = - if = "text:...": - the string written to the resulting store path - if = "source:...": - the serialisation of the path from which this store path is - copied, as returned by hashPath() - if = "output:": - for non-fixed derivation outputs: - the derivation (see hashDerivationModulo() in - primops.cc) - for paths copied by addToStore() or produced by fixed-output - derivations: - the string "fixed:out:::", where - = "r:" for recursive (path) hashes, or "" for flat - (file) hashes - = "md5", "sha1" or "sha256" - = base-16 representation of the path or flat hash of - the contents of the path (or expected contents of the - path for fixed-output derivations) - - Note that since an output derivation has always type output, while - something added by addToStore can have type output or source depending - on the hash, this means that the same input can be hashed differently - if added to the store via addToStore or via a derivation, in the sha256 - recursive case. - - It would have been nicer to handle fixed-output derivations under - "source", e.g. have something like "source:", but we're - stuck with this for now... - - The main reason for this way of computing names is to prevent name - collisions (for security). For instance, it shouldn't be feasible - to come up with a derivation whose output path collides with the - path for a copied source. The former would have a starting with - "output:out:", while the latter would have a starting with - "source:". +If changes do these functions go behind mere implementation changes but +also update the user-visible behavior, please update the specification +to match. */ From 28d7db249ace91c10a9ad6cb6d11a6c2109929fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Fri, 19 Jan 2024 10:10:00 +0100 Subject: [PATCH 165/307] Remove a nonsensical shorthand flag in `nix store add` `-n` was an alias for `--mode`, but that seems to just be a copy-paste error as it doesn't make sense. `--mode` probably doesn't need a shorthand flag at all, so remove it. Noticed in https://github.com/NixOS/nix/pull/9809#issuecomment-1899890555 --- src/nix/add-to-store.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index 64a43ecfa..171848002 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -38,7 +38,6 @@ struct CmdAddToStore : MixDryRun, StoreCommand addFlag({ .longName = "mode", - .shortName = 'n', .description = R"( How to compute the hash of the input. One of: From bc00fa46472c56ccfddc2d6e81453be537d2e051 Mon Sep 17 00:00:00 2001 From: DavHau Date: Fri, 19 Jan 2024 15:59:15 +0700 Subject: [PATCH 166/307] fetchTree/fetchGit: re-enable shallow fetching Add several tests for git fetching: - shallow-cache-separation: can fetch the same repo shallowly and non-shallowly - shallow-ignore-ref: ensure that ref gets ignored when shallow=true is set - ssh-shallow: can fetch a git repo via ssh using shallow=1 --- src/libfetchers/git-utils.cc | 9 ++- src/libfetchers/git.cc | 16 ++++-- .../shallow-cache-separation/default.nix | 57 +++++++++++++++++++ .../test-cases/shallow-ignore-ref/default.nix | 40 +++++++++++++ .../test-cases/ssh-shallow/default.nix | 52 +++++++++++++++++ .../fetch-git/testsupport/gitea-repo.nix | 2 +- 6 files changed, 168 insertions(+), 8 deletions(-) create mode 100644 tests/nixos/fetch-git/test-cases/shallow-cache-separation/default.nix create mode 100644 tests/nixos/fetch-git/test-cases/shallow-ignore-ref/default.nix create mode 100644 tests/nixos/fetch-git/test-cases/ssh-shallow/default.nix diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 911c16c4b..382a363f0 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -387,13 +387,20 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this // then use code that was removed in this commit (see blame) auto dir = this->path; + Strings gitArgs; + if (shallow) { + gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec }; + } + else { + gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--", url, refspec }; + } runProgram(RunOptions { .program = "git", .searchPath = true, // FIXME: git stderr messes up our progress indicator, so // we're using --quiet for now. Should process its stderr. - .args = { "-C", path.abs(), "fetch", "--quiet", "--force", "--", url, refspec }, + .args = gitArgs, .input = {}, .isInteractive = true }); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 6ecb7a4ea..f9a1cb1bc 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -50,10 +50,12 @@ bool touchCacheFile(const Path & path, time_t touch_time) return lutimes(path.c_str(), times) == 0; } -Path getCachePath(std::string_view key) +Path getCachePath(std::string_view key, bool shallow) { - return getCacheDir() + "/nix/gitv3/" + - hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false); + return getCacheDir() + + "/nix/gitv3/" + + hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false) + + (shallow ? "-shallow" : ""); } // Returns the name of the HEAD branch. @@ -92,7 +94,8 @@ std::optional readHead(const Path & path) // Persist the HEAD ref from the remote repo in the local cached repo. bool storeCachedHead(const std::string & actualUrl, const std::string & headRef) { - Path cacheDir = getCachePath(actualUrl); + // set shallow=false as HEAD will never be queried for a shallow repo + Path cacheDir = getCachePath(actualUrl, false); try { runProgram("git", true, { "-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef }); } catch (ExecError &e) { @@ -107,7 +110,8 @@ std::optional readHeadCached(const std::string & actualUrl) { // Create a cache path to store the branch of the HEAD ref. Append something // in front of the URL to prevent collision with the repository itself. - Path cacheDir = getCachePath(actualUrl); + // set shallow=false as HEAD will never be queried for a shallow repo + Path cacheDir = getCachePath(actualUrl, false); Path headRefFile = cacheDir + "/HEAD"; time_t now = time(0); @@ -508,7 +512,7 @@ struct GitInputScheme : InputScheme if (!input.getRev()) input.attrs.insert_or_assign("rev", GitRepo::openRepo(CanonPath(repoDir))->resolveRef(ref).gitRev()); } else { - Path cacheDir = getCachePath(repoInfo.url); + Path cacheDir = getCachePath(repoInfo.url, getShallowAttr(input)); repoDir = cacheDir; repoInfo.gitDir = "."; diff --git a/tests/nixos/fetch-git/test-cases/shallow-cache-separation/default.nix b/tests/nixos/fetch-git/test-cases/shallow-cache-separation/default.nix new file mode 100644 index 000000000..57561e74b --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/shallow-cache-separation/default.nix @@ -0,0 +1,57 @@ +{ + description = "can fetch the same repo shallowly and non-shallowly"; + script = '' + # create branch1 off of main + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' \ + \ + && {repo.git} push origin --all + """) + + # save the revision + mainRev = client.succeed(f""" + {repo.git} rev-parse main + """).strip() + + # fetch shallowly + revCountShallow = client.succeed(f""" + nix eval --impure --expr ' + (builtins.fetchGit {{ + url = "{repo.remote}"; + rev = "{mainRev}"; + shallow = true; + }}).revCount + ' + """).strip() + # ensure the revCount is 0 + assert revCountShallow == "0", f"revCountShallow should be 0, but is {revCountShallow}" + + # fetch non-shallowly + revCountNonShallow = client.succeed(f""" + nix eval --impure --expr ' + (builtins.fetchGit {{ + url = "{repo.remote}"; + rev = "{mainRev}"; + shallow = false; + }}).revCount + ' + """).strip() + # ensure the revCount is 1 + assert revCountNonShallow == "1", f"revCountNonShallow should be 1, but is {revCountNonShallow}" + + # fetch shallowly again + revCountShallow2 = client.succeed(f""" + nix eval --impure --expr ' + (builtins.fetchGit {{ + url = "{repo.remote}"; + rev = "{mainRev}"; + shallow = true; + }}).revCount + ' + """).strip() + # ensure the revCount is 0 + assert revCountShallow2 == "0", f"revCountShallow2 should be 0, but is {revCountShallow2}" + ''; +} diff --git a/tests/nixos/fetch-git/test-cases/shallow-ignore-ref/default.nix b/tests/nixos/fetch-git/test-cases/shallow-ignore-ref/default.nix new file mode 100644 index 000000000..456ee8341 --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/shallow-ignore-ref/default.nix @@ -0,0 +1,40 @@ +{ + description = "ensure that ref gets ignored when shallow=true is set"; + script = '' + # create branch1 off of main + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' \ + \ + && {repo.git} checkout -b branch1 main \ + && echo bangkok > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit2' \ + \ + && {repo.git} push origin --all + """) + + # save the revisions + mainRev = client.succeed(f""" + {repo.git} rev-parse main + """).strip() + branch1Rev = client.succeed(f""" + {repo.git} rev-parse branch1 + """).strip() + + # Ensure that ref gets ignored when fetching shallowly. + # This would fail if the ref was respected, as branch1Rev is not on main. + client.succeed(f""" + nix eval --impure --raw --expr ' + (builtins.fetchGit {{ + url = "{repo.remote}"; + rev = "{branch1Rev}"; + ref = "main"; + shallow = true; + }}) + ' + """) + + ''; +} diff --git a/tests/nixos/fetch-git/test-cases/ssh-shallow/default.nix b/tests/nixos/fetch-git/test-cases/ssh-shallow/default.nix new file mode 100644 index 000000000..979512af9 --- /dev/null +++ b/tests/nixos/fetch-git/test-cases/ssh-shallow/default.nix @@ -0,0 +1,52 @@ +{ + description = "can fetch a git repo via ssh using shallow=1"; + script = '' + # add a file to the repo + client.succeed(f""" + echo chiang-mai > {repo.path}/thailand \ + && {repo.git} add thailand \ + && {repo.git} commit -m 'commit1' + """) + + # memoize the revision + rev1 = client.succeed(f""" + {repo.git} rev-parse HEAD + """).strip() + + # push to the server + client.succeed(f""" + {repo.git} push origin-ssh main + """) + + fetchGit_expr = f""" + builtins.fetchGit {{ + url = "{repo.remote_ssh}"; + rev = "{rev1}"; + shallow = true; + }} + """ + + # fetch the repo via nix + fetched1 = client.succeed(f""" + nix eval --impure --raw --expr '({fetchGit_expr}).outPath' + """) + + # check if the committed file is there + client.succeed(f""" + test -f {fetched1}/thailand + """) + + # check if the revision is the same + rev1_fetched = client.succeed(f""" + nix eval --impure --raw --expr '({fetchGit_expr}).rev' + """).strip() + assert rev1 == rev1_fetched, f"rev1: {rev1} != rev1_fetched: {rev1_fetched}" + + # check if revCount is 1 + revCount1 = client.succeed(f""" + nix eval --impure --expr '({fetchGit_expr}).revCount' + """).strip() + print(f"revCount1: {revCount1}") + assert revCount1 == '0', f"rev count is not 0 but {revCount1}" + ''; +} diff --git a/tests/nixos/fetch-git/testsupport/gitea-repo.nix b/tests/nixos/fetch-git/testsupport/gitea-repo.nix index a3ad65ca4..e9f4adcc1 100644 --- a/tests/nixos/fetch-git/testsupport/gitea-repo.nix +++ b/tests/nixos/fetch-git/testsupport/gitea-repo.nix @@ -72,4 +72,4 @@ in """) ''; }; -} \ No newline at end of file +} From 75a6e6dd0eb60f3bcaaa3b33b085fb542638eb44 Mon Sep 17 00:00:00 2001 From: Yuxuan Shui Date: Thu, 18 Jan 2024 16:39:34 +0000 Subject: [PATCH 167/307] Add --unpack to nix store prefetch-file --- src/nix/prefetch.cc | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index b5d619006..84b79ea28 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -262,6 +262,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON { std::string url; bool executable = false; + bool unpack = false; std::optional name; HashAlgorithm hashAlgo = HashAlgorithm::SHA256; std::optional expectedHash; @@ -294,6 +295,14 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON .handler = {&executable, true}, }); + addFlag({ + .longName = "unpack", + .description = + "Unpack the archive (which must be a tarball or zip file) and add " + "the result to the Nix store.", + .handler = {&unpack, true}, + }); + expectArg("url", &url); } @@ -310,7 +319,7 @@ struct CmdStorePrefetchFile : StoreCommand, MixJSON } void run(ref store) override { - auto [storePath, hash] = prefetchFile(store, url, name, hashAlgo, expectedHash, false, executable); + auto [storePath, hash] = prefetchFile(store, url, name, hashAlgo, expectedHash, unpack, executable); if (json) { auto res = nlohmann::json::object(); From 8983ee8b2e0c10e6cac672a5a7ada4698235a62e Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 17 Jan 2024 17:54:03 +0100 Subject: [PATCH 168/307] refactor: Un-callback transform Store::queryPathInfoFromClientCache This part of the code was not necessarily callback based. Removing CPS is always nice; particularly if there's no loss of functionality, like here. --- src/libstore/store-api.cc | 18 +++++++++--------- src/libstore/store-api.hh | 8 ++++---- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 66bc95625..f237578e5 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -685,8 +685,7 @@ static bool goodStorePath(const StorePath & expected, const StorePath & actual) && (expected.name() == Store::MissingName || expected.name() == actual.name()); } -bool Store::queryPathInfoFromClientCache(const StorePath & storePath, - Callback> & callback) +std::optional> Store::queryPathInfoFromClientCache(const StorePath & storePath) { auto hashPart = std::string(storePath.hashPart()); @@ -696,8 +695,7 @@ bool Store::queryPathInfoFromClientCache(const StorePath & storePath, stats.narInfoReadAverted++; if (!res->didExist()) throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); - callback(ref(res->value)); - return true; + return ref(res->value); } } @@ -713,12 +711,11 @@ bool Store::queryPathInfoFromClientCache(const StorePath & storePath, !goodStorePath(storePath, res.second->path)) throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); } - callback(ref(res.second)); - return true; + return ref(res.second); } } - return false; + return std::nullopt; } @@ -728,8 +725,11 @@ void Store::queryPathInfo(const StorePath & storePath, auto hashPart = std::string(storePath.hashPart()); try { - if (queryPathInfoFromClientCache(storePath, callback)) - return; + auto r = queryPathInfoFromClientCache(storePath); + if (r.has_value()) { + ref & info = *r; + return callback(ref(info)); + } } catch (...) { return callback.rethrow(); } auto callbackPtr = std::make_shared(std::move(callback)); diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 2a1092d9e..e47f2c768 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -285,14 +285,14 @@ public: /** * NOTE: this is not the final interface - to be modified in next commit. * - * Asynchronous version that only queries the local narinfo cache and not + * Version of queryPathInfo() that only queries the local narinfo cache and not * the actual store. * - * @return true if the path was known and the callback invoked - * @return false if the path was not known and the callback not invoked + * @return `std::make_optional(vpi)` if the path is known + * @return `std::null_opt` if the path was not known to be valid or invalid * @throw InvalidPathError if the path is known to be invalid */ - bool queryPathInfoFromClientCache(const StorePath & path, Callback> & callback); + std::optional> queryPathInfoFromClientCache(const StorePath & path); /** * Query the information about a realisation. From d19627e8b4c3c09b0cc1329a9acaa8e5b070f26e Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 19 Jan 2024 17:00:39 +0100 Subject: [PATCH 169/307] refactor: Remove throw from queryPathInfoFromClientCache Return a value instead of throwing. Rather than the more trivial refactor of wrapping the return value in another std::optional, we retain the meaning of the outer optional: "we know at least something." So we have changed: return nullopt -> return nullopt throw InvalidPath -> return make_optional(nullptr) return vpi -> return make_optional(vpi) --- src/libstore/store-api.cc | 22 ++++++++++++++-------- src/libstore/store-api.hh | 10 ++++------ 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index f237578e5..2cd40d510 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -685,7 +685,8 @@ static bool goodStorePath(const StorePath & expected, const StorePath & actual) && (expected.name() == Store::MissingName || expected.name() == actual.name()); } -std::optional> Store::queryPathInfoFromClientCache(const StorePath & storePath) + +std::optional> Store::queryPathInfoFromClientCache(const StorePath & storePath) { auto hashPart = std::string(storePath.hashPart()); @@ -693,9 +694,10 @@ std::optional> Store::queryPathInfoFromClientCache(cons auto res = state.lock()->pathInfoCache.get(std::string(storePath.to_string())); if (res && res->isKnownNow()) { stats.narInfoReadAverted++; - if (!res->didExist()) - throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); - return ref(res->value); + if (res->didExist()) + return std::make_optional(res->value); + else + return std::make_optional(nullptr); } } @@ -709,9 +711,10 @@ std::optional> Store::queryPathInfoFromClientCache(cons res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second }); if (res.first == NarInfoDiskCache::oInvalid || !goodStorePath(storePath, res.second->path)) - throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); + return std::make_optional(nullptr); } - return ref(res.second); + assert(res.second); + return std::make_optional(res.second); } } @@ -727,8 +730,11 @@ void Store::queryPathInfo(const StorePath & storePath, try { auto r = queryPathInfoFromClientCache(storePath); if (r.has_value()) { - ref & info = *r; - return callback(ref(info)); + std::shared_ptr & info = *r; + if (info) + return callback(ref(info)); + else + throw InvalidPath("path '%s' is not valid", printStorePath(storePath)); } } catch (...) { return callback.rethrow(); } diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index e47f2c768..2f8a9440e 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -283,16 +283,14 @@ public: Callback> callback) noexcept; /** - * NOTE: this is not the final interface - to be modified in next commit. - * * Version of queryPathInfo() that only queries the local narinfo cache and not * the actual store. * - * @return `std::make_optional(vpi)` if the path is known - * @return `std::null_opt` if the path was not known to be valid or invalid - * @throw InvalidPathError if the path is known to be invalid + * @return `std::nullopt` if nothing is known about the path in the local narinfo cache. + * @return `std::make_optional(nullptr)` if the path is known to not exist. + * @return `std::make_optional(validPathInfo)` if the path is known to exist. */ - std::optional> queryPathInfoFromClientCache(const StorePath & path); + std::optional> queryPathInfoFromClientCache(const StorePath & path); /** * Query the information about a realisation. From 356352c3709f69b6d11ed7f14ffa586219170908 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 19 Jan 2024 01:07:26 -0500 Subject: [PATCH 170/307] Add missing `--hash-algo` flag to `nix store add` --- doc/manual/rl-next/nix-store-add.md | 7 +++++++ src/nix/add-to-store.cc | 7 +++++-- tests/functional/add.sh | 2 ++ 3 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 doc/manual/rl-next/nix-store-add.md diff --git a/doc/manual/rl-next/nix-store-add.md b/doc/manual/rl-next/nix-store-add.md new file mode 100644 index 000000000..d55711569 --- /dev/null +++ b/doc/manual/rl-next/nix-store-add.md @@ -0,0 +1,7 @@ +--- +synopsis: Give `nix store add` a `--hash-algo` flag +prs: 9809 +--- + +Adds a missing feature that was present in the old CLI, and matches our +plans to have similar flags for `nix hash convert` and `hash hash path`. diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index 171848002..f2dbe8a2c 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -22,6 +22,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand Path path; std::optional namePart; ContentAddressMethod caMethod = FileIngestionMethod::Recursive; + HashAlgorithm hashAlgo = HashAlgorithm::SHA256; CmdAddToStore() { @@ -51,6 +52,8 @@ struct CmdAddToStore : MixDryRun, StoreCommand this->caMethod = parseIngestionMethod(s); }}, }); + + addFlag(Flag::mkHashAlgoFlag("hash-algo", &hashAlgo)); } void run(ref store) override @@ -63,9 +66,9 @@ struct CmdAddToStore : MixDryRun, StoreCommand auto storePath = dryRun ? store->computeStorePath( - *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).first + *namePart, accessor, path2, caMethod, hashAlgo, {}).first : store->addToStoreSlow( - *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).path; + *namePart, accessor, path2, caMethod, hashAlgo, {}).path; logger->cout("%s", store->printStorePath(storePath)); } diff --git a/tests/functional/add.sh b/tests/functional/add.sh index d0fedcb25..762e01dbe 100644 --- a/tests/functional/add.sh +++ b/tests/functional/add.sh @@ -37,9 +37,11 @@ clearStore path3=$(nix store add-path ./dummy) [[ "$path1" == "$path2" ]] [[ "$path1" == "$path3" ]] + path4=$(nix store add --mode nar --hash-algo sha1 ./dummy) ) ( path1=$(nix store add --mode flat ./dummy) path2=$(nix store add-file ./dummy) [[ "$path1" == "$path2" ]] + path4=$(nix store add --mode flat --hash-algo sha1 ./dummy) ) From 49221493e243c4d10e69e7465a21be53902e16a8 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 31 Aug 2023 11:34:52 -0700 Subject: [PATCH 171/307] Log what `nix flake check` does There's still room for improvement, but this produces much more informative output with `-v`: ``` $ nix flake check -v evaluating flake... checking flake output 'checks'... checking derivation checks.aarch64-darwin.ghcid-ng-tests... checking derivation checks.aarch64-darwin.ghcid-ng-clippy... checking derivation checks.aarch64-darwin.ghcid-ng-doc... checking derivation checks.aarch64-darwin.ghcid-ng-fmt... checking derivation checks.aarch64-darwin.ghcid-ng-audit... checking flake output 'packages'... checking derivation packages.aarch64-darwin.ghcid-ng... checking derivation packages.aarch64-darwin.ghcid-ng-tests... checking derivation packages.aarch64-darwin.default... checking flake output 'apps'... checking flake output 'devShells'... checking derivation devShells.aarch64-darwin.default... running flake checks... warning: The check omitted these incompatible systems: aarch64-linux, x86_64-darwin, x86_64-linux Use '--all-systems' to check all. ``` --- src/nix/flake.cc | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/src/nix/flake.cc b/src/nix/flake.cc index bebc62deb..0103a9cd9 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -395,6 +395,8 @@ struct CmdFlakeCheck : FlakeCommand auto checkDerivation = [&](const std::string & attrPath, Value & v, const PosIdx pos) -> std::optional { try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking derivation %s", attrPath)); auto packageInfo = getDerivation(*state, v, false); if (!packageInfo) throw Error("flake attribute '%s' is not a derivation", attrPath); @@ -427,6 +429,8 @@ struct CmdFlakeCheck : FlakeCommand auto checkOverlay = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking overlay %s", attrPath)); state->forceValue(v, pos); if (!v.isLambda()) { throw Error("overlay is not a function, but %s instead", showType(v)); @@ -449,6 +453,8 @@ struct CmdFlakeCheck : FlakeCommand auto checkModule = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking NixOS module %s", attrPath)); state->forceValue(v, pos); } catch (Error & e) { e.addTrace(resolve(pos), hintfmt("while checking the NixOS module '%s'", attrPath)); @@ -460,6 +466,8 @@ struct CmdFlakeCheck : FlakeCommand checkHydraJobs = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking Hydra job %s", attrPath)); state->forceAttrs(v, pos, ""); if (state->isDerivation(v)) @@ -469,7 +477,7 @@ struct CmdFlakeCheck : FlakeCommand state->forceAttrs(*attr.value, attr.pos, ""); auto attrPath2 = concatStrings(attrPath, ".", state->symbols[attr.name]); if (state->isDerivation(*attr.value)) { - Activity act(*logger, lvlChatty, actUnknown, + Activity act(*logger, lvlInfo, actUnknown, fmt("checking Hydra job '%s'", attrPath2)); checkDerivation(attrPath2, *attr.value, attr.pos); } else @@ -484,7 +492,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkNixOSConfiguration = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlChatty, actUnknown, + Activity act(*logger, lvlInfo, actUnknown, fmt("checking NixOS configuration '%s'", attrPath)); Bindings & bindings(*state->allocBindings(0)); auto vToplevel = findAlongAttrPath(*state, "config.system.build.toplevel", bindings, v).first; @@ -499,7 +507,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkTemplate = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlChatty, actUnknown, + Activity act(*logger, lvlInfo, actUnknown, fmt("checking template '%s'", attrPath)); state->forceAttrs(v, pos, ""); @@ -533,6 +541,8 @@ struct CmdFlakeCheck : FlakeCommand auto checkBundler = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { + Activity act(*logger, lvlInfo, actUnknown, + fmt("checking bundler %s", attrPath)); state->forceValue(v, pos); if (!v.isLambda()) throw Error("bundler must be a function"); @@ -552,7 +562,7 @@ struct CmdFlakeCheck : FlakeCommand enumerateOutputs(*state, *vFlake, [&](const std::string & name, Value & vOutput, const PosIdx pos) { - Activity act(*logger, lvlChatty, actUnknown, + Activity act(*logger, lvlInfo, actUnknown, fmt("checking flake output '%s'", name)); try { From 9404ce36e4edd1df12892089bdab1ceb7d4d7a97 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 1 Sep 2023 13:09:01 -0700 Subject: [PATCH 172/307] Print derivation paths Also be more consistent with quotes around attribute paths --- src/nix/flake.cc | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 0103a9cd9..850ea77da 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -400,8 +400,16 @@ struct CmdFlakeCheck : FlakeCommand auto packageInfo = getDerivation(*state, v, false); if (!packageInfo) throw Error("flake attribute '%s' is not a derivation", attrPath); - // FIXME: check meta attributes - return packageInfo->queryDrvPath(); + else { + // FIXME: check meta attributes + auto storePath = packageInfo->queryDrvPath(); + if (storePath) { + logger->log(lvlInfo, + fmt("derivation evaluated to %s", + store->printStorePath(storePath.value()))); + } + return storePath; + } } catch (Error & e) { e.addTrace(resolve(pos), hintfmt("while checking the derivation '%s'", attrPath)); reportError(e); @@ -430,7 +438,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkOverlay = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { Activity act(*logger, lvlInfo, actUnknown, - fmt("checking overlay %s", attrPath)); + fmt("checking overlay '%s'", attrPath)); state->forceValue(v, pos); if (!v.isLambda()) { throw Error("overlay is not a function, but %s instead", showType(v)); @@ -454,7 +462,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkModule = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { Activity act(*logger, lvlInfo, actUnknown, - fmt("checking NixOS module %s", attrPath)); + fmt("checking NixOS module '%s'", attrPath)); state->forceValue(v, pos); } catch (Error & e) { e.addTrace(resolve(pos), hintfmt("while checking the NixOS module '%s'", attrPath)); @@ -466,8 +474,6 @@ struct CmdFlakeCheck : FlakeCommand checkHydraJobs = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { - Activity act(*logger, lvlInfo, actUnknown, - fmt("checking Hydra job %s", attrPath)); state->forceAttrs(v, pos, ""); if (state->isDerivation(v)) @@ -542,7 +548,7 @@ struct CmdFlakeCheck : FlakeCommand auto checkBundler = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { Activity act(*logger, lvlInfo, actUnknown, - fmt("checking bundler %s", attrPath)); + fmt("checking bundler '%s'", attrPath)); state->forceValue(v, pos); if (!v.isLambda()) throw Error("bundler must be a function"); From d75a5f427a385e56c821fdf49a70a150fe7fe6fd Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 1 Sep 2023 13:11:58 -0700 Subject: [PATCH 173/307] Print how many checks are run --- src/nix/flake.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 850ea77da..0e34bd76a 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -781,7 +781,8 @@ struct CmdFlakeCheck : FlakeCommand } if (build && !drvPaths.empty()) { - Activity act(*logger, lvlInfo, actUnknown, "running flake checks"); + Activity act(*logger, lvlInfo, actUnknown, + fmt("running %d flake checks", drvPaths.size())); store->buildPaths(drvPaths); } if (hasErrors) From 561a56cd13b4f12e3dfb6c5e3f42e5d8add04ecc Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 18 Dec 2023 13:53:40 -0800 Subject: [PATCH 174/307] Add release notes --- .../rl-next/nix-flake-check-logs-actions.md | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 doc/manual/rl-next/nix-flake-check-logs-actions.md diff --git a/doc/manual/rl-next/nix-flake-check-logs-actions.md b/doc/manual/rl-next/nix-flake-check-logs-actions.md new file mode 100644 index 000000000..53a7b35eb --- /dev/null +++ b/doc/manual/rl-next/nix-flake-check-logs-actions.md @@ -0,0 +1,33 @@ +--- +synopsis: Some stack overflow segfaults are fixed +issues: 8882 +prs: 8893 +--- + +`nix flake check` now logs the checks it runs and the derivations it evaluates: + +``` +$ nix flake check -v +evaluating flake... +checking flake output 'checks'... +checking derivation 'checks.aarch64-darwin.ghciwatch-tests'... +derivation evaluated to /nix/store/nh7dlvsrhds4cxl91mvgj4h5cbq6skmq-ghciwatch-test-0.3.0.drv +checking derivation 'checks.aarch64-darwin.ghciwatch-clippy'... +derivation evaluated to /nix/store/9cb5a6wmp6kf6hidqw9wphidvb8bshym-ghciwatch-clippy-0.3.0.drv +checking derivation 'checks.aarch64-darwin.ghciwatch-doc'... +derivation evaluated to /nix/store/8brdd3jbawfszpbs7vdpsrhy80as1il8-ghciwatch-doc-0.3.0.drv +checking derivation 'checks.aarch64-darwin.ghciwatch-fmt'... +derivation evaluated to /nix/store/wjhs0l1njl5pyji53xlmfjrlya0wmz8p-ghciwatch-fmt-0.3.0.drv +checking derivation 'checks.aarch64-darwin.ghciwatch-audit'... +derivation evaluated to /nix/store/z0mps8dyj2ds7c0fn0819y5h5611033z-ghciwatch-audit-0.3.0.drv +checking flake output 'packages'... +checking derivation 'packages.aarch64-darwin.default'... +derivation evaluated to /nix/store/41abbdyglw5x9vcsvd89xan3ydjf8d7r-ghciwatch-0.3.0.drv +checking flake output 'apps'... +checking flake output 'devShells'... +checking derivation 'devShells.aarch64-darwin.default'... +derivation evaluated to /nix/store/bc935gz7dylzmcpdb5cczr8gngv8pmdb-nix-shell.drv +running 5 flake checks... +warning: The check omitted these incompatible systems: aarch64-linux, x86_64-darwin, x86_64-linux +Use '--all-systems' to check all. +``` From edf3ecc497d9931f84d8a28679b51773c761fdd8 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 22 Oct 2023 20:01:01 -0400 Subject: [PATCH 175/307] Document JSON formats Good to document these formats separately from commands that happen to use them. Eventually I would like this and `builtins.derivation` to refer to a store section on derivations that is authoritative, but that doesn't yet exist, and will take some time to make. So I think we're just best off merging this now as is. Co-authored-by: Valentin Gagarin --- doc/manual/src/SUMMARY.md.in | 3 + doc/manual/src/glossary.md | 2 +- doc/manual/src/json/derivation.md | 71 +++++++++++++++++ doc/manual/src/json/store-object-info.md | 97 ++++++++++++++++++++++++ src/libstore/globals.hh | 2 +- src/nix/derivation-add.md | 7 +- src/nix/derivation-show.md | 60 +-------------- 7 files changed, 181 insertions(+), 61 deletions(-) create mode 100644 doc/manual/src/json/derivation.md create mode 100644 doc/manual/src/json/store-object-info.md diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index c67ddc6cb..10fe51fc9 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -104,6 +104,9 @@ - [Channels](command-ref/files/channels.md) - [Default Nix expression](command-ref/files/default-nix-expression.md) - [Architecture and Design](architecture/architecture.md) +- [JSON Formats](json/index.md) + - [Store Object Info](json/store-object-info.md) + - [Derivation](json/derivation.md) - [Protocols](protocols/index.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 3c0570a44..124dc8d2e 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -127,7 +127,7 @@ non-[fixed-output](#gloss-fixed-output-derivation) derivation. -- [output-addressed store object]{#gloss-output-addressed-store-object} +- [content-addressed store object]{#gloss-content-addressed-store-object} A [store object] whose [store path] is determined by its contents. This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation). diff --git a/doc/manual/src/json/derivation.md b/doc/manual/src/json/derivation.md new file mode 100644 index 000000000..649d543cc --- /dev/null +++ b/doc/manual/src/json/derivation.md @@ -0,0 +1,71 @@ +# Derivation JSON Format + +> **Warning** +> +> This JSON format is currently +> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command) +> and subject to change. + +The JSON serialization of a +[derivations](@docroot@/glossary.md#gloss-store-derivation) +is a JSON object with the following fields: + +* `name`: + The name of the derivation. + This is used when calculating the store paths of the derivation's outputs. + +* `outputs`: + Information about the output paths of the derivation. + This is a JSON object with one member per output, where the key is the output name and the value is a JSON object with these fields: + + * `path`: The output path. + + * `hashAlgo`: + For fixed-output derivations, the hashing algorithm (e.g. `sha256`), optionally prefixed by `r:` if `hash` denotes a NAR hash rather than a flat file hash. + + * `hash`: + For fixed-output derivations, the expected content hash in base-16. + + > **Example** + > + > ```json + > "outputs": { + > "out": { + > "path": "/nix/store/2543j7c6jn75blc3drf4g5vhb1rhdq29-source", + > "hashAlgo": "r:sha256", + > "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62" + > } + > } + > ``` + +* `inputSrcs`: + A list of store paths on which this derivation depends. + +* `inputDrvs`: + A JSON object specifying the derivations on which this derivation depends, and what outputs of those derivations. + + > **Example** + > + > ```json + > "inputDrvs": { + > "/nix/store/6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"], + > "/nix/store/fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"] + > } + > ``` + + specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`. + +* `system`: + The system type on which this derivation is to be built + (e.g. `x86_64-linux`). + +* `builder`: + The absolute path of the program to be executed to run the build. + Typically this is the `bash` shell + (e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`). + +* `args`: + The command-line arguments passed to the `builder`. + +* `env`: + The environment passed to the `builder`. diff --git a/doc/manual/src/json/store-object-info.md b/doc/manual/src/json/store-object-info.md new file mode 100644 index 000000000..db43c2fa1 --- /dev/null +++ b/doc/manual/src/json/store-object-info.md @@ -0,0 +1,97 @@ +# Store object info JSON format + +> **Warning** +> +> This JSON format is currently +> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command) +> and subject to change. + +Info about a [store object]. + +* `path`: + + [Store path][store path] to the given store object. + +* `narHash`: + + Hash of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar). + +* `narSize`: + + Size of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar). + +* `references`: + + An array of [store paths][store path], possibly including this one. + +* `ca` (optional): + + Content address of this store object's file system object, used to compute its store path. + +[store path]: @docroot@/glossary.md#gloss-store-path +[file system object]: @docroot@/store/file-system-object.md + +## Impure fields + +These are not intrinsic properties of the store object. +In other words, the same store object residing in different store could have different values for these properties. + +* `deriver` (optional): + + The path to the [derivation] from which this store object is produced. + + [derivation]: @docroot@/glossary.md#gloss-store-derivation + +* `registrationTime` (optional): + + When this derivation was added to the store. + +* `ultimate` (optional): + + Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere. + +* `signatures` (optional): + + Signatures claiming that this store object is what it claims to be. + Not relevant for [content-addressed] store objects, + but useful for [input-addressed] store objects. + + [content-addressed]: @docroot@/glossary.md#gloss-content-addressed-store-object + [input-addressed]: @docroot@/glossary.md#gloss-input-addressed-store-object + +### `.narinfo` extra fields + +This meta data is specific to the "binary cache" family of Nix store types. +This information is not intrinsic to the store object, but about how it is stored. + +* `url`: + + Where to download a compressed archive of the file system objects of this store object. + +* `compression`: + + The compression format that the archive is in. + +* `fileHash`: + + A digest for the compressed archive itself, as opposed to the data contained within. + +* `fileSize`: + + The size of the compressed archive itself. + +## Computed closure fields + +These fields are not stored at all, but computed by traverising the other other fields across all the store objects in a [closure]. + +* `closureSize`: + + The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure]. + +### `.narinfo` extra fields + +* `closureSize`: + + The total size of this store object and every other object in its [closure]. + +[closure]: @docroot@/glossary.md#gloss-closure diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh index 49a4c1f2a..3107c8aed 100644 --- a/src/libstore/globals.hh +++ b/src/libstore/globals.hh @@ -635,7 +635,7 @@ public: - the store object has been signed using a key in the trusted keys list - the [`require-sigs`](#conf-require-sigs) option has been set to `false` - - the store object is [output-addressed](@docroot@/glossary.md#gloss-output-addressed-store-object) + - the store object is [content-addressed](@docroot@/glossary.md#gloss-content-addressed-store-object) )", {"binary-cache-public-keys"}}; diff --git a/src/nix/derivation-add.md b/src/nix/derivation-add.md index f116681ab..d9b8467df 100644 --- a/src/nix/derivation-add.md +++ b/src/nix/derivation-add.md @@ -9,10 +9,11 @@ Store derivations are used internally by Nix. They are store paths with extension `.drv` that represent the build-time dependency graph to which a Nix expression evaluates. -[store derivation]: ../../glossary.md#gloss-store-derivation -The JSON format is documented under the [`derivation show`] command. +[store derivation]: @docroot@/glossary.md#gloss-store-derivation -[`derivation show`]: ./nix3-derivation-show.md +`nix derivation add` takes a single derivation in the following format: + +{{#include ../../json/derivation.md}} )"" diff --git a/src/nix/derivation-show.md b/src/nix/derivation-show.md index 1296e2885..884f1adc6 100644 --- a/src/nix/derivation-show.md +++ b/src/nix/derivation-show.md @@ -5,8 +5,6 @@ R""( * Show the [store derivation] that results from evaluating the Hello package: - [store derivation]: ../../glossary.md#gloss-store-derivation - ```console # nix derivation show nixpkgs#hello { @@ -48,62 +46,12 @@ a Nix expression evaluates. By default, this command only shows top-level derivations, but with `--recursive`, it also shows their dependencies. -The JSON output is a JSON object whose keys are the store paths of the -derivations, and whose values are a JSON object with the following -fields: +[store derivation]: @docroot@/glossary.md#gloss-store-derivation -* `name`: The name of the derivation. This is used when calculating the - store paths of the derivation's outputs. +`nix derivation show` outputs a JSON map of [store path]s to derivations in the following format: -* `outputs`: Information about the output paths of the - derivation. This is a JSON object with one member per output, where - the key is the output name and the value is a JSON object with these - fields: +[store path]: @docroot@/glossary.md#gloss-store-path - * `path`: The output path. - * `hashAlgo`: For fixed-output derivations, the hashing algorithm - (e.g. `sha256`), optionally prefixed by `r:` if `hash` denotes a - NAR hash rather than a flat file hash. - * `hash`: For fixed-output derivations, the expected content hash in - base-16. - - Example: - - ```json - "outputs": { - "out": { - "path": "/nix/store/2543j7c6jn75blc3drf4g5vhb1rhdq29-source", - "hashAlgo": "r:sha256", - "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62" - } - } - ``` - -* `inputSrcs`: A list of store paths on which this derivation depends. - -* `inputDrvs`: A JSON object specifying the derivations on which this - derivation depends, and what outputs of those derivations. For - example, - - ```json - "inputDrvs": { - "/nix/store/6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"], - "/nix/store/fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"] - } - ``` - - specifies that this derivation depends on the `dev` output of - `curl`, and the `out` output of `unzip`. - -* `system`: The system type on which this derivation is to be built - (e.g. `x86_64-linux`). - -* `builder`: The absolute path of the program to be executed to run - the build. Typically this is the `bash` shell - (e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`). - -* `args`: The command-line arguments passed to the `builder`. - -* `env`: The environment passed to the `builder`. +{{#include ../../json/derivation.md}} )"" From 65294fe5fe4fd5419ea374e73710e8a217ba8060 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 20 Jan 2024 17:07:21 -0500 Subject: [PATCH 176/307] Fix typo in upcomming release notes Thanks @cole-h for finding in https://github.com/NixOS/nix/pull/9815#discussion_r1460604130 --- doc/manual/rl-next/nix-store-add.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/rl-next/nix-store-add.md b/doc/manual/rl-next/nix-store-add.md index d55711569..5ef2913b4 100644 --- a/doc/manual/rl-next/nix-store-add.md +++ b/doc/manual/rl-next/nix-store-add.md @@ -4,4 +4,4 @@ prs: 9809 --- Adds a missing feature that was present in the old CLI, and matches our -plans to have similar flags for `nix hash convert` and `hash hash path`. +plans to have similar flags for `nix hash convert` and `nix hash path`. From 202c5e2afc14232b3c9ff32b014387d76c45b3d7 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 19 Jan 2024 20:14:48 -0500 Subject: [PATCH 177/307] Start standardizing hash algo flags Do this if we want to do `--hash-algo` everywhere, and not `--algo` for hash commands. The new `nix hash convert` is updated. Deprecated new CLI commands are left as-is (`nix hash path` needs to be redone and is also left as-is). --- doc/manual/rl-next/nix-hash-convert.md | 12 +++++----- src/libutil/args.hh | 6 +++++ src/nix/add-to-store.cc | 2 +- src/nix/hash.cc | 2 +- tests/functional/hash.sh | 32 +++++++++++++------------- 5 files changed, 30 insertions(+), 24 deletions(-) diff --git a/doc/manual/rl-next/nix-hash-convert.md b/doc/manual/rl-next/nix-hash-convert.md index 2b718a66b..69db9508a 100644 --- a/doc/manual/rl-next/nix-hash-convert.md +++ b/doc/manual/rl-next/nix-hash-convert.md @@ -9,7 +9,7 @@ to stabilization! Examples: - Convert the hash to `nix32`. ```bash - $ nix hash convert --algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" + $ nix hash convert --hash-algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" vw46m23bizj4n8afrc0fj19wrp7mj3c0 ``` `nix32` is a base32 encoding with a nix-specific character set. @@ -17,23 +17,23 @@ to stabilization! Examples: hash. - Convert the hash to the `sri` format that includes an algorithm specification: ```bash - nix hash convert --algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" + nix hash convert --hash-algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" sha1-gA1Zz808BekAy04hS+SPa4hqCN8= ``` or with an explicit `-to` format: ```bash - nix hash convert --algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" + nix hash convert --hash-algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" sha1-gA1Zz808BekAy04hS+SPa4hqCN8= ``` - Assert the input format of the hash: ```bash - nix hash convert --algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" + nix hash convert --hash-algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' - nix hash convert --algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" + nix hash convert --hash-algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= ``` -The `--to`/`--from`/`--algo` parameters have context-sensitive auto-completion. +The `--to`/`--from`/`--hash-algo` parameters have context-sensitive auto-completion. ## Related Deprecations diff --git a/src/libutil/args.hh b/src/libutil/args.hh index 18b0ae583..6c9c48065 100644 --- a/src/libutil/args.hh +++ b/src/libutil/args.hh @@ -177,7 +177,13 @@ protected: std::optional experimentalFeature; static Flag mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha); + static Flag mkHashAlgoFlag(HashAlgorithm * ha) { + return mkHashAlgoFlag("hash-algo", ha); + } static Flag mkHashAlgoOptFlag(std::string && longName, std::optional * oha); + static Flag mkHashAlgoOptFlag(std::optional * oha) { + return mkHashAlgoOptFlag("hash-algo", oha); + } static Flag mkHashFormatFlagWithDefault(std::string && longName, HashFormat * hf); static Flag mkHashFormatOptFlag(std::string && longName, std::optional * ohf); }; diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index f2dbe8a2c..7c534517d 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -53,7 +53,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand }}, }); - addFlag(Flag::mkHashAlgoFlag("hash-algo", &hashAlgo)); + addFlag(Flag::mkHashAlgoFlag(&hashAlgo)); } void run(ref store) override diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 83694306e..8ab89e433 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -141,7 +141,7 @@ struct CmdHashConvert : Command CmdHashConvert(): to(HashFormat::SRI) { addFlag(Args::Flag::mkHashFormatOptFlag("from", &from)); addFlag(Args::Flag::mkHashFormatFlagWithDefault("to", &to)); - addFlag(Args::Flag::mkHashAlgoOptFlag("algo", &algo)); + addFlag(Args::Flag::mkHashAlgoOptFlag(&algo)); expectArgs({ .label = "hashes", .handler = {&hashStrings}, diff --git a/tests/functional/hash.sh b/tests/functional/hash.sh index 47eed5178..ff270076e 100644 --- a/tests/functional/hash.sh +++ b/tests/functional/hash.sh @@ -87,7 +87,7 @@ try3() { # $2 = expected hash in base16 # $3 = expected hash in base32 # $4 = expected hash in base64 - h64=$(nix hash convert --algo "$1" --to base64 "$2") + h64=$(nix hash convert --hash-algo "$1" --to base64 "$2") [ "$h64" = "$4" ] h64=$(nix-hash --type "$1" --to-base64 "$2") [ "$h64" = "$4" ] @@ -95,13 +95,13 @@ try3() { h64=$(nix hash to-base64 --type "$1" "$2") [ "$h64" = "$4" ] - sri=$(nix hash convert --algo "$1" --to sri "$2") + sri=$(nix hash convert --hash-algo "$1" --to sri "$2") [ "$sri" = "$1-$4" ] sri=$(nix-hash --type "$1" --to-sri "$2") [ "$sri" = "$1-$4" ] sri=$(nix hash to-sri --type "$1" "$2") [ "$sri" = "$1-$4" ] - h32=$(nix hash convert --algo "$1" --to base32 "$2") + h32=$(nix hash convert --hash-algo "$1" --to base32 "$2") [ "$h32" = "$3" ] h32=$(nix-hash --type "$1" --to-base32 "$2") [ "$h32" = "$3" ] @@ -110,7 +110,7 @@ try3() { h16=$(nix-hash --type "$1" --to-base16 "$h32") [ "$h16" = "$2" ] - h16=$(nix hash convert --algo "$1" --to base16 "$h64") + h16=$(nix hash convert --hash-algo "$1" --to base16 "$h64") [ "$h16" = "$2" ] h16=$(nix hash to-base16 --type "$1" "$h64") [ "$h16" = "$2" ] @@ -143,40 +143,40 @@ try3() { # Auto-detecting the input from algo and length. # - sri=$(nix hash convert --algo "$1" "$2") + sri=$(nix hash convert --hash-algo "$1" "$2") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" "$3") + sri=$(nix hash convert --hash-algo "$1" "$3") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" "$4") + sri=$(nix hash convert --hash-algo "$1" "$4") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" "$2") + sri=$(nix hash convert --hash-algo "$1" "$2") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" "$3") + sri=$(nix hash convert --hash-algo "$1" "$3") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" "$4") + sri=$(nix hash convert --hash-algo "$1" "$4") [ "$sri" = "$1-$4" ] # # Asserting input format succeeds. # - sri=$(nix hash convert --algo "$1" --from base16 "$2") + sri=$(nix hash convert --hash-algo "$1" --from base16 "$2") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" --from nix32 "$3") + sri=$(nix hash convert --hash-algo "$1" --from nix32 "$3") [ "$sri" = "$1-$4" ] - sri=$(nix hash convert --algo "$1" --from base64 "$4") + sri=$(nix hash convert --hash-algo "$1" --from base64 "$4") [ "$sri" = "$1-$4" ] # # Asserting input format fails. # - fail=$(nix hash convert --algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?") + fail=$(nix hash convert --hash-algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?") [[ "$fail" == *"error: input hash"*"exit: 1" ]] - fail=$(nix hash convert --algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?") + fail=$(nix hash convert --hash-algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?") [[ "$fail" == *"error: input hash"*"exit: 1" ]] - fail=$(nix hash convert --algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?") + fail=$(nix hash convert --hash-algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?") [[ "$fail" == *"error: input hash"*"exit: 1" ]] } From 0bcdb4f5f0830261ecbff1cbc805b215cac1abae Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 22 Jan 2024 08:38:52 -0500 Subject: [PATCH 178/307] Elaborate what the monthly assignments status check entails Co-authored-by: Robert Hensing --- maintainers/README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/maintainers/README.md b/maintainers/README.md index 585e2b50a..fa321c7c0 100644 --- a/maintainers/README.md +++ b/maintainers/README.md @@ -44,7 +44,10 @@ The team meets twice a week: 1. Triage issues and pull requests from the [No Status](#no-status) column (30 min) 2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min). - Once a month, this slot is used to check the [Assigned](#assigned) column to make sure that nothing bitrots in it. + Once a month, each team member checks the [Assigned](#assigned) column for prs/issues assigned to them, to either + - unblock it by providing input + - mark it as draft if it is blocked on the contributor + - escalate it back to the team by moving it to To discuss, and leaving a comment as to why the issue needs to be discussed again. - Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn) From 80b84710b8c676620ed1e8bf8ff3bb1d5bc19b80 Mon Sep 17 00:00:00 2001 From: pennae <82953136+pennae@users.noreply.github.com> Date: Mon, 22 Jan 2024 15:15:53 +0100 Subject: [PATCH 179/307] Update src/libexpr/eval.cc Co-authored-by: John Ericson --- src/libexpr/eval.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index dc9167144..2330102c3 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -427,7 +427,7 @@ EvalState::EvalState( .or_ = symbols.create("or"), .findFile = symbols.create("__findFile"), .nixPath = symbols.create("__nixPath"), - .body = symbols.create("body") + .body = symbols.create("body"), } , repair(NoRepair) , emptyBindings(0) From 316e50cc7c0bad8448c9f475993e52f9d5dee7c0 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 22 Jan 2024 10:32:25 -0500 Subject: [PATCH 180/307] Fix `if`...`if`...`else` ambiguity This can be parsed two ways. Add a pair of braces so it must be parsed the intended way. --- src/libexpr/primops/fetchTree.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index bc5a69720..d32c264f7 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -174,11 +174,12 @@ static void fetchTree( if (!evalSettings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes)) input = lookupInRegistries(state.store, input).first; - if (evalSettings.pureEval && !input.isLocked()) + if (evalSettings.pureEval && !input.isLocked()) { if (params.isFetchGit) state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchGit' requires a locked input, at %s", state.positions[pos])); else state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); + } state.checkURI(input.toURLString()); From cb7fbd4d831de9d98b7dfd149d8a96939be31bb2 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Wed, 6 Dec 2023 16:03:01 -0800 Subject: [PATCH 181/307] Print value on type error Adds the failing value to `value is while a is expected` error messages. --- .../rl-next/print-value-in-type-error.md | 23 ++ .../rl-next/source-positions-in-errors.md | 2 +- doc/manual/rl-next/with-error-reporting.md | 4 +- src/libexpr/eval-inline.hh | 11 +- src/libexpr/eval.cc | 38 ++- src/libexpr/primops.cc | 2 +- src/libexpr/print-ambiguous.cc | 1 + src/libexpr/print-options.hh | 12 + src/libexpr/print.cc | 7 + src/libexpr/print.hh | 21 +- src/libutil/error.cc | 4 +- src/nix/eval.cc | 2 +- tests/functional/dyn-drv/eval-outputOf.sh | 2 +- .../lang/eval-fail-attr-name-type.err.exp | 2 +- .../lang/eval-fail-call-primop.err.exp | 2 +- tests/functional/lang/eval-fail-list.err.exp | 2 +- .../lang/eval-fail-set-override.err.exp | 2 +- .../eval-fail-using-set-as-attr-name.err.exp | 2 +- tests/unit/libexpr/error_traces.cc | 224 +++++++++--------- 19 files changed, 227 insertions(+), 136 deletions(-) create mode 100644 doc/manual/rl-next/print-value-in-type-error.md diff --git a/doc/manual/rl-next/print-value-in-type-error.md b/doc/manual/rl-next/print-value-in-type-error.md new file mode 100644 index 000000000..aaae22756 --- /dev/null +++ b/doc/manual/rl-next/print-value-in-type-error.md @@ -0,0 +1,23 @@ +--- +synopsis: Type errors include the failing value +issues: #561 +prs: #9753 +--- + +In errors like `value is an integer while a list was expected`, the message now +includes the failing value. + +Before: + +``` + error: value is a set while a string was expected +``` + +After: + +``` + error: expected a string but found a set: { ghc810 = «thunk»; + ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»; + ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»; + ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»} +``` diff --git a/doc/manual/rl-next/source-positions-in-errors.md b/doc/manual/rl-next/source-positions-in-errors.md index 5b210289d..b1a33d83b 100644 --- a/doc/manual/rl-next/source-positions-in-errors.md +++ b/doc/manual/rl-next/source-positions-in-errors.md @@ -38,5 +38,5 @@ error: | ^ 5| - error: value is a set while a string was expected + error: expected a string but found a set ``` diff --git a/doc/manual/rl-next/with-error-reporting.md b/doc/manual/rl-next/with-error-reporting.md index 10b020956..d9e07df52 100644 --- a/doc/manual/rl-next/with-error-reporting.md +++ b/doc/manual/rl-next/with-error-reporting.md @@ -8,7 +8,7 @@ prs: 9658 Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated: ``` -nix-repl> with 1; a +nix-repl> with 1; a error: … @@ -27,5 +27,5 @@ error: 1| with 1; a | ^ - error: value is an integer while a set was expected + error: expected a set but found an integer ``` diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index f7710f819..42cb68bbe 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -1,6 +1,7 @@ #pragma once ///@file +#include "print.hh" #include "eval.hh" namespace nix { @@ -114,7 +115,10 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e PosIdx pos = getPos(); forceValue(v, pos); if (v.type() != nAttrs) { - error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow(); + error("expected a set but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .withTrace(pos, errorCtx).debugThrow(); } } @@ -124,7 +128,10 @@ inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view e { forceValue(v, pos); if (!v.isList()) { - error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow(); + error("expected a list but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .withTrace(pos, errorCtx).debugThrow(); } } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 0659a2173..71e956e10 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2,6 +2,7 @@ #include "eval-settings.hh" #include "hash.hh" #include "primops.hh" +#include "print-options.hh" #include "types.hh" #include "util.hh" #include "store-api.hh" @@ -29,9 +30,9 @@ #include #include #include -#include #include #include +#include #include #include @@ -1153,7 +1154,10 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::stri Value v; e->eval(*this, env, v); if (v.type() != nBool) - error("value is %1% while a Boolean was expected", showType(v)).withFrame(env, *e).debugThrow(); + error("expected a Boolean but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .withFrame(env, *e).debugThrow(); return v.boolean; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -1167,7 +1171,10 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx po try { e->eval(*this, env, v); if (v.type() != nAttrs) - error("value is %1% while a set was expected", showType(v)).withFrame(env, *e).debugThrow(); + error("expected a set but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .withFrame(env, *e).debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -2076,7 +2083,10 @@ NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCt try { forceValue(v, pos); if (v.type() != nInt) - error("value is %1% while an integer was expected", showType(v)).debugThrow(); + error("expected an integer but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .debugThrow(); return v.integer; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2092,7 +2102,10 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err if (v.type() == nInt) return v.integer; else if (v.type() != nFloat) - error("value is %1% while a float was expected", showType(v)).debugThrow(); + error("expected a float but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .debugThrow(); return v.fpoint; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2106,7 +2119,10 @@ bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx try { forceValue(v, pos); if (v.type() != nBool) - error("value is %1% while a Boolean was expected", showType(v)).debugThrow(); + error("expected a Boolean but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .debugThrow(); return v.boolean; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2126,7 +2142,10 @@ void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view erro try { forceValue(v, pos); if (v.type() != nFunction && !isFunctor(v)) - error("value is %1% while a function was expected", showType(v)).debugThrow(); + error("expected a function but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -2139,7 +2158,10 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string try { forceValue(v, pos); if (v.type() != nString) - error("value is %1% while a string was expected", showType(v)).debugThrow(); + error("expected a string but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) + .debugThrow(); return v.string_view(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index c08aea898..5032e95cc 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -997,7 +997,7 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu if (args[0]->type() == nString) printError("trace: %1%", args[0]->string_view()); else - printError("trace: %1%", printValue(state, *args[0])); + printError("trace: %1%", ValuePrinter(state, *args[0])); state.forceValue(*args[1], pos); v = *args[1]; } diff --git a/src/libexpr/print-ambiguous.cc b/src/libexpr/print-ambiguous.cc index 07c398dd2..521250cec 100644 --- a/src/libexpr/print-ambiguous.cc +++ b/src/libexpr/print-ambiguous.cc @@ -1,6 +1,7 @@ #include "print-ambiguous.hh" #include "print.hh" #include "signals.hh" +#include "eval.hh" namespace nix { diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh index 11ff9ae87..aba2eaeae 100644 --- a/src/libexpr/print-options.hh +++ b/src/libexpr/print-options.hh @@ -49,4 +49,16 @@ struct PrintOptions size_t maxStringLength = std::numeric_limits::max(); }; +/** + * `PrintOptions` for unknown and therefore potentially large values in error messages, + * to avoid printing "too much" output. + */ +static PrintOptions errorPrintOptions = PrintOptions { + .ansiColors = true, + .maxDepth = 10, + .maxAttrs = 10, + .maxListItems = 10, + .maxStringLength = 1024 +}; + } diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index db26ed4c2..dad6dc9ad 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -7,6 +7,7 @@ #include "store-api.hh" #include "terminal.hh" #include "english.hh" +#include "eval.hh" namespace nix { @@ -501,4 +502,10 @@ void printValue(EvalState & state, std::ostream & output, Value & v, PrintOption Printer(output, state, options).print(v); } +std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer) +{ + printValue(printer.state, output, printer.value, printer.options); + return output; +} + } diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index 40207d777..a8300264a 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -9,11 +9,13 @@ #include -#include "eval.hh" #include "print-options.hh" namespace nix { +class EvalState; +struct Value; + /** * Print a string as a Nix string literal. * @@ -59,4 +61,21 @@ std::ostream & printIdentifier(std::ostream & o, std::string_view s); void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions {}); +/** + * A partially-applied form of `printValue` which can be formatted using `<<` + * without allocating an intermediate string. + */ +class ValuePrinter { + friend std::ostream & operator << (std::ostream & output, const ValuePrinter & printer); +private: + EvalState & state; + Value & value; + PrintOptions options; + +public: + ValuePrinter(EvalState & state, Value & value, PrintOptions options = PrintOptions {}) + : state(state), value(value), options(options) { } +}; + +std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer); } diff --git a/src/libutil/error.cc b/src/libutil/error.cc index bd2f6b840..1f0cb08c9 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -335,7 +335,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s * try { * e->eval(*this, env, v); * if (v.type() != nAttrs) - * throwTypeError("value is %1% while a set was expected", v); + * throwTypeError("expected a set but found %1%", v); * } catch (Error & e) { * e.addTrace(pos, errorCtx); * throw; @@ -349,7 +349,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s * e->eval(*this, env, v); * try { * if (v.type() != nAttrs) - * throwTypeError("value is %1% while a set was expected", v); + * throwTypeError("expected a set but found %1%", v); * } catch (Error & e) { * e.addTrace(pos, errorCtx); * throw; diff --git a/src/nix/eval.cc b/src/nix/eval.cc index b34af34e0..a89fa7412 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -121,7 +121,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption else { state->forceValueDeep(*v); - logger->cout("%s", printValue(*state, *v)); + logger->cout("%s", ValuePrinter(*state, *v, PrintOptions { .force = true })); } } }; diff --git a/tests/functional/dyn-drv/eval-outputOf.sh b/tests/functional/dyn-drv/eval-outputOf.sh index 9467feb8d..3681bd098 100644 --- a/tests/functional/dyn-drv/eval-outputOf.sh +++ b/tests/functional/dyn-drv/eval-outputOf.sh @@ -14,7 +14,7 @@ nix --experimental-features 'nix-command' eval --impure --expr \ # resolve first. Adding a test so we don't liberalise it by accident. expectStderr 1 nix --experimental-features 'nix-command dynamic-derivations' eval --impure --expr \ 'builtins.outputOf (import ../dependencies.nix {}) "out"' \ - | grepQuiet "value is a set while a string was expected" + | grepQuiet "expected a string but found a set" # Test that "DrvDeep" string contexts are not supported at this time # diff --git a/tests/functional/lang/eval-fail-attr-name-type.err.exp b/tests/functional/lang/eval-fail-attr-name-type.err.exp index 23cceb58a..c8d56ba7d 100644 --- a/tests/functional/lang/eval-fail-attr-name-type.err.exp +++ b/tests/functional/lang/eval-fail-attr-name-type.err.exp @@ -13,4 +13,4 @@ error: | ^ 8| - error: value is an integer while a string was expected + error: expected a string but found an integer: 1 diff --git a/tests/functional/lang/eval-fail-call-primop.err.exp b/tests/functional/lang/eval-fail-call-primop.err.exp index ae5b55ed4..0c6f614e8 100644 --- a/tests/functional/lang/eval-fail-call-primop.err.exp +++ b/tests/functional/lang/eval-fail-call-primop.err.exp @@ -7,4 +7,4 @@ error: … while evaluating the first argument passed to builtins.length - error: value is an integer while a list was expected + error: expected a list but found an integer: 1 diff --git a/tests/functional/lang/eval-fail-list.err.exp b/tests/functional/lang/eval-fail-list.err.exp index 4320fc022..d492f8bd2 100644 --- a/tests/functional/lang/eval-fail-list.err.exp +++ b/tests/functional/lang/eval-fail-list.err.exp @@ -5,4 +5,4 @@ error: | ^ 2| - error: value is an integer while a list was expected + error: expected a list but found an integer: 8 diff --git a/tests/functional/lang/eval-fail-set-override.err.exp b/tests/functional/lang/eval-fail-set-override.err.exp index 71481683d..9006ca4e6 100644 --- a/tests/functional/lang/eval-fail-set-override.err.exp +++ b/tests/functional/lang/eval-fail-set-override.err.exp @@ -1,4 +1,4 @@ error: … while evaluating the `__overrides` attribute - error: value is an integer while a set was expected + error: expected a set but found an integer: 1 diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp index 0a4f56ac5..94784c651 100644 --- a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp @@ -6,4 +6,4 @@ error: | ^ 6| - error: value is a set while a string was expected + error: expected a string but found a set: { } diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 81498f65a..f0cad58bb 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -105,7 +105,7 @@ namespace nix { TEST_F(ErrorTraceTest, genericClosure) { ASSERT_TRACE2("genericClosure 1", TypeError, - hintfmt("value is %s while a set was expected", "an integer"), + hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure {}", @@ -115,22 +115,22 @@ namespace nix { ASSERT_TRACE2("genericClosure { startSet = 1; }", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = true; }", TypeError, - hintfmt("value is %s while a function was expected", "a Boolean"), + hintfmt("expected a function but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", TypeError, - hintfmt("value is %s while a list was expected", "a Boolean"), + hintfmt("expected a list but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", TypeError, - hintfmt("value is %s while a set was expected", "a Boolean"), + hintfmt("expected a set but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", @@ -145,7 +145,7 @@ namespace nix { ASSERT_TRACE2("genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", TypeError, - hintfmt("value is %s while a set was expected", "a Boolean"), + hintfmt("expected a set but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); } @@ -154,12 +154,12 @@ namespace nix { TEST_F(ErrorTraceTest, replaceStrings) { ASSERT_TRACE2("replaceStrings 0 0 {}", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "0" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [] 0 {}", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "0" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.replaceStrings")); ASSERT_TRACE1("replaceStrings [ 0 ] [] {}", @@ -168,17 +168,17 @@ namespace nix { ASSERT_TRACE2("replaceStrings [ 1 ] [ \"new\" ] {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"oo\" ] [ true ] \"foo\"", TypeError, - hintfmt("value is %s while a string was expected", "a Boolean"), + hintfmt("expected a string but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"old\" ] [ \"new\" ] {}", TypeError, - hintfmt("value is %s while a string was expected", "a set"), + hintfmt("expected a string but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the third argument passed to builtins.replaceStrings")); } @@ -243,7 +243,7 @@ namespace nix { TEST_F(ErrorTraceTest, ceil) { ASSERT_TRACE2("ceil \"foo\"", TypeError, - hintfmt("value is %s while a float was expected", "a string"), + hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.ceil")); } @@ -252,7 +252,7 @@ namespace nix { TEST_F(ErrorTraceTest, floor) { ASSERT_TRACE2("floor \"foo\"", TypeError, - hintfmt("value is %s while a float was expected", "a string"), + hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.floor")); } @@ -265,7 +265,7 @@ namespace nix { TEST_F(ErrorTraceTest, getEnv) { ASSERT_TRACE2("getEnv [ ]", TypeError, - hintfmt("value is %s while a string was expected", "a list"), + hintfmt("expected a string but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.getEnv")); } @@ -286,7 +286,7 @@ namespace nix { TEST_F(ErrorTraceTest, placeholder) { ASSERT_TRACE2("placeholder []", TypeError, - hintfmt("value is %s while a string was expected", "a list"), + hintfmt("expected a string but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.placeholder")); } @@ -387,7 +387,7 @@ namespace nix { ASSERT_TRACE2("filterSource [] ./.", TypeError, - hintfmt("value is %s while a function was expected", "a list"), + hintfmt("expected a function but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.filterSource")); // Usupported by store "dummy" @@ -399,7 +399,7 @@ namespace nix { // ASSERT_TRACE2("filterSource (_: _: 1) ./.", // TypeError, - // hintfmt("value is %s while a Boolean was expected", "an integer"), + // hintfmt("expected a Boolean but found %s: %s", "an integer", "1"), // hintfmt("while evaluating the return value of the path filter function")); } @@ -412,7 +412,7 @@ namespace nix { TEST_F(ErrorTraceTest, attrNames) { ASSERT_TRACE2("attrNames []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the argument passed to builtins.attrNames")); } @@ -421,7 +421,7 @@ namespace nix { TEST_F(ErrorTraceTest, attrValues) { ASSERT_TRACE2("attrValues []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the argument passed to builtins.attrValues")); } @@ -430,12 +430,12 @@ namespace nix { TEST_F(ErrorTraceTest, getAttr) { ASSERT_TRACE2("getAttr [] []", TypeError, - hintfmt("value is %s while a string was expected", "a list"), + hintfmt("expected a string but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the second argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" {}", @@ -453,12 +453,12 @@ namespace nix { TEST_F(ErrorTraceTest, hasAttr) { ASSERT_TRACE2("hasAttr [] []", TypeError, - hintfmt("value is %s while a string was expected", "a list"), + hintfmt("expected a string but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.hasAttr")); ASSERT_TRACE2("hasAttr \"foo\" []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the second argument passed to builtins.hasAttr")); } @@ -471,17 +471,17 @@ namespace nix { TEST_F(ErrorTraceTest, removeAttrs) { ASSERT_TRACE2("removeAttrs \"\" \"\"", TypeError, - hintfmt("value is %s while a set was expected", "a string"), + hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ 1 ]", TypeError, - hintfmt("value is %s while a set was expected", "a string"), + hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ \"1\" ]", TypeError, - hintfmt("value is %s while a set was expected", "a string"), + hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); } @@ -490,12 +490,12 @@ namespace nix { TEST_F(ErrorTraceTest, listToAttrs) { ASSERT_TRACE2("listToAttrs 1", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the argument passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ 1 ]", TypeError, - hintfmt("value is %s while a set was expected", "an integer"), + hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ {} ]", @@ -505,7 +505,7 @@ namespace nix { ASSERT_TRACE2("listToAttrs [ { name = 1; } ]", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ { name = \"foo\"; } ]", @@ -519,12 +519,12 @@ namespace nix { TEST_F(ErrorTraceTest, intersectAttrs) { ASSERT_TRACE2("intersectAttrs [] []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.intersectAttrs")); ASSERT_TRACE2("intersectAttrs {} []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the second argument passed to builtins.intersectAttrs")); } @@ -533,22 +533,22 @@ namespace nix { TEST_F(ErrorTraceTest, catAttrs) { ASSERT_TRACE2("catAttrs [] {}", TypeError, - hintfmt("value is %s while a string was expected", "a list"), + hintfmt("expected a string but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" {}", TypeError, - hintfmt("value is %s while a list was expected", "a set"), + hintfmt("expected a list but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ 1 ]", TypeError, - hintfmt("value is %s while a set was expected", "an integer"), + hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ { foo = 1; } 1 { bar = 5;} ]", TypeError, - hintfmt("value is %s while a set was expected", "an integer"), + hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); } @@ -565,7 +565,7 @@ namespace nix { TEST_F(ErrorTraceTest, mapAttrs) { ASSERT_TRACE2("mapAttrs [] []", TypeError, - hintfmt("value is %s while a set was expected", "a list"), + hintfmt("expected a set but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the second argument passed to builtins.mapAttrs")); // XXX: defered @@ -590,12 +590,12 @@ namespace nix { TEST_F(ErrorTraceTest, zipAttrsWith) { ASSERT_TRACE2("zipAttrsWith [] [ 1 ]", TypeError, - hintfmt("value is %s while a function was expected", "a list"), + hintfmt("expected a function but found %s: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.zipAttrsWith")); ASSERT_TRACE2("zipAttrsWith (_: 1) [ 1 ]", TypeError, - hintfmt("value is %s while a set was expected", "an integer"), + hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); // XXX: How to properly tell that the fucntion takes two arguments ? @@ -622,7 +622,7 @@ namespace nix { TEST_F(ErrorTraceTest, elemAt) { ASSERT_TRACE2("elemAt \"foo\" (-1)", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("elemAt [] (-1)", @@ -639,7 +639,7 @@ namespace nix { TEST_F(ErrorTraceTest, head) { ASSERT_TRACE2("head 1", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("head []", @@ -652,7 +652,7 @@ namespace nix { TEST_F(ErrorTraceTest, tail) { ASSERT_TRACE2("tail 1", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.tail")); ASSERT_TRACE1("tail []", @@ -665,12 +665,12 @@ namespace nix { TEST_F(ErrorTraceTest, map) { ASSERT_TRACE2("map 1 \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.map")); ASSERT_TRACE2("map 1 [ 1 ]", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.map")); } @@ -679,17 +679,17 @@ namespace nix { TEST_F(ErrorTraceTest, filter) { ASSERT_TRACE2("filter 1 \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.filter")); ASSERT_TRACE2("filter 1 [ \"foo\" ]", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.filter")); ASSERT_TRACE2("filter (_: 5) [ \"foo\" ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "5" ANSI_NORMAL), hintfmt("while evaluating the return value of the filtering function passed to builtins.filter")); } @@ -698,7 +698,7 @@ namespace nix { TEST_F(ErrorTraceTest, elem) { ASSERT_TRACE2("elem 1 \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.elem")); } @@ -707,17 +707,17 @@ namespace nix { TEST_F(ErrorTraceTest, concatLists) { ASSERT_TRACE2("concatLists 1", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ 1 ]", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating a value of the list passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ [1] \"foo\" ]", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating a value of the list passed to builtins.concatLists")); } @@ -726,12 +726,12 @@ namespace nix { TEST_F(ErrorTraceTest, length) { ASSERT_TRACE2("length 1", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.length")); ASSERT_TRACE2("length \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.length")); } @@ -740,12 +740,12 @@ namespace nix { TEST_F(ErrorTraceTest, foldlPrime) { ASSERT_TRACE2("foldl' 1 \"foo\" true", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.foldlStrict")); ASSERT_TRACE2("foldl' (_: 1) \"foo\" true", TypeError, - hintfmt("value is %s while a list was expected", "a Boolean"), + hintfmt("expected a list but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating the third argument passed to builtins.foldlStrict")); ASSERT_TRACE1("foldl' (_: 1) \"foo\" [ true ]", @@ -754,7 +754,7 @@ namespace nix { ASSERT_TRACE2("foldl' (a: b: a && b) \"foo\" [ true ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "a string"), + hintfmt("expected a Boolean but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("in the left operand of the AND (&&) operator")); } @@ -763,17 +763,17 @@ namespace nix { TEST_F(ErrorTraceTest, any) { ASSERT_TRACE2("any 1 \"foo\"", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) [ \"foo\" ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the function passed to builtins.any")); } @@ -782,17 +782,17 @@ namespace nix { TEST_F(ErrorTraceTest, all) { ASSERT_TRACE2("all 1 \"foo\"", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) [ \"foo\" ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the function passed to builtins.all")); } @@ -801,12 +801,12 @@ namespace nix { TEST_F(ErrorTraceTest, genList) { ASSERT_TRACE2("genList 1 \"foo\"", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.genList")); ASSERT_TRACE2("genList 1 2", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.genList", "an integer")); // XXX: defered @@ -825,12 +825,12 @@ namespace nix { TEST_F(ErrorTraceTest, sort) { ASSERT_TRACE2("sort 1 \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.sort")); ASSERT_TRACE2("sort 1 [ \"foo\" ]", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.sort")); ASSERT_TRACE1("sort (_: 1) [ \"foo\" \"bar\" ]", @@ -839,7 +839,7 @@ namespace nix { ASSERT_TRACE2("sort (_: _: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the sorting function passed to builtins.sort")); // XXX: Trace too deep, need better asserts @@ -857,17 +857,17 @@ namespace nix { TEST_F(ErrorTraceTest, partition) { ASSERT_TRACE2("partition 1 \"foo\"", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) [ \"foo\" ]", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the partition function passed to builtins.partition")); } @@ -876,17 +876,17 @@ namespace nix { TEST_F(ErrorTraceTest, groupBy) { ASSERT_TRACE2("groupBy 1 \"foo\"", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (_: 1) \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (x: x) [ \"foo\" \"bar\" 1 ]", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); } @@ -895,22 +895,22 @@ namespace nix { TEST_F(ErrorTraceTest, concatMap) { ASSERT_TRACE2("concatMap 1 \"foo\"", TypeError, - hintfmt("value is %s while a function was expected", "an integer"), + hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) \"foo\"", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO", TypeError, - hintfmt("value is %s while a list was expected", "an integer"), + hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); } @@ -919,12 +919,12 @@ namespace nix { TEST_F(ErrorTraceTest, add) { ASSERT_TRACE2("add \"foo\" 1", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument of the addition")); ASSERT_TRACE2("add 1 \"foo\"", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument of the addition")); } @@ -933,12 +933,12 @@ namespace nix { TEST_F(ErrorTraceTest, sub) { ASSERT_TRACE2("sub \"foo\" 1", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument of the subtraction")); ASSERT_TRACE2("sub 1 \"foo\"", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument of the subtraction")); } @@ -947,12 +947,12 @@ namespace nix { TEST_F(ErrorTraceTest, mul) { ASSERT_TRACE2("mul \"foo\" 1", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first argument of the multiplication")); ASSERT_TRACE2("mul 1 \"foo\"", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument of the multiplication")); } @@ -961,12 +961,12 @@ namespace nix { TEST_F(ErrorTraceTest, div) { ASSERT_TRACE2("div \"foo\" 1 # TODO: an integer was expected -> a number", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the first operand of the division")); ASSERT_TRACE2("div 1 \"foo\"", TypeError, - hintfmt("value is %s while a float was expected", "a string"), + hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second operand of the division")); ASSERT_TRACE1("div \"foo\" 0", @@ -979,12 +979,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitAnd) { ASSERT_TRACE2("bitAnd 1.1 2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.bitAnd")); ASSERT_TRACE2("bitAnd 1 2.2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.bitAnd")); } @@ -993,12 +993,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitOr) { ASSERT_TRACE2("bitOr 1.1 2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.bitOr")); ASSERT_TRACE2("bitOr 1 2.2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.bitOr")); } @@ -1007,12 +1007,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitXor) { ASSERT_TRACE2("bitXor 1.1 2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.bitXor")); ASSERT_TRACE2("bitXor 1 2.2", TypeError, - hintfmt("value is %s while an integer was expected", "a float"), + hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), hintfmt("while evaluating the second argument passed to builtins.bitXor")); } @@ -1047,12 +1047,12 @@ namespace nix { TEST_F(ErrorTraceTest, substring) { ASSERT_TRACE2("substring {} \"foo\" true", TypeError, - hintfmt("value is %s while an integer was expected", "a set"), + hintfmt("expected an integer but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the first argument (the start offset) passed to builtins.substring")); ASSERT_TRACE2("substring 3 \"foo\" true", TypeError, - hintfmt("value is %s while an integer was expected", "a string"), + hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), hintfmt("while evaluating the second argument (the substring length) passed to builtins.substring")); ASSERT_TRACE2("substring 0 3 {}", @@ -1079,7 +1079,7 @@ namespace nix { TEST_F(ErrorTraceTest, hashString) { ASSERT_TRACE2("hashString 1 {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.hashString")); ASSERT_TRACE1("hashString \"foo\" \"content\"", @@ -1088,7 +1088,7 @@ namespace nix { ASSERT_TRACE2("hashString \"sha256\" {}", TypeError, - hintfmt("value is %s while a string was expected", "a set"), + hintfmt("expected a string but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument passed to builtins.hashString")); } @@ -1097,12 +1097,12 @@ namespace nix { TEST_F(ErrorTraceTest, match) { ASSERT_TRACE2("match 1 {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.match")); ASSERT_TRACE2("match \"foo\" {}", TypeError, - hintfmt("value is %s while a string was expected", "a set"), + hintfmt("expected a string but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument passed to builtins.match")); ASSERT_TRACE1("match \"(.*\" \"\"", @@ -1115,12 +1115,12 @@ namespace nix { TEST_F(ErrorTraceTest, split) { ASSERT_TRACE2("split 1 {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.split")); ASSERT_TRACE2("split \"foo\" {}", TypeError, - hintfmt("value is %s while a string was expected", "a set"), + hintfmt("expected a string but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument passed to builtins.split")); ASSERT_TRACE1("split \"f(o*o\" \"1foo2\"", @@ -1133,12 +1133,12 @@ namespace nix { TEST_F(ErrorTraceTest, concatStringsSep) { ASSERT_TRACE2("concatStringsSep 1 {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" {}", TypeError, - hintfmt("value is %s while a list was expected", "a set"), + hintfmt("expected a list but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", @@ -1152,7 +1152,7 @@ namespace nix { TEST_F(ErrorTraceTest, parseDrvName) { ASSERT_TRACE2("parseDrvName 1", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.parseDrvName")); } @@ -1161,12 +1161,12 @@ namespace nix { TEST_F(ErrorTraceTest, compareVersions) { ASSERT_TRACE2("compareVersions 1 {}", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.compareVersions")); ASSERT_TRACE2("compareVersions \"abd\" {}", TypeError, - hintfmt("value is %s while a string was expected", "a set"), + hintfmt("expected a string but found %s: %s", "a set", "{ }"), hintfmt("while evaluating the second argument passed to builtins.compareVersions")); } @@ -1175,7 +1175,7 @@ namespace nix { TEST_F(ErrorTraceTest, splitVersion) { ASSERT_TRACE2("splitVersion 1", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to builtins.splitVersion")); } @@ -1189,7 +1189,7 @@ namespace nix { TEST_F(ErrorTraceTest, derivationStrict) { ASSERT_TRACE2("derivationStrict \"\"", TypeError, - hintfmt("value is %s while a set was expected", "a string"), + hintfmt("expected a set but found %s: %s", "a string", "\"\""), hintfmt("while evaluating the argument passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict {}", @@ -1199,7 +1199,7 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = 1; }", TypeError, - hintfmt("value is %s while a string was expected", "an integer"), + hintfmt("expected a string but found %s: %s", "an integer", "1"), hintfmt("while evaluating the `name` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; }", @@ -1209,12 +1209,12 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __structuredAttrs = 15; }", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", "15"), hintfmt("while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __ignoreNulls = 15; }", TypeError, - hintfmt("value is %s while a Boolean was expected", "an integer"), + hintfmt("expected a Boolean but found %s: %s", "an integer", "15"), hintfmt("while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = 15; }", @@ -1259,22 +1259,22 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __contentAddressed = \"true\"; }", TypeError, - hintfmt("value is %s while a Boolean was expected", "a string"), + hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), hintfmt("while evaluating the attribute '__contentAddressed' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, - hintfmt("value is %s while a Boolean was expected", "a string"), + hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), hintfmt("while evaluating the attribute '__impure' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, - hintfmt("value is %s while a Boolean was expected", "a string"), + hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), hintfmt("while evaluating the attribute '__impure' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = \"foo\"; }", TypeError, - hintfmt("value is %s while a list was expected", "a string"), + hintfmt("expected a list but found %s: %s", "a string", "\"foo\""), hintfmt("while evaluating the attribute 'args' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", From e502d1cf945fb3cdd0ca1e1c16ec330ccab51c7b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 22 Jan 2024 18:34:52 +0100 Subject: [PATCH 182/307] tests/nixos: Test remote build against older versions --- tests/nixos/default.nix | 100 +++++++++++++++++++++++++++ tests/nixos/remote-builds-ssh-ng.nix | 21 +++++- tests/nixos/remote-builds.nix | 21 +++++- 3 files changed, 136 insertions(+), 6 deletions(-) diff --git a/tests/nixos/default.nix b/tests/nixos/default.nix index 1a42f886c..8f4fa2621 100644 --- a/tests/nixos/default.nix +++ b/tests/nixos/default.nix @@ -28,6 +28,13 @@ let }; }; + # Checks that a NixOS configuration does not contain any references to our + # locally defined Nix version. + checkOverrideNixVersion = { pkgs, lib, ... }: { + # pkgs.nix: The new Nix in this repo + # We disallow it, to make sure we don't accidentally use it. + system.forbiddenDependenciesRegex = lib.strings.escapeRegex "nix-${pkgs.nix.version}"; + }; in { @@ -35,8 +42,101 @@ in remoteBuilds = runNixOSTestFor "x86_64-linux" ./remote-builds.nix; + # Test our Nix as a client against remotes that are older + + remoteBuilds_remote_2_3 = runNixOSTestFor "x86_64-linux" { + name = "remoteBuilds_remote_2_3"; + imports = [ ./remote-builds.nix ]; + builders.config = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + }; + }; + + remoteBuilds_remote_2_13 = runNixOSTestFor "x86_64-linux" ({ lib, pkgs, ... }: { + name = "remoteBuilds_remote_2_13"; + imports = [ ./remote-builds.nix ]; + builders.config = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + }; + }); + + # TODO: (nixpkgs update) remoteBuilds_remote_2_18 = ... + + # Test our Nix as a builder for clients that are older + + remoteBuilds_local_2_3 = runNixOSTestFor "x86_64-linux" ({ lib, pkgs, ... }: { + name = "remoteBuilds_local_2_3"; + imports = [ ./remote-builds.nix ]; + nodes.client = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + }; + }); + + remoteBuilds_local_2_13 = runNixOSTestFor "x86_64-linux" ({ lib, pkgs, ... }: { + name = "remoteBuilds_local_2_13"; + imports = [ ./remote-builds.nix ]; + nodes.client = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_13; + }; + }); + + # TODO: (nixpkgs update) remoteBuilds_local_2_18 = ... + + # End remoteBuilds tests + remoteBuildsSshNg = runNixOSTestFor "x86_64-linux" ./remote-builds-ssh-ng.nix; + # Test our Nix as a client against remotes that are older + + remoteBuildsSshNg_remote_2_3 = runNixOSTestFor "x86_64-linux" { + name = "remoteBuildsSshNg_remote_2_3"; + imports = [ ./remote-builds-ssh-ng.nix ]; + builders.config = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + }; + }; + + remoteBuildsSshNg_remote_2_13 = runNixOSTestFor "x86_64-linux" { + name = "remoteBuildsSshNg_remote_2_13"; + imports = [ ./remote-builds-ssh-ng.nix ]; + builders.config = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_13; + }; + }; + + # TODO: (nixpkgs update) remoteBuildsSshNg_remote_2_18 = ... + + # Test our Nix as a builder for clients that are older + + # FIXME: these tests don't work yet + /* + remoteBuildsSshNg_local_2_3 = runNixOSTestFor "x86_64-linux" ({ lib, pkgs, ... }: { + name = "remoteBuildsSshNg_local_2_3"; + imports = [ ./remote-builds-ssh-ng.nix ]; + nodes.client = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_3; + }; + }); + + remoteBuildsSshNg_local_2_13 = runNixOSTestFor "x86_64-linux" ({ lib, pkgs, ... }: { + name = "remoteBuildsSshNg_local_2_13"; + imports = [ ./remote-builds-ssh-ng.nix ]; + nodes.client = { lib, pkgs, ... }: { + imports = [ checkOverrideNixVersion ]; + nix.package = lib.mkForce pkgs.nixVersions.nix_2_13; + }; + }); + + # TODO: (nixpkgs update) remoteBuildsSshNg_local_2_18 = ... + */ + nix-copy-closure = runNixOSTestFor "x86_64-linux" ./nix-copy-closure.nix; nix-copy = runNixOSTestFor "x86_64-linux" ./nix-copy.nix; diff --git a/tests/nixos/remote-builds-ssh-ng.nix b/tests/nixos/remote-builds-ssh-ng.nix index 20a43803d..b9174a788 100644 --- a/tests/nixos/remote-builds-ssh-ng.nix +++ b/tests/nixos/remote-builds-ssh-ng.nix @@ -1,4 +1,4 @@ -{ config, lib, hostPkgs, ... }: +test@{ config, lib, hostPkgs, ... }: let pkgs = config.nodes.client.nixpkgs.pkgs; @@ -28,12 +28,27 @@ let in { - name = "remote-builds-ssh-ng"; + name = lib.mkDefault "remote-builds-ssh-ng"; + + # TODO expand module shorthand syntax instead of use imports + imports = [{ + options = { + builders.config = lib.mkOption { + type = lib.types.deferredModule; + description = '' + Configuration to add to the builder nodes. + ''; + default = { }; + }; + }; + }]; nodes = { builder = { config, pkgs, ... }: - { services.openssh.enable = true; + { + imports = [ test.config.builders.config ]; + services.openssh.enable = true; virtualisation.writableStore = true; nix.settings.sandbox = true; nix.settings.substituters = lib.mkForce [ ]; diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index ad7f509db..6f9b0ebf0 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -1,6 +1,6 @@ # Test Nix's remote build feature. -{ config, lib, hostPkgs, ... }: +test@{ config, lib, hostPkgs, ... }: let pkgs = config.nodes.client.nixpkgs.pkgs; @@ -8,7 +8,9 @@ let # The configuration of the remote builders. builder = { config, pkgs, ... }: - { services.openssh.enable = true; + { + imports = [ test.config.builders.config ]; + services.openssh.enable = true; virtualisation.writableStore = true; nix.settings.sandbox = true; @@ -35,7 +37,20 @@ let in { - name = "remote-builds"; + name = lib.mkDefault "remote-builds"; + + # TODO expand module shorthand syntax instead of use imports + imports = [{ + options = { + builders.config = lib.mkOption { + type = lib.types.deferredModule; + description = '' + Configuration to add to the builder nodes. + ''; + default = { }; + }; + }; + }]; nodes = { builder1 = builder; From ce2f714e6daa0250f30bc3a14967e4e3a7777d9f Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 20 Feb 2022 19:24:07 +0000 Subject: [PATCH 183/307] Start factoring out the serve protocol for Hydra to share Factor out `ServeProto::BasicClientConnection` for Hydra to share - `queryValidPaths`: Hydra uses the lock argument differently than Nix, so we un-hard-code it. - `buildDerivationRequest`: Just the request half, as Hydra does some things between requesting and responding. Co-authored-by: Robert Hensing --- src/libstore/legacy-ssh-store.cc | 65 ++++------------------------- src/libstore/legacy-ssh-store.hh | 4 -- src/libstore/serve-protocol-impl.cc | 38 +++++++++++++++++ src/libstore/serve-protocol-impl.hh | 54 ++++++++++++++++++++++++ src/libstore/serve-protocol.hh | 7 ++++ 5 files changed, 107 insertions(+), 61 deletions(-) create mode 100644 src/libstore/serve-protocol-impl.cc diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 06bef9d08..b89dd5fd9 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -22,45 +22,10 @@ std::string LegacySSHStoreConfig::doc() } -struct LegacySSHStore::Connection +struct LegacySSHStore::Connection : public ServeProto::BasicClientConnection { std::unique_ptr sshConn; - FdSink to; - FdSource from; - ServeProto::Version remoteVersion; bool good = true; - - /** - * Coercion to `ServeProto::ReadConn`. This makes it easy to use the - * factored out serve protocol searlizers with a - * `LegacySSHStore::Connection`. - * - * The serve protocol connection types are unidirectional, unlike - * this type. - */ - operator ServeProto::ReadConn () - { - return ServeProto::ReadConn { - .from = from, - .version = remoteVersion, - }; - } - - /* - * Coercion to `ServeProto::WriteConn`. This makes it easy to use the - * factored out serve protocol searlizers with a - * `LegacySSHStore::Connection`. - * - * The serve protocol connection types are unidirectional, unlike - * this type. - */ - operator ServeProto::WriteConn () - { - return ServeProto::WriteConn { - .to = to, - .version = remoteVersion, - }; - } }; @@ -232,16 +197,16 @@ void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink) } -void LegacySSHStore::putBuildSettings(Connection & conn) +static ServeProto::BuildOptions buildSettings() { - ServeProto::write(*this, conn, ServeProto::BuildOptions { + return { .maxSilentTime = settings.maxSilentTime, .buildTimeout = settings.buildTimeout, .maxLogSize = settings.maxLogSize, .nrRepeats = 0, // buildRepeat hasn't worked for ages anyway .enforceDeterminism = 0, .keepFailed = settings.keepFailed, - }); + }; } @@ -250,14 +215,7 @@ BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const Bas { auto conn(connections->get()); - conn->to - << ServeProto::Command::BuildDerivation - << printStorePath(drvPath); - writeDerivation(conn->to, *this, drv); - - putBuildSettings(*conn); - - conn->to.flush(); + conn->putBuildDerivationRequest(*this, drvPath, drv, buildSettings()); return ServeProto::Serialise::read(*this, *conn); } @@ -288,7 +246,7 @@ void LegacySSHStore::buildPaths(const std::vector & drvPaths, Build } conn->to << ss; - putBuildSettings(*conn); + ServeProto::write(*this, *conn, buildSettings()); conn->to.flush(); @@ -328,15 +286,8 @@ StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths, SubstituteFlag maybeSubstitute) { auto conn(connections->get()); - - conn->to - << ServeProto::Command::QueryValidPaths - << false // lock - << maybeSubstitute; - ServeProto::write(*this, *conn, paths); - conn->to.flush(); - - return ServeProto::Serialise::read(*this, *conn); + return conn->queryValidPaths(*this, + false, paths, maybeSubstitute); } diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh index 7cee31d66..bdf79eab3 100644 --- a/src/libstore/legacy-ssh-store.hh +++ b/src/libstore/legacy-ssh-store.hh @@ -78,10 +78,6 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor RepairFlag repair = NoRepair) override { unsupported("addToStore"); } -private: - - void putBuildSettings(Connection & conn); - public: BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv, diff --git a/src/libstore/serve-protocol-impl.cc b/src/libstore/serve-protocol-impl.cc new file mode 100644 index 000000000..e65de7650 --- /dev/null +++ b/src/libstore/serve-protocol-impl.cc @@ -0,0 +1,38 @@ +#include "serve-protocol-impl.hh" +#include "build-result.hh" +#include "derivations.hh" + +namespace nix { + +StorePathSet ServeProto::BasicClientConnection::queryValidPaths( + const Store & store, + bool lock, const StorePathSet & paths, + SubstituteFlag maybeSubstitute) +{ + to + << ServeProto::Command::QueryValidPaths + << lock + << maybeSubstitute; + write(store, *this, paths); + to.flush(); + + return Serialise::read(store, *this); +} + + +void ServeProto::BasicClientConnection::putBuildDerivationRequest( + const Store & store, + const StorePath & drvPath, const BasicDerivation & drv, + const ServeProto::BuildOptions & options) +{ + to + << ServeProto::Command::BuildDerivation + << store.printStorePath(drvPath); + writeDerivation(to, store, drv); + + ServeProto::write(store, *this, options); + + to.flush(); +} + +} diff --git a/src/libstore/serve-protocol-impl.hh b/src/libstore/serve-protocol-impl.hh index 6f3b177ac..312f5d47a 100644 --- a/src/libstore/serve-protocol-impl.hh +++ b/src/libstore/serve-protocol-impl.hh @@ -10,6 +10,7 @@ #include "serve-protocol.hh" #include "length-prefixed-protocol-helper.hh" +#include "store-api.hh" namespace nix { @@ -56,4 +57,57 @@ struct ServeProto::Serialise /* protocol-specific templates */ +struct ServeProto::BasicClientConnection +{ + FdSink to; + FdSource from; + ServeProto::Version remoteVersion; + + /** + * Coercion to `ServeProto::ReadConn`. This makes it easy to use the + * factored out serve protocol serializers with a + * `LegacySSHStore::Connection`. + * + * The serve protocol connection types are unidirectional, unlike + * this type. + */ + operator ServeProto::ReadConn () + { + return ServeProto::ReadConn { + .from = from, + .version = remoteVersion, + }; + } + + /** + * Coercion to `ServeProto::WriteConn`. This makes it easy to use the + * factored out serve protocol serializers with a + * `LegacySSHStore::Connection`. + * + * The serve protocol connection types are unidirectional, unlike + * this type. + */ + operator ServeProto::WriteConn () + { + return ServeProto::WriteConn { + .to = to, + .version = remoteVersion, + }; + } + + StorePathSet queryValidPaths( + const Store & remoteStore, + bool lock, const StorePathSet & paths, + SubstituteFlag maybeSubstitute); + + /** + * Just the request half, because Hydra may do other things between + * issuing the request and reading the `BuildResult` response. + */ + void putBuildDerivationRequest( + const Store & store, + const StorePath & drvPath, const BasicDerivation & drv, + const ServeProto::BuildOptions & options); +}; + } diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh index 1665b935f..632c4b6bd 100644 --- a/src/libstore/serve-protocol.hh +++ b/src/libstore/serve-protocol.hh @@ -59,6 +59,13 @@ struct ServeProto Version version; }; + /** + * Stripped down serialization logic suitable for sharing with Hydra. + * + * @todo remove once Hydra uses Store abstraction consistently. + */ + struct BasicClientConnection; + /** * Data type for canonical pairs of serialisers for the serve protocol. * From 4580bed3e47eba844ec905d7a0e5fec79fb06b67 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 19 Jan 2024 10:24:07 -0500 Subject: [PATCH 184/307] `LegacySSHStore::openConnection` move more logic inside catch block Broader error handling logic is more robust. --- src/libstore/legacy-ssh-store.cc | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index b89dd5fd9..058b1affd 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -61,28 +61,27 @@ ref LegacySSHStore::openConnection() conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); + StringSink saved; + TeeSource tee(conn->from, saved); try { conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION; conn->to.flush(); - StringSink saved; - try { - TeeSource tee(conn->from, saved); - unsigned int magic = readInt(tee); - if (magic != SERVE_MAGIC_2) - throw Error("'nix-store --serve' protocol mismatch from '%s'", host); - } catch (SerialisationError & e) { - /* In case the other side is waiting for our input, - close it. */ - conn->sshConn->in.close(); - auto msg = conn->from.drain(); - throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", - host, chomp(saved.s + msg)); - } + unsigned int magic = readInt(conn->from); + if (magic != SERVE_MAGIC_2) + throw Error("'nix-store --serve' protocol mismatch from '%s'", host); conn->remoteVersion = readInt(conn->from); if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200) throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host); - + } catch (SerialisationError & e) { + // in.close(): Don't let the remote block on us not writing. + conn->sshConn->in.close(); + { + NullSink nullSink; + conn->from.drainInto(nullSink); + } + throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'", + host, chomp(saved.s)); } catch (EndOfFile & e) { throw Error("cannot connect to '%1%'", host); } From 4a5ca576da511fcc64039c2494f41f710d662478 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sun, 20 Feb 2022 19:24:07 +0000 Subject: [PATCH 185/307] Factor out `ServeProto::BasicClientConnection::handshake` Hydra to share --- src/libstore/legacy-ssh-store.cc | 11 ++--------- src/libstore/serve-protocol-impl.cc | 19 +++++++++++++++++++ src/libstore/serve-protocol-impl.hh | 22 ++++++++++++++++++++++ 3 files changed, 43 insertions(+), 9 deletions(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 058b1affd..4f020c452 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -64,15 +64,8 @@ ref LegacySSHStore::openConnection() StringSink saved; TeeSource tee(conn->from, saved); try { - conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION; - conn->to.flush(); - - unsigned int magic = readInt(conn->from); - if (magic != SERVE_MAGIC_2) - throw Error("'nix-store --serve' protocol mismatch from '%s'", host); - conn->remoteVersion = readInt(conn->from); - if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200) - throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host); + conn->remoteVersion = ServeProto::BasicClientConnection::handshake( + conn->to, tee, SERVE_PROTOCOL_VERSION, host); } catch (SerialisationError & e) { // in.close(): Don't let the remote block on us not writing. conn->sshConn->in.close(); diff --git a/src/libstore/serve-protocol-impl.cc b/src/libstore/serve-protocol-impl.cc index e65de7650..6bf6c8cf6 100644 --- a/src/libstore/serve-protocol-impl.cc +++ b/src/libstore/serve-protocol-impl.cc @@ -4,6 +4,25 @@ namespace nix { +ServeProto::Version ServeProto::BasicClientConnection::handshake( + BufferedSink & to, + Source & from, + ServeProto::Version localVersion, + std::string_view host) +{ + to << SERVE_MAGIC_1 << localVersion; + to.flush(); + + unsigned int magic = readInt(from); + if (magic != SERVE_MAGIC_2) + throw Error("'nix-store --serve' protocol mismatch from '%s'", host); + auto remoteVersion = readInt(from); + if (GET_PROTOCOL_MAJOR(remoteVersion) != 0x200) + throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host); + return remoteVersion; +} + + StorePathSet ServeProto::BasicClientConnection::queryValidPaths( const Store & store, bool lock, const StorePathSet & paths, diff --git a/src/libstore/serve-protocol-impl.hh b/src/libstore/serve-protocol-impl.hh index 312f5d47a..8cd241fd3 100644 --- a/src/libstore/serve-protocol-impl.hh +++ b/src/libstore/serve-protocol-impl.hh @@ -63,6 +63,28 @@ struct ServeProto::BasicClientConnection FdSource from; ServeProto::Version remoteVersion; + /** + * Establishes connection, negotiating version. + * + * @return the version provided by the other side of the + * connection. + * + * @param to Taken by reference to allow for various error handling + * mechanisms. + * + * @param from Taken by reference to allow for various error + * handling mechanisms. + * + * @param localVersion Our version which is sent over + * + * @param host Just used to add context to thrown exceptions. + */ + static ServeProto::Version handshake( + BufferedSink & to, + Source & from, + ServeProto::Version localVersion, + std::string_view host); + /** * Coercion to `ServeProto::ReadConn`. This makes it easy to use the * factored out serve protocol serializers with a From e960b2823091f7c6685b55d5f1ad8d7612130009 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 19 Jan 2024 16:38:08 -0500 Subject: [PATCH 186/307] Factor our `ServeProto::BasicServerConnection::handshake` We'll need this for unit testing. Co-authored-by: Robert Hensing --- src/libstore/serve-protocol-impl.cc | 12 ++++++++++++ src/libstore/serve-protocol-impl.hh | 22 ++++++++++++++++++++++ src/libstore/serve-protocol.hh | 1 + src/nix-store/nix-store.cc | 8 +++----- 4 files changed, 38 insertions(+), 5 deletions(-) diff --git a/src/libstore/serve-protocol-impl.cc b/src/libstore/serve-protocol-impl.cc index 6bf6c8cf6..b39212884 100644 --- a/src/libstore/serve-protocol-impl.cc +++ b/src/libstore/serve-protocol-impl.cc @@ -22,6 +22,18 @@ ServeProto::Version ServeProto::BasicClientConnection::handshake( return remoteVersion; } +ServeProto::Version ServeProto::BasicServerConnection::handshake( + BufferedSink & to, + Source & from, + ServeProto::Version localVersion) +{ + unsigned int magic = readInt(from); + if (magic != SERVE_MAGIC_1) throw Error("protocol mismatch"); + to << SERVE_MAGIC_2 << localVersion; + to.flush(); + return readInt(from); +} + StorePathSet ServeProto::BasicClientConnection::queryValidPaths( const Store & store, diff --git a/src/libstore/serve-protocol-impl.hh b/src/libstore/serve-protocol-impl.hh index 8cd241fd3..fd8d94697 100644 --- a/src/libstore/serve-protocol-impl.hh +++ b/src/libstore/serve-protocol-impl.hh @@ -132,4 +132,26 @@ struct ServeProto::BasicClientConnection const ServeProto::BuildOptions & options); }; +struct ServeProto::BasicServerConnection +{ + /** + * Establishes connection, negotiating version. + * + * @return the version provided by the other side of the + * connection. + * + * @param to Taken by reference to allow for various error handling + * mechanisms. + * + * @param from Taken by reference to allow for various error + * handling mechanisms. + * + * @param localVersion Our version which is sent over + */ + static ServeProto::Version handshake( + BufferedSink & to, + Source & from, + ServeProto::Version localVersion); +}; + } diff --git a/src/libstore/serve-protocol.hh b/src/libstore/serve-protocol.hh index 632c4b6bd..8c112bb74 100644 --- a/src/libstore/serve-protocol.hh +++ b/src/libstore/serve-protocol.hh @@ -65,6 +65,7 @@ struct ServeProto * @todo remove once Hydra uses Store abstraction consistently. */ struct BasicClientConnection; + struct BasicServerConnection; /** * Data type for canonical pairs of serialisers for the serve protocol. diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 0a0a3ab1a..40378e123 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -828,11 +828,9 @@ static void opServe(Strings opFlags, Strings opArgs) FdSink out(STDOUT_FILENO); /* Exchange the greeting. */ - unsigned int magic = readInt(in); - if (magic != SERVE_MAGIC_1) throw Error("protocol mismatch"); - out << SERVE_MAGIC_2 << SERVE_PROTOCOL_VERSION; - out.flush(); - ServeProto::Version clientVersion = readInt(in); + ServeProto::Version clientVersion = + ServeProto::BasicServerConnection::handshake( + out, in, SERVE_PROTOCOL_VERSION); ServeProto::ReadConn rconn { .from = in, From 1fb25829692e5455c0edec96226af295957d99b4 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 19 Jan 2024 18:42:27 -0500 Subject: [PATCH 187/307] Create unit tests for the serve proto handshake Co-authored-by: Robert Hensing --- .../serve-protocol/handshake-to-client.bin | Bin 0 -> 16 bytes tests/unit/libstore/serve-protocol.cc | 110 ++++++++++++++++++ 2 files changed, 110 insertions(+) create mode 100644 tests/unit/libstore/data/serve-protocol/handshake-to-client.bin diff --git a/tests/unit/libstore/data/serve-protocol/handshake-to-client.bin b/tests/unit/libstore/data/serve-protocol/handshake-to-client.bin new file mode 100644 index 0000000000000000000000000000000000000000..15ba4b5e3d96e388637107542f6eb9f7e94ac708 GIT binary patch literal 16 RcmX^8E+~Wn1em}i0{|m{0%8CF literal 0 HcmV?d00001 diff --git a/tests/unit/libstore/serve-protocol.cc b/tests/unit/libstore/serve-protocol.cc index 8f256d1e6..597c0b570 100644 --- a/tests/unit/libstore/serve-protocol.cc +++ b/tests/unit/libstore/serve-protocol.cc @@ -1,3 +1,4 @@ +#include #include #include @@ -6,6 +7,7 @@ #include "serve-protocol.hh" #include "serve-protocol-impl.hh" #include "build-result.hh" +#include "file-descriptor.hh" #include "tests/protocol.hh" #include "tests/characterization.hh" @@ -401,4 +403,112 @@ VERSIONED_CHARACTERIZATION_TEST( }, })) +TEST_F(ServeProtoTest, handshake_log) +{ + CharacterizationTest::writeTest("handshake-to-client", [&]() -> std::string { + StringSink toClientLog; + + Pipe toClient, toServer; + toClient.create(); + toServer.create(); + + ServeProto::Version clientResult, serverResult; + + auto thread = std::thread([&]() { + FdSink out { toServer.writeSide.get() }; + FdSource in0 { toClient.readSide.get() }; + TeeSource in { in0, toClientLog }; + clientResult = ServeProto::BasicClientConnection::handshake( + out, in, defaultVersion, "blah"); + }); + + { + FdSink out { toClient.writeSide.get() }; + FdSource in { toServer.readSide.get() }; + serverResult = ServeProto::BasicServerConnection::handshake( + out, in, defaultVersion); + }; + + thread.join(); + + return std::move(toClientLog.s); + }); +} + +/// Has to be a `BufferedSink` for handshake. +struct NullBufferedSink : BufferedSink { + void writeUnbuffered(std::string_view data) override { } +}; + +TEST_F(ServeProtoTest, handshake_client_replay) +{ + CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + NullBufferedSink nullSink; + + StringSource in { toClientLog }; + auto clientResult = ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"); + + EXPECT_EQ(clientResult, defaultVersion); + }); +} + +TEST_F(ServeProtoTest, handshake_client_truncated_replay_throws) +{ + CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + for (size_t len = 0; len < toClientLog.size(); ++len) { + NullBufferedSink nullSink; + StringSource in { + // truncate + toClientLog.substr(0, len) + }; + if (len < 8) { + EXPECT_THROW( + ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"), + EndOfFile); + } else { + // Not sure why cannot keep on checking for `EndOfFile`. + EXPECT_THROW( + ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"), + Error); + } + } + }); +} + +TEST_F(ServeProtoTest, handshake_client_corrupted_throws) +{ + CharacterizationTest::readTest("handshake-to-client", [&](const std::string toClientLog) { + for (size_t idx = 0; idx < toClientLog.size(); ++idx) { + // corrupt a copy + std::string toClientLogCorrupt = toClientLog; + toClientLogCorrupt[idx] *= 4; + ++toClientLogCorrupt[idx]; + + NullBufferedSink nullSink; + StringSource in { toClientLogCorrupt }; + + if (idx < 4 || idx == 9) { + // magic bytes don't match + EXPECT_THROW( + ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"), + Error); + } else if (idx < 8 || idx >= 12) { + // Number out of bounds + EXPECT_THROW( + ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"), + SerialisationError); + } else { + auto ver = ServeProto::BasicClientConnection::handshake( + nullSink, in, defaultVersion, "blah"); + EXPECT_NE(ver, defaultVersion); + } + } + }); +} + } From 5167351efbee5c5a7390510eb720c31c6976f4d9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 22 Jan 2024 18:44:16 +0100 Subject: [PATCH 188/307] tests/nixos/remote-builds*: Inline module + format --- tests/nixos/remote-builds-ssh-ng.nix | 149 +++++++++++++------------- tests/nixos/remote-builds.nix | 151 +++++++++++++-------------- 2 files changed, 149 insertions(+), 151 deletions(-) diff --git a/tests/nixos/remote-builds-ssh-ng.nix b/tests/nixos/remote-builds-ssh-ng.nix index b9174a788..cca4066f3 100644 --- a/tests/nixos/remote-builds-ssh-ng.nix +++ b/tests/nixos/remote-builds-ssh-ng.nix @@ -28,96 +28,95 @@ let in { - name = lib.mkDefault "remote-builds-ssh-ng"; - - # TODO expand module shorthand syntax instead of use imports - imports = [{ - options = { - builders.config = lib.mkOption { - type = lib.types.deferredModule; - description = '' - Configuration to add to the builder nodes. - ''; - default = { }; - }; + options = { + builders.config = lib.mkOption { + type = lib.types.deferredModule; + description = '' + Configuration to add to the builder nodes. + ''; + default = { }; }; - }]; + }; - nodes = - { builder = - { config, pkgs, ... }: - { - imports = [ test.config.builders.config ]; - services.openssh.enable = true; - virtualisation.writableStore = true; - nix.settings.sandbox = true; - nix.settings.substituters = lib.mkForce [ ]; - }; + config = { + name = lib.mkDefault "remote-builds-ssh-ng"; - client = - { config, lib, pkgs, ... }: - { nix.settings.max-jobs = 0; # force remote building - nix.distributedBuilds = true; - nix.buildMachines = - [ { hostName = "builder"; - sshUser = "root"; - sshKey = "/root/.ssh/id_ed25519"; - system = "i686-linux"; - maxJobs = 1; - protocol = "ssh-ng"; - } - ]; + nodes = + { builder = + { config, pkgs, ... }: + { + imports = [ test.config.builders.config ]; + services.openssh.enable = true; virtualisation.writableStore = true; - virtualisation.additionalPaths = [ config.system.build.extraUtils ]; + nix.settings.sandbox = true; nix.settings.substituters = lib.mkForce [ ]; - programs.ssh.extraConfig = "ConnectTimeout 30"; }; - }; - testScript = { nodes }: '' - # fmt: off - import subprocess + client = + { config, lib, pkgs, ... }: + { nix.settings.max-jobs = 0; # force remote building + nix.distributedBuilds = true; + nix.buildMachines = + [ { hostName = "builder"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + protocol = "ssh-ng"; + } + ]; + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ config.system.build.extraUtils ]; + nix.settings.substituters = lib.mkForce [ ]; + programs.ssh.extraConfig = "ConnectTimeout 30"; + }; + }; - start_all() + testScript = { nodes }: '' + # fmt: off + import subprocess - # Create an SSH key on the client. - subprocess.run([ - "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" - ], capture_output=True, check=True) - client.succeed("mkdir -p -m 700 /root/.ssh") - client.copy_from_host("key", "/root/.ssh/id_ed25519") - client.succeed("chmod 600 /root/.ssh/id_ed25519") + start_all() - # Install the SSH key on the builder. - client.wait_for_unit("network.target") - builder.succeed("mkdir -p -m 700 /root/.ssh") - builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") - builder.wait_for_unit("sshd") - client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") + # Create an SSH key on the client. + subprocess.run([ + "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" + ], capture_output=True, check=True) + client.succeed("mkdir -p -m 700 /root/.ssh") + client.copy_from_host("key", "/root/.ssh/id_ed25519") + client.succeed("chmod 600 /root/.ssh/id_ed25519") - # Perform a build - out = client.succeed("nix-build ${expr nodes.client 1} 2> build-output") + # Install the SSH key on the builder. + client.wait_for_unit("network.target") + builder.succeed("mkdir -p -m 700 /root/.ssh") + builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") + builder.wait_for_unit("sshd") + client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") - # Verify that the build was done on the builder - builder.succeed(f"test -e {out.strip()}") + # Perform a build + out = client.succeed("nix-build ${expr nodes.client 1} 2> build-output") - # Print the build log, prefix the log lines to avoid nix intercepting lines starting with @nix - buildOutput = client.succeed("sed -e 's/^/build-output:/' build-output") - print(buildOutput) + # Verify that the build was done on the builder + builder.succeed(f"test -e {out.strip()}") - # Make sure that we get the expected build output - client.succeed("grep -qF Hello build-output") + # Print the build log, prefix the log lines to avoid nix intercepting lines starting with @nix + buildOutput = client.succeed("sed -e 's/^/build-output:/' build-output") + print(buildOutput) - # We don't want phase reporting in the build output - client.fail("grep -qF '@nix' build-output") + # Make sure that we get the expected build output + client.succeed("grep -qF Hello build-output") - # Get the log file - client.succeed(f"nix-store --read-log {out.strip()} > log-output") - # Prefix the log lines to avoid nix intercepting lines starting with @nix - logOutput = client.succeed("sed -e 's/^/log-file:/' log-output") - print(logOutput) + # We don't want phase reporting in the build output + client.fail("grep -qF '@nix' build-output") - # Check that we get phase reporting in the log file - client.succeed("grep -q '@nix {\"action\":\"setPhase\",\"phase\":\"buildPhase\"}' log-output") - ''; + # Get the log file + client.succeed(f"nix-store --read-log {out.strip()} > log-output") + # Prefix the log lines to avoid nix intercepting lines starting with @nix + logOutput = client.succeed("sed -e 's/^/log-file:/' log-output") + print(logOutput) + + # Check that we get phase reporting in the log file + client.succeed("grep -q '@nix {\"action\":\"setPhase\",\"phase\":\"buildPhase\"}' log-output") + ''; + }; } diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index 6f9b0ebf0..423b9d171 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -37,90 +37,89 @@ let in { - name = lib.mkDefault "remote-builds"; + options = { + builders.config = lib.mkOption { + type = lib.types.deferredModule; + description = '' + Configuration to add to the builder nodes. + ''; + default = { }; + }; + }; - # TODO expand module shorthand syntax instead of use imports - imports = [{ - options = { - builders.config = lib.mkOption { - type = lib.types.deferredModule; - description = '' - Configuration to add to the builder nodes. - ''; - default = { }; + config = { + name = lib.mkDefault "remote-builds"; + + nodes = + { builder1 = builder; + builder2 = builder; + + client = + { config, lib, pkgs, ... }: + { nix.settings.max-jobs = 0; # force remote building + nix.distributedBuilds = true; + nix.buildMachines = + [ { hostName = "builder1"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + } + { hostName = "builder2"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + } + ]; + virtualisation.writableStore = true; + virtualisation.additionalPaths = [ config.system.build.extraUtils ]; + nix.settings.substituters = lib.mkForce [ ]; + programs.ssh.extraConfig = "ConnectTimeout 30"; + }; }; - }; - }]; - nodes = - { builder1 = builder; - builder2 = builder; + testScript = { nodes }: '' + # fmt: off + import subprocess - client = - { config, lib, pkgs, ... }: - { nix.settings.max-jobs = 0; # force remote building - nix.distributedBuilds = true; - nix.buildMachines = - [ { hostName = "builder1"; - sshUser = "root"; - sshKey = "/root/.ssh/id_ed25519"; - system = "i686-linux"; - maxJobs = 1; - } - { hostName = "builder2"; - sshUser = "root"; - sshKey = "/root/.ssh/id_ed25519"; - system = "i686-linux"; - maxJobs = 1; - } - ]; - virtualisation.writableStore = true; - virtualisation.additionalPaths = [ config.system.build.extraUtils ]; - nix.settings.substituters = lib.mkForce [ ]; - programs.ssh.extraConfig = "ConnectTimeout 30"; - }; - }; + start_all() - testScript = { nodes }: '' - # fmt: off - import subprocess + # Create an SSH key on the client. + subprocess.run([ + "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" + ], capture_output=True, check=True) + client.succeed("mkdir -p -m 700 /root/.ssh") + client.copy_from_host("key", "/root/.ssh/id_ed25519") + client.succeed("chmod 600 /root/.ssh/id_ed25519") - start_all() + # Install the SSH key on the builders. + client.wait_for_unit("network.target") + for builder in [builder1, builder2]: + builder.succeed("mkdir -p -m 700 /root/.ssh") + builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") + builder.wait_for_unit("sshd") + client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") - # Create an SSH key on the client. - subprocess.run([ - "${hostPkgs.openssh}/bin/ssh-keygen", "-t", "ed25519", "-f", "key", "-N", "" - ], capture_output=True, check=True) - client.succeed("mkdir -p -m 700 /root/.ssh") - client.copy_from_host("key", "/root/.ssh/id_ed25519") - client.succeed("chmod 600 /root/.ssh/id_ed25519") + # Perform a build and check that it was performed on the builder. + out = client.succeed( + "nix-build ${expr nodes.client 1} 2> build-output", + "grep -q Hello build-output" + ) + builder1.succeed(f"test -e {out}") - # Install the SSH key on the builders. - client.wait_for_unit("network.target") - for builder in [builder1, builder2]: - builder.succeed("mkdir -p -m 700 /root/.ssh") - builder.copy_from_host("key.pub", "/root/.ssh/authorized_keys") - builder.wait_for_unit("sshd") - client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'") + # And a parallel build. + paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client 2})\!out $(nix-instantiate ${expr nodes.client 3})\!out') + out1, out2 = paths.split() + builder1.succeed(f"test -e {out1} -o -e {out2}") + builder2.succeed(f"test -e {out1} -o -e {out2}") - # Perform a build and check that it was performed on the builder. - out = client.succeed( - "nix-build ${expr nodes.client 1} 2> build-output", - "grep -q Hello build-output" - ) - builder1.succeed(f"test -e {out}") + # And a failing build. + client.fail("nix-build ${expr nodes.client 5}") - # And a parallel build. - paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client 2})\!out $(nix-instantiate ${expr nodes.client 3})\!out') - out1, out2 = paths.split() - builder1.succeed(f"test -e {out1} -o -e {out2}") - builder2.succeed(f"test -e {out1} -o -e {out2}") - - # And a failing build. - client.fail("nix-build ${expr nodes.client 5}") - - # Test whether the build hook automatically skips unavailable builders. - builder1.block() - client.succeed("nix-build ${expr nodes.client 4}") - ''; + # Test whether the build hook automatically skips unavailable builders. + builder1.block() + client.succeed("nix-build ${expr nodes.client 4}") + ''; + }; } From c4d7c4a8485cb74f57045d1fa14c1d5f9fa28310 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 22 Jan 2024 18:47:59 +0100 Subject: [PATCH 189/307] nixos/tests/remote-builds*: Format nixpkgs-fmt --- tests/nixos/remote-builds-ssh-ng.nix | 38 +++++++++++++++------------- tests/nixos/remote-builds.nix | 13 +++++++--- 2 files changed, 29 insertions(+), 22 deletions(-) diff --git a/tests/nixos/remote-builds-ssh-ng.nix b/tests/nixos/remote-builds-ssh-ng.nix index cca4066f3..926ec00fe 100644 --- a/tests/nixos/remote-builds-ssh-ng.nix +++ b/tests/nixos/remote-builds-ssh-ng.nix @@ -42,29 +42,31 @@ in name = lib.mkDefault "remote-builds-ssh-ng"; nodes = - { builder = - { config, pkgs, ... }: - { - imports = [ test.config.builders.config ]; - services.openssh.enable = true; - virtualisation.writableStore = true; - nix.settings.sandbox = true; - nix.settings.substituters = lib.mkForce [ ]; - }; + { + builder = + { config, pkgs, ... }: + { + imports = [ test.config.builders.config ]; + services.openssh.enable = true; + virtualisation.writableStore = true; + nix.settings.sandbox = true; + nix.settings.substituters = lib.mkForce [ ]; + }; client = { config, lib, pkgs, ... }: - { nix.settings.max-jobs = 0; # force remote building + { + nix.settings.max-jobs = 0; # force remote building nix.distributedBuilds = true; nix.buildMachines = - [ { hostName = "builder"; - sshUser = "root"; - sshKey = "/root/.ssh/id_ed25519"; - system = "i686-linux"; - maxJobs = 1; - protocol = "ssh-ng"; - } - ]; + [{ + hostName = "builder"; + sshUser = "root"; + sshKey = "/root/.ssh/id_ed25519"; + system = "i686-linux"; + maxJobs = 1; + protocol = "ssh-ng"; + }]; virtualisation.writableStore = true; virtualisation.additionalPaths = [ config.system.build.extraUtils ]; nix.settings.substituters = lib.mkForce [ ]; diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix index 423b9d171..1661203ec 100644 --- a/tests/nixos/remote-builds.nix +++ b/tests/nixos/remote-builds.nix @@ -51,21 +51,26 @@ in name = lib.mkDefault "remote-builds"; nodes = - { builder1 = builder; + { + builder1 = builder; builder2 = builder; client = { config, lib, pkgs, ... }: - { nix.settings.max-jobs = 0; # force remote building + { + nix.settings.max-jobs = 0; # force remote building nix.distributedBuilds = true; nix.buildMachines = - [ { hostName = "builder1"; + [ + { + hostName = "builder1"; sshUser = "root"; sshKey = "/root/.ssh/id_ed25519"; system = "i686-linux"; maxJobs = 1; } - { hostName = "builder2"; + { + hostName = "builder2"; sshUser = "root"; sshKey = "/root/.ssh/id_ed25519"; system = "i686-linux"; From 81499a0b93a136f889f3799d7110dcc479a4cbe1 Mon Sep 17 00:00:00 2001 From: Maximilian Bosch Date: Sat, 20 Jan 2024 16:05:30 +0100 Subject: [PATCH 190/307] libexpr: print value of what is attempted to be called as function MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Low-hanging fruit in the spirit of #9753 and #9754 (means 9999years did all the hard work already). This basically prints out what was attempted to be called as function, i.e. map (import {}) [ 1 2 3 ] now gives the following error message: error: … while calling the 'map' builtin at «string»:1:1: 1| map (import {}) [ 1 2 3 ] | ^ … while evaluating the first argument passed to builtins.map error: expected a function but found a set: { _type = "pkgs"; AAAAAASomeThingsFailToEvaluate = «thunk»; AMB-plugins = «thunk»; ArchiSteamFarm = «thunk»; BeatSaberModManager = «thunk»; CHOWTapeModel = «thunk»; ChowCentaur = «thunk»; ChowKick = «thunk»; ChowPhaser = «thunk»; CoinMP = «thunk»; «18783 attributes elided»} --- src/libexpr/eval.cc | 6 +++++- tests/unit/libexpr/error_traces.cc | 4 ++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 71e956e10..ce410162e 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1692,7 +1692,11 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & } else - error("attempt to call something which is not a function but %1%", showType(vCur)).atPos(pos).debugThrow(); + error("attempt to call something which is not a function but %1%: %2%", + showType(vCur), + ValuePrinter(*this, vCur, errorPrintOptions)) + .atPos(pos) + .debugThrow(); } vRes = vCur; diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index f0cad58bb..f99aafd74 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -750,7 +750,7 @@ namespace nix { ASSERT_TRACE1("foldl' (_: 1) \"foo\" [ true ]", TypeError, - hintfmt("attempt to call something which is not a function but %s", "an integer")); + hintfmt("attempt to call something which is not a function but %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL)); ASSERT_TRACE2("foldl' (a: b: a && b) \"foo\" [ true ]", TypeError, @@ -835,7 +835,7 @@ namespace nix { ASSERT_TRACE1("sort (_: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("attempt to call something which is not a function but %s", "an integer")); + hintfmt("attempt to call something which is not a function but %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL)); ASSERT_TRACE2("sort (_: _: 1) [ \"foo\" \"bar\" ]", TypeError, From 9a51209309891f8bf7edf65673682df13d4beb90 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 22:40:01 +0000 Subject: [PATCH 191/307] build(deps): bump zeebe-io/backport-action from 2.3.0 to 2.4.0 Bumps [zeebe-io/backport-action](https://github.com/zeebe-io/backport-action) from 2.3.0 to 2.4.0. - [Release notes](https://github.com/zeebe-io/backport-action/releases) - [Commits](https://github.com/zeebe-io/backport-action/compare/v2.3.0...v2.4.0) --- updated-dependencies: - dependency-name: zeebe-io/backport-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index f003114ba..46a4529c1 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -21,7 +21,7 @@ jobs: fetch-depth: 0 - name: Create backport PRs # should be kept in sync with `version` - uses: zeebe-io/backport-action@v2.3.0 + uses: zeebe-io/backport-action@v2.4.0 with: # Config README: https://github.com/zeebe-io/backport-action#backport-action github_token: ${{ secrets.GITHUB_TOKEN }} From b71673109c2172cb1f933cc8a97c26b4352ac239 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 22 Jan 2024 15:50:00 -0500 Subject: [PATCH 192/307] Make `SSHMaster::startCommand` work on an args list This avoids split-on-whitespace errors: - No more `bash -c` needed - No more `shellEscape` needed - `remote-program` ssh store setting also cleanly supports args (e.g. `nix daemon`) - `ssh` uses `--` to separate args for SSH from args for the command to run. and will help with Hydra dedup. Some code taken from #6628. Co-Authored-By: Alexander Bantyev --- src/libstore/legacy-ssh-store.cc | 11 ++++++++--- src/libstore/legacy-ssh-store.hh | 2 +- src/libstore/ssh-store.cc | 19 ++++++++++--------- src/libstore/ssh.cc | 12 +++++++----- src/libstore/ssh.hh | 11 ++++++++++- 5 files changed, 36 insertions(+), 19 deletions(-) diff --git a/src/libstore/legacy-ssh-store.cc b/src/libstore/legacy-ssh-store.cc index 4f020c452..e422adeec 100644 --- a/src/libstore/legacy-ssh-store.cc +++ b/src/libstore/legacy-ssh-store.cc @@ -55,9 +55,14 @@ LegacySSHStore::LegacySSHStore(const std::string & scheme, const std::string & h ref LegacySSHStore::openConnection() { auto conn = make_ref(); - conn->sshConn = master.startCommand( - fmt("%s --serve --write", remoteProgram) - + (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get()))); + Strings command = remoteProgram.get(); + command.push_back("--serve"); + command.push_back("--write"); + if (remoteStore.get() != "") { + command.push_back("--store"); + command.push_back(remoteStore.get()); + } + conn->sshConn = master.startCommand(std::move(command)); conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh index bdf79eab3..ae890177b 100644 --- a/src/libstore/legacy-ssh-store.hh +++ b/src/libstore/legacy-ssh-store.hh @@ -13,7 +13,7 @@ struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig { using CommonSSHStoreConfig::CommonSSHStoreConfig; - const Setting remoteProgram{this, "nix-store", "remote-program", + const Setting remoteProgram{this, {"nix-store"}, "remote-program", "Path to the `nix-store` executable on the remote machine."}; const Setting maxConnections{this, 1, "max-connections", diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index d4c8ab5b2..0cf92b114 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -17,7 +17,7 @@ struct SSHStoreConfig : virtual RemoteStoreConfig, virtual CommonSSHStoreConfig using RemoteStoreConfig::RemoteStoreConfig; using CommonSSHStoreConfig::CommonSSHStoreConfig; - const Setting remoteProgram{this, "nix-daemon", "remote-program", + const Setting remoteProgram{this, {"nix-daemon"}, "remote-program", "Path to the `nix-daemon` executable on the remote machine."}; const std::string name() override { return "Experimental SSH Store"; } @@ -212,14 +212,15 @@ public: ref SSHStore::openConnection() { auto conn = make_ref(); - - std::string command = remoteProgram + " --stdio"; - if (remoteStore.get() != "") - command += " --store " + shellEscape(remoteStore.get()); - for (auto & arg : extraRemoteProgramArgs) - command += " " + shellEscape(arg); - - conn->sshConn = master.startCommand(command); + Strings command = remoteProgram.get(); + command.push_back("--stdio"); + if (remoteStore.get() != "") { + command.push_back("--store"); + command.push_back(remoteStore.get()); + } + command.insert(command.end(), + extraRemoteProgramArgs.begin(), extraRemoteProgramArgs.end()); + conn->sshConn = master.startCommand(std::move(command)); conn->to = FdSink(conn->sshConn->in.get()); conn->from = FdSource(conn->sshConn->out.get()); return conn; diff --git a/src/libstore/ssh.cc b/src/libstore/ssh.cc index 5c8d6a504..30fe73adb 100644 --- a/src/libstore/ssh.cc +++ b/src/libstore/ssh.cc @@ -52,7 +52,8 @@ bool SSHMaster::isMasterRunning() { return res.first == 0; } -std::unique_ptr SSHMaster::startCommand(const std::string & command) +std::unique_ptr SSHMaster::startCommand( + Strings && command, Strings && extraSshArgs) { Path socketPath = startMaster(); @@ -84,18 +85,19 @@ std::unique_ptr SSHMaster::startCommand(const std::string Strings args; - if (fakeSSH) { - args = { "bash", "-c" }; - } else { + if (!fakeSSH) { args = { "ssh", host.c_str(), "-x" }; addCommonSSHOpts(args); if (socketPath != "") args.insert(args.end(), {"-S", socketPath}); if (verbosity >= lvlChatty) args.push_back("-v"); + args.splice(args.end(), std::move(extraSshArgs)); + args.push_back("--"); } - args.push_back(command); + args.splice(args.end(), std::move(command)); + execvp(args.begin()->c_str(), stringsToCharPtrs(args).data()); // could not exec ssh/bash diff --git a/src/libstore/ssh.hh b/src/libstore/ssh.hh index bfcd6f21c..08bb43dfa 100644 --- a/src/libstore/ssh.hh +++ b/src/libstore/ssh.hh @@ -41,7 +41,16 @@ public: AutoCloseFD out, in; }; - std::unique_ptr startCommand(const std::string & command); + /** + * @param command The command (arg vector) to execute. + * + * @param extraSShArgs Extra args to pass to SSH (not the command to + * execute). Will not be used when "fake SSHing" to the local + * machine. + */ + std::unique_ptr startCommand( + Strings && command, + Strings && extraSshArgs = {}); Path startMaster(); }; From 966d6fcd01cfd33e9954e5df262b8bf64a5fd311 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 22 Jan 2024 17:59:34 -0500 Subject: [PATCH 193/307] `ParseSink` -> `FileSystemObjectSink` Co-authored-by: Robert Hensing --- src/libstore/daemon.cc | 4 ++-- src/libstore/export-import.cc | 2 +- src/libstore/local-store.cc | 2 +- src/libstore/nar-accessor.cc | 2 +- src/libstore/store-api.cc | 8 ++++---- src/libutil/archive.cc | 8 ++++---- src/libutil/archive.hh | 2 +- src/libutil/file-content-address.hh | 2 +- src/libutil/fs-sink.cc | 2 +- src/libutil/fs-sink.hh | 10 +++++----- src/libutil/git.cc | 4 ++-- src/libutil/git.hh | 4 ++-- src/libutil/memory-source-accessor.hh | 2 +- tests/unit/libutil/git.cc | 2 +- 14 files changed, 27 insertions(+), 27 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 923ea6447..27ad14ed4 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -441,7 +441,7 @@ static void performOp(TunnelLogger * logger, ref store, eagerly consume the entire stream it's given, past the length of the Nar. */ TeeSource savedNARSource(from, saved); - NullParseSink sink; /* just parse the NAR */ + NullFileSystemObjectSink sink; /* just parse the NAR */ parseDump(sink, savedNARSource); } else { /* Incrementally parse the NAR file, stripping the @@ -913,7 +913,7 @@ static void performOp(TunnelLogger * logger, ref store, source = std::make_unique(from, to); else { TeeSource tee { from, saved }; - NullParseSink ether; + NullFileSystemObjectSink ether; parseDump(ether, tee); source = std::make_unique(saved.s); } diff --git a/src/libstore/export-import.cc b/src/libstore/export-import.cc index d57b25bd7..cb36c0c1b 100644 --- a/src/libstore/export-import.cc +++ b/src/libstore/export-import.cc @@ -65,7 +65,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs) /* Extract the NAR from the source. */ StringSink saved; TeeSource tee { source, saved }; - NullParseSink ether; + NullFileSystemObjectSink ether; parseDump(ether, tee); uint32_t magic = readInt(source); diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 07068f8f8..2c22bfe31 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1048,7 +1048,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, bool narRead = false; Finally cleanup = [&]() { if (!narRead) { - NullParseSink sink; + NullFileSystemObjectSink sink; try { parseDump(sink, source); } catch (...) { diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index 15b05fe25..4bc68a5ae 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -27,7 +27,7 @@ struct NarAccessor : public SourceAccessor NarMember root; - struct NarIndexer : ParseSink, Source + struct NarIndexer : FileSystemObjectSink, Source { NarAccessor & acc; Source & source; diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index c913a97dc..439c9530c 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -424,12 +424,12 @@ ValidPathInfo Store::addToStoreSlow( information to narSink. */ TeeSource tapped { *fileSource, narSink }; - NullParseSink blank; + NullFileSystemObjectSink blank; auto & parseSink = method.getFileIngestionMethod() == FileIngestionMethod::Flat - ? (ParseSink &) fileSink + ? (FileSystemObjectSink &) fileSink : method.getFileIngestionMethod() == FileIngestionMethod::Recursive - ? (ParseSink &) blank - : (abort(), (ParseSink &)*(ParseSink *)nullptr); // handled both cases + ? (FileSystemObjectSink &) blank + : (abort(), (FileSystemObjectSink &)*(FileSystemObjectSink *)nullptr); // handled both cases /* The information that flows from tapped (besides being replicated in narSink), is now put in parseSink. */ diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 712ea51c7..17886dd19 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -133,7 +133,7 @@ static SerialisationError badArchive(const std::string & s) } -static void parseContents(ParseSink & sink, Source & source, const Path & path) +static void parseContents(FileSystemObjectSink & sink, Source & source, const Path & path) { uint64_t size = readLongLong(source); @@ -164,7 +164,7 @@ struct CaseInsensitiveCompare }; -static void parse(ParseSink & sink, Source & source, const Path & path) +static void parse(FileSystemObjectSink & sink, Source & source, const Path & path) { std::string s; @@ -266,7 +266,7 @@ static void parse(ParseSink & sink, Source & source, const Path & path) } -void parseDump(ParseSink & sink, Source & source) +void parseDump(FileSystemObjectSink & sink, Source & source) { std::string version; try { @@ -294,7 +294,7 @@ void copyNAR(Source & source, Sink & sink) // FIXME: if 'source' is the output of dumpPath() followed by EOF, // we should just forward all data directly without parsing. - NullParseSink parseSink; /* just parse the NAR */ + NullFileSystemObjectSink parseSink; /* just parse the NAR */ TeeSource wrapper { source, sink }; diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh index 2cf8ee891..28c63bb85 100644 --- a/src/libutil/archive.hh +++ b/src/libutil/archive.hh @@ -73,7 +73,7 @@ time_t dumpPathAndGetMtime(const Path & path, Sink & sink, */ void dumpString(std::string_view s, Sink & sink); -void parseDump(ParseSink & sink, Source & source); +void parseDump(FileSystemObjectSink & sink, Source & source); void restorePath(const Path & path, Source & source); diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh index 8e93f5847..7f7544e41 100644 --- a/src/libutil/file-content-address.hh +++ b/src/libutil/file-content-address.hh @@ -35,7 +35,7 @@ void dumpPath( /** * Restore a serialization of the given file system object. * - * @TODO use an arbitrary `ParseSink`. + * @TODO use an arbitrary `FileSystemObjectSink`. */ void restorePath( const Path & path, diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index 925e6f05d..bf44de92d 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -7,7 +7,7 @@ namespace nix { void copyRecursive( SourceAccessor & accessor, const CanonPath & from, - ParseSink & sink, const Path & to) + FileSystemObjectSink & sink, const Path & to) { auto stat = accessor.lstat(from); diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh index bf54b7301..f4c4e92f1 100644 --- a/src/libutil/fs-sink.hh +++ b/src/libutil/fs-sink.hh @@ -11,7 +11,7 @@ namespace nix { /** * \todo Fix this API, it sucks. */ -struct ParseSink +struct FileSystemObjectSink { virtual void createDirectory(const Path & path) = 0; @@ -33,12 +33,12 @@ struct ParseSink */ void copyRecursive( SourceAccessor & accessor, const CanonPath & sourcePath, - ParseSink & sink, const Path & destPath); + FileSystemObjectSink & sink, const Path & destPath); /** * Ignore everything and do nothing */ -struct NullParseSink : ParseSink +struct NullFileSystemObjectSink : FileSystemObjectSink { void createDirectory(const Path & path) override { } void receiveContents(std::string_view data) override { } @@ -51,7 +51,7 @@ struct NullParseSink : ParseSink /** * Write files at the given path */ -struct RestoreSink : ParseSink +struct RestoreSink : FileSystemObjectSink { Path dstPath; @@ -75,7 +75,7 @@ private: * `receiveContents` to the underlying `Sink`. For anything but a single * file, set `regular = true` so the caller can fail accordingly. */ -struct RegularFileSink : ParseSink +struct RegularFileSink : FileSystemObjectSink { bool regular = true; Sink & sink; diff --git a/src/libutil/git.cc b/src/libutil/git.cc index 296b75628..058384db0 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -54,7 +54,7 @@ static std::string getString(Source & source, int n) void parse( - ParseSink & sink, + FileSystemObjectSink & sink, const Path & sinkPath, Source & source, std::function hook, @@ -133,7 +133,7 @@ std::optional convertMode(SourceAccessor::Type type) } -void restore(ParseSink & sink, Source & source, std::function hook) +void restore(FileSystemObjectSink & sink, Source & source, std::function hook) { parse(sink, "", source, [&](Path name, TreeEntry entry) { auto [accessor, from] = hook(entry.hash); diff --git a/src/libutil/git.hh b/src/libutil/git.hh index b24b25dd3..e2fe20509 100644 --- a/src/libutil/git.hh +++ b/src/libutil/git.hh @@ -60,7 +60,7 @@ using Tree = std::map; using SinkHook = void(const Path & name, TreeEntry entry); void parse( - ParseSink & sink, const Path & sinkPath, + FileSystemObjectSink & sink, const Path & sinkPath, Source & source, std::function hook, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); @@ -81,7 +81,7 @@ using RestoreHook = std::pair(Hash); /** * Wrapper around `parse` and `RestoreSink` */ -void restore(ParseSink & sink, Source & source, std::function hook); +void restore(FileSystemObjectSink & sink, Source & source, std::function hook); /** * Dumps a single file to a sink diff --git a/src/libutil/memory-source-accessor.hh b/src/libutil/memory-source-accessor.hh index b908f3713..b46c61e54 100644 --- a/src/libutil/memory-source-accessor.hh +++ b/src/libutil/memory-source-accessor.hh @@ -75,7 +75,7 @@ struct MemorySourceAccessor : virtual SourceAccessor /** * Write to a `MemorySourceAccessor` at the given path */ -struct MemorySink : ParseSink +struct MemorySink : FileSystemObjectSink { MemorySourceAccessor & dst; diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc index 141a55816..6bbcd161b 100644 --- a/tests/unit/libutil/git.cc +++ b/tests/unit/libutil/git.cc @@ -119,7 +119,7 @@ const static Tree tree = { TEST_F(GitTest, tree_read) { readTest("tree.bin", [&](const auto & encoded) { StringSource in { encoded }; - NullParseSink out; + NullFileSystemObjectSink out; Tree got; parse(out, "", in, [&](auto & name, auto entry) { auto name2 = name; From 6365bbfa8120007719156b45482568aca6c74f26 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 20 Dec 2023 14:47:05 -0500 Subject: [PATCH 194/307] Improve the `FileSystemObjectSink` interface More invariants are enforced in the type, and less state needs to be stored in the main sink itself. The method here is roughly that known as "session types". Co-authored-by: Robert Hensing --- src/libstore/nar-accessor.cc | 60 +++++++---- src/libutil/archive.cc | 144 ++++++++++++++------------ src/libutil/fs-sink.cc | 72 +++++++++---- src/libutil/fs-sink.hh | 61 ++++++----- src/libutil/git.cc | 130 +++++++++++++++-------- src/libutil/git.hh | 34 +++++- src/libutil/memory-source-accessor.cc | 39 ++++--- src/libutil/memory-source-accessor.hh | 12 +-- tests/unit/libutil/git.cc | 24 +++-- 9 files changed, 357 insertions(+), 219 deletions(-) diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index 4bc68a5ae..b13e4c52c 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -19,6 +19,35 @@ struct NarMember std::map children; }; +struct NarMemberConstructor : CreateRegularFileSink +{ +private: + + NarMember & narMember; + + uint64_t & pos; + +public: + + NarMemberConstructor(NarMember & nm, uint64_t & pos) + : narMember(nm), pos(pos) + { } + + void isExecutable() override + { + narMember.stat.isExecutable = true; + } + + void preallocateContents(uint64_t size) override + { + narMember.stat.fileSize = size; + narMember.stat.narOffset = pos; + } + + void operator () (std::string_view data) override + { } +}; + struct NarAccessor : public SourceAccessor { std::optional nar; @@ -42,7 +71,7 @@ struct NarAccessor : public SourceAccessor : acc(acc), source(source) { } - void createMember(const Path & path, NarMember member) + NarMember & createMember(const Path & path, NarMember member) { size_t level = std::count(path.begin(), path.end(), '/'); while (parents.size() > level) parents.pop(); @@ -50,11 +79,14 @@ struct NarAccessor : public SourceAccessor if (parents.empty()) { acc.root = std::move(member); parents.push(&acc.root); + return acc.root; } else { if (parents.top()->stat.type != Type::tDirectory) throw Error("NAR file missing parent directory of path '%s'", path); auto result = parents.top()->children.emplace(baseNameOf(path), std::move(member)); - parents.push(&result.first->second); + auto & ref = result.first->second; + parents.push(&ref); + return ref; } } @@ -68,34 +100,18 @@ struct NarAccessor : public SourceAccessor } }); } - void createRegularFile(const Path & path) override + void createRegularFile(const Path & path, std::function func) override { - createMember(path, NarMember{ .stat = { + auto & nm = createMember(path, NarMember{ .stat = { .type = Type::tRegular, .fileSize = 0, .isExecutable = false, .narOffset = 0 } }); + NarMemberConstructor nmc { nm, pos }; + func(nmc); } - void closeRegularFile() override - { } - - void isExecutable() override - { - parents.top()->stat.isExecutable = true; - } - - void preallocateContents(uint64_t size) override - { - auto & st = parents.top()->stat; - st.fileSize = size; - st.narOffset = pos; - } - - void receiveContents(std::string_view data) override - { } - void createSymlink(const Path & path, const std::string & target) override { createMember(path, diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 17886dd19..6062392cd 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -133,7 +133,7 @@ static SerialisationError badArchive(const std::string & s) } -static void parseContents(FileSystemObjectSink & sink, Source & source, const Path & path) +static void parseContents(CreateRegularFileSink & sink, Source & source) { uint64_t size = readLongLong(source); @@ -147,7 +147,7 @@ static void parseContents(FileSystemObjectSink & sink, Source & source, const Pa auto n = buf.size(); if ((uint64_t)n > left) n = left; source(buf.data(), n); - sink.receiveContents({buf.data(), n}); + sink({buf.data(), n}); left -= n; } @@ -171,95 +171,107 @@ static void parse(FileSystemObjectSink & sink, Source & source, const Path & pat s = readString(source); if (s != "(") throw badArchive("expected open tag"); - enum { tpUnknown, tpRegular, tpDirectory, tpSymlink } type = tpUnknown; - std::map names; - while (1) { + auto getString = [&]() { checkInterrupt(); + return readString(source); + }; - s = readString(source); + // For first iteration + s = getString(); + + while (1) { if (s == ")") { break; } else if (s == "type") { - if (type != tpUnknown) - throw badArchive("multiple type fields"); - std::string t = readString(source); + std::string t = getString(); if (t == "regular") { - type = tpRegular; - sink.createRegularFile(path); + sink.createRegularFile(path, [&](auto & crf) { + while (1) { + s = getString(); + + if (s == "contents") { + parseContents(crf, source); + } + + else if (s == "executable") { + auto s2 = getString(); + if (s2 != "") throw badArchive("executable marker has non-empty value"); + crf.isExecutable(); + } + + else break; + } + }); } else if (t == "directory") { sink.createDirectory(path); - type = tpDirectory; + + while (1) { + s = getString(); + + if (s == "entry") { + std::string name, prevName; + + s = getString(); + if (s != "(") throw badArchive("expected open tag"); + + while (1) { + s = getString(); + + if (s == ")") { + break; + } else if (s == "name") { + name = getString(); + if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos || name.find((char) 0) != std::string::npos) + throw Error("NAR contains invalid file name '%1%'", name); + if (name <= prevName) + throw Error("NAR directory is not sorted"); + prevName = name; + if (archiveSettings.useCaseHack) { + auto i = names.find(name); + if (i != names.end()) { + debug("case collision between '%1%' and '%2%'", i->first, name); + name += caseHackSuffix; + name += std::to_string(++i->second); + } else + names[name] = 0; + } + } else if (s == "node") { + if (name.empty()) throw badArchive("entry name missing"); + parse(sink, source, path + "/" + name); + } else + throw badArchive("unknown field " + s); + } + } + + else break; + } } else if (t == "symlink") { - type = tpSymlink; + s = getString(); + + if (s != "target") + throw badArchive("expected 'target' got " + s); + + std::string target = getString(); + sink.createSymlink(path, target); + + // for the next iteration + s = getString(); } else throw badArchive("unknown file type " + t); } - else if (s == "contents" && type == tpRegular) { - parseContents(sink, source, path); - sink.closeRegularFile(); - } - - else if (s == "executable" && type == tpRegular) { - auto s = readString(source); - if (s != "") throw badArchive("executable marker has non-empty value"); - sink.isExecutable(); - } - - else if (s == "entry" && type == tpDirectory) { - std::string name, prevName; - - s = readString(source); - if (s != "(") throw badArchive("expected open tag"); - - while (1) { - checkInterrupt(); - - s = readString(source); - - if (s == ")") { - break; - } else if (s == "name") { - name = readString(source); - if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos || name.find((char) 0) != std::string::npos) - throw Error("NAR contains invalid file name '%1%'", name); - if (name <= prevName) - throw Error("NAR directory is not sorted"); - prevName = name; - if (archiveSettings.useCaseHack) { - auto i = names.find(name); - if (i != names.end()) { - debug("case collision between '%1%' and '%2%'", i->first, name); - name += caseHackSuffix; - name += std::to_string(++i->second); - } else - names[name] = 0; - } - } else if (s == "node") { - if (name.empty()) throw badArchive("entry name missing"); - parse(sink, source, path + "/" + name); - } else - throw badArchive("unknown field " + s); - } - } - - else if (s == "target" && type == tpSymlink) { - std::string target = readString(source); - sink.createSymlink(path, target); - } - else throw badArchive("unknown field " + s); } diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index bf44de92d..b6f8db592 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -19,16 +19,12 @@ void copyRecursive( case SourceAccessor::tRegular: { - sink.createRegularFile(to); - if (stat.isExecutable) - sink.isExecutable(); - LambdaSink sink2 { - [&](auto d) { - sink.receiveContents(d); - } - }; - accessor.readFile(from, sink2, [&](uint64_t size) { - sink.preallocateContents(size); + sink.createRegularFile(to, [&](CreateRegularFileSink & crf) { + if (stat.isExecutable) + crf.isExecutable(); + accessor.readFile(from, crf, [&](uint64_t size) { + crf.preallocateContents(size); + }); }); break; } @@ -71,20 +67,24 @@ void RestoreSink::createDirectory(const Path & path) throw SysError("creating directory '%1%'", p); }; -void RestoreSink::createRegularFile(const Path & path) +struct RestoreRegularFile : CreateRegularFileSink { + AutoCloseFD fd; + + void operator () (std::string_view data) override; + void isExecutable() override; + void preallocateContents(uint64_t size) override; +}; + +void RestoreSink::createRegularFile(const Path & path, std::function func) { Path p = dstPath + path; - fd = open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666); - if (!fd) throw SysError("creating file '%1%'", p); + RestoreRegularFile crf; + crf.fd = open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666); + if (!crf.fd) throw SysError("creating file '%1%'", p); + func(crf); } -void RestoreSink::closeRegularFile() -{ - /* Call close explicitly to make sure the error is checked */ - fd.close(); -} - -void RestoreSink::isExecutable() +void RestoreRegularFile::isExecutable() { struct stat st; if (fstat(fd.get(), &st) == -1) @@ -93,7 +93,7 @@ void RestoreSink::isExecutable() throw SysError("fchmod"); } -void RestoreSink::preallocateContents(uint64_t len) +void RestoreRegularFile::preallocateContents(uint64_t len) { if (!restoreSinkSettings.preallocateContents) return; @@ -111,7 +111,7 @@ void RestoreSink::preallocateContents(uint64_t len) #endif } -void RestoreSink::receiveContents(std::string_view data) +void RestoreRegularFile::operator () (std::string_view data) { writeFull(fd.get(), data); } @@ -122,4 +122,32 @@ void RestoreSink::createSymlink(const Path & path, const std::string & target) nix::createSymlink(target, p); } + +void RegularFileSink::createRegularFile(const Path & path, std::function func) +{ + struct CRF : CreateRegularFileSink { + RegularFileSink & back; + CRF(RegularFileSink & back) : back(back) {} + void operator () (std::string_view data) override + { + back.sink(data); + } + void isExecutable() override {} + } crf { *this }; + func(crf); +} + + +void NullFileSystemObjectSink::createRegularFile(const Path & path, std::function func) +{ + struct : CreateRegularFileSink { + void operator () (std::string_view data) override {} + void isExecutable() override {} + } crf; + // Even though `NullFileSystemObjectSink` doesn't do anything, it's important + // that we call the function, to e.g. advance the parser using this + // sink. + func(crf); +} + } diff --git a/src/libutil/fs-sink.hh b/src/libutil/fs-sink.hh index f4c4e92f1..4dfb5b329 100644 --- a/src/libutil/fs-sink.hh +++ b/src/libutil/fs-sink.hh @@ -9,18 +9,13 @@ namespace nix { /** - * \todo Fix this API, it sucks. + * Actions on an open regular file in the process of creating it. + * + * See `FileSystemObjectSink::createRegularFile`. */ -struct FileSystemObjectSink +struct CreateRegularFileSink : Sink { - virtual void createDirectory(const Path & path) = 0; - - virtual void createRegularFile(const Path & path) = 0; - virtual void receiveContents(std::string_view data) = 0; virtual void isExecutable() = 0; - virtual void closeRegularFile() = 0; - - virtual void createSymlink(const Path & path, const std::string & target) = 0; /** * An optimization. By default, do nothing. @@ -28,8 +23,24 @@ struct FileSystemObjectSink virtual void preallocateContents(uint64_t size) { }; }; + +struct FileSystemObjectSink +{ + virtual void createDirectory(const Path & path) = 0; + + /** + * This function in general is no re-entrant. Only one file can be + * written at a time. + */ + virtual void createRegularFile( + const Path & path, + std::function) = 0; + + virtual void createSymlink(const Path & path, const std::string & target) = 0; +}; + /** - * Recusively copy file system objects from the source into the sink. + * Recursively copy file system objects from the source into the sink. */ void copyRecursive( SourceAccessor & accessor, const CanonPath & sourcePath, @@ -41,11 +52,10 @@ void copyRecursive( struct NullFileSystemObjectSink : FileSystemObjectSink { void createDirectory(const Path & path) override { } - void receiveContents(std::string_view data) override { } void createSymlink(const Path & path, const std::string & target) override { } - void createRegularFile(const Path & path) override { } - void closeRegularFile() override { } - void isExecutable() override { } + void createRegularFile( + const Path & path, + std::function) override; }; /** @@ -57,17 +67,11 @@ struct RestoreSink : FileSystemObjectSink void createDirectory(const Path & path) override; - void createRegularFile(const Path & path) override; - void receiveContents(std::string_view data) override; - void isExecutable() override; - void closeRegularFile() override; + void createRegularFile( + const Path & path, + std::function) override; void createSymlink(const Path & path, const std::string & target) override; - - void preallocateContents(uint64_t size) override; - -private: - AutoCloseFD fd; }; /** @@ -87,19 +91,14 @@ struct RegularFileSink : FileSystemObjectSink regular = false; } - void receiveContents(std::string_view data) override - { - sink(data); - } - void createSymlink(const Path & path, const std::string & target) override { regular = false; } - void createRegularFile(const Path & path) override { } - void closeRegularFile() override { } - void isExecutable() override { } + void createRegularFile( + const Path & path, + std::function) override; }; } diff --git a/src/libutil/git.cc b/src/libutil/git.cc index 058384db0..3b8c3ebac 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -52,24 +52,22 @@ static std::string getString(Source & source, int n) return v; } - -void parse( +void parseBlob( FileSystemObjectSink & sink, const Path & sinkPath, Source & source, - std::function hook, + bool executable, const ExperimentalFeatureSettings & xpSettings) { xpSettings.require(Xp::GitHashing); - auto type = getString(source, 5); - - if (type == "blob ") { - sink.createRegularFile(sinkPath); + sink.createRegularFile(sinkPath, [&](auto & crf) { + if (executable) + crf.isExecutable(); unsigned long long size = std::stoi(getStringUntil(source, 0)); - sink.preallocateContents(size); + crf.preallocateContents(size); unsigned long long left = size; std::string buf; @@ -79,47 +77,91 @@ void parse( checkInterrupt(); buf.resize(std::min((unsigned long long)buf.capacity(), left)); source(buf); - sink.receiveContents(buf); + crf(buf); left -= buf.size(); } + }); +} + +void parseTree( + FileSystemObjectSink & sink, + const Path & sinkPath, + Source & source, + std::function hook, + const ExperimentalFeatureSettings & xpSettings) +{ + unsigned long long size = std::stoi(getStringUntil(source, 0)); + unsigned long long left = size; + + sink.createDirectory(sinkPath); + + while (left) { + std::string perms = getStringUntil(source, ' '); + left -= perms.size(); + left -= 1; + + RawMode rawMode = std::stoi(perms, 0, 8); + auto modeOpt = decodeMode(rawMode); + if (!modeOpt) + throw Error("Unknown Git permission: %o", perms); + auto mode = std::move(*modeOpt); + + std::string name = getStringUntil(source, '\0'); + left -= name.size(); + left -= 1; + + std::string hashs = getString(source, 20); + left -= 20; + + Hash hash(HashAlgorithm::SHA1); + std::copy(hashs.begin(), hashs.end(), hash.hash); + + hook(name, TreeEntry { + .mode = mode, + .hash = hash, + }); + } +} + +ObjectType parseObjectType( + Source & source, + const ExperimentalFeatureSettings & xpSettings) +{ + xpSettings.require(Xp::GitHashing); + + auto type = getString(source, 5); + + if (type == "blob ") { + return ObjectType::Blob; } else if (type == "tree ") { - unsigned long long size = std::stoi(getStringUntil(source, 0)); - unsigned long long left = size; - - sink.createDirectory(sinkPath); - - while (left) { - std::string perms = getStringUntil(source, ' '); - left -= perms.size(); - left -= 1; - - RawMode rawMode = std::stoi(perms, 0, 8); - auto modeOpt = decodeMode(rawMode); - if (!modeOpt) - throw Error("Unknown Git permission: %o", perms); - auto mode = std::move(*modeOpt); - - std::string name = getStringUntil(source, '\0'); - left -= name.size(); - left -= 1; - - std::string hashs = getString(source, 20); - left -= 20; - - Hash hash(HashAlgorithm::SHA1); - std::copy(hashs.begin(), hashs.end(), hash.hash); - - hook(name, TreeEntry { - .mode = mode, - .hash = hash, - }); - - if (mode == Mode::Executable) - sink.isExecutable(); - } + return ObjectType::Tree; } else throw Error("input doesn't look like a Git object"); } +void parse( + FileSystemObjectSink & sink, + const Path & sinkPath, + Source & source, + bool executable, + std::function hook, + const ExperimentalFeatureSettings & xpSettings) +{ + xpSettings.require(Xp::GitHashing); + + auto type = parseObjectType(source, xpSettings); + + switch (type) { + case ObjectType::Blob: + parseBlob(sink, sinkPath, source, executable, xpSettings); + break; + case ObjectType::Tree: + parseTree(sink, sinkPath, source, hook, xpSettings); + break; + default: + assert(false); + }; +} + std::optional convertMode(SourceAccessor::Type type) { @@ -135,7 +177,7 @@ std::optional convertMode(SourceAccessor::Type type) void restore(FileSystemObjectSink & sink, Source & source, std::function hook) { - parse(sink, "", source, [&](Path name, TreeEntry entry) { + parse(sink, "", source, false, [&](Path name, TreeEntry entry) { auto [accessor, from] = hook(entry.hash); auto stat = accessor->lstat(from); auto gotOpt = convertMode(stat.type); diff --git a/src/libutil/git.hh b/src/libutil/git.hh index e2fe20509..d9eb138e1 100644 --- a/src/libutil/git.hh +++ b/src/libutil/git.hh @@ -13,12 +13,19 @@ namespace nix::git { +enum struct ObjectType { + Blob, + Tree, + //Commit, + //Tag, +}; + using RawMode = uint32_t; enum struct Mode : RawMode { Directory = 0040000, - Executable = 0100755, Regular = 0100644, + Executable = 0100755, Symlink = 0120000, }; @@ -59,9 +66,34 @@ using Tree = std::map; */ using SinkHook = void(const Path & name, TreeEntry entry); +/** + * Parse the "blob " or "tree " prefix. + * + * @throws if prefix not recognized + */ +ObjectType parseObjectType( + Source & source, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + +void parseBlob( + FileSystemObjectSink & sink, const Path & sinkPath, + Source & source, + bool executable, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + +void parseTree( + FileSystemObjectSink & sink, const Path & sinkPath, + Source & source, + std::function hook, + const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); + +/** + * Helper putting the previous three `parse*` functions together. + */ void parse( FileSystemObjectSink & sink, const Path & sinkPath, Source & source, + bool executable, std::function hook, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings); diff --git a/src/libutil/memory-source-accessor.cc b/src/libutil/memory-source-accessor.cc index 78a4dd298..880fa61b7 100644 --- a/src/libutil/memory-source-accessor.cc +++ b/src/libutil/memory-source-accessor.cc @@ -134,36 +134,43 @@ void MemorySink::createDirectory(const Path & path) throw Error("file '%s' is not a directory", path); }; -void MemorySink::createRegularFile(const Path & path) +struct CreateMemoryRegularFile : CreateRegularFileSink { + File::Regular & regularFile; + + CreateMemoryRegularFile(File::Regular & r) + : regularFile(r) + { } + + void operator () (std::string_view data) override; + void isExecutable() override; + void preallocateContents(uint64_t size) override; +}; + +void MemorySink::createRegularFile(const Path & path, std::function func) { auto * f = dst.open(CanonPath{path}, File { File::Regular {} }); if (!f) throw Error("file '%s' cannot be made because some parent file is not a directory", path); - if (!(r = std::get_if(&f->raw))) + if (auto * rp = std::get_if(&f->raw)) { + CreateMemoryRegularFile crf { *rp }; + func(crf); + } else throw Error("file '%s' is not a regular file", path); } -void MemorySink::closeRegularFile() +void CreateMemoryRegularFile::isExecutable() { - r = nullptr; + regularFile.executable = true; } -void MemorySink::isExecutable() +void CreateMemoryRegularFile::preallocateContents(uint64_t len) { - assert(r); - r->executable = true; + regularFile.contents.reserve(len); } -void MemorySink::preallocateContents(uint64_t len) +void CreateMemoryRegularFile::operator () (std::string_view data) { - assert(r); - r->contents.reserve(len); -} - -void MemorySink::receiveContents(std::string_view data) -{ - assert(r); - r->contents += data; + regularFile.contents += data; } void MemorySink::createSymlink(const Path & path, const std::string & target) diff --git a/src/libutil/memory-source-accessor.hh b/src/libutil/memory-source-accessor.hh index b46c61e54..7a1990d2f 100644 --- a/src/libutil/memory-source-accessor.hh +++ b/src/libutil/memory-source-accessor.hh @@ -83,17 +83,11 @@ struct MemorySink : FileSystemObjectSink void createDirectory(const Path & path) override; - void createRegularFile(const Path & path) override; - void receiveContents(std::string_view data) override; - void isExecutable() override; - void closeRegularFile() override; + void createRegularFile( + const Path & path, + std::function) override; void createSymlink(const Path & path, const std::string & target) override; - - void preallocateContents(uint64_t size) override; - -private: - MemorySourceAccessor::File::Regular * r; }; } diff --git a/tests/unit/libutil/git.cc b/tests/unit/libutil/git.cc index 6bbcd161b..76ef86bcf 100644 --- a/tests/unit/libutil/git.cc +++ b/tests/unit/libutil/git.cc @@ -66,7 +66,8 @@ TEST_F(GitTest, blob_read) { StringSource in { encoded }; StringSink out; RegularFileSink out2 { out }; - parse(out2, "", in, [](auto &, auto) {}, mockXpSettings); + ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Blob); + parseBlob(out2, "", in, false, mockXpSettings); auto expected = readFile(goldenMaster("hello-world.bin")); @@ -121,7 +122,8 @@ TEST_F(GitTest, tree_read) { StringSource in { encoded }; NullFileSystemObjectSink out; Tree got; - parse(out, "", in, [&](auto & name, auto entry) { + ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Tree); + parseTree(out, "", in, [&](auto & name, auto entry) { auto name2 = name; if (entry.mode == Mode::Directory) name2 += '/'; @@ -193,15 +195,21 @@ TEST_F(GitTest, both_roundrip) { MemorySink sinkFiles2 { files2 }; - std::function mkSinkHook; - mkSinkHook = [&](const Path prefix, const Hash & hash) { + std::function mkSinkHook; + mkSinkHook = [&](auto prefix, auto & hash, auto executable) { StringSource in { cas[hash] }; - parse(sinkFiles2, prefix, in, [&](const Path & name, const auto & entry) { - mkSinkHook(prefix + "/" + name, entry.hash); - }, mockXpSettings); + parse( + sinkFiles2, prefix, in, executable, + [&](const Path & name, const auto & entry) { + mkSinkHook( + prefix + "/" + name, + entry.hash, + entry.mode == Mode::Executable); + }, + mockXpSettings); }; - mkSinkHook("", root.hash); + mkSinkHook("", root.hash, false); ASSERT_EQ(files, files2); } From 739032762addcb3d88490040b388ff63b155bb16 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 23 Jan 2024 12:30:26 -0500 Subject: [PATCH 195/307] Make `Machine::systemTypes` a set not vector This is more conceptually correct (the order does not matter), and also matches what Hydra already does. (Nix and Hydra matching is needed for dedup https://github.com/NixOS/hydra/issues/1164) --- src/build-remote/build-remote.cc | 6 ++---- src/libstore/machines.cc | 2 +- src/libstore/machines.hh | 2 +- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index d69d3a0c2..b6704152a 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -139,9 +139,7 @@ static int main_build_remote(int argc, char * * argv) if (m.enabled && (neededSystem == "builtin" - || std::find(m.systemTypes.begin(), - m.systemTypes.end(), - neededSystem) != m.systemTypes.end()) && + || m.systemTypes.count(neededSystem) > 0) && m.allSupported(requiredFeatures) && m.mandatoryMet(requiredFeatures)) { @@ -214,7 +212,7 @@ static int main_build_remote(int argc, char * * argv) for (auto & m : machines) error - % concatStringsSep>(", ", m.systemTypes) + % concatStringsSep(", ", m.systemTypes) % m.maxJobs % concatStringsSep(", ", m.supportedFeatures) % concatStringsSep(", ", m.mandatoryFeatures); diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 512115893..8a1da84cd 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -145,7 +145,7 @@ static Machine parseBuilderLine(const std::string & line) return { tokens[0], - isSet(1) ? tokenizeString>(tokens[1], ",") : std::vector{settings.thisSystem}, + isSet(1) ? tokenizeString>(tokens[1], ",") : std::set{settings.thisSystem}, isSet(2) ? tokens[2] : "", isSet(3) ? parseUnsignedIntField(3) : 1U, isSet(4) ? parseUnsignedIntField(4) : 1U, diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh index 1adeaf1f0..d25fdf1b3 100644 --- a/src/libstore/machines.hh +++ b/src/libstore/machines.hh @@ -10,7 +10,7 @@ class Store; struct Machine { const std::string storeUri; - const std::vector systemTypes; + const std::set systemTypes; const std::string sshKey; const unsigned int maxJobs; const unsigned int speedFactor; From 870acc2892661d1d2c9f9f39c43d79cb4bbaacb0 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 23 Jan 2024 12:50:48 -0500 Subject: [PATCH 196/307] Add API docs to `Machine` methods --- src/libstore/machines.hh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh index d25fdf1b3..7dd812cf0 100644 --- a/src/libstore/machines.hh +++ b/src/libstore/machines.hh @@ -19,8 +19,15 @@ struct Machine { const std::string sshPublicHostKey; bool enabled = true; + /** + * @return Whether `features` is a subset of the union of `supportedFeatures` and + * `mandatoryFeatures` + */ bool allSupported(const std::set & features) const; + /** + * @return @Whether `mandatoryFeatures` is a subset of `features` + */ bool mandatoryMet(const std::set & features) const; Machine(decltype(storeUri) storeUri, From 0aa85088dee30615adcc7a2933fb94ea8767ec35 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 23 Jan 2024 12:52:54 -0500 Subject: [PATCH 197/307] Factor out `Machine::systemSupported` There's just enough logic (the `"builtin"` special case) that makes this worthy of its own method. --- src/build-remote/build-remote.cc | 5 ++--- src/libstore/machines.cc | 5 +++++ src/libstore/machines.hh | 6 ++++++ 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index b6704152a..519e03242 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -137,9 +137,8 @@ static int main_build_remote(int argc, char * * argv) for (auto & m : machines) { debug("considering building on remote machine '%s'", m.storeUri); - if (m.enabled - && (neededSystem == "builtin" - || m.systemTypes.count(neededSystem) > 0) && + if (m.enabled && + m.systemSupported(neededSystem) && m.allSupported(requiredFeatures) && m.mandatoryMet(requiredFeatures)) { diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 8a1da84cd..561d8d557 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -38,6 +38,11 @@ Machine::Machine(decltype(storeUri) storeUri, sshPublicHostKey(sshPublicHostKey) {} +bool Machine::systemSupported(const std::string & system) const +{ + return system == "builtin" || (systemTypes.count(system) > 0); +} + bool Machine::allSupported(const std::set & features) const { return std::all_of(features.begin(), features.end(), diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh index 7dd812cf0..1bca74c28 100644 --- a/src/libstore/machines.hh +++ b/src/libstore/machines.hh @@ -19,6 +19,12 @@ struct Machine { const std::string sshPublicHostKey; bool enabled = true; + /** + * @return Whether `system` is either `"builtin"` or in + * `systemTypes`. + */ + bool systemSupported(const std::string & system) const; + /** * @return Whether `features` is a subset of the union of `supportedFeatures` and * `mandatoryFeatures` From 83bb494a30a9e659a53eb757242fa0113aeae556 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Wed, 6 Dec 2023 12:42:53 -0800 Subject: [PATCH 198/307] Print the value in `error: cannot coerce` messages This extends the `error: cannot coerce a TYPE to a string` message to print the value that could not be coerced. This helps with debugging by making it easier to track down where the value is being produced from, especially in errors with deep or unhelpful stack traces. --- .../rl-next/print-value-in-coercion-error.md | 24 ++++++++++++++++ .../src/language/string-interpolation.md | 2 +- src/libexpr/eval.cc | 10 +++++-- src/libexpr/print-options.hh | 8 +++++- src/libexpr/print.cc | 11 +++++--- ...al-fail-bad-string-interpolation-1.err.exp | 2 +- ...al-fail-bad-string-interpolation-3.err.exp | 2 +- ...al-fail-bad-string-interpolation-4.err.exp | 2 +- tests/unit/libexpr/error_traces.cc | 28 +++++++++---------- tests/unit/libexpr/value/print.cc | 10 +++---- 10 files changed, 68 insertions(+), 31 deletions(-) create mode 100644 doc/manual/rl-next/print-value-in-coercion-error.md diff --git a/doc/manual/rl-next/print-value-in-coercion-error.md b/doc/manual/rl-next/print-value-in-coercion-error.md new file mode 100644 index 000000000..046e4e3cf --- /dev/null +++ b/doc/manual/rl-next/print-value-in-coercion-error.md @@ -0,0 +1,24 @@ +--- +synopsis: Coercion errors include the failing value +issues: #561 +prs: #9754 +--- + +The `error: cannot coerce a to a string` message now includes the value +which caused the error. + +Before: + +``` + error: cannot coerce a set to a string +``` + +After: + +``` + error: cannot coerce a set to a string: { aesSupport = «thunk»; + avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»; + canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion + = «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84 + attributes elided»} +``` diff --git a/doc/manual/src/language/string-interpolation.md b/doc/manual/src/language/string-interpolation.md index e999b287b..6e28d2664 100644 --- a/doc/manual/src/language/string-interpolation.md +++ b/doc/manual/src/language/string-interpolation.md @@ -189,7 +189,7 @@ If neither is present, an error is thrown. > "${a}" > ``` > -> error: cannot coerce a set to a string +> error: cannot coerce a set to a string: { } > > at «string»:4:2: > diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 71e956e10..437a6b7bf 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2255,7 +2255,9 @@ BackedStringView EvalState::coerceToString( return std::move(*maybeString); auto i = v.attrs->find(sOutPath); if (i == v.attrs->end()) { - error("cannot coerce %1% to a string", showType(v)) + error("cannot coerce %1% to a string: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) .withTrace(pos, errorCtx) .debugThrow(); } @@ -2301,7 +2303,9 @@ BackedStringView EvalState::coerceToString( } } - error("cannot coerce %1% to a string", showType(v)) + error("cannot coerce %1% to a string: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions)) .withTrace(pos, errorCtx) .debugThrow(); } @@ -2661,7 +2665,7 @@ void EvalState::printStatistics() std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { throw TypeError({ - .msg = hintfmt("cannot coerce %1% to a string", showType()) + .msg = hintfmt("cannot coerce %1% to a string: %2%", showType(), *this) }); } diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh index aba2eaeae..e03746ece 100644 --- a/src/libexpr/print-options.hh +++ b/src/libexpr/print-options.hh @@ -36,11 +36,17 @@ struct PrintOptions */ size_t maxDepth = std::numeric_limits::max(); /** - * Maximum number of attributes in an attribute set to print. + * Maximum number of attributes in attribute sets to print. + * + * Note that this is a limit for the entire print invocation, not for each + * attribute set encountered. */ size_t maxAttrs = std::numeric_limits::max(); /** * Maximum number of list items to print. + * + * Note that this is a limit for the entire print invocation, not for each + * list encountered. */ size_t maxListItems = std::numeric_limits::max(); /** diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index dad6dc9ad..702e4bfe8 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -20,7 +20,7 @@ void printElided( { if (ansiColors) output << ANSI_FAINT; - output << " «"; + output << "«"; pluralize(output, value, single, plural); output << " elided»"; if (ansiColors) @@ -37,7 +37,7 @@ printLiteralString(std::ostream & str, const std::string_view string, size_t max str << "\""; for (auto i = string.begin(); i != string.end(); ++i) { if (charsPrinted >= maxLength) { - str << "\""; + str << "\" "; printElided(str, string.length() - charsPrinted, "byte", "bytes", ansiColors); return str; } @@ -161,6 +161,8 @@ private: EvalState & state; PrintOptions options; std::optional seen; + size_t attrsPrinted = 0; + size_t listItemsPrinted = 0; void printRepeated() { @@ -279,7 +281,6 @@ private: else std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp()); - size_t attrsPrinted = 0; for (auto & i : sorted) { if (attrsPrinted >= options.maxAttrs) { printElided(sorted.size() - attrsPrinted, "attribute", "attributes"); @@ -307,7 +308,6 @@ private: output << "[ "; if (depth < options.maxDepth) { - size_t listItemsPrinted = 0; for (auto elem : v.listItems()) { if (listItemsPrinted >= options.maxListItems) { printElided(v.listSize() - listItemsPrinted, "item", "items"); @@ -486,6 +486,9 @@ public: void print(Value & v) { + attrsPrinted = 0; + listItemsPrinted = 0; + if (options.trackRepeated) { seen.emplace(); } else { diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp index b461b2e02..5ae53034d 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp @@ -5,4 +5,4 @@ error: | ^ 2| - error: cannot coerce a function to a string + error: cannot coerce a function to a string: «lambda @ /pwd/lang/eval-fail-bad-string-interpolation-1.nix:1:4» diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp index 95f4c2460..170a3d132 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp @@ -5,4 +5,4 @@ error: | ^ 2| - error: cannot coerce a function to a string + error: cannot coerce a function to a string: «lambda @ /pwd/lang/eval-fail-bad-string-interpolation-3.nix:1:5» diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp index 4950f8ddb..5119238d7 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp @@ -6,4 +6,4 @@ error: | ^ 10| - error: cannot coerce a set to a string + error: cannot coerce a set to a string: { a = { a = { a = { a = "ha"; b = "ha"; c = "ha"; d = "ha"; e = "ha"; f = "ha"; g = "ha"; h = "ha"; j = "ha"; }; «4294967295 attributes elided»}; «4294967294 attributes elided»}; «4294967293 attributes elided»} diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index f0cad58bb..b6fbf02fe 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -295,7 +295,7 @@ namespace nix { TEST_F(ErrorTraceTest, toPath) { ASSERT_TRACE2("toPath []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.toPath")); ASSERT_TRACE2("toPath \"foo\"", @@ -309,7 +309,7 @@ namespace nix { TEST_F(ErrorTraceTest, storePath) { ASSERT_TRACE2("storePath true", TypeError, - hintfmt("cannot coerce %s to a string", "a Boolean"), + hintfmt("cannot coerce %s to a string: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), hintfmt("while evaluating the first argument passed to 'builtins.storePath'")); } @@ -318,7 +318,7 @@ namespace nix { TEST_F(ErrorTraceTest, pathExists) { ASSERT_TRACE2("pathExists []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while realising the context of a path")); ASSERT_TRACE2("pathExists \"zorglub\"", @@ -332,7 +332,7 @@ namespace nix { TEST_F(ErrorTraceTest, baseNameOf) { ASSERT_TRACE2("baseNameOf []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while evaluating the first argument passed to builtins.baseNameOf")); } @@ -377,7 +377,7 @@ namespace nix { TEST_F(ErrorTraceTest, filterSource) { ASSERT_TRACE2("filterSource [] []", TypeError, - hintfmt("cannot coerce %s to a string", "a list"), + hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] \"foo\"", @@ -1038,7 +1038,7 @@ namespace nix { TEST_F(ErrorTraceTest, toString) { ASSERT_TRACE2("toString { a = 1; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }"), hintfmt("while evaluating the first argument passed to builtins.toString")); } @@ -1057,7 +1057,7 @@ namespace nix { ASSERT_TRACE2("substring 0 3 {}", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the third argument (the string) passed to builtins.substring")); ASSERT_TRACE1("substring (-3) 3 \"sometext\"", @@ -1070,7 +1070,7 @@ namespace nix { TEST_F(ErrorTraceTest, stringLength) { ASSERT_TRACE2("stringLength {} # TODO: context is missing ???", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the argument passed to builtins.stringLength")); } @@ -1143,7 +1143,7 @@ namespace nix { ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", TypeError, - hintfmt("cannot coerce %s to a string", "an integer"), + hintfmt("cannot coerce %s to a string: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), hintfmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); } @@ -1229,12 +1229,12 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = {}; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the attribute 'system' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = {}; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"drv\"; }", @@ -1279,17 +1279,17 @@ namespace nix { ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ \"a\" {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; FOO = {}; }", TypeError, - hintfmt("cannot coerce %s to a string", "a set"), + hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), hintfmt("while evaluating the attribute 'FOO' of derivation 'foo'")); } diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc index 98131112e..c4264a38d 100644 --- a/tests/unit/libexpr/value/print.cc +++ b/tests/unit/libexpr/value/print.cc @@ -370,7 +370,7 @@ TEST_F(ValuePrintingTests, ansiColorsStringElided) v.mkString("puppy"); test(v, - ANSI_MAGENTA "\"pup\"" ANSI_FAINT " «2 bytes elided»" ANSI_NORMAL, + ANSI_MAGENTA "\"pup\" " ANSI_FAINT "«2 bytes elided»" ANSI_NORMAL, PrintOptions { .ansiColors = true, .maxStringLength = 3 @@ -756,7 +756,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) vAttrs.mkAttrs(builder.finish()); test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT " «1 attribute elided»" ANSI_NORMAL "}", + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL "}", PrintOptions { .ansiColors = true, .maxAttrs = 1 @@ -769,7 +769,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) vAttrs.mkAttrs(builder.finish()); test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT " «2 attributes elided»" ANSI_NORMAL "}", + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL "}", PrintOptions { .ansiColors = true, .maxAttrs = 1 @@ -793,7 +793,7 @@ TEST_F(ValuePrintingTests, ansiColorsListElided) vList.bigList.size = 2; test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT " «1 item elided»" ANSI_NORMAL "]", + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL "]", PrintOptions { .ansiColors = true, .maxListItems = 1 @@ -806,7 +806,7 @@ TEST_F(ValuePrintingTests, ansiColorsListElided) vList.bigList.size = 3; test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT " «2 items elided»" ANSI_NORMAL "]", + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL "]", PrintOptions { .ansiColors = true, .maxListItems = 1 From 1e24db6f9a7a36ddba1a591da8ddf5f5c9ec3f83 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 24 Jan 2024 01:03:07 -0500 Subject: [PATCH 199/307] Convert `Machine::speedFactor` from a non-neg int to a non-neg float The short motivation is to match Hydra, so we can de-dup. The long version is layed out in https://github.com/NixOS/nix/issues/9840. --- src/libstore/machines.cc | 17 ++++++++++++++--- src/libstore/machines.hh | 2 +- tests/unit/libstore/machines.cc | 3 ++- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/src/libstore/machines.cc b/src/libstore/machines.cc index 561d8d557..2d461c63a 100644 --- a/src/libstore/machines.cc +++ b/src/libstore/machines.cc @@ -32,11 +32,14 @@ Machine::Machine(decltype(storeUri) storeUri, systemTypes(systemTypes), sshKey(sshKey), maxJobs(maxJobs), - speedFactor(std::max(1U, speedFactor)), + speedFactor(speedFactor == 0.0f ? 1.0f : std::move(speedFactor)), supportedFeatures(supportedFeatures), mandatoryFeatures(mandatoryFeatures), sshPublicHostKey(sshPublicHostKey) -{} +{ + if (speedFactor < 0.0) + throw UsageError("speed factor must be >= 0"); +} bool Machine::systemSupported(const std::string & system) const { @@ -135,6 +138,14 @@ static Machine parseBuilderLine(const std::string & line) return result.value(); }; + auto parseFloatField = [&](size_t fieldIndex) { + const auto result = string2Int(tokens[fieldIndex]); + if (!result) { + throw FormatError("bad machine specification: failed to convert column #%lu in a row: '%s' to 'float'", fieldIndex, line); + } + return result.value(); + }; + auto ensureBase64 = [&](size_t fieldIndex) { const auto & str = tokens[fieldIndex]; try { @@ -153,7 +164,7 @@ static Machine parseBuilderLine(const std::string & line) isSet(1) ? tokenizeString>(tokens[1], ",") : std::set{settings.thisSystem}, isSet(2) ? tokens[2] : "", isSet(3) ? parseUnsignedIntField(3) : 1U, - isSet(4) ? parseUnsignedIntField(4) : 1U, + isSet(4) ? parseFloatField(4) : 1.0f, isSet(5) ? tokenizeString>(tokens[5], ",") : std::set{}, isSet(6) ? tokenizeString>(tokens[6], ",") : std::set{}, isSet(7) ? ensureBase64(7) : "" diff --git a/src/libstore/machines.hh b/src/libstore/machines.hh index 1bca74c28..8516409d4 100644 --- a/src/libstore/machines.hh +++ b/src/libstore/machines.hh @@ -13,7 +13,7 @@ struct Machine { const std::set systemTypes; const std::string sshKey; const unsigned int maxJobs; - const unsigned int speedFactor; + const float speedFactor; const std::set supportedFeatures; const std::set mandatoryFeatures; const std::string sshPublicHostKey; diff --git a/tests/unit/libstore/machines.cc b/tests/unit/libstore/machines.cc index 5b66e5a5b..9fd7fda54 100644 --- a/tests/unit/libstore/machines.cc +++ b/tests/unit/libstore/machines.cc @@ -14,6 +14,7 @@ using testing::SizeIs; using nix::absPath; using nix::FormatError; +using nix::UsageError; using nix::getMachines; using nix::Machine; using nix::Machines; @@ -133,7 +134,7 @@ TEST(machines, getMachinesWithIncorrectFormat) { settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 three"; EXPECT_THROW(getMachines(), FormatError); settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 -3"; - EXPECT_THROW(getMachines(), FormatError); + EXPECT_THROW(getMachines(), UsageError); settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 3 - - BAD_BASE64"; EXPECT_THROW(getMachines(), FormatError); } From 6532dd50fc4f2de79f6a187145a3d554b5a6f03a Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 24 Jan 2024 13:19:02 +0100 Subject: [PATCH 200/307] tests/functional/fetchGit.sh: Test fetchGit/fetchTree error message Follow-up for https://github.com/NixOS/nix/pull/9626 176dcd5c617367dbff6d5455856a25518326f79d --- tests/functional/fetchGit.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index 46532c025..c6a482035 100644 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -66,6 +66,9 @@ path2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \" # In pure eval mode, fetchGit with a revision should succeed. [[ $(nix eval --raw --expr "builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\")") = world ]] +# But without a hash, it fails +expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "'fetchGit' requires a locked input" + # Fetch again. This should be cached. mv $repo ${repo}-tmp path2=$(nix eval --impure --raw --expr "(builtins.fetchGit file://$repo).outPath") @@ -205,6 +208,8 @@ path6=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; ur [[ $path3 = $path6 ]] [[ $(nix eval --impure --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]] +expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "'fetchTree' requires a locked input" + # Explicit ref = "HEAD" should work, and produce the same outPath as without ref path7=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).outPath") path8=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; }).outPath") From c81730541133d271c040df92600333cf188dc5a4 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 23 Jan 2024 15:37:15 -0500 Subject: [PATCH 201/307] Link both gmock and gtest, not just gtest GMock is not entirely header-only, we're finding. --- configure.ac | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/configure.ac b/configure.ac index f46cff732..8c29c1e62 100644 --- a/configure.ac +++ b/configure.ac @@ -351,7 +351,7 @@ fi AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[ # Look for gtest. -PKG_CHECK_MODULES([GTEST], [gtest_main]) +PKG_CHECK_MODULES([GTEST], [gtest_main gmock_main]) # Look for rapidcheck. PKG_CHECK_MODULES([RAPIDCHECK], [rapidcheck rapidcheck_gtest]) From a9e10a1dbdbc673614c1f27e889a7a0f7e470462 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 24 Jan 2024 21:32:29 -0500 Subject: [PATCH 202/307] Make `StoreConfig::getDefaultSystemFeatures` a static method This makes something in Hydra bit simpler. If someday the default depends on the other config options, we can always change it back. --- src/libstore/store-api.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh index 876ebf384..5163070b2 100644 --- a/src/libstore/store-api.hh +++ b/src/libstore/store-api.hh @@ -108,7 +108,7 @@ struct StoreConfig : public StoreDirConfig StoreConfig() = delete; - StringSet getDefaultSystemFeatures(); + static StringSet getDefaultSystemFeatures(); virtual ~StoreConfig() { } From 08f38a3a4030e765f63e6b02e0094d33083c401b Mon Sep 17 00:00:00 2001 From: lexi Date: Thu, 25 Jan 2024 15:30:51 +0100 Subject: [PATCH 203/307] Fix typo in primops.cc (and therefore Nix docs) This also fixes the typo in the Nix docs at https://nixos.org/manual/nix/unstable/language/builtins. --- src/libexpr/primops.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 5032e95cc..993ecceb2 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1878,7 +1878,7 @@ static RegisterPrimOp primop_outputOf({ For instance, ```nix builtins.outputOf - (builtins.outputOf myDrv "out) + (builtins.outputOf myDrv "out") "out" ``` will return a placeholder for the output of the output of `myDrv`. From 30bdee5c3b6beb88dae48771191de5d0620db6ba Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Fri, 26 Jan 2024 18:26:08 +0100 Subject: [PATCH 204/307] update docs on `fetchGit` shallow clone behavior (#9704) --- src/libexpr/primops/fetchTree.cc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index d32c264f7..a943095bb 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -614,8 +614,7 @@ static RegisterPrimOp primop_fetchGit({ - `shallow` (default: `false`) - A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed. - This still performs a full clone of what is available on the remote. + Make a shallow clone when fetching the Git tree. - `allRefs` From 3a124d1e88c8cbac6fbaf4709b8b4ee92f58ff30 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 26 Jan 2024 09:40:41 -0800 Subject: [PATCH 205/307] Increase stack size on macOS as well as Linux The code works fine on macOS, but the default stack size we attempt to set is larger than what my system will allow (Nix attempts to set the stack size to 67108864, but the maximum allowed is 67092480), so I've instead used the requested stack size or the maximum allowed, whichever is smaller. I've also added an error message if setting the stack size fails. It looks like this: > Failed to increase stack size from 8372224 to 67108864 (maximum > allowed stack size: 67092480): Invalid argument --- src/libutil/current-process.cc | 26 +++++++++++++++++--------- src/libutil/current-process.hh | 2 +- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/src/libutil/current-process.cc b/src/libutil/current-process.cc index 352a6a0fb..01f64f211 100644 --- a/src/libutil/current-process.cc +++ b/src/libutil/current-process.cc @@ -1,3 +1,6 @@ +#include +#include + #include "current-process.hh" #include "namespaces.hh" #include "util.hh" @@ -49,20 +52,27 @@ unsigned int getMaxCPU() ////////////////////////////////////////////////////////////////////// -#if __linux__ rlim_t savedStackSize = 0; -#endif -void setStackSize(size_t stackSize) +void setStackSize(rlim_t stackSize) { - #if __linux__ struct rlimit limit; if (getrlimit(RLIMIT_STACK, &limit) == 0 && limit.rlim_cur < stackSize) { savedStackSize = limit.rlim_cur; - limit.rlim_cur = stackSize; - setrlimit(RLIMIT_STACK, &limit); + limit.rlim_cur = std::min(stackSize, limit.rlim_max); + if (setrlimit(RLIMIT_STACK, &limit) != 0) { + logger->log( + lvlError, + hintfmt( + "Failed to increase stack size from %1% to %2% (maximum allowed stack size: %3%): %4%", + savedStackSize, + stackSize, + limit.rlim_max, + std::strerror(errno) + ).str() + ); + } } - #endif } void restoreProcessContext(bool restoreMounts) @@ -72,7 +82,6 @@ void restoreProcessContext(bool restoreMounts) restoreMountNamespace(); } - #if __linux__ if (savedStackSize) { struct rlimit limit; if (getrlimit(RLIMIT_STACK, &limit) == 0) { @@ -80,7 +89,6 @@ void restoreProcessContext(bool restoreMounts) setrlimit(RLIMIT_STACK, &limit); } } - #endif } diff --git a/src/libutil/current-process.hh b/src/libutil/current-process.hh index 826d6fe20..97ea70bf4 100644 --- a/src/libutil/current-process.hh +++ b/src/libutil/current-process.hh @@ -16,7 +16,7 @@ unsigned int getMaxCPU(); /** * Change the stack size. */ -void setStackSize(size_t stackSize); +void setStackSize(rlim_t stackSize); /** * Restore the original inherited Unix process context (such as signal From 772897a1cd46fc3875f0ffa54cf2661d9ef17494 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 26 Jan 2024 10:08:56 -0800 Subject: [PATCH 206/307] Color `diff` output in `tests/functional/lang` tests Use `diff --color=always` to print colored output for language test failures. I've also flipped the arguments so that expected lines missing from the actual output will be marked with a red `-` and additional lines found in the actual output will be marked with a green `+`. Previously it was the other way around, which was very confusing. --- tests/functional/lang/framework.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/lang/framework.sh b/tests/functional/lang/framework.sh index 516bff8ad..9b886e983 100644 --- a/tests/functional/lang/framework.sh +++ b/tests/functional/lang/framework.sh @@ -16,7 +16,7 @@ function diffAndAcceptInner() { fi # Diff so we get a nice message - if ! diff --unified "$got" "$expectedOrEmpty"; then + if ! diff --color=always --unified "$expectedOrEmpty" "$got"; then echo "FAIL: evaluation result of $testName not as expected" badDiff=1 fi From 1aec7771d4560d91ef97c18d9b5cdb29dde132a7 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 26 Jan 2024 22:34:31 -0500 Subject: [PATCH 207/307] Add missing `#include` for `rlim_t` My local build in the shell was failing while CI was fine; not sure why that is but having the include here is definitely more correct. Per the POSIX spec, this is where it is supposed to be gotten https://pubs.opengroup.org/onlinepubs/009695399/basedefs/sys/resource.h.html --- src/libutil/current-process.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libutil/current-process.hh b/src/libutil/current-process.hh index 97ea70bf4..444c717d1 100644 --- a/src/libutil/current-process.hh +++ b/src/libutil/current-process.hh @@ -2,6 +2,7 @@ ///@file #include +#include #include "types.hh" From 365b831e6f290c733da6879dae871dada343a1eb Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 26 Jan 2024 23:11:31 -0500 Subject: [PATCH 208/307] Minor formatting tweaks --- src/libexpr/parser-state.hh | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index a5b932ae8..0a9f076dc 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -1,19 +1,25 @@ #pragma once +///@file #include "eval.hh" namespace nix { -// using C a struct allows us to avoid having to define the special -// members that using string_view here would implicitly delete. -struct StringToken { - const char * p; - size_t l; - bool hasIndentation; - operator std::string_view() const { return {p, l}; } +/** + * @note Storing a C-style `char *` and `size_t` allows us to avoid + * having to define the special members that using string_view here + * would implicitly delete. + */ +struct StringToken +{ + const char * p; + size_t l; + bool hasIndentation; + operator std::string_view() const { return {p, l}; } }; -struct ParserLocation { +struct ParserLocation +{ int first_line, first_column; int last_line, last_column; @@ -36,7 +42,8 @@ struct ParserLocation { } }; -struct ParserState { +struct ParserState +{ SymbolTable & symbols; PosTable & positions; Expr * result; From 49b25ea85c9695a0668f65bff5839aa3feccd263 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 15 Jan 2024 08:17:42 +0100 Subject: [PATCH 209/307] refactor: Impure derivation type isPure -> isImpure To quote the method doc: Non-impure derivations can still behave impurely, to the degree permitted by the sandbox. Hence why this method isn't `isPure`: impure derivations are not the negation of pure derivations. Purity can not be ascertained except by rather heavy tools. --- src/libstore/build/derivation-goal.cc | 18 +++++++++--------- src/libstore/build/local-derivation-goal.cc | 2 +- src/libstore/derivations.cc | 10 +++++----- src/libstore/derivations.hh | 13 +++++++++---- 4 files changed, 24 insertions(+), 19 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index f8728ed4a..00cbf4228 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -223,7 +223,7 @@ void DerivationGoal::haveDerivation() if (!drv->type().hasKnownOutputPaths()) experimentalFeatureSettings.require(Xp::CaDerivations); - if (!drv->type().isPure()) { + if (drv->type().isImpure()) { experimentalFeatureSettings.require(Xp::ImpureDerivations); for (auto & [outputName, output] : drv->outputs) { @@ -304,7 +304,7 @@ void DerivationGoal::outputsSubstitutionTried() { trace("all outputs substituted (maybe)"); - assert(drv->type().isPure()); + assert(!drv->type().isImpure()); if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) { done(BuildResult::TransientFailure, {}, @@ -397,9 +397,9 @@ void DerivationGoal::gaveUpOnSubstitution() for (const auto & [inputDrvPath, inputNode] : dynamic_cast(drv.get())->inputDrvs.map) { /* Ensure that pure, non-fixed-output derivations don't depend on impure derivations. */ - if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && drv->type().isPure() && !drv->type().isFixed()) { + if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && !drv->type().isImpure() && !drv->type().isFixed()) { auto inputDrv = worker.evalStore.readDerivation(inputDrvPath); - if (!inputDrv.type().isPure()) + if (inputDrv.type().isImpure()) throw Error("pure derivation '%s' depends on impure derivation '%s'", worker.store.printStorePath(drvPath), worker.store.printStorePath(inputDrvPath)); @@ -439,7 +439,7 @@ void DerivationGoal::gaveUpOnSubstitution() void DerivationGoal::repairClosure() { - assert(drv->type().isPure()); + assert(!drv->type().isImpure()); /* If we're repairing, we now know that our own outputs are valid. Now check whether the other paths in the outputs closure are @@ -1100,7 +1100,7 @@ void DerivationGoal::resolvedFinished() worker.store.printStorePath(resolvedDrvGoal->drvPath), outputName); }(); - if (drv->type().isPure()) { + if (!drv->type().isImpure()) { auto newRealisation = realisation; newRealisation.id = DrvOutput { initialOutput->outputHash, outputName }; newRealisation.signatures.clear(); @@ -1395,7 +1395,7 @@ void DerivationGoal::flushLine() std::map> DerivationGoal::queryPartialDerivationOutputMap() { - assert(drv->type().isPure()); + assert(!drv->type().isImpure()); if (!useDerivation || drv->type().hasKnownOutputPaths()) { std::map> res; for (auto & [name, output] : drv->outputs) @@ -1411,7 +1411,7 @@ std::map> DerivationGoal::queryPartialDeri OutputPathMap DerivationGoal::queryDerivationOutputMap() { - assert(drv->type().isPure()); + assert(!drv->type().isImpure()); if (!useDerivation || drv->type().hasKnownOutputPaths()) { OutputPathMap res; for (auto & [name, output] : drv->outputsAndOptPaths(worker.store)) @@ -1428,7 +1428,7 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap() std::pair DerivationGoal::checkPathValidity() { - if (!drv->type().isPure()) return { false, {} }; + if (drv->type().isImpure()) return { false, {} }; bool checkHash = buildMode == bmRepair; auto wantedOutputsLeft = std::visit(overloaded { diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index f85301950..2ba8be7d6 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -2724,7 +2724,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs() .outPath = newInfo.path }; if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations) - && drv->type().isPure()) + && !drv->type().isImpure()) { signRealisation(thisRealisation); worker.store.registerDrvOutput(thisRealisation); diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 2fafcb8e7..393806652 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -110,17 +110,17 @@ bool DerivationType::isSandboxed() const } -bool DerivationType::isPure() const +bool DerivationType::isImpure() const { return std::visit(overloaded { [](const InputAddressed & ia) { - return true; + return false; }, [](const ContentAddressed & ca) { - return true; + return false; }, [](const Impure &) { - return false; + return true; }, }, raw); } @@ -840,7 +840,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut }; } - if (!type.isPure()) { + if (type.isImpure()) { std::map outputHashes; for (const auto & [outputName, _] : drv.outputs) outputHashes.insert_or_assign(outputName, impureOutputHash); diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh index 2a326b578..522523e45 100644 --- a/src/libstore/derivations.hh +++ b/src/libstore/derivations.hh @@ -253,12 +253,17 @@ struct DerivationType { bool isSandboxed() const; /** - * Whether the derivation is expected to produce the same result - * every time, and therefore it only needs to be built once. This is - * only false for derivations that have the attribute '__impure = + * Whether the derivation is expected to produce a different result + * every time, and therefore it needs to be rebuilt every time. This is + * only true for derivations that have the attribute '__impure = * true'. + * + * Non-impure derivations can still behave impurely, to the degree permitted + * by the sandbox. Hence why this method isn't `isPure`: impure derivations + * are not the negation of pure derivations. Purity can not be ascertained + * except by rather heavy tools. */ - bool isPure() const; + bool isImpure() const; /** * Does the derivation knows its own output paths? From 6a99c18c304cd199950bf32d9b9cb07c0276f0b7 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 15 Jan 2024 08:18:53 +0100 Subject: [PATCH 210/307] doc/glossary: Define impure derivation --- doc/manual/src/glossary.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 4507d8bf3..46cc5926c 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -156,6 +156,11 @@ builder can rely on external inputs such as the network or the system time) but the Nix model assumes it. +- [impure derivation]{#gloss-impure-derivation} + + [An experimental feature](#@docroot@/contributing/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure, + so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them. + - [Nix database]{#gloss-nix-database} An SQlite database to track [reference]s between [store object]s. From 9ddd0f2af8fd95e1380027a70d0aa650ea2fd5e4 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 27 Jan 2024 11:18:03 +0100 Subject: [PATCH 211/307] Revert "StorePath: reject names starting with '.'" This reverts commit 24bda0c7b381e1a017023c6f7cb9661fae8560bd. --- src/libstore/path-regex.hh | 2 +- src/libstore/path.cc | 2 -- tests/unit/libstore-support/tests/path.cc | 8 ++------ tests/unit/libstore/path.cc | 1 - 4 files changed, 3 insertions(+), 10 deletions(-) diff --git a/src/libstore/path-regex.hh b/src/libstore/path-regex.hh index a44e6a2eb..4f8dc4c1f 100644 --- a/src/libstore/path-regex.hh +++ b/src/libstore/path-regex.hh @@ -3,6 +3,6 @@ namespace nix { -static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-_\?=][0-9a-zA-Z\+\-\._\?=]*)"; +static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-\._\?=]+)"; } diff --git a/src/libstore/path.cc b/src/libstore/path.cc index a15a78545..4361b3194 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -9,8 +9,6 @@ static void checkName(std::string_view path, std::string_view name) if (name.size() > StorePath::MaxPathLen) throw BadStorePath("store path '%s' has a name longer than %d characters", path, StorePath::MaxPathLen); - if (name[0] == '.') - throw BadStorePath("store path '%s' starts with illegal character '.'", path); // See nameRegexStr for the definition for (auto c : name) if (!((c >= '0' && c <= '9') diff --git a/tests/unit/libstore-support/tests/path.cc b/tests/unit/libstore-support/tests/path.cc index e5f169e94..bbe43bad4 100644 --- a/tests/unit/libstore-support/tests/path.cc +++ b/tests/unit/libstore-support/tests/path.cc @@ -46,12 +46,8 @@ Gen Arbitrary::arbitrary() pre += '-'; break; case 64: - // names aren't permitted to start with a period, - // so just fall through to the next case here - if (c != 0) { - pre += '.'; - break; - } + pre += '.'; + break; case 65: pre += '_'; break; diff --git a/tests/unit/libstore/path.cc b/tests/unit/libstore/path.cc index 30631b5fd..5485ab8bb 100644 --- a/tests/unit/libstore/path.cc +++ b/tests/unit/libstore/path.cc @@ -39,7 +39,6 @@ TEST_DONT_PARSE(double_star, "**") TEST_DONT_PARSE(star_first, "*,foo") TEST_DONT_PARSE(star_second, "foo,*") TEST_DONT_PARSE(bang, "foo!o") -TEST_DONT_PARSE(dotfile, ".gitignore") #undef TEST_DONT_PARSE From 44a0d044832050cc419d844e73b8e021b0643357 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Mon, 29 Jan 2024 05:56:19 +0100 Subject: [PATCH 212/307] add missing link (#9869) --- doc/manual/src/glossary.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md index 46cc5926c..13b2906f7 100644 --- a/doc/manual/src/glossary.md +++ b/doc/manual/src/glossary.md @@ -285,7 +285,7 @@ - [package attribute set]{#package-attribute-set} - An [attribute set] containing the attribute `type = "derivation";` (derivation for historical reasons), as well as other attributes, such as + An [attribute set](@docroot@/language/values.md#attribute-set) containing the attribute `type = "derivation";` (derivation for historical reasons), as well as other attributes, such as - attributes that refer to the files of a [package], typically in the form of [derivation outputs](#output), - attributes that declare something about how the package is supposed to be installed or used, - other metadata or arbitrary attributes. @@ -310,4 +310,4 @@ See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md). -[Nix language]: ./language/index.md \ No newline at end of file +[Nix language]: ./language/index.md From f6719032cf7d867fe85da5916793d263670dbd8b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 15:22:44 +0100 Subject: [PATCH 213/307] Shut up a gcc warning --- tests/unit/libstore/serve-protocol.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/libstore/serve-protocol.cc b/tests/unit/libstore/serve-protocol.cc index 597c0b570..b2fd0fb82 100644 --- a/tests/unit/libstore/serve-protocol.cc +++ b/tests/unit/libstore/serve-protocol.cc @@ -412,7 +412,7 @@ TEST_F(ServeProtoTest, handshake_log) toClient.create(); toServer.create(); - ServeProto::Version clientResult, serverResult; + ServeProto::Version clientResult; auto thread = std::thread([&]() { FdSink out { toServer.writeSide.get() }; @@ -425,7 +425,7 @@ TEST_F(ServeProtoTest, handshake_log) { FdSink out { toClient.writeSide.get() }; FdSource in { toServer.readSide.get() }; - serverResult = ServeProto::BasicServerConnection::handshake( + ServeProto::BasicServerConnection::handshake( out, in, defaultVersion); }; From baff34d728844870e62deea7847bbe1e97dfe157 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 16:30:29 +0100 Subject: [PATCH 214/307] Don't include store docs in every manpage --- doc/manual/generate-manpage.nix | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/doc/manual/generate-manpage.nix b/doc/manual/generate-manpage.nix index ae31b2a1f..ba5667a43 100644 --- a/doc/manual/generate-manpage.nix +++ b/doc/manual/generate-manpage.nix @@ -93,9 +93,6 @@ let maybeProse = # FIXME: this is a horrible hack to keep `nix help-stores` working. - # the correct answer to this is to remove that command and replace it - # by statically generated manpages or the output of something like `nix - # store info `. let help-stores = '' ${index} @@ -121,7 +118,7 @@ let }; in optionalString (details ? doc) ( - if match "@store-types@" details.doc != [ ] + if match ".*@store-types@.*" details.doc != null then help-stores else details.doc ); From 1ef6bbb16d61067bcfdd30f1c8910afe498cc164 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 16:50:55 +0100 Subject: [PATCH 215/307] Update release-process.md --- maintainers/release-process.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/maintainers/release-process.md b/maintainers/release-process.md index db8b064a5..da6886ea9 100644 --- a/maintainers/release-process.md +++ b/maintainers/release-process.md @@ -27,8 +27,9 @@ release: * Compile the release notes by running ```console + $ export VERSION=X.YY $ git checkout -b release-notes - $ VERSION=X.YY ./maintainers/release-notes + $ ./maintainers/release-notes ``` where `X.YY` is *without* the patch level, e.g. `2.12` rather than ~~`2.12.0`~~. From 007040080977f1a06786fd4cfa7b4b95b18c5713 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 17:10:42 +0100 Subject: [PATCH 216/307] maintainers/release-notes: Include changelog-d Otherwise it quietly generates an empty rl-.md --- doc/manual/src/contributing/hacking.md | 1 - flake.nix | 3 +-- maintainers/release-notes | 6 ++---- package.nix | 12 ------------ 4 files changed, 3 insertions(+), 19 deletions(-) diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md index 9a7623dc9..9e2470859 100644 --- a/doc/manual/src/contributing/hacking.md +++ b/doc/manual/src/contributing/hacking.md @@ -304,7 +304,6 @@ See also the [format documentation](https://github.com/haskell/cabal/blob/master ### Build process Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`. -Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly. ## Branches diff --git a/flake.nix b/flake.nix index a48e36a2f..0bc70768e 100644 --- a/flake.nix +++ b/flake.nix @@ -190,7 +190,6 @@ boehmgc = final.boehmgc-nix; libgit2 = final.libgit2-nix; busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell; - changelog-d = final.changelog-d-nix; } // { # this is a proper separate downstream package, but put # here also for back compat reasons. @@ -363,7 +362,7 @@ }); packages = forAllSystems (system: rec { - inherit (nixpkgsFor.${system}.native) nix; + inherit (nixpkgsFor.${system}.native) nix changelog-d-nix; default = nix; } // (lib.optionalAttrs (builtins.elem system linux64BitSystems) { nix-static = nixpkgsFor.${system}.static.nix; diff --git a/maintainers/release-notes b/maintainers/release-notes index 34cd85a56..2d84485c1 100755 --- a/maintainers/release-notes +++ b/maintainers/release-notes @@ -1,7 +1,5 @@ -#!/usr/bin/env nix-shell -#!nix-shell -i bash ../shell.nix -I nixpkgs=channel:nixos-unstable-small -# ^^^^^^^ -# Only used for bash. shell.nix goes to the flake. +#!/usr/bin/env nix +#!nix shell .#changelog-d-nix --command bash # --- CONFIGURATION --- diff --git a/package.nix b/package.nix index 192df90ab..d1d14d10e 100644 --- a/package.nix +++ b/package.nix @@ -10,7 +10,6 @@ , boost , brotli , bzip2 -, changelog-d , curl , editline , readline @@ -88,11 +87,6 @@ # - readline , readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline" -# Whether to compile `rl-next.md`, the release notes for the next -# not-yet-released version of Nix in the manul, from the individual -# change log entries in the directory. -, buildUnreleasedNotes ? false - # Whether to build the internal API docs, can be done separately from # everything else. , enableInternalAPIDocs ? false @@ -218,9 +212,6 @@ in { ] ++ lib.optionals (doInstallCheck || enableManual) [ jq # Also for custom mdBook preprocessor. ] ++ lib.optional stdenv.hostPlatform.isLinux util-linux - # Official releases don't have rl-next, so we don't need to compile a - # changelog - ++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d ++ lib.optional enableInternalAPIDocs doxygen ; @@ -378,9 +369,6 @@ in { # Nix proper (which they depend on). (installUnitTests -> doBuild) (doCheck -> doBuild) - # We have to build the manual to build unreleased notes, as those - # are part of the manual - (buildUnreleasedNotes -> enableManual) # The build process for the manual currently requires extracting # data from the Nix executable we are trying to document. (enableManual -> doBuild) From 3089bce41b020fafd3e31034cf9f5dcf33a0b65c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 17:14:17 +0100 Subject: [PATCH 217/307] release notes: 2.20.0 --- ...llowed-uris-can-now-match-whole-schemes.md | 7 - doc/manual/rl-next/cgroup-stats.md | 8 - doc/manual/rl-next/drv-string-parse-hang.md | 6 - doc/manual/rl-next/empty-search-regex.md | 8 - doc/manual/rl-next/env-size-reduction.md | 7 - doc/manual/rl-next/eval-system.md | 12 - doc/manual/rl-next/git-fetcher.md | 18 - doc/manual/rl-next/hash-format-nix32.md | 23 -- doc/manual/rl-next/ifd-eval-store.md | 8 - doc/manual/rl-next/mounted-ssh-store.md | 8 - doc/manual/rl-next/nix-config-show.md | 7 - doc/manual/rl-next/nix-env-json-drv-path.md | 6 - .../rl-next/nix-flake-check-logs-actions.md | 33 -- doc/manual/rl-next/nix-hash-convert.md | 47 --- doc/manual/rl-next/nix-profile-names.md | 8 - doc/manual/rl-next/nix-store-add.md | 7 - .../rl-next/print-value-in-coercion-error.md | 24 -- .../rl-next/print-value-in-type-error.md | 23 -- .../rl-next/source-positions-in-errors.md | 42 --- .../rl-next/stack-overflow-segfaults.md | 32 -- doc/manual/rl-next/with-error-reporting.md | 31 -- doc/manual/src/SUMMARY.md.in | 1 + doc/manual/src/release-notes/rl-2.20.md | 334 ++++++++++++++++++ 23 files changed, 335 insertions(+), 365 deletions(-) delete mode 100644 doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md delete mode 100644 doc/manual/rl-next/cgroup-stats.md delete mode 100644 doc/manual/rl-next/drv-string-parse-hang.md delete mode 100644 doc/manual/rl-next/empty-search-regex.md delete mode 100644 doc/manual/rl-next/env-size-reduction.md delete mode 100644 doc/manual/rl-next/eval-system.md delete mode 100644 doc/manual/rl-next/git-fetcher.md delete mode 100644 doc/manual/rl-next/hash-format-nix32.md delete mode 100644 doc/manual/rl-next/ifd-eval-store.md delete mode 100644 doc/manual/rl-next/mounted-ssh-store.md delete mode 100644 doc/manual/rl-next/nix-config-show.md delete mode 100644 doc/manual/rl-next/nix-env-json-drv-path.md delete mode 100644 doc/manual/rl-next/nix-flake-check-logs-actions.md delete mode 100644 doc/manual/rl-next/nix-hash-convert.md delete mode 100644 doc/manual/rl-next/nix-profile-names.md delete mode 100644 doc/manual/rl-next/nix-store-add.md delete mode 100644 doc/manual/rl-next/print-value-in-coercion-error.md delete mode 100644 doc/manual/rl-next/print-value-in-type-error.md delete mode 100644 doc/manual/rl-next/source-positions-in-errors.md delete mode 100644 doc/manual/rl-next/stack-overflow-segfaults.md delete mode 100644 doc/manual/rl-next/with-error-reporting.md create mode 100644 doc/manual/src/release-notes/rl-2.20.md diff --git a/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md b/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md deleted file mode 100644 index 3cf75a612..000000000 --- a/doc/manual/rl-next/allowed-uris-can-now-match-whole-schemes.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -synopsis: Option `allowed-uris` can now match whole schemes in URIs without slashes -prs: 9547 ---- - -If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed. -Previously this only worked for schemes whose URIs used the `://` syntax. diff --git a/doc/manual/rl-next/cgroup-stats.md b/doc/manual/rl-next/cgroup-stats.md deleted file mode 100644 index 00853a0f8..000000000 --- a/doc/manual/rl-next/cgroup-stats.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -synopsis: Include cgroup stats when building through the daemon -prs: 9598 ---- - -Nix now also reports cgroup statistics when building through the nix daemon and when doing remote builds using ssh-ng, -if both sides of the connection are this version of Nix or newer. - diff --git a/doc/manual/rl-next/drv-string-parse-hang.md b/doc/manual/rl-next/drv-string-parse-hang.md deleted file mode 100644 index 1e041d3e9..000000000 --- a/doc/manual/rl-next/drv-string-parse-hang.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -synopsis: Fix handling of truncated `.drv` files. -prs: 9673 ---- - -Previously a `.drv` that was truncated in the middle of a string would case nix to enter an infinite loop, eventually exhausting all memory and crashing. diff --git a/doc/manual/rl-next/empty-search-regex.md b/doc/manual/rl-next/empty-search-regex.md deleted file mode 100644 index b193f9456..000000000 --- a/doc/manual/rl-next/empty-search-regex.md +++ /dev/null @@ -1,8 +0,0 @@ -synopsis: Disallow empty search regex in `nix search` -prs: #9481 -description: { - -[`nix search`](@docroot@/command-ref/new-cli/nix3-search.md) now requires a search regex to be passed. To show all packages, use `^`. - -} - diff --git a/doc/manual/rl-next/env-size-reduction.md b/doc/manual/rl-next/env-size-reduction.md deleted file mode 100644 index 40a58bc28..000000000 --- a/doc/manual/rl-next/env-size-reduction.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -synopsis: Reduce eval memory usage and wall time -prs: 9658 ---- - -Reduce the size of the `Env` struct used in the evaluator by a pointer, or 8 bytes on most modern machines. -This reduces memory usage during eval by around 2% and wall time by around 3%. diff --git a/doc/manual/rl-next/eval-system.md b/doc/manual/rl-next/eval-system.md deleted file mode 100644 index a4696a56c..000000000 --- a/doc/manual/rl-next/eval-system.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -synopsis: Add new `eval-system` setting -prs: 4093 ---- - -Add a new `eval-system` option. -Unlike `system`, it just overrides the value of `builtins.currentSystem`. -This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system. -In contrast, `system` also effects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense. - -`eval-system` only takes effect if it is non-empty. -If empty (the default) `system` is used as before, so there is no breakage. diff --git a/doc/manual/rl-next/git-fetcher.md b/doc/manual/rl-next/git-fetcher.md deleted file mode 100644 index 54c0d216d..000000000 --- a/doc/manual/rl-next/git-fetcher.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -synopsis: "Nix now uses `libgit2` for Git fetching" -prs: - - 9240 - - 9241 - - 9258 - - 9480 -issues: - - 5313 ---- - -Nix has built-in support for fetching sources from Git, during evaluation and locking; outside the sandbox. -The existing implementation based on the Git CLI had issues regarding reproducibility and performance. - -Most of the original `fetchGit` behavior has been implemented using the `libgit2` library, which gives the fetcher fine-grained control. - -Known issues: -- The `export-subst` behavior has not been reimplemented. [Partial](https://github.com/NixOS/nix/pull/9391#issuecomment-1872503447) support for this Git feature is feasible, but it did not make the release window. diff --git a/doc/manual/rl-next/hash-format-nix32.md b/doc/manual/rl-next/hash-format-nix32.md deleted file mode 100644 index 73e6fbb24..000000000 --- a/doc/manual/rl-next/hash-format-nix32.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -synopsis: Rename hash format `base32` to `nix32` -prs: 9452 ---- - -Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for -[Base32](https://en.wikipedia.org/wiki/Base32). - -## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` - -For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` -parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value -remains as a deprecated alias for `"base32"`. Please convert your code from: - -```nix -builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} -``` - -to - -```nix -builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} -``` \ No newline at end of file diff --git a/doc/manual/rl-next/ifd-eval-store.md b/doc/manual/rl-next/ifd-eval-store.md deleted file mode 100644 index 835e7e7a3..000000000 --- a/doc/manual/rl-next/ifd-eval-store.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -synopsis: import-from-derivation builds the derivation in the build store -prs: 9661 ---- - -When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option. - -Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures. diff --git a/doc/manual/rl-next/mounted-ssh-store.md b/doc/manual/rl-next/mounted-ssh-store.md deleted file mode 100644 index 6df44dbb6..000000000 --- a/doc/manual/rl-next/mounted-ssh-store.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -synopsis: Mounted SSH Store -issues: 7890 -prs: 7912 ---- - -Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md). -This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem. diff --git a/doc/manual/rl-next/nix-config-show.md b/doc/manual/rl-next/nix-config-show.md deleted file mode 100644 index 26b961b76..000000000 --- a/doc/manual/rl-next/nix-config-show.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -synopsis: Rename to `nix config show` -issues: 7672 -prs: 9477 ---- - -`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface. diff --git a/doc/manual/rl-next/nix-env-json-drv-path.md b/doc/manual/rl-next/nix-env-json-drv-path.md deleted file mode 100644 index 734cefd1b..000000000 --- a/doc/manual/rl-next/nix-env-json-drv-path.md +++ /dev/null @@ -1,6 +0,0 @@ ---- -synopsis: Fix `nix-env --query --drv-path --json` -prs: 9257 ---- - -Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set. diff --git a/doc/manual/rl-next/nix-flake-check-logs-actions.md b/doc/manual/rl-next/nix-flake-check-logs-actions.md deleted file mode 100644 index 53a7b35eb..000000000 --- a/doc/manual/rl-next/nix-flake-check-logs-actions.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -synopsis: Some stack overflow segfaults are fixed -issues: 8882 -prs: 8893 ---- - -`nix flake check` now logs the checks it runs and the derivations it evaluates: - -``` -$ nix flake check -v -evaluating flake... -checking flake output 'checks'... -checking derivation 'checks.aarch64-darwin.ghciwatch-tests'... -derivation evaluated to /nix/store/nh7dlvsrhds4cxl91mvgj4h5cbq6skmq-ghciwatch-test-0.3.0.drv -checking derivation 'checks.aarch64-darwin.ghciwatch-clippy'... -derivation evaluated to /nix/store/9cb5a6wmp6kf6hidqw9wphidvb8bshym-ghciwatch-clippy-0.3.0.drv -checking derivation 'checks.aarch64-darwin.ghciwatch-doc'... -derivation evaluated to /nix/store/8brdd3jbawfszpbs7vdpsrhy80as1il8-ghciwatch-doc-0.3.0.drv -checking derivation 'checks.aarch64-darwin.ghciwatch-fmt'... -derivation evaluated to /nix/store/wjhs0l1njl5pyji53xlmfjrlya0wmz8p-ghciwatch-fmt-0.3.0.drv -checking derivation 'checks.aarch64-darwin.ghciwatch-audit'... -derivation evaluated to /nix/store/z0mps8dyj2ds7c0fn0819y5h5611033z-ghciwatch-audit-0.3.0.drv -checking flake output 'packages'... -checking derivation 'packages.aarch64-darwin.default'... -derivation evaluated to /nix/store/41abbdyglw5x9vcsvd89xan3ydjf8d7r-ghciwatch-0.3.0.drv -checking flake output 'apps'... -checking flake output 'devShells'... -checking derivation 'devShells.aarch64-darwin.default'... -derivation evaluated to /nix/store/bc935gz7dylzmcpdb5cczr8gngv8pmdb-nix-shell.drv -running 5 flake checks... -warning: The check omitted these incompatible systems: aarch64-linux, x86_64-darwin, x86_64-linux -Use '--all-systems' to check all. -``` diff --git a/doc/manual/rl-next/nix-hash-convert.md b/doc/manual/rl-next/nix-hash-convert.md deleted file mode 100644 index 69db9508a..000000000 --- a/doc/manual/rl-next/nix-hash-convert.md +++ /dev/null @@ -1,47 +0,0 @@ ---- -synopsis: Add `nix hash convert` -prs: 9452 ---- - -New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track -to stabilization! Examples: - -- Convert the hash to `nix32`. - - ```bash - $ nix hash convert --hash-algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" - vw46m23bizj4n8afrc0fj19wrp7mj3c0 - ``` - `nix32` is a base32 encoding with a nix-specific character set. - Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input - hash. -- Convert the hash to the `sri` format that includes an algorithm specification: - ```bash - nix hash convert --hash-algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" - sha1-gA1Zz808BekAy04hS+SPa4hqCN8= - ``` - or with an explicit `-to` format: - ```bash - nix hash convert --hash-algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" - sha1-gA1Zz808BekAy04hS+SPa4hqCN8= - ``` -- Assert the input format of the hash: - ```bash - nix hash convert --hash-algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" - error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' - nix hash convert --hash-algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" - sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= - ``` - -The `--to`/`--from`/`--hash-algo` parameters have context-sensitive auto-completion. - -## Related Deprecations - -The following commands are still available but will emit a deprecation warning. Please convert your code to -`nix hash convert`: - -- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead. -- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead. -- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead. -- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` - or even just `nix hash convert $hash1 $hash2` instead. diff --git a/doc/manual/rl-next/nix-profile-names.md b/doc/manual/rl-next/nix-profile-names.md deleted file mode 100644 index b7ad4b5d7..000000000 --- a/doc/manual/rl-next/nix-profile-names.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -synopsis: "`nix profile` now allows referring to elements by human-readable name" -prs: 8678 ---- - -[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed. - -**Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix. diff --git a/doc/manual/rl-next/nix-store-add.md b/doc/manual/rl-next/nix-store-add.md deleted file mode 100644 index 5ef2913b4..000000000 --- a/doc/manual/rl-next/nix-store-add.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -synopsis: Give `nix store add` a `--hash-algo` flag -prs: 9809 ---- - -Adds a missing feature that was present in the old CLI, and matches our -plans to have similar flags for `nix hash convert` and `nix hash path`. diff --git a/doc/manual/rl-next/print-value-in-coercion-error.md b/doc/manual/rl-next/print-value-in-coercion-error.md deleted file mode 100644 index 046e4e3cf..000000000 --- a/doc/manual/rl-next/print-value-in-coercion-error.md +++ /dev/null @@ -1,24 +0,0 @@ ---- -synopsis: Coercion errors include the failing value -issues: #561 -prs: #9754 ---- - -The `error: cannot coerce a to a string` message now includes the value -which caused the error. - -Before: - -``` - error: cannot coerce a set to a string -``` - -After: - -``` - error: cannot coerce a set to a string: { aesSupport = «thunk»; - avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»; - canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion - = «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84 - attributes elided»} -``` diff --git a/doc/manual/rl-next/print-value-in-type-error.md b/doc/manual/rl-next/print-value-in-type-error.md deleted file mode 100644 index aaae22756..000000000 --- a/doc/manual/rl-next/print-value-in-type-error.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -synopsis: Type errors include the failing value -issues: #561 -prs: #9753 ---- - -In errors like `value is an integer while a list was expected`, the message now -includes the failing value. - -Before: - -``` - error: value is a set while a string was expected -``` - -After: - -``` - error: expected a string but found a set: { ghc810 = «thunk»; - ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»; - ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»; - ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»} -``` diff --git a/doc/manual/rl-next/source-positions-in-errors.md b/doc/manual/rl-next/source-positions-in-errors.md deleted file mode 100644 index b1a33d83b..000000000 --- a/doc/manual/rl-next/source-positions-in-errors.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -synopsis: Source locations are printed more consistently in errors -issues: 561 -prs: 9555 ---- - -Source location information is now included in error messages more -consistently. Given this code: - -```nix -let - attr = {foo = "bar";}; - key = {}; -in - attr.${key} -``` - -Previously, Nix would show this unhelpful message when attempting to evaluate -it: - -``` -error: - … while evaluating an attribute name - - error: value is a set while a string was expected -``` - -Now, the error message displays where the problematic value was found: - -``` -error: - … while evaluating an attribute name - - at bad.nix:4:11: - - 3| key = {}; - 4| in attr.${key} - | ^ - 5| - - error: expected a string but found a set -``` diff --git a/doc/manual/rl-next/stack-overflow-segfaults.md b/doc/manual/rl-next/stack-overflow-segfaults.md deleted file mode 100644 index 3d9753248..000000000 --- a/doc/manual/rl-next/stack-overflow-segfaults.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -synopsis: Some stack overflow segfaults are fixed -issues: 9616 -prs: 9617 ---- - -The number of nested function calls has been restricted, to detect and report -infinite function call recursions. The default maximum call depth is 10,000 and -can be set with [the `max-call-depth` -option](@docroot@/command-ref/conf-file.md#conf-max-call-depth). - -This fixes segfaults or the following unhelpful error message in many cases: - - error: stack overflow (possible infinite recursion) - -Before: - -``` -$ nix-instantiate --eval --expr '(x: x x) (x: x x)' -Segmentation fault: 11 -``` - -After: - -``` -$ nix-instantiate --eval --expr '(x: x x) (x: x x)' -error: stack overflow - - at «string»:1:14: - 1| (x: x x) (x: x x) - | ^ -``` diff --git a/doc/manual/rl-next/with-error-reporting.md b/doc/manual/rl-next/with-error-reporting.md deleted file mode 100644 index d9e07df52..000000000 --- a/doc/manual/rl-next/with-error-reporting.md +++ /dev/null @@ -1,31 +0,0 @@ ---- -synopsis: Better error reporting for `with` expressions -prs: 9658 ---- - -`with` expressions using non-attrset values to resolve variables are now reported with proper positions. - -Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated: - -``` -nix-repl> with 1; a -error: - … - - at «none»:0: (source not available) - - error: value is an integer while a set was expected -``` - -Now position information is preserved and reported as with most other errors: - -``` -nix-repl> with 1; a -error: - … while evaluating the first subexpression of a with expression - at «string»:1:1: - 1| with 1; a - | ^ - - error: expected a set but found an integer -``` diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index 10fe51fc9..695d63dfc 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -120,6 +120,7 @@ - [C++ style guide](contributing/cxx.md) - [Release Notes](release-notes/index.md) {{#include ./SUMMARY-rl-next.md}} + - [Release 2.20 (2024-01-29)](release-notes/rl-2.20.md) - [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md) - [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md) - [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md) diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md new file mode 100644 index 000000000..8c9267486 --- /dev/null +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -0,0 +1,334 @@ +# Release 2.20.0 (2024-01-29) + +- Option `allowed-uris` can now match whole schemes in URIs without slashes [#9547](https://github.com/NixOS/nix/pull/9547) + + If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed. + Previously this only worked for schemes whose URIs used the `://` syntax. + +- Make `nix store gc` use the auto-GC policy [#7851](https://github.com/NixOS/nix/pull/7851) + + + +- Include cgroup stats when building through the daemon [#9598](https://github.com/NixOS/nix/pull/9598) + + Nix now also reports cgroup statistics when building through the nix daemon and when doing remote builds using ssh-ng, + if both sides of the connection are this version of Nix or newer. + +- Fix handling of truncated `.drv` files. [#9673](https://github.com/NixOS/nix/pull/9673) + + Previously a `.drv` that was truncated in the middle of a string would case nix to enter an infinite loop, eventually exhausting all memory and crashing. + +- Disallow empty search regex in `nix search` [#9481](https://github.com/NixOS/nix/pull/9481) + + [`nix search`](@docroot@/command-ref/new-cli/nix3-search.md) now requires a search regex to be passed. To show all packages, use `^`. + +- Reduce eval memory usage and wall time [#9658](https://github.com/NixOS/nix/pull/9658) + + Reduce the size of the `Env` struct used in the evaluator by a pointer, or 8 bytes on most modern machines. + This reduces memory usage during eval by around 2% and wall time by around 3%. + +- Add new `eval-system` setting [#4093](https://github.com/NixOS/nix/pull/4093) + + Add a new `eval-system` option. + Unlike `system`, it just overrides the value of `builtins.currentSystem`. + This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system. + In contrast, `system` also effects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense. + + `eval-system` only takes effect if it is non-empty. + If empty (the default) `system` is used as before, so there is no breakage. + +- Nix now uses `libgit2` for Git fetching [#5313](https://github.com/NixOS/nix/issues/5313) [#9240](https://github.com/NixOS/nix/pull/9240) [#9241](https://github.com/NixOS/nix/pull/9241) [#9258](https://github.com/NixOS/nix/pull/9258) [#9480](https://github.com/NixOS/nix/pull/9480) + + Nix has built-in support for fetching sources from Git, during evaluation and locking; outside the sandbox. + The existing implementation based on the Git CLI had issues regarding reproducibility and performance. + + Most of the original `fetchGit` behavior has been implemented using the `libgit2` library, which gives the fetcher fine-grained control. + + Known issues: + - The `export-subst` behavior has not been reimplemented. [Partial](https://github.com/NixOS/nix/pull/9391#issuecomment-1872503447) support for this Git feature is feasible, but it did not make the release window. + +- Rename hash format `base32` to `nix32` [#9452](https://github.com/NixOS/nix/pull/9452) + + Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for + [Base32](https://en.wikipedia.org/wiki/Base32). + + ## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` + + For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` + parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value + remains as a deprecated alias for `"base32"`. Please convert your code from: + + ```nix + builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} + ``` + + to + + ```nix + builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} + ``` + +- import-from-derivation builds the derivation in the build store [#9661](https://github.com/NixOS/nix/pull/9661) + + When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option. + + Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures. + +- Mounted SSH Store [#7890](https://github.com/NixOS/nix/issues/7890) [#7912](https://github.com/NixOS/nix/pull/7912) + + Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md). + This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem. + +- Rename to `nix config show` [#7672](https://github.com/NixOS/nix/issues/7672) [#9477](https://github.com/NixOS/nix/pull/9477) + + `nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface. + +- Fix `nix-env --query --drv-path --json` [#9257](https://github.com/NixOS/nix/pull/9257) + + Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set. + +- Some stack overflow segfaults are fixed [#8882](https://github.com/NixOS/nix/issues/8882) [#8893](https://github.com/NixOS/nix/pull/8893) + + `nix flake check` now logs the checks it runs and the derivations it evaluates: + + ``` + $ nix flake check -v + evaluating flake... + checking flake output 'checks'... + checking derivation 'checks.aarch64-darwin.ghciwatch-tests'... + derivation evaluated to /nix/store/nh7dlvsrhds4cxl91mvgj4h5cbq6skmq-ghciwatch-test-0.3.0.drv + checking derivation 'checks.aarch64-darwin.ghciwatch-clippy'... + derivation evaluated to /nix/store/9cb5a6wmp6kf6hidqw9wphidvb8bshym-ghciwatch-clippy-0.3.0.drv + checking derivation 'checks.aarch64-darwin.ghciwatch-doc'... + derivation evaluated to /nix/store/8brdd3jbawfszpbs7vdpsrhy80as1il8-ghciwatch-doc-0.3.0.drv + checking derivation 'checks.aarch64-darwin.ghciwatch-fmt'... + derivation evaluated to /nix/store/wjhs0l1njl5pyji53xlmfjrlya0wmz8p-ghciwatch-fmt-0.3.0.drv + checking derivation 'checks.aarch64-darwin.ghciwatch-audit'... + derivation evaluated to /nix/store/z0mps8dyj2ds7c0fn0819y5h5611033z-ghciwatch-audit-0.3.0.drv + checking flake output 'packages'... + checking derivation 'packages.aarch64-darwin.default'... + derivation evaluated to /nix/store/41abbdyglw5x9vcsvd89xan3ydjf8d7r-ghciwatch-0.3.0.drv + checking flake output 'apps'... + checking flake output 'devShells'... + checking derivation 'devShells.aarch64-darwin.default'... + derivation evaluated to /nix/store/bc935gz7dylzmcpdb5cczr8gngv8pmdb-nix-shell.drv + running 5 flake checks... + warning: The check omitted these incompatible systems: aarch64-linux, x86_64-darwin, x86_64-linux + Use '--all-systems' to check all. + ``` + +- Add `nix hash convert` [#9452](https://github.com/NixOS/nix/pull/9452) + + New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track + to stabilization! Examples: + + - Convert the hash to `nix32`. + + ```bash + $ nix hash convert --hash-algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" + vw46m23bizj4n8afrc0fj19wrp7mj3c0 + ``` + `nix32` is a base32 encoding with a nix-specific character set. + Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input + hash. + - Convert the hash to the `sri` format that includes an algorithm specification: + ```bash + nix hash convert --hash-algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` + or with an explicit `-to` format: + ```bash + nix hash convert --hash-algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` + - Assert the input format of the hash: + ```bash + nix hash convert --hash-algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" + error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' + nix hash convert --hash-algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" + sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= + ``` + + The `--to`/`--from`/`--hash-algo` parameters have context-sensitive auto-completion. + + ## Related Deprecations + + The following commands are still available but will emit a deprecation warning. Please convert your code to + `nix hash convert`: + + - `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead. + - `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead. + - `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead. + - `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` + or even just `nix hash convert $hash1 $hash2` instead. + +- `nix profile` now allows referring to elements by human-readable name [#8678](https://github.com/NixOS/nix/pull/8678) + + [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed. + + **Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix. + +- Rename hash format `base32` to `nix32` [#8678](https://github.com/NixOS/nix/pull/8678) + + Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for + [Base32](https://en.wikipedia.org/wiki/Base32). + + ## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` + + For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` + parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value + remains as a deprecated alias for `"base32"`. Please convert your code from: + + ```nix + builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} + ``` + + to + + ```nix + builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} + ``` + +- Give `nix store add` a `--hash-algo` flag [#9809](https://github.com/NixOS/nix/pull/9809) + + Adds a missing feature that was present in the old CLI, and matches our + plans to have similar flags for `nix hash convert` and `nix hash path`. + +- Coercion errors include the failing value + + The `error: cannot coerce a to a string` message now includes the value + which caused the error. + + Before: + + ``` + error: cannot coerce a set to a string + ``` + + After: + + ``` + error: cannot coerce a set to a string: { aesSupport = «thunk»; + avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»; + canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion + = «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84 + attributes elided»} + ``` + +- Type errors include the failing value + + In errors like `value is an integer while a list was expected`, the message now + includes the failing value. + + Before: + + ``` + error: value is a set while a string was expected + ``` + + After: + + ``` + error: expected a string but found a set: { ghc810 = «thunk»; + ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»; + ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»; + ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»} + ``` + +- Source locations are printed more consistently in errors [#561](https://github.com/NixOS/nix/issues/561) [#9555](https://github.com/NixOS/nix/pull/9555) + + Source location information is now included in error messages more + consistently. Given this code: + + ```nix + let + attr = {foo = "bar";}; + key = {}; + in + attr.${key} + ``` + + Previously, Nix would show this unhelpful message when attempting to evaluate + it: + + ``` + error: + … while evaluating an attribute name + + error: value is a set while a string was expected + ``` + + Now, the error message displays where the problematic value was found: + + ``` + error: + … while evaluating an attribute name + + at bad.nix:4:11: + + 3| key = {}; + 4| in attr.${key} + | ^ + 5| + + error: expected a string but found a set + ``` + +- Some stack overflow segfaults are fixed [#9616](https://github.com/NixOS/nix/issues/9616) [#9617](https://github.com/NixOS/nix/pull/9617) + + The number of nested function calls has been restricted, to detect and report + infinite function call recursions. The default maximum call depth is 10,000 and + can be set with [the `max-call-depth` + option](@docroot@/command-ref/conf-file.md#conf-max-call-depth). + + This fixes segfaults or the following unhelpful error message in many cases: + + error: stack overflow (possible infinite recursion) + + Before: + + ``` + $ nix-instantiate --eval --expr '(x: x x) (x: x x)' + Segmentation fault: 11 + ``` + + After: + + ``` + $ nix-instantiate --eval --expr '(x: x x) (x: x x)' + error: stack overflow + + at «string»:1:14: + 1| (x: x x) (x: x x) + | ^ + ``` + +- Better error reporting for `with` expressions [#9658](https://github.com/NixOS/nix/pull/9658) + + `with` expressions using non-attrset values to resolve variables are now reported with proper positions. + + Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated: + + ``` + nix-repl> with 1; a + error: + … + + at «none»:0: (source not available) + + error: value is an integer while a set was expected + ``` + + Now position information is preserved and reported as with most other errors: + + ``` + nix-repl> with 1; a + error: + … while evaluating the first subexpression of a with expression + at «string»:1:1: + 1| with 1; a + | ^ + + error: expected a set but found an integer + ``` + From 6f86f87043971eb9414a6d63013a1e06af397f3a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 17:50:25 +0100 Subject: [PATCH 218/307] Fix formatting of hash args --- src/libutil/args.cc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libutil/args.cc b/src/libutil/args.cc index 5187e7396..8996cbe5b 100644 --- a/src/libutil/args.cc +++ b/src/libutil/args.cc @@ -557,7 +557,7 @@ Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashF assert(*hf == nix::HashFormat::SRI); return Flag{ .longName = std::move(longName), - .description = "hash format ('base16', 'nix32', 'base64', 'sri'). Default: 'sri'", + .description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.", .labels = {"hash-format"}, .handler = {[hf](std::string s) { *hf = parseHashFormat(s); @@ -569,7 +569,7 @@ Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashF Args::Flag Args::Flag::mkHashFormatOptFlag(std::string && longName, std::optional * ohf) { return Flag{ .longName = std::move(longName), - .description = "hash format ('base16', 'nix32', 'base64', 'sri').", + .description = "Hash format (`base16`, `nix32`, `base64`, `sri`).", .labels = {"hash-format"}, .handler = {[ohf](std::string s) { *ohf = std::optional{parseHashFormat(s)}; @@ -589,7 +589,7 @@ Args::Flag Args::Flag::mkHashAlgoFlag(std::string && longName, HashAlgorithm * h { return Flag{ .longName = std::move(longName), - .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')", + .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`).", .labels = {"hash-algo"}, .handler = {[ha](std::string s) { *ha = parseHashAlgo(s); @@ -602,7 +602,7 @@ Args::Flag Args::Flag::mkHashAlgoOptFlag(std::string && longName, std::optional< { return Flag{ .longName = std::move(longName), - .description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.", + .description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.", .labels = {"hash-algo"}, .handler = {[oha](std::string s) { *oha = std::optional{parseHashAlgo(s)}; From 9465c8cca133a149c003e9ef4d7e97d513716155 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 17:51:01 +0100 Subject: [PATCH 219/307] nix hash convert: Add manpage --- src/nix/hash-convert.md | 40 ++++++++++++++++++++++++++++++++++++++++ src/nix/hash.cc | 15 +++++++-------- 2 files changed, 47 insertions(+), 8 deletions(-) create mode 100644 src/nix/hash-convert.md diff --git a/src/nix/hash-convert.md b/src/nix/hash-convert.md new file mode 100644 index 000000000..dfb215443 --- /dev/null +++ b/src/nix/hash-convert.md @@ -0,0 +1,40 @@ +R""( + +# Examples + +* Convert a hash to `nix32` (a base-32 encoding with a Nix-specific character set). + + ```console + $ nix hash convert --hash-algo sha1 --to nix32 800d59cfcd3c05e900cb4e214be48f6b886a08df + vw46m23bizj4n8afrc0fj19wrp7mj3c0 + ``` + +* Convert a hash to [the `sri` format](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) that includes an algorithm specification: + + ```console + # nix hash convert --hash-algo sha1 800d59cfcd3c05e900cb4e214be48f6b886a08df + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` + + or with an explicit `--to` format: + + ```console + # nix hash convert --hash-algo sha1 --to sri 800d59cfcd3c05e900cb4e214be48f6b886a08df + sha1-gA1Zz808BekAy04hS+SPa4hqCN8= + ``` + +* Assert the input format of the hash: + + ```console + # nix hash convert --hash-algo sha256 --from nix32 ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= + error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' + + # nix hash convert --hash-algo sha256 --from nix32 1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s + sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= + ``` + +# Description + +`nix hash convert` converts hashes from one encoding to another. + +)"" diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 8ab89e433..4837891c6 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -150,15 +150,14 @@ struct CmdHashConvert : Command std::string description() override { - std::string descr( "convert between different hash formats. Choose from: "); - auto iter = hashFormats.begin(); - assert(iter != hashFormats.end()); - descr += *iter++; - while (iter != hashFormats.end()) { - descr += ", " + *iter++; - } + return "convert between hash formats"; + } - return descr; + std::string doc() override + { + return + #include "hash-convert.md" + ; } Category category() override { return catUtility; } From 652f334f879153b1357f92504999d9b0fb951a2b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 17:51:21 +0100 Subject: [PATCH 220/307] Edit release notes --- doc/manual/src/release-notes/rl-2.20.md | 223 +++--------------------- 1 file changed, 29 insertions(+), 194 deletions(-) diff --git a/doc/manual/src/release-notes/rl-2.20.md b/doc/manual/src/release-notes/rl-2.20.md index 8c9267486..26869e90a 100644 --- a/doc/manual/src/release-notes/rl-2.20.md +++ b/doc/manual/src/release-notes/rl-2.20.md @@ -5,190 +5,60 @@ If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed. Previously this only worked for schemes whose URIs used the `://` syntax. -- Make `nix store gc` use the auto-GC policy [#7851](https://github.com/NixOS/nix/pull/7851) - - - - Include cgroup stats when building through the daemon [#9598](https://github.com/NixOS/nix/pull/9598) - Nix now also reports cgroup statistics when building through the nix daemon and when doing remote builds using ssh-ng, - if both sides of the connection are this version of Nix or newer. - -- Fix handling of truncated `.drv` files. [#9673](https://github.com/NixOS/nix/pull/9673) - - Previously a `.drv` that was truncated in the middle of a string would case nix to enter an infinite loop, eventually exhausting all memory and crashing. + Nix now also reports cgroup statistics when building through the Nix daemon and when doing remote builds using `ssh-ng`, + if both sides of the connection are using Nix 2.20 or newer. - Disallow empty search regex in `nix search` [#9481](https://github.com/NixOS/nix/pull/9481) [`nix search`](@docroot@/command-ref/new-cli/nix3-search.md) now requires a search regex to be passed. To show all packages, use `^`. -- Reduce eval memory usage and wall time [#9658](https://github.com/NixOS/nix/pull/9658) - - Reduce the size of the `Env` struct used in the evaluator by a pointer, or 8 bytes on most modern machines. - This reduces memory usage during eval by around 2% and wall time by around 3%. - - Add new `eval-system` setting [#4093](https://github.com/NixOS/nix/pull/4093) Add a new `eval-system` option. Unlike `system`, it just overrides the value of `builtins.currentSystem`. This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system. - In contrast, `system` also effects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense. + In contrast, `system` also affects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense. `eval-system` only takes effect if it is non-empty. If empty (the default) `system` is used as before, so there is no breakage. -- Nix now uses `libgit2` for Git fetching [#5313](https://github.com/NixOS/nix/issues/5313) [#9240](https://github.com/NixOS/nix/pull/9240) [#9241](https://github.com/NixOS/nix/pull/9241) [#9258](https://github.com/NixOS/nix/pull/9258) [#9480](https://github.com/NixOS/nix/pull/9480) - - Nix has built-in support for fetching sources from Git, during evaluation and locking; outside the sandbox. - The existing implementation based on the Git CLI had issues regarding reproducibility and performance. - - Most of the original `fetchGit` behavior has been implemented using the `libgit2` library, which gives the fetcher fine-grained control. - - Known issues: - - The `export-subst` behavior has not been reimplemented. [Partial](https://github.com/NixOS/nix/pull/9391#issuecomment-1872503447) support for this Git feature is feasible, but it did not make the release window. - -- Rename hash format `base32` to `nix32` [#9452](https://github.com/NixOS/nix/pull/9452) - - Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for - [Base32](https://en.wikipedia.org/wiki/Base32). - - ## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` - - For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` - parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value - remains as a deprecated alias for `"base32"`. Please convert your code from: - - ```nix - builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} - ``` - - to - - ```nix - builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} - ``` - -- import-from-derivation builds the derivation in the build store [#9661](https://github.com/NixOS/nix/pull/9661) +- Import-from-derivation builds the derivation in the build store [#9661](https://github.com/NixOS/nix/pull/9661) When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option. - Because the resulting Nix expression must be copied back to the eval store in order to be imported, this requires the eval store to trust the build store's signatures. + Because the resulting Nix expression must be copied back to the evaluation store in order to be imported, this requires the evaluation store to trust the build store's signatures. - Mounted SSH Store [#7890](https://github.com/NixOS/nix/issues/7890) [#7912](https://github.com/NixOS/nix/pull/7912) Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md). This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem. -- Rename to `nix config show` [#7672](https://github.com/NixOS/nix/issues/7672) [#9477](https://github.com/NixOS/nix/pull/9477) +- Rename `nix show-config` to `nix config show` [#7672](https://github.com/NixOS/nix/issues/7672) [#9477](https://github.com/NixOS/nix/pull/9477) - `nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface. + `nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command line interface. -- Fix `nix-env --query --drv-path --json` [#9257](https://github.com/NixOS/nix/pull/9257) +- Add command `nix hash convert` [#9452](https://github.com/NixOS/nix/pull/9452) - Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set. - -- Some stack overflow segfaults are fixed [#8882](https://github.com/NixOS/nix/issues/8882) [#8893](https://github.com/NixOS/nix/pull/8893) - - `nix flake check` now logs the checks it runs and the derivations it evaluates: - - ``` - $ nix flake check -v - evaluating flake... - checking flake output 'checks'... - checking derivation 'checks.aarch64-darwin.ghciwatch-tests'... - derivation evaluated to /nix/store/nh7dlvsrhds4cxl91mvgj4h5cbq6skmq-ghciwatch-test-0.3.0.drv - checking derivation 'checks.aarch64-darwin.ghciwatch-clippy'... - derivation evaluated to /nix/store/9cb5a6wmp6kf6hidqw9wphidvb8bshym-ghciwatch-clippy-0.3.0.drv - checking derivation 'checks.aarch64-darwin.ghciwatch-doc'... - derivation evaluated to /nix/store/8brdd3jbawfszpbs7vdpsrhy80as1il8-ghciwatch-doc-0.3.0.drv - checking derivation 'checks.aarch64-darwin.ghciwatch-fmt'... - derivation evaluated to /nix/store/wjhs0l1njl5pyji53xlmfjrlya0wmz8p-ghciwatch-fmt-0.3.0.drv - checking derivation 'checks.aarch64-darwin.ghciwatch-audit'... - derivation evaluated to /nix/store/z0mps8dyj2ds7c0fn0819y5h5611033z-ghciwatch-audit-0.3.0.drv - checking flake output 'packages'... - checking derivation 'packages.aarch64-darwin.default'... - derivation evaluated to /nix/store/41abbdyglw5x9vcsvd89xan3ydjf8d7r-ghciwatch-0.3.0.drv - checking flake output 'apps'... - checking flake output 'devShells'... - checking derivation 'devShells.aarch64-darwin.default'... - derivation evaluated to /nix/store/bc935gz7dylzmcpdb5cczr8gngv8pmdb-nix-shell.drv - running 5 flake checks... - warning: The check omitted these incompatible systems: aarch64-linux, x86_64-darwin, x86_64-linux - Use '--all-systems' to check all. - ``` - -- Add `nix hash convert` [#9452](https://github.com/NixOS/nix/pull/9452) - - New [`nix hash convert`](https://github.com/NixOS/nix/issues/8876) sub command with a fast track - to stabilization! Examples: - - - Convert the hash to `nix32`. - - ```bash - $ nix hash convert --hash-algo "sha1" --to nix32 "800d59cfcd3c05e900cb4e214be48f6b886a08df" - vw46m23bizj4n8afrc0fj19wrp7mj3c0 - ``` - `nix32` is a base32 encoding with a nix-specific character set. - Explicitly specify the hashing algorithm (optional with SRI hashes) but detect hash format by the length of the input - hash. - - Convert the hash to the `sri` format that includes an algorithm specification: - ```bash - nix hash convert --hash-algo "sha1" "800d59cfcd3c05e900cb4e214be48f6b886a08df" - sha1-gA1Zz808BekAy04hS+SPa4hqCN8= - ``` - or with an explicit `-to` format: - ```bash - nix hash convert --hash-algo "sha1" --to sri "800d59cfcd3c05e900cb4e214be48f6b886a08df" - sha1-gA1Zz808BekAy04hS+SPa4hqCN8= - ``` - - Assert the input format of the hash: - ```bash - nix hash convert --hash-algo "sha256" --from nix32 "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=" - error: input hash 'ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0=' does not have the expected format '--from nix32' - nix hash convert --hash-algo "sha256" --from nix32 "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" - sha256-ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0= - ``` - - The `--to`/`--from`/`--hash-algo` parameters have context-sensitive auto-completion. - - ## Related Deprecations - - The following commands are still available but will emit a deprecation warning. Please convert your code to - `nix hash convert`: + This replaces the old `nix hash to-*` commands, which are still available but will emit a deprecation warning. Please convert as follows: - `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead. - `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead. - `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead. - - `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` - or even just `nix hash convert $hash1 $hash2` instead. + - `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` or even just `nix hash convert $hash1 $hash2` instead. -- `nix profile` now allows referring to elements by human-readable name [#8678](https://github.com/NixOS/nix/pull/8678) +- Rename hash format `base32` to `nix32` [#9452](https://github.com/NixOS/nix/pull/9452) + + Hash format `base32` was renamed to `nix32` since it used a special Nix-specific character set for + [Base32](https://en.wikipedia.org/wiki/Base32). + +- `nix profile` now allows referring to elements by human-readable names [#8678](https://github.com/NixOS/nix/pull/8678) [`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed. **Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix. -- Rename hash format `base32` to `nix32` [#8678](https://github.com/NixOS/nix/pull/8678) - - Hash format `base32` was renamed to `nix32` since it used a special nix-specific character set for - [Base32](https://en.wikipedia.org/wiki/Base32). - - ## Deprecation: Use `nix32` instead of `base32` as `toHashFormat` - - For the builtin `convertHash`, the `toHashFormat` parameter now accepts the same hash formats as the `--to`/`--from` - parameters of the `nix hash conert` command: `"base16"`, `"nix32"`, `"base64"`, and `"sri"`. The former `"base32"` value - remains as a deprecated alias for `"base32"`. Please convert your code from: - - ```nix - builtins.convertHash { inherit hash hashAlgo; toHashFormat = "base32";} - ``` - - to - - ```nix - builtins.convertHash { inherit hash hashAlgo; toHashFormat = "nix32";} - ``` - - Give `nix store add` a `--hash-algo` flag [#9809](https://github.com/NixOS/nix/pull/9809) Adds a missing feature that was present in the old CLI, and matches our @@ -202,17 +72,17 @@ Before: ``` - error: cannot coerce a set to a string + error: cannot coerce a set to a string ``` After: ``` - error: cannot coerce a set to a string: { aesSupport = «thunk»; - avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»; - canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion - = «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84 - attributes elided»} + error: cannot coerce a set to a string: { aesSupport = «thunk»; + avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»; + canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion + = «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84 + attributes elided»} ``` - Type errors include the failing value @@ -223,16 +93,16 @@ Before: ``` - error: value is a set while a string was expected + error: value is a set while a string was expected ``` After: ``` - error: expected a string but found a set: { ghc810 = «thunk»; - ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»; - ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»; - ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»} + error: expected a string but found a set: { ghc810 = «thunk»; + ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»; + ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»; + ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»} ``` - Source locations are printed more consistently in errors [#561](https://github.com/NixOS/nix/issues/561) [#9555](https://github.com/NixOS/nix/pull/9555) @@ -281,45 +151,11 @@ can be set with [the `max-call-depth` option](@docroot@/command-ref/conf-file.md#conf-max-call-depth). - This fixes segfaults or the following unhelpful error message in many cases: - - error: stack overflow (possible infinite recursion) - - Before: - - ``` - $ nix-instantiate --eval --expr '(x: x x) (x: x x)' - Segmentation fault: 11 - ``` - - After: - - ``` - $ nix-instantiate --eval --expr '(x: x x) (x: x x)' - error: stack overflow - - at «string»:1:14: - 1| (x: x x) (x: x x) - | ^ - ``` + This replaces the `stack overflow (possible infinite recursion)` message. - Better error reporting for `with` expressions [#9658](https://github.com/NixOS/nix/pull/9658) - `with` expressions using non-attrset values to resolve variables are now reported with proper positions. - - Previously an incorrect `with` expression would report no position at all, making it hard to determine where the error originated: - - ``` - nix-repl> with 1; a - error: - … - - at «none»:0: (source not available) - - error: value is an integer while a set was expected - ``` - - Now position information is preserved and reported as with most other errors: + `with` expressions using non-attrset values to resolve variables are now reported with proper positions, e.g. ``` nix-repl> with 1; a @@ -331,4 +167,3 @@ error: expected a set but found an integer ``` - From 2f3fb6c12e91907b91be88e69a5a430ee3d86642 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 29 Jan 2024 22:57:25 +0100 Subject: [PATCH 221/307] Bump version --- .version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.version b/.version index 7329e21c3..db65e2167 100644 --- a/.version +++ b/.version @@ -1 +1 @@ -2.20.0 +2.21.0 From a3aae7beefb675ea8c27f07284995d4f06f9952c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 22:14:10 +0000 Subject: [PATCH 222/307] build(deps): bump zeebe-io/backport-action from 2.4.0 to 2.4.1 Bumps [zeebe-io/backport-action](https://github.com/zeebe-io/backport-action) from 2.4.0 to 2.4.1. - [Release notes](https://github.com/zeebe-io/backport-action/releases) - [Commits](https://github.com/zeebe-io/backport-action/compare/v2.4.0...v2.4.1) --- updated-dependencies: - dependency-name: zeebe-io/backport-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/backport.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 46a4529c1..5b75704b5 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -21,7 +21,7 @@ jobs: fetch-depth: 0 - name: Create backport PRs # should be kept in sync with `version` - uses: zeebe-io/backport-action@v2.4.0 + uses: zeebe-io/backport-action@v2.4.1 with: # Config README: https://github.com/zeebe-io/backport-action#backport-action github_token: ${{ secrets.GITHUB_TOKEN }} From b36ff47e7c38de2eebe4934c27f5594babcebe1b Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 30 Jan 2024 15:00:18 +0100 Subject: [PATCH 223/307] Resolve symlinks in a few more places Fixes #9882. --- src/libexpr/eval.cc | 2 +- src/libexpr/primops.cc | 2 +- tests/functional/nix-channel.sh | 5 ++++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index b60cdcf55..91fd3ddf8 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2338,7 +2338,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat auto dstPath = i != srcToStore.end() ? i->second : [&]() { - auto dstPath = fetchToStore(*store, path, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); + auto dstPath = fetchToStore(*store, path.resolveSymlinks(), path.baseName(), FileIngestionMethod::Recursive, nullptr, repair); allowPath(dstPath); srcToStore.insert_or_assign(path, dstPath); printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath)); diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 993ecceb2..cdd9a3a09 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -2241,7 +2241,7 @@ static void addPath( }); if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { - auto dstPath = fetchToStore(*state.store, path, name, method, filter.get(), state.repair); + auto dstPath = fetchToStore(*state.store, path.resolveSymlinks(), name, method, filter.get(), state.repair); if (expectedHash && expectedStorePath != dstPath) state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); state.allowAndSetStorePathString(dstPath, v); diff --git a/tests/functional/nix-channel.sh b/tests/functional/nix-channel.sh index b5d935004..ca5df3bdd 100644 --- a/tests/functional/nix-channel.sh +++ b/tests/functional/nix-channel.sh @@ -29,7 +29,8 @@ unset NIX_CONFIG # Create a channel. rm -rf $TEST_ROOT/foo mkdir -p $TEST_ROOT/foo -nix copy --to file://$TEST_ROOT/foo?compression="bzip2" $(nix-store -r $(nix-instantiate dependencies.nix)) +drvPath=$(nix-instantiate dependencies.nix) +nix copy --to file://$TEST_ROOT/foo?compression="bzip2" $(nix-store -r "$drvPath") rm -rf $TEST_ROOT/nixexprs mkdir -p $TEST_ROOT/nixexprs cp config.nix dependencies.nix dependencies.builder*.sh $TEST_ROOT/nixexprs/ @@ -64,3 +65,5 @@ grepQuiet 'item.*attrPath="foo".*name="dependencies-top"' $TEST_ROOT/meta.xml nix-env -i dependencies-top [ -e $TEST_HOME/.nix-profile/foobar ] +# Test evaluation through a channel symlink (#9882). +nix-instantiate '' From caea7dcb7e8fe75ef94635e15f49283668e60965 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 31 Jan 2024 11:43:27 -0500 Subject: [PATCH 224/307] Change an `allowPath` call to take a store path again This looks like a revert of #5844, but is not. That one was needed because https://github.com/NixOS/nix/commit/d90f9d4b9994dc1f15b9d664ae313f06261d6058#diff-0f59bb6f197822ef9f19ceae9624989499d170c84dfdc1f486a8959bb4588cafR85 changed the type of the argument to `allowPath` from a `StorePath` to a `Path`. But since https://github.com/NixOS/nix/commit/caabc4f64889d5a4c47d6102b3aa1d3c80bbc107#diff-0f59bb6f197822ef9f19ceae9624989499d170c84dfdc1f486a8959bb4588cafL100-R92, it is a `StorePath` again. I think this is worth changing because we want to be very careful about `toRealPath` and the evaluator --- ideally the choice of real path does not affect evaluation at all. So using it fewer times is better. --- src/libexpr/primops.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index cdd9a3a09..1197b6e13 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -112,7 +112,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context) for (auto & outputPath : outputsToCopyAndAllow) { /* Add the output of this derivations to the allowed paths. */ - allowPath(store->toRealPath(outputPath)); + allowPath(outputPath); } return res; From b13e6a76b4f289c6db69ffaa7bd35b7e44f2a391 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sat, 27 Jan 2024 11:19:05 +0100 Subject: [PATCH 225/307] parseStorePath: Support leading period --- doc/manual/rl-next/leading-period.md | 10 ++++++++++ tests/unit/libstore/path.cc | 1 + 2 files changed, 11 insertions(+) create mode 100644 doc/manual/rl-next/leading-period.md diff --git a/doc/manual/rl-next/leading-period.md b/doc/manual/rl-next/leading-period.md new file mode 100644 index 000000000..e9a32a74a --- /dev/null +++ b/doc/manual/rl-next/leading-period.md @@ -0,0 +1,10 @@ +--- +synopsis: Store paths are allowed to start with `.` +issues: 912 +prs: 9867 9091 9095 9120 9121 9122 9130 9219 9224 +--- + +Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties. +From now on, leading periods are officially, definitively supported. + +Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286). diff --git a/tests/unit/libstore/path.cc b/tests/unit/libstore/path.cc index 5485ab8bb..f7b69d5f9 100644 --- a/tests/unit/libstore/path.cc +++ b/tests/unit/libstore/path.cc @@ -62,6 +62,7 @@ TEST_DO_PARSE(underscore, "foo_bar") TEST_DO_PARSE(period, "foo.txt") TEST_DO_PARSE(question_mark, "foo?why") TEST_DO_PARSE(equals_sign, "foo=foo") +TEST_DO_PARSE(dotfile, ".gitignore") #undef TEST_DO_PARSE From 69bbd5852af9b2f0b794162bd1debcdf64fc6648 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 30 Jan 2024 18:18:27 +0100 Subject: [PATCH 226/307] test: Generate distinct path names Gen::just is the constant generator. Don't just return that! --- tests/unit/libstore-support/tests/path.cc | 72 ++++++++++++----------- 1 file changed, 37 insertions(+), 35 deletions(-) diff --git a/tests/unit/libstore-support/tests/path.cc b/tests/unit/libstore-support/tests/path.cc index bbe43bad4..8ddda8027 100644 --- a/tests/unit/libstore-support/tests/path.cc +++ b/tests/unit/libstore-support/tests/path.cc @@ -1,3 +1,4 @@ +#include #include #include @@ -20,59 +21,60 @@ void showValue(const StorePath & p, std::ostream & os) namespace rc { using namespace nix; -Gen Arbitrary::arbitrary() +Gen storePathChar() { - auto len = *gen::inRange( - 1, - StorePath::MaxPathLen - StorePath::HashLen); - - std::string pre; - pre.reserve(len); - - for (size_t c = 0; c < len; ++c) { - switch (auto i = *gen::inRange(0, 10 + 2 * 26 + 6)) { + return rc::gen::apply([](uint8_t i) -> char { + switch (i) { case 0 ... 9: - pre += '0' + i; + return '0' + i; case 10 ... 35: - pre += 'A' + (i - 10); - break; + return 'A' + (i - 10); case 36 ... 61: - pre += 'a' + (i - 36); - break; + return 'a' + (i - 36); case 62: - pre += '+'; - break; + return '+'; case 63: - pre += '-'; - break; + return '-'; case 64: - pre += '.'; - break; + return '.'; case 65: - pre += '_'; - break; + return '_'; case 66: - pre += '?'; - break; + return '?'; case 67: - pre += '='; - break; + return '='; default: assert(false); } - } + }, + gen::inRange(0, 10 + 2 * 26 + 6)); +} - return gen::just(StorePathName { - .name = std::move(pre), - }); +Gen Arbitrary::arbitrary() +{ + return gen::construct( + gen::suchThat( + gen::container(storePathChar()), + [](const std::string & s) { + return + !( s == "" + || s == "." + || s == ".." + || s.starts_with(".-") + || s.starts_with("..-") + ); + } + ) + ); } Gen Arbitrary::arbitrary() { - return gen::just(StorePath { - *gen::arbitrary(), - (*gen::arbitrary()).name, - }); + return + gen::construct( + gen::arbitrary(), + gen::apply([](StorePathName n){ return n.name; }, gen::arbitrary()) + ); } } // namespace rc From 8406da28773f050e00a006e4812e3ecbf919a2a9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 30 Jan 2024 18:31:28 +0100 Subject: [PATCH 227/307] test: Generate distinct hashes Gen::just is the constant generator. Don't just return that! --- tests/unit/libutil-support/tests/hash.cc | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tests/unit/libutil-support/tests/hash.cc b/tests/unit/libutil-support/tests/hash.cc index 50889cd33..51b9663b4 100644 --- a/tests/unit/libutil-support/tests/hash.cc +++ b/tests/unit/libutil-support/tests/hash.cc @@ -11,10 +11,17 @@ using namespace nix; Gen Arbitrary::arbitrary() { - Hash hash(HashAlgorithm::SHA1); - for (size_t i = 0; i < hash.hashSize; ++i) - hash.hash[i] = *gen::arbitrary(); - return gen::just(hash); + Hash prototype(HashAlgorithm::SHA1); + return + gen::apply( + [](const std::vector & v) { + Hash hash(HashAlgorithm::SHA1); + assert(v.size() == hash.hashSize); + std::copy(v.begin(), v.end(), hash.hash); + return hash; + }, + gen::container>(prototype.hashSize, gen::arbitrary()) + ); } } From f1b4663805a9dbcb1ace64ec110092d17c9155e0 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 30 Jan 2024 18:37:23 +0100 Subject: [PATCH 228/307] Disallow store path names that are . or .. (plus opt. -) As discussed in the maintainer meeting on 2024-01-29. Mainly this is to avoid a situation where the name is parsed and treated as a file name, mostly to protect users. .-* and ..-* are also considered invalid because they might strip on that separator to remove versions. Doesn't really work, but that's what we decided, and I won't argue with it, because .-* probably doesn't seem to have a real world application anyway. We do still permit a 1-character name that's just "-", which still poses a similar risk in such a situation. We can't start disallowing trailing -, because a non-zero number of users will need it and we've seen how annoying and painful such a change is. What matters most is preventing a situation where . or .. can be injected, and to just get this done. --- doc/manual/rl-next/leading-period.md | 2 +- src/libstore/path-regex.hh | 7 ++- src/libstore/path.cc | 13 ++++++ tests/unit/libstore/path.cc | 68 ++++++++++++++++++++++++++++ 4 files changed, 88 insertions(+), 2 deletions(-) diff --git a/doc/manual/rl-next/leading-period.md b/doc/manual/rl-next/leading-period.md index e9a32a74a..ef7c2326f 100644 --- a/doc/manual/rl-next/leading-period.md +++ b/doc/manual/rl-next/leading-period.md @@ -5,6 +5,6 @@ prs: 9867 9091 9095 9120 9121 9122 9130 9219 9224 --- Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties. -From now on, leading periods are officially, definitively supported. +From now on, leading periods are officially, definitively supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`. Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286). diff --git a/src/libstore/path-regex.hh b/src/libstore/path-regex.hh index 4f8dc4c1f..56c2cfc1d 100644 --- a/src/libstore/path-regex.hh +++ b/src/libstore/path-regex.hh @@ -3,6 +3,11 @@ namespace nix { -static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-\._\?=]+)"; + +static constexpr std::string_view nameRegexStr = + // This uses a negative lookahead: (?!\.\.?(-|$)) + // - deny ".", "..", or those strings followed by '-' + // - when it's not those, start again at the start of the input and apply the next regex, which is [0-9a-zA-Z\+\-\._\?=]+ + R"((?!\.\.?(-|$))[0-9a-zA-Z\+\-\._\?=]+)"; } diff --git a/src/libstore/path.cc b/src/libstore/path.cc index 4361b3194..5db4b974c 100644 --- a/src/libstore/path.cc +++ b/src/libstore/path.cc @@ -10,6 +10,19 @@ static void checkName(std::string_view path, std::string_view name) throw BadStorePath("store path '%s' has a name longer than %d characters", path, StorePath::MaxPathLen); // See nameRegexStr for the definition + if (name[0] == '.') { + // check against "." and "..", followed by end or dash + if (name.size() == 1) + throw BadStorePath("store path '%s' has invalid name '%s'", path, name); + if (name[1] == '-') + throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, "."); + if (name[1] == '.') { + if (name.size() == 2) + throw BadStorePath("store path '%s' has invalid name '%s'", path, name); + if (name[2] == '-') + throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, ".."); + } + } for (auto c : name) if (!((c >= '0' && c <= '9') || (c >= 'a' && c <= 'z') diff --git a/tests/unit/libstore/path.cc b/tests/unit/libstore/path.cc index f7b69d5f9..213b6e95f 100644 --- a/tests/unit/libstore/path.cc +++ b/tests/unit/libstore/path.cc @@ -39,6 +39,12 @@ TEST_DONT_PARSE(double_star, "**") TEST_DONT_PARSE(star_first, "*,foo") TEST_DONT_PARSE(star_second, "foo,*") TEST_DONT_PARSE(bang, "foo!o") +TEST_DONT_PARSE(dot, ".") +TEST_DONT_PARSE(dot_dot, "..") +TEST_DONT_PARSE(dot_dot_dash, "..-1") +TEST_DONT_PARSE(dot_dash, ".-1") +TEST_DONT_PARSE(dot_dot_dash_a, "..-a") +TEST_DONT_PARSE(dot_dash_a, ".-a") #undef TEST_DONT_PARSE @@ -63,6 +69,10 @@ TEST_DO_PARSE(period, "foo.txt") TEST_DO_PARSE(question_mark, "foo?why") TEST_DO_PARSE(equals_sign, "foo=foo") TEST_DO_PARSE(dotfile, ".gitignore") +TEST_DO_PARSE(triple_dot_a, "...a") +TEST_DO_PARSE(triple_dot_1, "...1") +TEST_DO_PARSE(triple_dot_dash, "...-") +TEST_DO_PARSE(triple_dot, "...") #undef TEST_DO_PARSE @@ -84,6 +94,64 @@ RC_GTEST_FIXTURE_PROP( RC_ASSERT(p == store->parseStorePath(store->printStorePath(p))); } + +RC_GTEST_FIXTURE_PROP( + StorePathTest, + prop_check_regex_eq_parse, + ()) +{ + static auto nameFuzzer = + rc::gen::container( + rc::gen::oneOf( + // alphanum, repeated to weigh heavier + rc::gen::oneOf( + rc::gen::inRange('0', '9'), + rc::gen::inRange('a', 'z'), + rc::gen::inRange('A', 'Z') + ), + // valid symbols + rc::gen::oneOf( + rc::gen::just('+'), + rc::gen::just('-'), + rc::gen::just('.'), + rc::gen::just('_'), + rc::gen::just('?'), + rc::gen::just('=') + ), + // symbols for scary .- and ..- cases, repeated for weight + rc::gen::just('.'), rc::gen::just('.'), + rc::gen::just('.'), rc::gen::just('.'), + rc::gen::just('-'), rc::gen::just('-'), + // ascii symbol ranges + rc::gen::oneOf( + rc::gen::inRange(' ', '/'), + rc::gen::inRange(':', '@'), + rc::gen::inRange('[', '`'), + rc::gen::inRange('{', '~') + ), + // typical whitespace + rc::gen::oneOf( + rc::gen::just(' '), + rc::gen::just('\t'), + rc::gen::just('\n'), + rc::gen::just('\r') + ), + // some chance of control codes, non-ascii or other garbage we missed + rc::gen::inRange('\0', '\xff') + )); + + auto name = *nameFuzzer; + + std::string path = store->storeDir + "/575s52sh487i0ylmbs9pvi606ljdszr0-" + name; + bool parsed = false; + try { + store->parseStorePath(path); + parsed = true; + } catch (const BadStorePath &) { + } + RC_ASSERT(parsed == std::regex_match(std::string { name }, nameRegex)); +} + #endif } From 0f2e9e6bd2b62b15babe608fbd18eccfc0215d06 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 1 Feb 2024 01:01:04 +0100 Subject: [PATCH 229/307] Typo --- src/libstore/build/worker.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build/worker.hh b/src/libstore/build/worker.hh index 23ad87914..ced013ddd 100644 --- a/src/libstore/build/worker.hh +++ b/src/libstore/build/worker.hh @@ -116,7 +116,7 @@ private: WeakGoals waitingForAWhile; /** - * Last time the goals in `waitingForAWhile` where woken up. + * Last time the goals in `waitingForAWhile` were woken up. */ steady_time_point lastWokenUp; From 58c26dd0f0090bfd1460f138f9ba17eda8a8ab5b Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 1 Feb 2024 01:01:39 +0100 Subject: [PATCH 230/307] Add .clang-tidy --- .clang-tidy | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .clang-tidy diff --git a/.clang-tidy b/.clang-tidy new file mode 100644 index 000000000..0887b8670 --- /dev/null +++ b/.clang-tidy @@ -0,0 +1,3 @@ +# We use pointers to aggregates in a couple of places, intentionally. +# void * would look weird. +Checks: '-bugprone-sizeof-expression' From 1ee42c5b88eb0533ebcf8b2579ec82f2be80e4b2 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Feb 2024 21:46:01 +0100 Subject: [PATCH 231/307] builtin:fetchurl: Ensure a fixed-output derivation Previously we didn't check that the derivation was fixed-output, so you could use builtin:fetchurl to impurely fetch a file. --- src/libstore/builtins/fetchurl.cc | 3 +++ tests/functional/fetchurl.sh | 3 +++ 2 files changed, 6 insertions(+) diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index 2086bd0b9..cf7b2770f 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -16,6 +16,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) writeFile(settings.netrcFile, netrcData, 0600); } + if (!drv.type().isFixed()) + throw Error("'builtin:fetchurl' must be a fixed-output derivation"); + auto getAttr = [&](const std::string & name) { auto i = drv.env.find(name); if (i == drv.env.end()) throw Error("attribute '%s' missing", name); diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh index 8cd40c09f..578f5a34c 100644 --- a/tests/functional/fetchurl.sh +++ b/tests/functional/fetchurl.sh @@ -78,3 +78,6 @@ outPath=$(nix-build -vvvvv --expr 'import ' --argstr url file: test -x $outPath/fetchurl.sh test -L $outPath/symlink + +# Make sure that *not* passing a outputHash fails. +expectStderr 100 nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' From b8b739e484078863c10c48d031fa8459081ba8b3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 1 Feb 2024 22:01:02 +0100 Subject: [PATCH 232/307] builtin:fetchurl: Get output hash info from the drv --- src/libstore/builtins/fetchurl.cc | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index cf7b2770f..a9f2e748e 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -16,7 +16,12 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) writeFile(settings.netrcFile, netrcData, 0600); } - if (!drv.type().isFixed()) + auto out = get(drv.outputs, "out"); + if (!out) + throw Error("'builtin:fetchurl' requires an 'out' output"); + + auto dof = std::get_if(&out->raw); + if (!dof) throw Error("'builtin:fetchurl' must be a fixed-output derivation"); auto getAttr = [&](const std::string & name) { @@ -62,13 +67,11 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) }; /* Try the hashed mirrors first. */ - if (getAttr("outputHashMode") == "flat") + if (dof->ca.method.getFileIngestionMethod() == FileIngestionMethod::Flat) for (auto hashedMirror : settings.hashedMirrors.get()) try { if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/'; - std::optional ht = parseHashAlgoOpt(getAttr("outputHashAlgo")); - Hash h = newHashAllowEmpty(getAttr("outputHash"), ht); - fetch(hashedMirror + printHashAlgo(h.algo) + "/" + h.to_string(HashFormat::Base16, false)); + fetch(hashedMirror + printHashAlgo(dof->ca.hash.algo) + "/" + dof->ca.hash.to_string(HashFormat::Base16, false)); return; } catch (Error & e) { debug(e.what()); From c62c21e29af20f1c14a59ab37d7a25dd0b70f69e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 1 Feb 2024 13:07:45 -0800 Subject: [PATCH 233/307] Move `PodIdx` to `pos-idx.hh` and `PosTable` to `pos-table.hh` --- src/libexpr/nixexpr.hh | 86 +--------------------------------------- src/libexpr/pos-idx.hh | 48 ++++++++++++++++++++++ src/libexpr/pos-table.hh | 83 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 133 insertions(+), 84 deletions(-) create mode 100644 src/libexpr/pos-idx.hh create mode 100644 src/libexpr/pos-table.hh diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index b6189c2a9..da0ec6e9d 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -9,6 +9,8 @@ #include "error.hh" #include "chunked-vector.hh" #include "position.hh" +#include "pos-idx.hh" +#include "pos-table.hh" namespace nix { @@ -29,90 +31,6 @@ public: using EvalError::EvalError; }; -class PosIdx { - friend class PosTable; - -private: - uint32_t id; - - explicit PosIdx(uint32_t id): id(id) {} - -public: - PosIdx() : id(0) {} - - explicit operator bool() const { return id > 0; } - - bool operator <(const PosIdx other) const { return id < other.id; } - - bool operator ==(const PosIdx other) const { return id == other.id; } - - bool operator !=(const PosIdx other) const { return id != other.id; } -}; - -class PosTable -{ -public: - class Origin { - friend PosTable; - private: - // must always be invalid by default, add() replaces this with the actual value. - // subsequent add() calls use this index as a token to quickly check whether the - // current origins.back() can be reused or not. - mutable uint32_t idx = std::numeric_limits::max(); - - // Used for searching in PosTable::[]. - explicit Origin(uint32_t idx): idx(idx), origin{std::monostate()} {} - - public: - const Pos::Origin origin; - - Origin(Pos::Origin origin): origin(origin) {} - }; - - struct Offset { - uint32_t line, column; - }; - -private: - std::vector origins; - ChunkedVector offsets; - -public: - PosTable(): offsets(1024) - { - origins.reserve(1024); - } - - PosIdx add(const Origin & origin, uint32_t line, uint32_t column) - { - const auto idx = offsets.add({line, column}).second; - if (origins.empty() || origins.back().idx != origin.idx) { - origin.idx = idx; - origins.push_back(origin); - } - return PosIdx(idx + 1); - } - - Pos operator[](PosIdx p) const - { - if (p.id == 0 || p.id > offsets.size()) - return {}; - const auto idx = p.id - 1; - /* we want the last key <= idx, so we'll take prev(first key > idx). - this is guaranteed to never rewind origin.begin because the first - key is always 0. */ - const auto pastOrigin = std::upper_bound( - origins.begin(), origins.end(), Origin(idx), - [] (const auto & a, const auto & b) { return a.idx < b.idx; }); - const auto origin = *std::prev(pastOrigin); - const auto offset = offsets[idx]; - return {offset.line, offset.column, origin.origin}; - } -}; - -inline PosIdx noPos = {}; - - struct Env; struct Value; class EvalState; diff --git a/src/libexpr/pos-idx.hh b/src/libexpr/pos-idx.hh new file mode 100644 index 000000000..9949f1dc5 --- /dev/null +++ b/src/libexpr/pos-idx.hh @@ -0,0 +1,48 @@ +#pragma once + +#include + +namespace nix { + +class PosIdx +{ + friend class PosTable; + +private: + uint32_t id; + + explicit PosIdx(uint32_t id) + : id(id) + { + } + +public: + PosIdx() + : id(0) + { + } + + explicit operator bool() const + { + return id > 0; + } + + bool operator<(const PosIdx other) const + { + return id < other.id; + } + + bool operator==(const PosIdx other) const + { + return id == other.id; + } + + bool operator!=(const PosIdx other) const + { + return id != other.id; + } +}; + +inline PosIdx noPos = {}; + +} diff --git a/src/libexpr/pos-table.hh b/src/libexpr/pos-table.hh new file mode 100644 index 000000000..1decf3c85 --- /dev/null +++ b/src/libexpr/pos-table.hh @@ -0,0 +1,83 @@ +#pragma once + +#include +#include +#include + +#include "chunked-vector.hh" +#include "pos-idx.hh" +#include "position.hh" + +namespace nix { + +class PosTable +{ +public: + class Origin + { + friend PosTable; + private: + // must always be invalid by default, add() replaces this with the actual value. + // subsequent add() calls use this index as a token to quickly check whether the + // current origins.back() can be reused or not. + mutable uint32_t idx = std::numeric_limits::max(); + + // Used for searching in PosTable::[]. + explicit Origin(uint32_t idx) + : idx(idx) + , origin{std::monostate()} + { + } + + public: + const Pos::Origin origin; + + Origin(Pos::Origin origin) + : origin(origin) + { + } + }; + + struct Offset + { + uint32_t line, column; + }; + +private: + std::vector origins; + ChunkedVector offsets; + +public: + PosTable() + : offsets(1024) + { + origins.reserve(1024); + } + + PosIdx add(const Origin & origin, uint32_t line, uint32_t column) + { + const auto idx = offsets.add({line, column}).second; + if (origins.empty() || origins.back().idx != origin.idx) { + origin.idx = idx; + origins.push_back(origin); + } + return PosIdx(idx + 1); + } + + Pos operator[](PosIdx p) const + { + if (p.id == 0 || p.id > offsets.size()) + return {}; + const auto idx = p.id - 1; + /* we want the last key <= idx, so we'll take prev(first key > idx). + this is guaranteed to never rewind origin.begin because the first + key is always 0. */ + const auto pastOrigin = std::upper_bound( + origins.begin(), origins.end(), Origin(idx), [](const auto & a, const auto & b) { return a.idx < b.idx; }); + const auto origin = *std::prev(pastOrigin); + const auto offset = offsets[idx]; + return {offset.line, offset.column, origin.origin}; + } +}; + +} From c6a89c1a1659b31694c0fbcd21d78a6dd521c732 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 22 Jan 2024 17:08:29 -0800 Subject: [PATCH 234/307] libexpr: Support structured error classes While preparing PRs like #9753, I've had to change error messages in dozens of code paths. It would be nice if instead of EvalError("expected 'boolean' but found '%1%'", showType(v)) we could write TypeError(v, "boolean") or similar. Then, changing the error message could be a mechanical refactor with the compiler pointing out places the constructor needs to be changed, rather than the error-prone process of grepping through the codebase. Structured errors would also help prevent the "same" error from having multiple slightly different messages, and could be a first step towards error codes / an error index. This PR reworks the exception infrastructure in `libexpr` to support exception types with different constructor signatures than `BaseError`. Actually refactoring the exceptions to use structured data will come in a future PR (this one is big enough already, as it has to touch every exception in `libexpr`). The core design is in `eval-error.hh`. Generally, errors like this: state.error("'%s' is not a string", getAttrPathStr()) .debugThrow() are transformed like this: state.error("'%s' is not a string", getAttrPathStr()) .debugThrow() The type annotation has moved from `ErrorBuilder::debugThrow` to `EvalState::error`. --- src/libcmd/repl.cc | 2 - src/libexpr/attr-path.cc | 8 +- src/libexpr/eval-cache.cc | 30 +-- src/libexpr/eval-error.cc | 113 ++++++++ src/libexpr/eval-error.hh | 118 +++++++++ src/libexpr/eval-inline.hh | 19 +- src/libexpr/eval.cc | 217 +++++++--------- src/libexpr/eval.hh | 91 +------ src/libexpr/flake/flake.cc | 16 +- src/libexpr/get-drvs.cc | 5 +- src/libexpr/json-to-value.cc | 4 +- src/libexpr/json-to-value.hh | 7 +- src/libexpr/lexer.l | 12 +- src/libexpr/nixexpr.cc | 8 +- src/libexpr/nixexpr.hh | 17 +- src/libexpr/parser-state.hh | 8 +- src/libexpr/parser.y | 8 +- src/libexpr/primops.cc | 244 ++++++++---------- src/libexpr/primops/context.cc | 50 ++-- src/libexpr/primops/fetchClosure.cc | 22 +- src/libexpr/primops/fetchMercurial.cc | 10 +- src/libexpr/primops/fetchTree.cc | 68 ++--- src/libexpr/primops/fromTOML.cc | 5 +- src/libexpr/value-to-json.cc | 18 +- src/libexpr/value.hh | 2 +- src/libmain/shared.cc | 2 +- src/libstore/build/entry-points.cc | 4 +- src/libstore/daemon.cc | 2 +- src/libutil/error.cc | 6 +- src/libutil/error.hh | 27 +- src/libutil/logging.cc | 2 +- src/nix-store/nix-store.cc | 4 +- src/nix/eval.cc | 2 +- src/nix/flake.cc | 6 +- tests/functional/fetchGit.sh | 4 +- .../lang/eval-fail-attr-name-type.err.exp | 5 + .../eval-fail-fromTOML-timestamps.err.exp | 2 +- .../functional/lang/eval-fail-toJSON.err.exp | 5 + .../eval-fail-using-set-as-attr-name.err.exp | 5 + tests/unit/libexpr/error_traces.cc | 20 +- 40 files changed, 653 insertions(+), 545 deletions(-) create mode 100644 src/libexpr/eval-error.cc create mode 100644 src/libexpr/eval-error.hh diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7d8f9819..714d3adb5 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -422,8 +422,6 @@ StringSet NixRepl::completePrefix(const std::string & prefix) // Quietly ignore parse errors. } catch (EvalError & e) { // Quietly ignore evaluation errors. - } catch (UndefinedVarError & e) { - // Quietly ignore undefined variable errors. } catch (BadURL & e) { // Quietly ignore BadURL flake-related errors. } diff --git a/src/libexpr/attr-path.cc b/src/libexpr/attr-path.cc index 7481a2232..d6befd362 100644 --- a/src/libexpr/attr-path.cc +++ b/src/libexpr/attr-path.cc @@ -65,10 +65,10 @@ std::pair findAlongAttrPath(EvalState & state, const std::strin if (!attrIndex) { if (v->type() != nAttrs) - throw TypeError( + state.error( "the expression selected by the selection path '%1%' should be a set but is %2%", attrPath, - showType(*v)); + showType(*v)).debugThrow(); if (attr.empty()) throw Error("empty attribute name in selection path '%1%'", attrPath); @@ -88,10 +88,10 @@ std::pair findAlongAttrPath(EvalState & state, const std::strin else { if (!v->isList()) - throw TypeError( + state.error( "the expression selected by the selection path '%1%' should be a list but is %2%", attrPath, - showType(*v)); + showType(*v)).debugThrow(); if (*attrIndex >= v->listSize()) throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath); diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 5808d58b6..2fc69e796 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -491,7 +491,7 @@ std::shared_ptr AttrCursor::maybeGetAttr(Symbol name, bool forceErro if (forceErrors) debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name)); else - throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name)); + throw CachedEvalError(root->state, "cached failure of attribute '%s'", getAttrPathStr(name)); } else return std::make_shared(root, std::make_pair(shared_from_this(), name), nullptr, std::move(attr)); @@ -500,7 +500,7 @@ std::shared_ptr AttrCursor::maybeGetAttr(Symbol name, bool forceErro // evaluate to see whether 'name' exists } else return nullptr; - //throw TypeError("'%s' is not an attribute set", getAttrPathStr()); + //error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); } } @@ -508,7 +508,7 @@ std::shared_ptr AttrCursor::maybeGetAttr(Symbol name, bool forceErro if (v.type() != nAttrs) return nullptr; - //throw TypeError("'%s' is not an attribute set", getAttrPathStr()); + //error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); auto attr = v.attrs->get(name); @@ -574,14 +574,14 @@ std::string AttrCursor::getString() debug("using cached string attribute '%s'", getAttrPathStr()); return s->first; } else - root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nString && v.type() != nPath) - root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); return v.type() == nString ? v.c_str() : v.path().to_string(); } @@ -616,7 +616,7 @@ string_t AttrCursor::getStringWithContext() return *s; } } else - root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow(); } } @@ -630,7 +630,7 @@ string_t AttrCursor::getStringWithContext() else if (v.type() == nPath) return {v.path().to_string(), {}}; else - root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow(); } bool AttrCursor::getBool() @@ -643,14 +643,14 @@ bool AttrCursor::getBool() debug("using cached Boolean attribute '%s'", getAttrPathStr()); return *b; } else - root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nBool) - root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow(); return v.boolean; } @@ -665,14 +665,14 @@ NixInt AttrCursor::getInt() debug("using cached integer attribute '%s'", getAttrPathStr()); return i->x; } else - throw TypeError("'%s' is not an integer", getAttrPathStr()); + root->state.error("'%s' is not an integer", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nInt) - throw TypeError("'%s' is not an integer", getAttrPathStr()); + root->state.error("'%s' is not an integer", getAttrPathStr()).debugThrow(); return v.integer; } @@ -687,7 +687,7 @@ std::vector AttrCursor::getListOfStrings() debug("using cached list of strings attribute '%s'", getAttrPathStr()); return *l; } else - throw TypeError("'%s' is not a list of strings", getAttrPathStr()); + root->state.error("'%s' is not a list of strings", getAttrPathStr()).debugThrow(); } } @@ -697,7 +697,7 @@ std::vector AttrCursor::getListOfStrings() root->state.forceValue(v, noPos); if (v.type() != nList) - throw TypeError("'%s' is not a list", getAttrPathStr()); + root->state.error("'%s' is not a list", getAttrPathStr()).debugThrow(); std::vector res; @@ -720,14 +720,14 @@ std::vector AttrCursor::getAttrs() debug("using cached attrset attribute '%s'", getAttrPathStr()); return *attrs; } else - root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); } } auto & v = forceValue(); if (v.type() != nAttrs) - root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); + root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow(); std::vector attrs; for (auto & attr : *getValue().attrs) diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc new file mode 100644 index 000000000..b9411cbf4 --- /dev/null +++ b/src/libexpr/eval-error.cc @@ -0,0 +1,113 @@ +#include "eval-error.hh" +#include "eval.hh" +#include "value.hh" + +namespace nix { + +template +EvalErrorBuilder & EvalErrorBuilder::withExitStatus(unsigned int exitStatus) +{ + error.withExitStatus(exitStatus); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::atPos(PosIdx pos) +{ + error.err.pos = error.state.positions[pos]; + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::atPos(Value & value, PosIdx fallback) +{ + return atPos(value.determinePos(fallback)); +} + +template +EvalErrorBuilder & EvalErrorBuilder::withTrace(PosIdx pos, const std::string_view text) +{ + error.err.traces.push_front( + Trace{.pos = error.state.positions[pos], .hint = hintfmt(std::string(text)), .frame = false}); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text) +{ + error.err.traces.push_front( + Trace{.pos = error.state.positions[pos], .hint = hintformat(std::string(text)), .frame = true}); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::withSuggestions(Suggestions & s) +{ + error.err.suggestions = s; + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::withFrame(const Env & env, const Expr & expr) +{ + // NOTE: This is abusing side-effects. + // TODO: check compatibility with nested debugger calls. + // TODO: What side-effects?? + error.state.debugTraces.push_front(DebugTrace{ + .pos = error.state.positions[expr.getPos()], + .expr = expr, + .env = env, + .hint = hintformat("Fake frame for debugging purposes"), + .isError = true}); + return *this; +} + +template +EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, hintformat hint, bool frame) +{ + error.addTrace(error.state.positions[pos], hint, frame); + return *this; +} + +template +template +EvalErrorBuilder & +EvalErrorBuilder::addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs) +{ + + addTrace(error.state.positions[pos], hintfmt(std::string(formatString), formatArgs...)); + return *this; +} + +template +void EvalErrorBuilder::debugThrow() +{ + if (error.state.debugRepl && !error.state.debugTraces.empty()) { + const DebugTrace & last = error.state.debugTraces.front(); + const Env * env = &last.env; + const Expr * expr = &last.expr; + error.state.runDebugRepl(&error, *env, *expr); + } + + // `EvalState` is the only class that can construct an `EvalErrorBuilder`, + // and it does so in dynamic storage. This is the final method called on + // any such instancve and must delete itself before throwing the underlying + // error. + auto error = std::move(this->error); + delete this; + + throw error; +} + +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; +template class EvalErrorBuilder; + +} diff --git a/src/libexpr/eval-error.hh b/src/libexpr/eval-error.hh new file mode 100644 index 000000000..ee69dce64 --- /dev/null +++ b/src/libexpr/eval-error.hh @@ -0,0 +1,118 @@ +#pragma once + +#include + +#include "error.hh" +#include "pos-idx.hh" + +namespace nix { + +struct Env; +struct Expr; +struct Value; + +class EvalState; +template +class EvalErrorBuilder; + +class EvalError : public Error +{ + template + friend class EvalErrorBuilder; +public: + EvalState & state; + + EvalError(EvalState & state, ErrorInfo && errorInfo) + : Error(errorInfo) + , state(state) + { + } + + template + explicit EvalError(EvalState & state, const std::string & formatString, const Args &... formatArgs) + : Error(formatString, formatArgs...) + , state(state) + { + } +}; + +MakeError(ParseError, Error); +MakeError(AssertionError, EvalError); +MakeError(ThrownError, AssertionError); +MakeError(Abort, EvalError); +MakeError(TypeError, EvalError); +MakeError(UndefinedVarError, EvalError); +MakeError(MissingArgumentError, EvalError); +MakeError(CachedEvalError, EvalError); +MakeError(InfiniteRecursionError, EvalError); + +struct InvalidPathError : public EvalError +{ +public: + Path path; + InvalidPathError(EvalState & state, const Path & path) + : EvalError(state, "path '%s' is not valid", path) + { + } +}; + +template +class EvalErrorBuilder final +{ + friend class EvalState; + + template + explicit EvalErrorBuilder(EvalState & state, const Args &... args) + : error(T(state, args...)) + { + } + +public: + T error; + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withExitStatus(unsigned int exitStatus); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & atPos(PosIdx pos); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & atPos(Value & value, PosIdx fallback = noPos); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withTrace(PosIdx pos, const std::string_view text); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withSuggestions(Suggestions & s); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & withFrame(const Env & e, const Expr & ex); + + [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, hintformat hint, bool frame = false); + + template + [[nodiscard, gnu::noinline]] EvalErrorBuilder & + addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs); + + [[gnu::noinline, gnu::noreturn]] void debugThrow(); +}; + +/** + * The size needed to allocate any `EvalErrorBuilder`. + * + * The list of classes here needs to be kept in sync with the list of `template + * class` declarations in `eval-error.cc`. + * + * This is used by `EvalState` to preallocate a buffer of sufficient size for + * any `EvalErrorBuilder` to avoid allocating while evaluating Nix code. + */ +constexpr size_t EVAL_ERROR_BUILDER_SIZE = std::max({ + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), + sizeof(EvalErrorBuilder), +}); + +} diff --git a/src/libexpr/eval-inline.hh b/src/libexpr/eval-inline.hh index 42cb68bbe..03320c7c9 100644 --- a/src/libexpr/eval-inline.hh +++ b/src/libexpr/eval-inline.hh @@ -3,6 +3,7 @@ #include "print.hh" #include "eval.hh" +#include "eval-error.hh" namespace nix { @@ -115,10 +116,11 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e PosIdx pos = getPos(); forceValue(v, pos); if (v.type() != nAttrs) { - error("expected a set but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withTrace(pos, errorCtx).debugThrow(); + error( + "expected a set but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).withTrace(pos, errorCtx).debugThrow(); } } @@ -128,10 +130,11 @@ inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view e { forceValue(v, pos); if (!v.isList()) { - error("expected a list but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withTrace(pos, errorCtx).debugThrow(); + error( + "expected a list but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).withTrace(pos, errorCtx).debugThrow(); } } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..ded4415cc 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -339,46 +339,6 @@ void initGC() gcInitialised = true; } - -ErrorBuilder & ErrorBuilder::atPos(PosIdx pos) -{ - info.errPos = state.positions[pos]; - return *this; -} - -ErrorBuilder & ErrorBuilder::withTrace(PosIdx pos, const std::string_view text) -{ - info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = false }); - return *this; -} - -ErrorBuilder & ErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text) -{ - info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = true }); - return *this; -} - -ErrorBuilder & ErrorBuilder::withSuggestions(Suggestions & s) -{ - info.suggestions = s; - return *this; -} - -ErrorBuilder & ErrorBuilder::withFrame(const Env & env, const Expr & expr) -{ - // NOTE: This is abusing side-effects. - // TODO: check compatibility with nested debugger calls. - state.debugTraces.push_front(DebugTrace { - .pos = nullptr, - .expr = expr, - .env = env, - .hint = hintformat("Fake frame for debugging purposes"), - .isError = true - }); - return *this; -} - - EvalState::EvalState( const SearchPath & _searchPath, ref store, @@ -811,7 +771,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & ? std::make_unique( *this, DebugTrace { - .pos = error->info().errPos ? error->info().errPos : positions[expr.getPos()], + .pos = error->info().pos ? error->info().pos : positions[expr.getPos()], .expr = expr, .env = env, .hint = error->info().msg, @@ -930,7 +890,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) return j->value; } if (!fromWith->parentWith) - error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow(); + error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow(); for (size_t l = fromWith->prevWith; l; --l, env = env->up) ; fromWith = fromWith->parentWith; } @@ -1136,7 +1096,7 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial) // computation. if (mustBeTrivial && !(dynamic_cast(e))) - error("file '%s' must be an attribute set", path).debugThrow(); + error("file '%s' must be an attribute set", path).debugThrow(); eval(e, v); } catch (Error & e) { addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath.to_string()); @@ -1167,10 +1127,11 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::stri Value v; e->eval(*this, env, v); if (v.type() != nBool) - error("expected a Boolean but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withFrame(env, *e).debugThrow(); + error( + "expected a Boolean but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).withFrame(env, *e).debugThrow(); return v.boolean; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -1184,10 +1145,11 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx po try { e->eval(*this, env, v); if (v.type() != nAttrs) - error("expected a set but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .withFrame(env, *e).debugThrow(); + error( + "expected a set but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).withFrame(env, *e).debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -1296,7 +1258,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v) auto nameSym = state.symbols.create(nameVal.string_view()); Bindings::iterator j = v.attrs->find(nameSym); if (j != v.attrs->end()) - state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow(); + state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow(); i.valueExpr->setName(nameSym); /* Keep sorted order so find can catch duplicates */ @@ -1408,8 +1370,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v) for (auto & attr : *vAttrs->attrs) allAttrNames.insert(state.symbols[attr.name]); auto suggestions = Suggestions::bestMatches(allAttrNames, state.symbols[name]); - state.error("attribute '%1%' missing", state.symbols[name]) - .atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow(); + state.error("attribute '%1%' missing", state.symbols[name]) + .atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow(); } } vAttrs = j->value; @@ -1482,7 +1444,7 @@ public: void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos) { if (callDepth > evalSettings.maxCallDepth) - error("stack overflow; max-call-depth exceeded").atPos(pos).template debugThrow(); + error("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow(); CallDepth _level(callDepth); auto trace = evalSettings.traceFunctionCalls @@ -1540,13 +1502,13 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & auto j = args[0]->attrs->get(i.name); if (!j) { if (!i.def) { - error("function '%1%' called without required argument '%2%'", + error("function '%1%' called without required argument '%2%'", (lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"), symbols[i.name]) .atPos(lambda.pos) .withTrace(pos, "from call site") .withFrame(*fun.lambda.env, lambda) - .debugThrow(); + .debugThrow(); } env2.values[displ++] = i.def->maybeThunk(*this, env2); } else { @@ -1566,14 +1528,14 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & for (auto & formal : lambda.formals->formals) formalNames.insert(symbols[formal.name]); auto suggestions = Suggestions::bestMatches(formalNames, symbols[i.name]); - error("function '%1%' called with unexpected argument '%2%'", + error("function '%1%' called with unexpected argument '%2%'", (lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"), symbols[i.name]) .atPos(lambda.pos) .withTrace(pos, "from call site") .withSuggestions(suggestions) .withFrame(*fun.lambda.env, lambda) - .debugThrow(); + .debugThrow(); } abort(); // can't happen } @@ -1705,11 +1667,12 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & } else - error("attempt to call something which is not a function but %1%: %2%", + error( + "attempt to call something which is not a function but %1%: %2%", showType(vCur), ValuePrinter(*this, vCur, errorPrintOptions)) .atPos(pos) - .debugThrow(); + .debugThrow(); } vRes = vCur; @@ -1779,12 +1742,12 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res) if (j != args.end()) { attrs.insert(*j); } else if (!i.def) { - error(R"(cannot evaluate a function that has an argument without a value ('%1%') + error(R"(cannot evaluate a function that has an argument without a value ('%1%') Nix attempted to evaluate a function as a top level expression; in this case it must have its arguments supplied either by default values, or passed explicitly with '--arg' or '--argstr'. See https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbols[i.name]) - .atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow(); + .atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow(); } } } @@ -1815,7 +1778,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v) if (!state.evalBool(env, cond, pos, "in the condition of the assert statement")) { std::ostringstream out; cond->show(state.symbols, out); - state.error("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow(); + state.error("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow(); } body->eval(state, env, v); } @@ -1993,14 +1956,14 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) nf = n; nf += vTmp.fpoint; } else - state.error("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); + state.error("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); } else if (firstType == nFloat) { if (vTmp.type() == nInt) { nf += vTmp.integer; } else if (vTmp.type() == nFloat) { nf += vTmp.fpoint; } else - state.error("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); + state.error("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow(); } else { if (s.empty()) s.reserve(es->size()); /* skip canonization of first path, which would only be not @@ -2022,7 +1985,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) v.mkFloat(nf); else if (firstType == nPath) { if (!context.empty()) - state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow(); + state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow(); v.mkPath(state.rootPath(CanonPath(canonPath(str())))); } else v.mkStringMove(c_str(), context); @@ -2037,8 +2000,9 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v) void ExprBlackHole::eval(EvalState & state, Env & env, Value & v) { - state.error("infinite recursion encountered") - .debugThrow(); + state.error("infinite recursion encountered") + .atPos(v.determinePos(noPos)) + .debugThrow(); } // always force this to be separate, otherwise forceValue may inline it and take @@ -2052,7 +2016,7 @@ void EvalState::tryFixupBlackHolePos(Value & v, PosIdx pos) try { std::rethrow_exception(e); } catch (InfiniteRecursionError & e) { - e.err.errPos = positions[pos]; + e.atPos(positions[pos]); } catch (...) { } } @@ -2100,15 +2064,18 @@ NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCt try { forceValue(v, pos); if (v.type() != nInt) - error("expected an integer but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected an integer but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.integer; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; } + + return v.integer; } @@ -2119,10 +2086,11 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err if (v.type() == nInt) return v.integer; else if (v.type() != nFloat) - error("expected a float but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a float but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.fpoint; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2136,15 +2104,18 @@ bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx try { forceValue(v, pos); if (v.type() != nBool) - error("expected a Boolean but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a Boolean but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.boolean; } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; } + + return v.boolean; } @@ -2159,10 +2130,11 @@ void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view erro try { forceValue(v, pos); if (v.type() != nFunction && !isFunctor(v)) - error("expected a function but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a function but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); throw; @@ -2175,10 +2147,11 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string try { forceValue(v, pos); if (v.type() != nString) - error("expected a string but found %1%: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) - .debugThrow(); + error( + "expected a string but found %1%: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ).atPos(pos).debugThrow(); return v.string_view(); } catch (Error & e) { e.addTrace(positions[pos], errorCtx); @@ -2207,7 +2180,7 @@ std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::s { auto s = forceString(v, pos, errorCtx); if (v.context()) { - error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow(); + error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow(); } return s; } @@ -2272,11 +2245,13 @@ BackedStringView EvalState::coerceToString( return std::move(*maybeString); auto i = v.attrs->find(sOutPath); if (i == v.attrs->end()) { - error("cannot coerce %1% to a string: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) + error( + "cannot coerce %1% to a string: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ) .withTrace(pos, errorCtx) - .debugThrow(); + .debugThrow(); } return coerceToString(pos, *i->value, context, errorCtx, coerceMore, copyToStore, canonicalizePath); @@ -2284,7 +2259,7 @@ BackedStringView EvalState::coerceToString( if (v.type() == nExternal) { try { - return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore); + return v.external->coerceToString(*this, pos, context, coerceMore, copyToStore); } catch (Error & e) { e.addTrace(nullptr, errorCtx); throw; @@ -2320,18 +2295,19 @@ BackedStringView EvalState::coerceToString( } } - error("cannot coerce %1% to a string: %2%", - showType(v), - ValuePrinter(*this, v, errorPrintOptions)) + error("cannot coerce %1% to a string: %2%", + showType(v), + ValuePrinter(*this, v, errorPrintOptions) + ) .withTrace(pos, errorCtx) - .debugThrow(); + .debugThrow(); } StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePath & path) { if (nix::isDerivation(path.path.abs())) - error("file names are not allowed to end in '%1%'", drvExtension).debugThrow(); + error("file names are not allowed to end in '%1%'", drvExtension).debugThrow(); auto i = srcToStore.find(path); @@ -2380,7 +2356,7 @@ SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext relative to the root filesystem. */ auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned(); if (path == "" || path[0] != '/') - error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow(); + error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow(); return rootPath(CanonPath(path)); } @@ -2390,7 +2366,7 @@ StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringCon auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned(); if (auto storePath = store->maybeParseStorePath(path)) return *storePath; - error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow(); + error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow(); } @@ -2400,18 +2376,18 @@ std::pair EvalState::coerceToSingleDerivedP auto s = forceString(v, context, pos, errorCtx); auto csize = context.size(); if (csize != 1) - error( + error( "string '%s' has %d entries in its context. It should only have exactly one entry", s, csize) - .withTrace(pos, errorCtx).debugThrow(); + .withTrace(pos, errorCtx).debugThrow(); auto derivedPath = std::visit(overloaded { [&](NixStringContextElem::Opaque && o) -> SingleDerivedPath { return std::move(o); }, [&](NixStringContextElem::DrvDeep &&) -> SingleDerivedPath { - error( + error( "string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time", - s).withTrace(pos, errorCtx).debugThrow(); + s).withTrace(pos, errorCtx).debugThrow(); }, [&](NixStringContextElem::Built && b) -> SingleDerivedPath { return std::move(b); @@ -2434,16 +2410,16 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value & error message. */ std::visit(overloaded { [&](const SingleDerivedPath::Opaque & o) { - error( + error( "path string '%s' has context with the different path '%s'", s, sExpected) - .withTrace(pos, errorCtx).debugThrow(); + .withTrace(pos, errorCtx).debugThrow(); }, [&](const SingleDerivedPath::Built & b) { - error( + error( "string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'", s, b.output, b.drvPath->to_string(*store), sExpected) - .withTrace(pos, errorCtx).debugThrow(); + .withTrace(pos, errorCtx).debugThrow(); } }, derivedPath.raw()); } @@ -2528,7 +2504,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v case nThunk: // Must not be left by forceValue default: - error("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow(); + error("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow(); } } @@ -2767,13 +2743,12 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_ if (hasPrefix(path, "nix/")) return {corepkgsFS, CanonPath(path.substr(3))}; - debugThrow(ThrownError({ - .msg = hintfmt(evalSettings.pureEval + error( + evalSettings.pureEval ? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)" : "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)", - path), - .errPos = positions[pos] - }), 0, 0); + path + ).atPos(pos).debugThrow(); } @@ -2856,11 +2831,11 @@ Expr * EvalState::parse( } -std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const +std::string ExternalValueBase::coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const { - throw TypeError({ - .msg = hintfmt("cannot coerce %1% to a string: %2%", showType(), *this) - }); + state.error( + "cannot coerce %1% to a string: %2%", showType(), *this + ).atPos(pos).debugThrow(); } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 2368187b1..afe89cd30 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -2,6 +2,7 @@ ///@file #include "attr-set.hh" +#include "eval-error.hh" #include "types.hh" #include "value.hh" #include "nixexpr.hh" @@ -151,45 +152,6 @@ struct DebugTrace { bool isError; }; -void debugError(Error * e, Env & env, Expr & expr); - -class ErrorBuilder -{ - private: - EvalState & state; - ErrorInfo info; - - ErrorBuilder(EvalState & s, ErrorInfo && i): state(s), info(i) { } - - public: - template - [[nodiscard, gnu::noinline]] - static ErrorBuilder * create(EvalState & s, const Args & ... args) - { - return new ErrorBuilder(s, ErrorInfo { .msg = hintfmt(args...) }); - } - - [[nodiscard, gnu::noinline]] - ErrorBuilder & atPos(PosIdx pos); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withTrace(PosIdx pos, const std::string_view text); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withSuggestions(Suggestions & s); - - [[nodiscard, gnu::noinline]] - ErrorBuilder & withFrame(const Env & e, const Expr & ex); - - template - [[gnu::noinline, gnu::noreturn]] - void debugThrow(); -}; - - class EvalState : public std::enable_shared_from_this { public: @@ -274,39 +236,10 @@ public: void runDebugRepl(const Error * error, const Env & env, const Expr & expr); - template - [[gnu::noinline, gnu::noreturn]] - void debugThrowLastTrace(E && error) - { - debugThrow(error, nullptr, nullptr); - } - - template - [[gnu::noinline, gnu::noreturn]] - void debugThrow(E && error, const Env * env, const Expr * expr) - { - if (debugRepl && ((env && expr) || !debugTraces.empty())) { - if (!env || !expr) { - const DebugTrace & last = debugTraces.front(); - env = &last.env; - expr = &last.expr; - } - runDebugRepl(&error, *env, *expr); - } - - throw std::move(error); - } - - // This is dangerous, but gets in line with the idea that error creation and - // throwing should not allocate on the stack of hot functions. - // as long as errors are immediately thrown, it works. - ErrorBuilder * errorBuilder; - - template + template [[nodiscard, gnu::noinline]] - ErrorBuilder & error(const Args & ... args) { - errorBuilder = ErrorBuilder::create(*this, args...); - return *errorBuilder; + EvalErrorBuilder & error(const Args & ... args) { + return *new EvalErrorBuilder(*this, args...); } private: @@ -845,22 +778,6 @@ SourcePath resolveExprPath(SourcePath path); */ bool isAllowedURI(std::string_view uri, const Strings & allowedPaths); -struct InvalidPathError : EvalError -{ - Path path; - InvalidPathError(const Path & path); -#ifdef EXCEPTION_NEEDS_THROW_SPEC - ~InvalidPathError() throw () { }; -#endif -}; - -template -void ErrorBuilder::debugThrow() -{ - // NOTE: We always use the -LastTrace version as we push the new trace in withFrame() - state.debugThrowLastTrace(ErrorType(info)); -} - } #include "eval-inline.hh" diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index fee58792b..3396b0219 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -147,8 +147,8 @@ static FlakeInput parseFlakeInput(EvalState & state, NixStringContext emptyContext = {}; attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump()); } else - throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected", - state.symbols[attr.name], showType(*attr.value)); + state.error("flake input attribute '%s' is %s while a string, Boolean, or integer is expected", + state.symbols[attr.name], showType(*attr.value)).debugThrow(); } #pragma GCC diagnostic pop } @@ -295,15 +295,15 @@ static Flake getFlake( std::vector ss; for (auto elem : setting.value->listItems()) { if (elem->type() != nString) - throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected", - state.symbols[setting.name], showType(*setting.value)); + state.error("list element in flake configuration setting '%s' is %s while a string is expected", + state.symbols[setting.name], showType(*setting.value)).debugThrow(); ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, "")); } flake.config.settings.emplace(state.symbols[setting.name], ss); } else - throw TypeError("flake configuration setting '%s' is %s", - state.symbols[setting.name], showType(*setting.value)); + state.error("flake configuration setting '%s' is %s", + state.symbols[setting.name], showType(*setting.value)).debugThrow(); } } @@ -865,11 +865,11 @@ static void prim_flakeRefToString( attrs.emplace(state.symbols[attr.name], std::string(attr.value->string_view())); } else { - state.error( + state.error( "flake reference attribute sets may only contain integers, Booleans, " "and strings, but attribute '%s' is %s", state.symbols[attr.name], - showType(*attr.value)).debugThrow(); + showType(*attr.value)).debugThrow(); } } auto flakeRef = FlakeRef::fromAttrs(attrs); diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 51449ccb3..e9ed1ef08 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -49,7 +49,7 @@ std::string PackageInfo::queryName() const { if (name == "" && attrs) { auto i = attrs->find(state->sName); - if (i == attrs->end()) throw TypeError("derivation name missing"); + if (i == attrs->end()) state->error("derivation name missing").debugThrow(); name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation"); } return name; @@ -396,7 +396,8 @@ static void getDerivations(EvalState & state, Value & vIn, } } - else throw TypeError("expression does not evaluate to a derivation (or a set or list of those)"); + else + state.error("expression does not evaluate to a derivation (or a set or list of those)").debugThrow(); } diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc index 99a475ff9..2d12c47c5 100644 --- a/src/libexpr/json-to-value.cc +++ b/src/libexpr/json-to-value.cc @@ -1,4 +1,6 @@ #include "json-to-value.hh" +#include "value.hh" +#include "eval.hh" #include #include @@ -159,7 +161,7 @@ public: } bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) { - throw JSONParseError(ex.what()); + throw JSONParseError("%s", ex.what()); } }; diff --git a/src/libexpr/json-to-value.hh b/src/libexpr/json-to-value.hh index 3b8ec000f..3c8fa5cc0 100644 --- a/src/libexpr/json-to-value.hh +++ b/src/libexpr/json-to-value.hh @@ -1,13 +1,16 @@ #pragma once ///@file -#include "eval.hh" +#include "error.hh" #include namespace nix { -MakeError(JSONParseError, EvalError); +class EvalState; +struct Value; + +MakeError(JSONParseError, Error); void parseJSON(EvalState & state, const std::string_view & s, Value & v); diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index d7a0b5048..af67e847d 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -146,9 +146,9 @@ or { return OR_KW; } try { yylval->n = boost::lexical_cast(yytext); } catch (const boost::bad_lexical_cast &) { - throw ParseError({ + throw ParseError(ErrorInfo{ .msg = hintfmt("invalid integer '%1%'", yytext), - .errPos = state->positions[CUR_POS], + .pos = state->positions[CUR_POS], }); } return INT_LIT; @@ -156,9 +156,9 @@ or { return OR_KW; } {FLOAT} { errno = 0; yylval->nf = strtod(yytext, 0); if (errno != 0) - throw ParseError({ + throw ParseError(ErrorInfo{ .msg = hintfmt("invalid float '%1%'", yytext), - .errPos = state->positions[CUR_POS], + .pos = state->positions[CUR_POS], }); return FLOAT_LIT; } @@ -285,9 +285,9 @@ or { return OR_KW; } {ANY} | <> { - throw ParseError({ + throw ParseError(ErrorInfo{ .msg = hintfmt("path has a trailing slash"), - .errPos = state->positions[CUR_POS], + .pos = state->positions[CUR_POS], }); } diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 6fe4ba81b..6b8f33c42 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -296,10 +296,10 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr & enclosing `with'. If there is no `with', then we can issue an "undefined variable" error now. */ if (withLevel == -1) - throw UndefinedVarError({ - .msg = hintfmt("undefined variable '%1%'", es.symbols[name]), - .errPos = es.positions[pos] - }); + es.error( + "undefined variable '%1%'", + es.symbols[name] + ).atPos(pos).debugThrow(); for (auto * e = env.get(); e && !fromWith; e = e->up) fromWith = e->isWith; this->level = withLevel; diff --git a/src/libexpr/nixexpr.hh b/src/libexpr/nixexpr.hh index da0ec6e9d..1f944f10b 100644 --- a/src/libexpr/nixexpr.hh +++ b/src/libexpr/nixexpr.hh @@ -9,28 +9,13 @@ #include "error.hh" #include "chunked-vector.hh" #include "position.hh" +#include "eval-error.hh" #include "pos-idx.hh" #include "pos-table.hh" namespace nix { -MakeError(EvalError, Error); -MakeError(ParseError, Error); -MakeError(AssertionError, EvalError); -MakeError(ThrownError, AssertionError); -MakeError(Abort, EvalError); -MakeError(TypeError, EvalError); -MakeError(UndefinedVarError, Error); -MakeError(MissingArgumentError, EvalError); - -class InfiniteRecursionError : public EvalError -{ - friend class EvalState; -public: - using EvalError::EvalError; -}; - struct Env; struct Value; class EvalState; diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index 0a9f076dc..bdd5bbabe 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -66,7 +66,7 @@ inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, co throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", showAttrPath(symbols, attrPath), positions[prevPos]), - .errPos = positions[pos] + .pos = positions[pos] }); } @@ -74,7 +74,7 @@ inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx pre { throw ParseError({ .msg = hintfmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), - .errPos = positions[pos] + .pos = positions[pos] }); } @@ -155,13 +155,13 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym if (duplicate) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), - .errPos = positions[duplicate->second] + .pos = positions[duplicate->second] }); if (arg && formals->has(arg)) throw ParseError({ .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), - .errPos = positions[pos] + .pos = positions[pos] }); return formals; diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index e95da37f7..95f45c80a 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -66,7 +66,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * { throw ParseError({ .msg = hintfmt(error), - .errPos = state->positions[state->at(*loc)] + .pos = state->positions[state->at(*loc)] }); } @@ -155,7 +155,7 @@ expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in let"), - .errPos = state->positions[CUR_POS] + .pos = state->positions[CUR_POS] }); $$ = new ExprLet($2, $4); } @@ -245,7 +245,7 @@ expr_simple if (noURLLiterals) throw ParseError({ .msg = hintfmt("URL literals are disabled"), - .errPos = state->positions[CUR_POS] + .pos = state->positions[CUR_POS] }); $$ = new ExprString(std::string($1)); } @@ -341,7 +341,7 @@ attrs } else throw ParseError({ .msg = hintfmt("dynamic attributes not allowed in inherit"), - .errPos = state->positions[state->at(@2)] + .pos = state->positions[state->at(@2)] }); } | { $$ = new AttrPath; } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 1197b6e13..1eec6f961 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -39,10 +39,6 @@ namespace nix { * Miscellaneous *************************************************************/ - -InvalidPathError::InvalidPathError(const Path & path) : - EvalError("path '%s' is not valid", path), path(path) {} - StringMap EvalState::realiseContext(const NixStringContext & context) { std::vector drvs; @@ -51,7 +47,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context) for (auto & c : context) { auto ensureValid = [&](const StorePath & p) { if (!store->isValidPath(p)) - debugThrowLastTrace(InvalidPathError(store->printStorePath(p))); + error(store->printStorePath(p)).debugThrow(); }; std::visit(overloaded { [&](const NixStringContextElem::Built & b) { @@ -78,9 +74,10 @@ StringMap EvalState::realiseContext(const NixStringContext & context) if (drvs.empty()) return {}; if (!evalSettings.enableImportFromDerivation) - debugThrowLastTrace(Error( + error( "cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled", - drvs.begin()->to_string(*store))); + drvs.begin()->to_string(*store) + ).debugThrow(); /* Build/substitute the context. */ std::vector buildReqs; @@ -340,16 +337,16 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu void *handle = dlopen(path.path.c_str(), RTLD_LAZY | RTLD_LOCAL); if (!handle) - state.debugThrowLastTrace(EvalError("could not open '%1%': %2%", path, dlerror())); + state.error("could not open '%1%': %2%", path, dlerror()).debugThrow(); dlerror(); ValueInitializer func = (ValueInitializer) dlsym(handle, sym.c_str()); if(!func) { char *message = dlerror(); if (message) - state.debugThrowLastTrace(EvalError("could not load symbol '%1%' from '%2%': %3%", sym, path, message)); + state.error("could not load symbol '%1%' from '%2%': %3%", sym, path, message).debugThrow(); else - state.debugThrowLastTrace(EvalError("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path)); + state.error("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path).debugThrow(); } (func)(state, v); @@ -365,7 +362,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) auto elems = args[0]->listElems(); auto count = args[0]->listSize(); if (count == 0) - state.error("at least one argument to 'exec' required").atPos(pos).debugThrow(); + state.error("at least one argument to 'exec' required").atPos(pos).debugThrow(); NixStringContext context; auto program = state.coerceToString(pos, *elems[0], context, "while evaluating the first element of the argument passed to builtins.exec", @@ -380,7 +377,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v) try { auto _ = state.realiseContext(context); // FIXME: Handle CA derivations } catch (InvalidPathError & e) { - state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow(); + state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow(); } auto output = runProgram(program, true, commandArgs); @@ -582,7 +579,7 @@ struct CompareValues if (v1->type() == nInt && v2->type() == nFloat) return v1->integer < v2->fpoint; if (v1->type() != v2->type()) - state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow(); + state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow(); // Allow selecting a subset of enum values #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wswitch-enum" @@ -610,7 +607,7 @@ struct CompareValues } } default: - state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow(); + state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow(); #pragma GCC diagnostic pop } } catch (Error & e) { @@ -637,7 +634,7 @@ static Bindings::iterator getAttr( { Bindings::iterator value = attrSet->find(attrSym); if (value == attrSet->end()) { - state.error("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow(); + state.error("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow(); } return value; } @@ -758,7 +755,7 @@ static RegisterPrimOp primop_break({ auto error = Error(ErrorInfo { .level = lvlInfo, .msg = hintfmt("breakpoint reached"), - .errPos = state.positions[pos], + .pos = state.positions[pos], }); auto & dt = state.debugTraces.front(); @@ -769,7 +766,7 @@ static RegisterPrimOp primop_break({ throw Error(ErrorInfo{ .level = lvlInfo, .msg = hintfmt("quit the debugger"), - .errPos = nullptr, + .pos = nullptr, }); } } @@ -790,7 +787,7 @@ static RegisterPrimOp primop_abort({ NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtins.abort").toOwned(); - state.debugThrowLastTrace(Abort("evaluation aborted with the following error message: '%1%'", s)); + state.error("evaluation aborted with the following error message: '%1%'", s).debugThrow(); } }); @@ -809,7 +806,7 @@ static RegisterPrimOp primop_throw({ NixStringContext context; auto s = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtin.throw").toOwned(); - state.debugThrowLastTrace(ThrownError(s)); + state.error(s).debugThrow(); } }); @@ -1128,37 +1125,33 @@ drvName, Bindings * attrs, Value & v) experimentalFeatureSettings.require(Xp::DynamicDerivations); ingestionMethod = TextIngestionMethod {}; } else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s), - .errPos = state.positions[noPos] - })); + state.error( + "invalid value '%s' for 'outputHashMode' attribute", s + ).atPos(v).debugThrow(); }; auto handleOutputs = [&](const Strings & ss) { outputs.clear(); for (auto & j : ss) { if (outputs.find(j) != outputs.end()) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("duplicate derivation output '%1%'", j), - .errPos = state.positions[noPos] - })); + state.error("duplicate derivation output '%1%'", j) + .atPos(v) + .debugThrow(); /* !!! Check whether j is a valid attribute name. */ /* Derivations cannot be named ‘drv’, because then we'd have an attribute ‘drvPath’ in the resulting set. */ if (j == "drv") - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid derivation output name 'drv'" ), - .errPos = state.positions[noPos] - })); + state.error("invalid derivation output name 'drv'") + .atPos(v) + .debugThrow(); outputs.insert(j); } if (outputs.empty()) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("derivation cannot have an empty set of outputs"), - .errPos = state.positions[noPos] - })); + state.error("derivation cannot have an empty set of outputs") + .atPos(v) + .debugThrow(); }; try { @@ -1281,16 +1274,14 @@ drvName, Bindings * attrs, Value & v) /* Do we have all required attributes? */ if (drv.builder == "") - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("required attribute 'builder' missing"), - .errPos = state.positions[noPos] - })); + state.error("required attribute 'builder' missing") + .atPos(v) + .debugThrow(); if (drv.platform == "") - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("required attribute 'system' missing"), - .errPos = state.positions[noPos] - })); + state.error("required attribute 'system' missing") + .atPos(v) + .debugThrow(); /* Check whether the derivation name is valid. */ if (isDerivation(drvName) && @@ -1298,10 +1289,10 @@ drvName, Bindings * attrs, Value & v) outputs.size() == 1 && *(outputs.begin()) == "out")) { - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("derivation names are allowed to end in '%s' only if they produce a single derivation file", drvExtension), - .errPos = state.positions[noPos] - })); + state.error( + "derivation names are allowed to end in '%s' only if they produce a single derivation file", + drvExtension + ).atPos(v).debugThrow(); } if (outputHash) { @@ -1310,10 +1301,9 @@ drvName, Bindings * attrs, Value & v) Ignore `__contentAddressed` because fixed output derivations are already content addressed. */ if (outputs.size() != 1 || *(outputs.begin()) != "out") - state.debugThrowLastTrace(Error({ - .msg = hintfmt("multiple outputs are not supported in fixed-output derivations"), - .errPos = state.positions[noPos] - })); + state.error( + "multiple outputs are not supported in fixed-output derivations" + ).atPos(v).debugThrow(); auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo)); @@ -1332,10 +1322,8 @@ drvName, Bindings * attrs, Value & v) else if (contentAddressed || isImpure) { if (contentAddressed && isImpure) - throw EvalError({ - .msg = hintfmt("derivation cannot be both content-addressed and impure"), - .errPos = state.positions[noPos] - }); + state.error("derivation cannot be both content-addressed and impure") + .atPos(v).debugThrow(); auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256); auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive); @@ -1376,10 +1364,10 @@ drvName, Bindings * attrs, Value & v) for (auto & i : outputs) { auto h = get(hashModulo.hashes, i); if (!h) - throw AssertionError({ - .msg = hintfmt("derivation produced no hash for output '%s'", i), - .errPos = state.positions[noPos], - }); + state.error( + "derivation produced no hash for output '%s'", + i + ).atPos(v).debugThrow(); auto outPath = state.store->makeOutputPath(i, *h, drvName); drv.env[i] = state.store->printStorePath(outPath); drv.outputs.insert_or_assign( @@ -1485,10 +1473,10 @@ static RegisterPrimOp primop_toPath({ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v) { if (evalSettings.pureEval) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("'%s' is not allowed in pure evaluation mode", "builtins.storePath"), - .errPos = state.positions[pos] - })); + state.error( + "'%s' is not allowed in pure evaluation mode", + "builtins.storePath" + ).atPos(pos).debugThrow(); NixStringContext context; auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path; @@ -1498,10 +1486,8 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, if (!state.store->isStorePath(path.abs())) path = CanonPath(canonPath(path.abs(), true)); if (!state.store->isInStore(path.abs())) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("path '%1%' is not in the Nix store", path), - .errPos = state.positions[pos] - })); + state.error("path '%1%' is not in the Nix store", path) + .atPos(pos).debugThrow(); auto path2 = state.store->toStorePath(path.abs()).first; if (!settings.readOnlyMode) state.store->ensurePath(path2); @@ -1616,7 +1602,10 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, V auto path = realisePath(state, pos, *args[0]); auto s = path.readFile(); if (s.find((char) 0) != std::string::npos) - state.debugThrowLastTrace(Error("the contents of the file '%1%' cannot be represented as a Nix string", path)); + state.error( + "the contents of the file '%1%' cannot be represented as a Nix string", + path + ).atPos(pos).debugThrow(); StorePathSet refs; if (state.store->isInStore(path.path.abs())) { try { @@ -1673,10 +1662,11 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V auto rewrites = state.realiseContext(context); path = rewriteStrings(path, rewrites); } catch (InvalidPathError & e) { - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path), - .errPos = state.positions[pos] - })); + state.error( + "cannot find '%1%', since path '%2%' is not valid", + path, + e.path + ).atPos(pos).debugThrow(); } searchPath.elements.emplace_back(SearchPath::Elem { @@ -1745,10 +1735,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile"); std::optional ha = parseHashAlgo(algo); if (!ha) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("unknown hash algo '%1%'", algo), - .errPos = state.positions[pos] - })); + state.error("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow(); auto path = realisePath(state, pos, *args[1]); @@ -2068,13 +2055,12 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val if (auto p = std::get_if(&c.raw)) refs.insert(p->path); else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt( - "in 'toFile': the file named '%1%' must not contain a reference " - "to a derivation but contains (%2%)", - name, c.to_string()), - .errPos = state.positions[pos] - })); + state.error( + "files created by %1% may not reference derivations, but %2% references %3%", + "builtins.toFile", + name, + c.to_string() + ).atPos(pos).debugThrow(); } auto storePath = settings.readOnlyMode @@ -2243,7 +2229,10 @@ static void addPath( if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) { auto dstPath = fetchToStore(*state.store, path.resolveSymlinks(), name, method, filter.get(), state.repair); if (expectedHash && expectedStorePath != dstPath) - state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path)); + state.error( + "store path mismatch in (possibly filtered) path added from '%s'", + path + ).atPos(pos).debugThrow(); state.allowAndSetStorePathString(dstPath, v); } else state.allowAndSetStorePathString(*expectedStorePath, v); @@ -2343,16 +2332,15 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value else if (n == "sha256") expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256); else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]), - .errPos = state.positions[attr.pos] - })); + state.error( + "unsupported argument '%1%' to 'addPath'", + state.symbols[attr.name] + ).atPos(attr.pos).debugThrow(); } if (!path) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("missing required 'path' attribute in the first argument to builtins.path"), - .errPos = state.positions[pos] - })); + state.error( + "missing required 'path' attribute in the first argument to builtins.path" + ).atPos(pos).debugThrow(); if (name.empty()) name = path->baseName(); @@ -2770,10 +2758,7 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * arg return; } if (!args[0]->isLambda()) - state.debugThrowLastTrace(TypeError({ - .msg = hintfmt("'functionArgs' requires a function"), - .errPos = state.positions[pos] - })); + state.error("'functionArgs' requires a function").atPos(pos).debugThrow(); if (!args[0]->lambda.fun->hasFormals()) { v.mkAttrs(&state.emptyBindings); @@ -2943,10 +2928,10 @@ static void elemAt(EvalState & state, const PosIdx pos, Value & list, int n, Val { state.forceList(list, pos, "while evaluating the first argument passed to builtins.elemAt"); if (n < 0 || (unsigned int) n >= list.listSize()) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("list index %1% is out of bounds", n), - .errPos = state.positions[pos] - })); + state.error( + "list index %1% is out of bounds", + n + ).atPos(pos).debugThrow(); state.forceValue(*list.listElems()[n], pos); v = *list.listElems()[n]; } @@ -2991,10 +2976,7 @@ static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value { state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.tail"); if (args[0]->listSize() == 0) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("'tail' called on an empty list"), - .errPos = state.positions[pos] - })); + state.error("'tail' called on an empty list").atPos(pos).debugThrow(); state.mkList(v, args[0]->listSize() - 1); for (unsigned int n = 0; n < v.listSize(); ++n) @@ -3251,7 +3233,7 @@ static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Va auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList"); if (len < 0) - state.error("cannot create list of size %1%", len).debugThrow(); + state.error("cannot create list of size %1%", len).atPos(pos).debugThrow(); // More strict than striclty (!) necessary, but acceptable // as evaluating map without accessing any values makes little sense. @@ -3568,10 +3550,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value NixFloat f2 = state.forceFloat(*args[1], pos, "while evaluating the second operand of the division"); if (f2 == 0) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("division by zero"), - .errPos = state.positions[pos] - })); + state.error("division by zero").atPos(pos).debugThrow(); if (args[0]->type() == nFloat || args[1]->type() == nFloat) { v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first operand of the division") / f2); @@ -3580,10 +3559,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value NixInt i2 = state.forceInt(*args[1], pos, "while evaluating the second operand of the division"); /* Avoid division overflow as it might raise SIGFPE. */ if (i1 == std::numeric_limits::min() && i2 == -1) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("overflow in integer division"), - .errPos = state.positions[pos] - })); + state.error("overflow in integer division").atPos(pos).debugThrow(); v.mkInt(i1 / i2); } @@ -3714,10 +3690,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring"); if (start < 0) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("negative start position in 'substring'"), - .errPos = state.positions[pos] - })); + state.error("negative start position in 'substring'").atPos(pos).debugThrow(); int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring"); @@ -3782,10 +3755,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString"); std::optional ha = parseHashAlgo(algo); if (!ha) - state.debugThrowLastTrace(Error({ - .msg = hintfmt("unknown hash algo '%1%'", algo), - .errPos = state.positions[pos] - })); + state.error("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow(); NixStringContext context; // discarded auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString"); @@ -3951,15 +3921,13 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v) } catch (std::regex_error & e) { if (e.code() == std::regex_constants::error_space) { // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("memory limit exceeded by regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("memory limit exceeded by regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("invalid regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } } @@ -4055,15 +4023,13 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v) } catch (std::regex_error & e) { if (e.code() == std::regex_constants::error_space) { // limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++ - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("memory limit exceeded by regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("memory limit exceeded by regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("invalid regular expression '%s'", re), - .errPos = state.positions[pos] - })); + state.error("invalid regular expression '%s'", re) + .atPos(pos) + .debugThrow(); } } @@ -4139,7 +4105,9 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings"); state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings"); if (args[0]->listSize() != args[1]->listSize()) - state.error("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths").atPos(pos).debugThrow(); + state.error( + "'from' and 'to' arguments passed to builtins.replaceStrings have different lengths" + ).atPos(pos).debugThrow(); std::vector from; from.reserve(args[0]->listSize()); diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index db940f277..1eec8b316 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -98,30 +98,30 @@ static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, V auto contextSize = context.size(); if (contextSize != 1) { - throw EvalError({ - .msg = hintfmt("context of string '%s' must have exactly one element, but has %d", *s, contextSize), - .errPos = state.positions[pos] - }); + state.error( + "context of string '%s' must have exactly one element, but has %d", + *s, + contextSize + ).atPos(pos).debugThrow(); } NixStringContext context2 { (NixStringContextElem { std::visit(overloaded { [&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep { if (!c.path.isDerivation()) { - throw EvalError({ - .msg = hintfmt("path '%s' is not a derivation", - state.store->printStorePath(c.path)), - .errPos = state.positions[pos], - }); + state.error( + "path '%s' is not a derivation", + state.store->printStorePath(c.path) + ).atPos(pos).debugThrow(); } return NixStringContextElem::DrvDeep { .drvPath = c.path, }; }, [&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep { - throw EvalError({ - .msg = hintfmt("`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", c.output), - .errPos = state.positions[pos], - }); + state.error( + "`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", + c.output + ).atPos(pos).debugThrow(); }, [&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep { /* Reuse original item because we want this to be idempotent. */ @@ -261,10 +261,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar for (auto & i : *args[1]->attrs) { const auto & name = state.symbols[i.name]; if (!state.store->isStorePath(name)) - throw EvalError({ - .msg = hintfmt("context key '%s' is not a store path", name), - .errPos = state.positions[i.pos] - }); + state.error( + "context key '%s' is not a store path", + name + ).atPos(i.pos).debugThrow(); auto namePath = state.store->parseStorePath(name); if (!settings.readOnlyMode) state.store->ensurePath(namePath); @@ -281,10 +281,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar if (iter != i.value->attrs->end()) { if (state.forceBool(*iter->value, iter->pos, "while evaluating the `allOutputs` attribute of a string context")) { if (!isDerivation(name)) { - throw EvalError({ - .msg = hintfmt("tried to add all-outputs context of %s, which is not a derivation, to a string", name), - .errPos = state.positions[i.pos] - }); + state.error( + "tried to add all-outputs context of %s, which is not a derivation, to a string", + name + ).atPos(i.pos).debugThrow(); } context.emplace(NixStringContextElem::DrvDeep { .drvPath = namePath, @@ -296,10 +296,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar if (iter != i.value->attrs->end()) { state.forceList(*iter->value, iter->pos, "while evaluating the `outputs` attribute of a string context"); if (iter->value->listSize() && !isDerivation(name)) { - throw EvalError({ - .msg = hintfmt("tried to add derivation output context of %s, which is not a derivation, to a string", name), - .errPos = state.positions[i.pos] - }); + state.error( + "tried to add derivation output context of %s, which is not a derivation, to a string", + name + ).atPos(i.pos).debugThrow(); } for (auto elem : iter->value->listItems()) { auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context"); diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index 27147a5d1..5806b3ff9 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -27,7 +27,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor state.store->printStorePath(fromPath), state.store->printStorePath(rewrittenPath), state.store->printStorePath(*toPathMaybe)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); if (!toPathMaybe) throw Error({ @@ -36,7 +36,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor "Use this value for the 'toPath' attribute passed to 'fetchClosure'", state.store->printStorePath(fromPath), state.store->printStorePath(rewrittenPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -54,7 +54,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor "The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n" "Set 'toPath' to an empty string to make Nix report the correct content-addressed path.", state.store->printStorePath(toPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -80,7 +80,7 @@ static void runFetchClosureWithContentAddressedPath(EvalState & state, const Pos "to the 'fetchClosure' arguments.\n\n" "Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.", state.store->printStorePath(fromPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -103,7 +103,7 @@ static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosId "The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n" "Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed", state.store->printStorePath(fromPath)), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } @@ -154,14 +154,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg else throw Error({ .msg = hintfmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } if (!fromPath) throw Error({ .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); bool inputAddressed = inputAddressedMaybe.value_or(false); @@ -172,14 +172,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg .msg = hintfmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them", "inputAddressed", "toPath"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); } if (!fromStoreUrl) throw Error({ .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); auto parsedURL = parseURL(*fromStoreUrl); @@ -189,13 +189,13 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg !(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file")) throw Error({ .msg = hintfmt("'fetchClosure' only supports http:// and https:// stores"), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); if (!parsedURL.query.empty()) throw Error({ .msg = hintfmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), - .errPos = state.positions[pos] + .pos = state.positions[pos] }); auto fromStore = openStore(parsedURL.to_string()); diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index 58fe6f173..bb029b5b3 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -38,17 +38,11 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a else if (n == "name") name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial"); else - throw EvalError({ - .msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]), - .errPos = state.positions[attr.pos] - }); + state.error("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]).atPos(attr.pos).debugThrow(); } if (url.empty()) - throw EvalError({ - .msg = hintfmt("'url' argument required"), - .errPos = state.positions[pos] - }); + state.error("'url' argument required").atPos(pos).debugThrow(); } else url = state.coerceToString(pos, *args[0], context, diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index a943095bb..1997d5513 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -100,16 +100,14 @@ static void fetchTree( if (auto aType = args[0]->attrs->get(state.sType)) { if (type) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("unexpected attribute 'type'"), - .errPos = state.positions[pos] - })); + state.error( + "unexpected attribute 'type'" + ).atPos(pos).debugThrow(); type = state.forceStringNoCtx(*aType->value, aType->pos, "while evaluating the `type` attribute passed to builtins.fetchTree"); } else if (!type) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"), - .errPos = state.positions[pos] - })); + state.error( + "attribute 'type' is missing in call to 'fetchTree'" + ).atPos(pos).debugThrow(); attrs.emplace("type", type.value()); @@ -132,8 +130,8 @@ static void fetchTree( attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump()); } else - state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected", - state.symbols[attr.name], showType(*attr.value))); + state.error("fetchTree argument '%s' is %s while a string, Boolean or integer is expected", + state.symbols[attr.name], showType(*attr.value)).debugThrow(); } if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) { @@ -142,10 +140,9 @@ static void fetchTree( if (!params.allowNameArgument) if (auto nameIter = attrs.find("name"); nameIter != attrs.end()) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("attribute 'name' isn’t supported in call to 'fetchTree'"), - .errPos = state.positions[pos] - })); + state.error( + "attribute 'name' isn’t supported in call to 'fetchTree'" + ).atPos(pos).debugThrow(); input = fetchers::Input::fromAttrs(std::move(attrs)); } else { @@ -163,10 +160,9 @@ static void fetchTree( input = fetchers::Input::fromAttrs(std::move(attrs)); } else { if (!experimentalFeatureSettings.isEnabled(Xp::Flakes)) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"), - .errPos = state.positions[pos] - })); + state.error( + "passing a string argument to 'fetchTree' requires the 'flakes' experimental feature" + ).atPos(pos).debugThrow(); input = fetchers::Input::fromURL(url); } } @@ -175,10 +171,14 @@ static void fetchTree( input = lookupInRegistries(state.store, input).first; if (evalSettings.pureEval && !input.isLocked()) { + auto fetcher = "fetchTree"; if (params.isFetchGit) - state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchGit' requires a locked input, at %s", state.positions[pos])); - else - state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos])); + fetcher = "fetchGit"; + + state.error( + "in pure evaluation mode, %s requires a locked input", + fetcher + ).atPos(pos).debugThrow(); } state.checkURI(input.toURLString()); @@ -432,17 +432,13 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v else if (n == "name") name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch"); else - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("unsupported argument '%s' to '%s'", n, who), - .errPos = state.positions[attr.pos] - })); + state.error("unsupported argument '%s' to '%s'", n, who) + .atPos(pos).debugThrow(); } if (!url) - state.debugThrowLastTrace(EvalError({ - .msg = hintfmt("'url' argument required"), - .errPos = state.positions[pos] - })); + state.error( + "'url' argument required").atPos(pos).debugThrow(); } else url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch"); @@ -455,7 +451,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v name = baseNameOf(*url); if (evalSettings.pureEval && !expectedHash) - state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who)); + state.error("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow(); // early exit if pinned and already in the store if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) { @@ -484,9 +480,15 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v auto hash = unpack ? state.store->queryPathInfo(storePath)->narHash : hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath)); - if (hash != *expectedHash) - state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", - *url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true))); + if (hash != *expectedHash) { + state.error( + "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s", + *url, + expectedHash->to_string(HashFormat::Nix32, true), + hash.to_string(HashFormat::Nix32, true) + ).withExitStatus(102) + .debugThrow(); + } } state.allowAndSetStorePathString(storePath, v); diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 2f4d4022e..94be7960a 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -83,10 +83,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V try { visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */)); } catch (std::exception & e) { // TODO: toml::syntax_error - throw EvalError({ - .msg = hintfmt("while parsing a TOML string: %s", e.what()), - .errPos = state.positions[pos] - }); + state.error("while parsing TOML: %s", e.what()).atPos(pos).debugThrow(); } } diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index 74b3ebf13..b2f116390 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -80,7 +80,7 @@ json printValueAsJSON(EvalState & state, bool strict, try { out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore)); } catch (Error & e) { - e.addTrace({}, + e.addTrace(state.positions[pos], hintfmt("while evaluating list element at index %1%", i)); throw; } @@ -99,13 +99,12 @@ json printValueAsJSON(EvalState & state, bool strict, case nThunk: case nFunction: - auto e = TypeError({ - .msg = hintfmt("cannot convert %1% to JSON", showType(v)), - .errPos = state.positions[v.determinePos(pos)] - }); - e.addTrace(state.positions[pos], hintfmt("message for the trace")); - state.debugThrowLastTrace(e); - throw e; + state.error( + "cannot convert %1% to JSON", + showType(v) + ) + .atPos(v.determinePos(pos)) + .debugThrow(); } return out; } @@ -119,7 +118,8 @@ void printValueAsJSON(EvalState & state, bool strict, json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict, NixStringContext & context, bool copyToStore) const { - state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType())); + state.error("cannot convert %1% to JSON", showType()) + .debugThrow(); } diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh index 214d52271..e7aea4949 100644 --- a/src/libexpr/value.hh +++ b/src/libexpr/value.hh @@ -105,7 +105,7 @@ class ExternalValueBase * Coerce the value to a string. Defaults to uncoercable, i.e. throws an * error. */ - virtual std::string coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const; + virtual std::string coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const; /** * Compare to another value of the same type. Defaults to uncomparable, diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 862ef355b..7b9b3c5b5 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -340,7 +340,7 @@ int handleExceptions(const std::string & programName, std::function fun) return 1; } catch (BaseError & e) { logError(e.info()); - return e.status; + return e.info().status; } catch (std::bad_alloc & e) { printError(error + "out of memory"); return 1; diff --git a/src/libstore/build/entry-points.cc b/src/libstore/build/entry-points.cc index 7f0a05d5d..d4bead28e 100644 --- a/src/libstore/build/entry-points.cc +++ b/src/libstore/build/entry-points.cc @@ -33,7 +33,7 @@ void Store::buildPaths(const std::vector & reqs, BuildMode buildMod } if (failed.size() == 1 && ex) { - ex->status = worker.failingExitStatus(); + ex->withExitStatus(worker.failingExitStatus()); throw std::move(*ex); } else if (!failed.empty()) { if (ex) logError(ex->info()); @@ -104,7 +104,7 @@ void Store::ensurePath(const StorePath & path) if (goal->exitCode != Goal::ecSuccess) { if (goal->ex) { - goal->ex->status = worker.failingExitStatus(); + goal->ex->withExitStatus(worker.failingExitStatus()); throw std::move(*goal->ex); } else throw Error(worker.failingExitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path)); diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index 27ad14ed4..8db93fa39 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -119,7 +119,7 @@ struct TunnelLogger : public Logger if (GET_PROTOCOL_MINOR(clientVersion) >= 26) { to << STDERR_ERROR << *ex; } else { - to << STDERR_ERROR << ex->what() << ex->status; + to << STDERR_ERROR << ex->what() << ex->info().status; } } } diff --git a/src/libutil/error.cc b/src/libutil/error.cc index 1f0cb08c9..e4e50d73b 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -335,7 +335,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s * try { * e->eval(*this, env, v); * if (v.type() != nAttrs) - * throwTypeError("expected a set but found %1%", v); + * error("expected a set but found %1%", v); * } catch (Error & e) { * e.addTrace(pos, errorCtx); * throw; @@ -349,7 +349,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s * e->eval(*this, env, v); * try { * if (v.type() != nAttrs) - * throwTypeError("expected a set but found %1%", v); + * error("expected a set but found %1%", v); * } catch (Error & e) { * e.addTrace(pos, errorCtx); * throw; @@ -411,7 +411,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s oss << einfo.msg << "\n"; - printPosMaybe(oss, "", einfo.errPos); + printPosMaybe(oss, "", einfo.pos); auto suggestions = einfo.suggestions.trim(); if (!suggestions.suggestions.empty()) { diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 764fac1ce..9f9302020 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -84,9 +84,14 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs); struct ErrorInfo { Verbosity level; hintformat msg; - std::shared_ptr errPos; + std::shared_ptr pos; std::list traces; + /** + * Exit status. + */ + unsigned int status = 1; + Suggestions suggestions; static std::optional programName; @@ -103,18 +108,21 @@ class BaseError : public std::exception protected: mutable ErrorInfo err; + /** + * Cached formatted contents of `err.msg`. + */ mutable std::optional what_; + /** + * Format `err.msg` and set `what_` to the resulting value. + */ const std::string & calcWhat() const; public: - unsigned int status = 1; // exit status - BaseError(const BaseError &) = default; template BaseError(unsigned int status, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(args...) } - , status(status) + : err { .level = lvlError, .msg = hintfmt(args...), .status = status } { } template @@ -149,6 +157,15 @@ public: const std::string & msg() const { return calcWhat(); } const ErrorInfo & info() const { calcWhat(); return err; } + void withExitStatus(unsigned int status) + { + err.status = status; + } + + void atPos(std::shared_ptr pos) { + err.pos = pos; + } + void pushTrace(Trace trace) { err.traces.push_front(trace); diff --git a/src/libutil/logging.cc b/src/libutil/logging.cc index d68ddacc0..89fbd194a 100644 --- a/src/libutil/logging.cc +++ b/src/libutil/logging.cc @@ -199,7 +199,7 @@ struct JSONLogger : Logger { json["level"] = ei.level; json["msg"] = oss.str(); json["raw_msg"] = ei.msg.str(); - to_json(json, ei.errPos); + to_json(json, ei.pos); if (loggerSettings.showTrace.get() && !ei.traces.empty()) { nlohmann::json traces = nlohmann::json::array(); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 40378e123..017818ed5 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -950,8 +950,8 @@ static void opServe(Strings opFlags, Strings opArgs) store->buildPaths(toDerivedPaths(paths)); out << 0; } catch (Error & e) { - assert(e.status); - out << e.status << e.msg(); + assert(e.info().status); + out << e.info().status << e.msg(); } break; } diff --git a/src/nix/eval.cc b/src/nix/eval.cc index a89fa7412..2e0837c8e 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -104,7 +104,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption } } else - throw TypeError("value at '%s' is not a string or an attribute set", state->positions[pos]); + state->error("value at '%s' is not a string or an attribute set", state->positions[pos]).debugThrow(); }; recurse(*v, pos, *writeTo); diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 0e34bd76a..646e4c831 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -848,10 +848,10 @@ struct CmdFlakeInitCommon : virtual Args, EvalCommand auto templateDir = templateDirAttr->getString(); if (!store->isInStore(templateDir)) - throw TypeError( + evalState->error( "'%s' was not found in the Nix store\n" "If you've set '%s' to a string, try using a path instead.", - templateDir, templateDirAttr->getAttrPathStr()); + templateDir, templateDirAttr->getAttrPathStr()).debugThrow(); std::vector changedFiles; std::vector conflictedFiles; @@ -1321,7 +1321,7 @@ struct CmdFlakeShow : FlakeCommand, MixJSON { auto aType = visitor.maybeGetAttr("type"); if (!aType || aType->getString() != "app") - throw EvalError("not an app definition"); + state->error("not an app definition").debugThrow(); if (json) { j.emplace("type", "app"); } else { diff --git a/tests/functional/fetchGit.sh b/tests/functional/fetchGit.sh index c6a482035..ea90f8ebe 100644 --- a/tests/functional/fetchGit.sh +++ b/tests/functional/fetchGit.sh @@ -67,7 +67,7 @@ path2=$(nix eval --raw --expr "(builtins.fetchGit { url = file://$repo; rev = \" [[ $(nix eval --raw --expr "builtins.readFile (fetchGit { url = file://$repo; rev = \"$rev2\"; } + \"/hello\")") = world ]] # But without a hash, it fails -expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "'fetchGit' requires a locked input" +expectStderr 1 nix eval --expr 'builtins.fetchGit "file:///foo"' | grepQuiet "fetchGit requires a locked input" # Fetch again. This should be cached. mv $repo ${repo}-tmp @@ -208,7 +208,7 @@ path6=$(nix eval --impure --raw --expr "(builtins.fetchTree { type = \"git\"; ur [[ $path3 = $path6 ]] [[ $(nix eval --impure --expr "(builtins.fetchTree { type = \"git\"; url = \"file://$TEST_ROOT/shallow\"; ref = \"dev\"; shallow = true; }).revCount or 123") == 123 ]] -expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "'fetchTree' requires a locked input" +expectStderr 1 nix eval --expr 'builtins.fetchTree { type = "git"; url = "file:///foo"; }' | grepQuiet "fetchTree requires a locked input" # Explicit ref = "HEAD" should work, and produce the same outPath as without ref path7=$(nix eval --impure --raw --expr "(builtins.fetchGit { url = \"file://$repo\"; ref = \"HEAD\"; }).outPath") diff --git a/tests/functional/lang/eval-fail-attr-name-type.err.exp b/tests/functional/lang/eval-fail-attr-name-type.err.exp index c8d56ba7d..6848a35ed 100644 --- a/tests/functional/lang/eval-fail-attr-name-type.err.exp +++ b/tests/functional/lang/eval-fail-attr-name-type.err.exp @@ -14,3 +14,8 @@ error: 8| error: expected a string but found an integer: 1 + at /pwd/lang/eval-fail-attr-name-type.nix:7:17: + 6| in + 7| attrs.puppy.${key} + | ^ + 8| diff --git a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp index 73f9df8cc..9bbb251e1 100644 --- a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp +++ b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp @@ -5,4 +5,4 @@ error: | ^ 2| key = "value" - error: while parsing a TOML string: Dates and times are not supported + error: while parsing TOML: Dates and times are not supported diff --git a/tests/functional/lang/eval-fail-toJSON.err.exp b/tests/functional/lang/eval-fail-toJSON.err.exp index 4f6003437..ad267711b 100644 --- a/tests/functional/lang/eval-fail-toJSON.err.exp +++ b/tests/functional/lang/eval-fail-toJSON.err.exp @@ -20,6 +20,11 @@ error: 3| true … while evaluating list element at index 3 + at /pwd/lang/eval-fail-toJSON.nix:2:3: + 1| builtins.toJSON { + 2| a.b = [ + | ^ + 3| true … while evaluating attribute 'c' at /pwd/lang/eval-fail-toJSON.nix:7:7: diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp index 94784c651..4326c9650 100644 --- a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp +++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp @@ -7,3 +7,8 @@ error: 6| error: expected a string but found a set: { } + at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10: + 4| in + 5| attr.${key} + | ^ + 6| diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 5fca79304..d0d7ca79c 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -12,33 +12,33 @@ namespace nix { TEST_F(ErrorTraceTest, TraceBuilder) { ASSERT_THROW( - state.error("Not much").debugThrow(), + state.error("puppy").debugThrow(), EvalError ); ASSERT_THROW( - state.error("Not much").withTrace(noPos, "No more").debugThrow(), + state.error("puppy").withTrace(noPos, "doggy").debugThrow(), EvalError ); ASSERT_THROW( try { try { - state.error("Not much").withTrace(noPos, "No more").debugThrow(); + state.error("puppy").withTrace(noPos, "doggy").debugThrow(); } catch (Error & e) { - e.addTrace(state.positions[noPos], "Something", ""); + e.addTrace(state.positions[noPos], "beans", ""); throw; } } catch (BaseError & e) { ASSERT_EQ(PrintToString(e.info().msg), - PrintToString(hintfmt("Not much"))); + PrintToString(hintfmt("puppy"))); auto trace = e.info().traces.rbegin(); ASSERT_EQ(e.info().traces.size(), 2); ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("No more"))); + PrintToString(hintfmt("doggy"))); trace++; ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("Something"))); + PrintToString(hintfmt("beans"))); throw; } , EvalError @@ -47,12 +47,12 @@ namespace nix { TEST_F(ErrorTraceTest, NestedThrows) { try { - state.error("Not much").withTrace(noPos, "No more").debugThrow(); + state.error("puppy").withTrace(noPos, "doggy").debugThrow(); } catch (BaseError & e) { try { - state.error("Not much more").debugThrow(); + state.error("beans").debugThrow(); } catch (Error & e2) { - e.addTrace(state.positions[noPos], "Something", ""); + e.addTrace(state.positions[noPos], "beans2", ""); //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); From 87dc4bc7d139a7eccb257e71558314a0d99e8d6a Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 1 Feb 2024 13:08:06 -0800 Subject: [PATCH 235/307] Attach positions to errors in `derivationStrict` --- src/libexpr/primops.cc | 33 +++++++++++++++++---------------- 1 file changed, 17 insertions(+), 16 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 1eec6f961..69f89e0e0 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1085,9 +1085,10 @@ drvName, Bindings * attrs, Value & v) /* Check whether attributes should be passed as a JSON file. */ using nlohmann::json; std::optional jsonObject; + auto pos = v.determinePos(noPos); auto attr = attrs->find(state.sStructuredAttrs); if (attr != attrs->end() && - state.forceBool(*attr->value, noPos, + state.forceBool(*attr->value, pos, "while evaluating the `__structuredAttrs` " "attribute passed to builtins.derivationStrict")) jsonObject = json::object(); @@ -1096,7 +1097,7 @@ drvName, Bindings * attrs, Value & v) bool ignoreNulls = false; attr = attrs->find(state.sIgnoreNulls); if (attr != attrs->end()) - ignoreNulls = state.forceBool(*attr->value, noPos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict"); + ignoreNulls = state.forceBool(*attr->value, pos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict"); /* Build the derivation expression by processing the attributes. */ Derivation drv; @@ -1160,16 +1161,16 @@ drvName, Bindings * attrs, Value & v) const std::string_view context_below(""); if (ignoreNulls) { - state.forceValue(*i->value, noPos); + state.forceValue(*i->value, pos); if (i->value->type() == nNull) continue; } - if (i->name == state.sContentAddressed && state.forceBool(*i->value, noPos, context_below)) { + if (i->name == state.sContentAddressed && state.forceBool(*i->value, pos, context_below)) { contentAddressed = true; experimentalFeatureSettings.require(Xp::CaDerivations); } - else if (i->name == state.sImpure && state.forceBool(*i->value, noPos, context_below)) { + else if (i->name == state.sImpure && state.forceBool(*i->value, pos, context_below)) { isImpure = true; experimentalFeatureSettings.require(Xp::ImpureDerivations); } @@ -1177,9 +1178,9 @@ drvName, Bindings * attrs, Value & v) /* The `args' attribute is special: it supplies the command-line arguments to the builder. */ else if (i->name == state.sArgs) { - state.forceList(*i->value, noPos, context_below); + state.forceList(*i->value, pos, context_below); for (auto elem : i->value->listItems()) { - auto s = state.coerceToString(noPos, *elem, context, + auto s = state.coerceToString(pos, *elem, context, "while evaluating an element of the argument list", true).toOwned(); drv.args.push_back(s); @@ -1194,29 +1195,29 @@ drvName, Bindings * attrs, Value & v) if (i->name == state.sStructuredAttrs) continue; - (*jsonObject)[key] = printValueAsJSON(state, true, *i->value, noPos, context); + (*jsonObject)[key] = printValueAsJSON(state, true, *i->value, pos, context); if (i->name == state.sBuilder) - drv.builder = state.forceString(*i->value, context, noPos, context_below); + drv.builder = state.forceString(*i->value, context, pos, context_below); else if (i->name == state.sSystem) - drv.platform = state.forceStringNoCtx(*i->value, noPos, context_below); + drv.platform = state.forceStringNoCtx(*i->value, pos, context_below); else if (i->name == state.sOutputHash) - outputHash = state.forceStringNoCtx(*i->value, noPos, context_below); + outputHash = state.forceStringNoCtx(*i->value, pos, context_below); else if (i->name == state.sOutputHashAlgo) - outputHashAlgo = state.forceStringNoCtx(*i->value, noPos, context_below); + outputHashAlgo = state.forceStringNoCtx(*i->value, pos, context_below); else if (i->name == state.sOutputHashMode) - handleHashMode(state.forceStringNoCtx(*i->value, noPos, context_below)); + handleHashMode(state.forceStringNoCtx(*i->value, pos, context_below)); else if (i->name == state.sOutputs) { /* Require ‘outputs’ to be a list of strings. */ - state.forceList(*i->value, noPos, context_below); + state.forceList(*i->value, pos, context_below); Strings ss; for (auto elem : i->value->listItems()) - ss.emplace_back(state.forceStringNoCtx(*elem, noPos, context_below)); + ss.emplace_back(state.forceStringNoCtx(*elem, pos, context_below)); handleOutputs(ss); } } else { - auto s = state.coerceToString(noPos, *i->value, context, context_below, true).toOwned(); + auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned(); drv.env.emplace(key, s); if (i->name == state.sBuilder) drv.builder = std::move(s); else if (i->name == state.sSystem) drv.platform = std::move(s); From faaccecbc82d98288582bdc8ca96991796561371 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 1 Feb 2024 13:08:19 -0800 Subject: [PATCH 236/307] Remove `EXCEPTION_NEEDS_THROW_SPEC` We're on C++ 20 now, we don't need this --- src/libutil/error.hh | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 9f9302020..4fb822843 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -31,15 +31,6 @@ #include #include -/* Before 4.7, gcc's std::exception uses empty throw() specifiers for - * its (virtual) destructor and what() in c++11 mode, in violation of spec - */ -#ifdef __GNUC__ -#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 7) -#define EXCEPTION_NEEDS_THROW_SPEC -#endif -#endif - namespace nix { @@ -147,13 +138,7 @@ public: : err(e) { } -#ifdef EXCEPTION_NEEDS_THROW_SPEC - ~BaseError() throw () { }; - const char * what() const throw () { return calcWhat().c_str(); } -#else const char * what() const noexcept override { return calcWhat().c_str(); } -#endif - const std::string & msg() const { return calcWhat(); } const ErrorInfo & info() const { calcWhat(); return err; } From 05535be03a1526061ea3a3ad25459c032e1f8f8c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 2 Feb 2024 13:07:08 +0100 Subject: [PATCH 237/307] Fix test --- tests/functional/fetchurl.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh index 578f5a34c..5259dd60e 100644 --- a/tests/functional/fetchurl.sh +++ b/tests/functional/fetchurl.sh @@ -80,4 +80,6 @@ test -x $outPath/fetchurl.sh test -L $outPath/symlink # Make sure that *not* passing a outputHash fails. -expectStderr 100 nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' +expected=100 +if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly +expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' From e67458e5b821e0a3a6839f4637eb96ff873f64ed Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 2 Feb 2024 13:22:18 +0100 Subject: [PATCH 238/307] Better test fix --- tests/functional/fetchurl.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh index 5259dd60e..5a05cc5e1 100644 --- a/tests/functional/fetchurl.sh +++ b/tests/functional/fetchurl.sh @@ -80,6 +80,7 @@ test -x $outPath/fetchurl.sh test -L $outPath/symlink # Make sure that *not* passing a outputHash fails. +requireDaemonNewerThan "2.20" expected=100 if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation' From 7d7483cafce258edf405756c0dd42a34afe231b9 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 17:38:46 -0800 Subject: [PATCH 239/307] Print positions in `--debugger`, instead of pointers --- src/libcmd/repl.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7d8f9819..d7af15153 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -232,7 +232,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi : positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]; if (pos) { - out << pos; + out << *pos; if (auto loc = pos->getCodeLines()) { out << "\n"; printCodeLines(out, "", *pos, *loc); From 0127d54d5e86db9039e6322d482d26e66af8bd8a Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 19:14:22 -0800 Subject: [PATCH 240/307] Enter debugger more reliably in let expressions and calls --- src/libexpr/eval.cc | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..df40b18b8 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -846,20 +846,20 @@ void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const e.addTrace(positions[pos], hintfmt(s, s2), frame); } +template static std::unique_ptr makeDebugTraceStacker( EvalState & state, Expr & expr, Env & env, std::shared_ptr && pos, - const char * s, - const std::string & s2) + const Args & ... formatArgs) { return std::make_unique(state, DebugTrace { .pos = std::move(pos), .expr = expr, .env = env, - .hint = hintfmt(s, s2), + .hint = hintfmt(formatArgs...), .isError = false }); } @@ -1322,6 +1322,19 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) for (auto & i : attrs->attrs) env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); + auto dts = state.debugRepl + ? makeDebugTraceStacker( + state, + *this, + env2, + getPos() + ? std::make_shared(state.positions[getPos()]) + : nullptr, + "while evaluating a '%1%' expression", + "let" + ) + : nullptr; + body->eval(state, env2, v); } @@ -1718,6 +1731,18 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & void ExprCall::eval(EvalState & state, Env & env, Value & v) { + auto dts = state.debugRepl + ? makeDebugTraceStacker( + state, + *this, + env, + getPos() + ? std::make_shared(state.positions[getPos()]) + : nullptr, + "while calling a function" + ) + : nullptr; + Value vFun; fun->eval(state, env, vFun); From 36dfac75601b246dc22a6a27ee793dd9ef0b8c0e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 19:31:18 -0800 Subject: [PATCH 241/307] Expose locals from `let` expressions to the debugger --- src/libexpr/eval.cc | 13 +++++++++++++ src/libexpr/nixexpr.cc | 9 +++------ 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..4241dca6a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1322,6 +1322,19 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v) for (auto & i : attrs->attrs) env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2); + auto dts = state.debugRepl + ? makeDebugTraceStacker( + state, + *this, + env2, + getPos() + ? std::make_shared(state.positions[getPos()]) + : nullptr, + "while evaluating a '%1%' expression", + "let" + ) + : nullptr; + body->eval(state, env2, v); } diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 6fe4ba81b..492e131d0 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -409,9 +409,6 @@ void ExprCall::bindVars(EvalState & es, const std::shared_ptr & void ExprLet::bindVars(EvalState & es, const std::shared_ptr & env) { - if (es.debugRepl) - es.exprEnvs.insert(std::make_pair(this, env)); - auto newEnv = std::make_shared(nullptr, env.get(), attrs->attrs.size()); Displacement displ = 0; @@ -423,6 +420,9 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr & for (auto & i : attrs->attrs) i.second.e->bindVars(es, i.second.inherited ? env : newEnv); + if (es.debugRepl) + es.exprEnvs.insert(std::make_pair(this, newEnv)); + body->bindVars(es, newEnv); } @@ -447,9 +447,6 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr & break; } - if (es.debugRepl) - es.exprEnvs.insert(std::make_pair(this, env)); - attrs->bindVars(es, env); auto newEnv = std::make_shared(this, env.get()); body->bindVars(es, newEnv); From 6414cd259e7f271e0e7141866cbc79da7f589c93 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Fri, 2 Feb 2024 19:58:35 -0800 Subject: [PATCH 242/307] Reduce visual clutter in the debugger --- src/libcmd/repl.cc | 15 +++++++++++++-- src/libexpr/eval.cc | 4 +--- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7d8f9819..5b4d3f9d5 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -243,10 +243,21 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi return out; } +static bool isFirstRepl = true; + void NixRepl::mainLoop() { - std::string error = ANSI_RED "error:" ANSI_NORMAL " "; - notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n"); + if (isFirstRepl) { + std::string_view debuggerNotice = ""; + if (state->debugRepl) { + debuggerNotice = " debugger"; + } + notice("Nix %1%%2%\nType :? for help.", nixVersion, debuggerNotice); + } + + if (isFirstRepl) { + isFirstRepl = false; + } loadFiles(); diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..dc2579dfa 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -821,12 +821,10 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & if (error) { - printError("%s\n\n", error->what()); + printError("%s\n", error->what()); if (trylevel > 0 && error->info().level != lvlInfo) printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n"); - - printError(ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL); } auto se = getStaticEnv(expr); From ec5cc1026db61d4c43c89ffdd8a71ed62cfb842d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=B6ren=20Tempel?= Date: Sun, 4 Feb 2024 00:47:47 +0100 Subject: [PATCH 243/307] absPath: Explicitly check if path is empty before accessing it It is entirely possible for the path to be an empty string and many unit tests actually pass it as an empty string (e.g. both_roundrip or turnsEmptyPathIntoCWD). In this case, without this patch, absPath will perform a one-byte out-of-bounds access. This was discovered while enabling the nix test suite on Alpine where we compile all software with `-D_GLIBCXX_ASSERTIONS=1`, thus resulting in a test failure on Alpine. --- src/libutil/file-system.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/file-system.cc b/src/libutil/file-system.cc index cf8a6d967..9fa1f62df 100644 --- a/src/libutil/file-system.cc +++ b/src/libutil/file-system.cc @@ -25,7 +25,7 @@ Path absPath(PathView path, std::optional dir, bool resolveSymlinks) { std::string scratch; - if (path[0] != '/') { + if (path.empty() || path[0] != '/') { // In this case we need to call `canonPath` on a newly-created // string. We set `scratch` to that string first, and then set // `path` to `scratch`. This ensures the newly-created string From a7939a6c2aad1bec454996d553148d2ba351586c Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:16:30 -0800 Subject: [PATCH 244/307] Rename `yellowtxt` -> `magentatxt` `yellowtxt` wraps its value with `ANSI_WARNING`, but `ANSI_WARNING` has been equal to `ANSI_MAGENTA` for a long time. Now the name is updated. --- src/libstore/build/derivation-goal.cc | 6 +++--- src/libstore/build/local-derivation-goal.cc | 2 +- src/libutil/fmt.hh | 12 +++++------- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 00cbf4228..454c35763 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -708,7 +708,7 @@ void DerivationGoal::tryToBuild() if (!outputLocks.lockPaths(lockFiles, "", false)) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for lock on %s", yellowtxt(showPaths(lockFiles)))); + fmt("waiting for lock on %s", magentatxt(showPaths(lockFiles)))); worker.waitForAWhile(shared_from_this()); return; } @@ -762,7 +762,7 @@ void DerivationGoal::tryToBuild() the wake-up timeout expires. */ if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a machine to build '%s'", yellowtxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a machine to build '%s'", magentatxt(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); outputLocks.unlock(); return; @@ -987,7 +987,7 @@ void DerivationGoal::buildDone() diskFull |= cleanupDecideWhetherDiskFull(); auto msg = fmt("builder for '%s' %s", - yellowtxt(worker.store.printStorePath(drvPath)), + magentatxt(worker.store.printStorePath(drvPath)), statusToString(status)); if (!logger->isVerbose() && !logTail.empty()) { diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 2ba8be7d6..ce8943efe 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -232,7 +232,7 @@ void LocalDerivationGoal::tryLocalBuild() if (!buildUser) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a free build user ID for '%s'", yellowtxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a free build user ID for '%s'", magentatxt(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); return; } diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh index ac72e47fb..6430c7707 100644 --- a/src/libutil/fmt.hh +++ b/src/libutil/fmt.hh @@ -63,19 +63,17 @@ inline std::string fmt(const std::string & fs, const Args & ... args) return f.str(); } -// ----------------------------------------------------------------------------- // format function for hints in errors. same as fmt, except templated values -// are always in yellow. - +// are always in magenta. template -struct yellowtxt +struct magentatxt { - yellowtxt(const T &s) : value(s) {} + magentatxt(const T &s) : value(s) {} const T & value; }; template -std::ostream & operator<<(std::ostream & out, const yellowtxt & y) +std::ostream & operator<<(std::ostream & out, const magentatxt & y) { return out << ANSI_WARNING << y.value << ANSI_NORMAL; } @@ -114,7 +112,7 @@ public: template hintformat & operator%(const T & value) { - fmt % yellowtxt(value); + fmt % magentatxt(value); return *this; } From a7927abdc165c0ed6c55565b333fd4fadcdf3417 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:18:42 -0800 Subject: [PATCH 245/307] Catch `Error`, not `BaseError` in `ValuePrinter` `BaseError` includes `Interrupt`. We probably don't want the value printer to tell you you pressed Ctrl-C while it was working. --- src/libexpr/print.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..e1cb3f0cb 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -255,7 +255,7 @@ private: output << "»"; if (options.ansiColors) output << ANSI_NORMAL; - } catch (BaseError & e) { + } catch (Error & e) { printError_(e); } } @@ -405,7 +405,7 @@ private: output << ANSI_NORMAL; } - void printError_(BaseError & e) + void printError_(Error & e) { if (options.ansiColors) output << ANSI_RED; @@ -422,7 +422,7 @@ private: if (options.force) { try { state.forceValue(v, v.determinePos(noPos)); - } catch (BaseError & e) { + } catch (Error & e) { printError_(e); return; } From c5d525cd8430f31e38128acb3b483cbf17f2f977 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:19:23 -0800 Subject: [PATCH 246/307] Print error messages but not traces MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This makes output of values that include errors much cleaner. Before: ``` nix-repl> { err = builtins.throw "uh oh!"; } { err = «error: … while calling the 'throw' builtin at «string»:1:9: 1| { err = builtins.throw "uh oh!"; } | ^ error: uh oh!»; } ``` After: ``` nix-repl> { err = builtins.throw "uh oh!"; } { err = «error: uh oh!»; } ``` But if the whole expression throws an error, source locations and (if applicable) a stack trace are printed, like you'd expect: ``` nix-repl> builtins.throw "uh oh!" error: … while calling the 'throw' builtin at «string»:1:1: 1| builtins.throw "uh oh!" | ^ error: uh oh! ``` --- src/libexpr/print.cc | 2 +- tests/unit/libexpr/value/print.cc | 44 +++---------------------------- 2 files changed, 5 insertions(+), 41 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..f4b13019e 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -409,7 +409,7 @@ private: { if (options.ansiColors) output << ANSI_RED; - output << "«" << e.msg() << "»"; + output << "«error: " << filterANSIEscapes(e.info().msg.str(), true) << "»"; if (options.ansiColors) output << ANSI_NORMAL; } diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc index c4264a38d..c1de3a6a9 100644 --- a/tests/unit/libexpr/value/print.cc +++ b/tests/unit/libexpr/value/print.cc @@ -460,19 +460,7 @@ TEST_F(ValuePrintingTests, ansiColorsError) test(vError, ANSI_RED - "«" - ANSI_RED - "error:" - ANSI_NORMAL - "\n … while calling the '" - ANSI_MAGENTA - "throw" - ANSI_NORMAL - "' builtin\n\n " - ANSI_RED - "error:" - ANSI_NORMAL - " uh oh!»" + "«error: uh oh!»" ANSI_NORMAL, PrintOptions { .ansiColors = true, @@ -501,19 +489,7 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) test(vAttrs, "{ drvPath = " ANSI_RED - "«" - ANSI_RED - "error:" - ANSI_NORMAL - "\n … while calling the '" - ANSI_MAGENTA - "throw" - ANSI_NORMAL - "' builtin\n\n " - ANSI_RED - "error:" - ANSI_NORMAL - " uh oh!»" + "«error: uh oh!»" ANSI_NORMAL "; type = " ANSI_MAGENTA @@ -527,19 +503,7 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) test(vAttrs, ANSI_RED - "«" - ANSI_RED - "error:" - ANSI_NORMAL - "\n … while calling the '" - ANSI_MAGENTA - "throw" - ANSI_NORMAL - "' builtin\n\n " - ANSI_RED - "error:" - ANSI_NORMAL - " uh oh!»" + "«error: uh oh!»" ANSI_NORMAL, PrintOptions { .ansiColors = true, @@ -560,7 +524,7 @@ TEST_F(ValuePrintingTests, ansiColorsAssert) state.mkThunk_(v, &expr); test(v, - ANSI_RED "«" ANSI_RED "error:" ANSI_NORMAL " assertion '" ANSI_MAGENTA "false" ANSI_NORMAL "' failed»" ANSI_NORMAL, + ANSI_RED "«error: assertion 'false' failed»" ANSI_NORMAL, PrintOptions { .ansiColors = true, .force = true From 9646d62b0c3b1313565124a304ddc4057700ab13 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:21:20 -0800 Subject: [PATCH 247/307] Don't print values in magenta This fixes the opening bracket of lists/attrsets being printed in magenta, unlike the closing bracket. https://github.com/NixOS/nix/pull/9753#issuecomment-1904616088 --- src/libexpr/print.cc | 7 + src/libexpr/print.hh | 10 ++ tests/unit/libexpr/error_traces.cc | 228 ++++++++++++++--------------- 3 files changed, 131 insertions(+), 114 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..277c454d7 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -511,4 +511,11 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer) return output; } +template<> +hintformat & hintformat::operator%(const ValuePrinter & value) +{ + fmt % value; + return *this; +} + } diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index a8300264a..a542bc7b1 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -9,6 +9,7 @@ #include +#include "fmt.hh" #include "print-options.hh" namespace nix { @@ -78,4 +79,13 @@ public: }; std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer); + + +/** + * `ValuePrinter` does its own ANSI formatting, so we don't color it + * magenta. + */ +template<> +hintformat & hintformat::operator%(const ValuePrinter & value); + } diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 5fca79304..2f4c9e60d 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -105,7 +105,7 @@ namespace nix { TEST_F(ErrorTraceTest, genericClosure) { ASSERT_TRACE2("genericClosure 1", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure {}", @@ -115,22 +115,22 @@ namespace nix { ASSERT_TRACE2("genericClosure { startSet = 1; }", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = true; }", TypeError, - hintfmt("expected a function but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", @@ -145,7 +145,7 @@ namespace nix { ASSERT_TRACE2("genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); } @@ -154,12 +154,12 @@ namespace nix { TEST_F(ErrorTraceTest, replaceStrings) { ASSERT_TRACE2("replaceStrings 0 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "0" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [] 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "0" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.replaceStrings")); ASSERT_TRACE1("replaceStrings [ 0 ] [] {}", @@ -168,17 +168,17 @@ namespace nix { ASSERT_TRACE2("replaceStrings [ 1 ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"oo\" ] [ true ] \"foo\"", TypeError, - hintfmt("expected a string but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"old\" ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the third argument passed to builtins.replaceStrings")); } @@ -243,7 +243,7 @@ namespace nix { TEST_F(ErrorTraceTest, ceil) { ASSERT_TRACE2("ceil \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.ceil")); } @@ -252,7 +252,7 @@ namespace nix { TEST_F(ErrorTraceTest, floor) { ASSERT_TRACE2("floor \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.floor")); } @@ -265,7 +265,7 @@ namespace nix { TEST_F(ErrorTraceTest, getEnv) { ASSERT_TRACE2("getEnv [ ]", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.getEnv")); } @@ -286,7 +286,7 @@ namespace nix { TEST_F(ErrorTraceTest, placeholder) { ASSERT_TRACE2("placeholder []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.placeholder")); } @@ -295,7 +295,7 @@ namespace nix { TEST_F(ErrorTraceTest, toPath) { ASSERT_TRACE2("toPath []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.toPath")); ASSERT_TRACE2("toPath \"foo\"", @@ -309,7 +309,7 @@ namespace nix { TEST_F(ErrorTraceTest, storePath) { ASSERT_TRACE2("storePath true", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("cannot coerce %s to a string: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to 'builtins.storePath'")); } @@ -318,7 +318,7 @@ namespace nix { TEST_F(ErrorTraceTest, pathExists) { ASSERT_TRACE2("pathExists []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while realising the context of a path")); ASSERT_TRACE2("pathExists \"zorglub\"", @@ -332,7 +332,7 @@ namespace nix { TEST_F(ErrorTraceTest, baseNameOf) { ASSERT_TRACE2("baseNameOf []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.baseNameOf")); } @@ -377,7 +377,7 @@ namespace nix { TEST_F(ErrorTraceTest, filterSource) { ASSERT_TRACE2("filterSource [] []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", "[ ]"), + hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] \"foo\"", @@ -387,7 +387,7 @@ namespace nix { ASSERT_TRACE2("filterSource [] ./.", TypeError, - hintfmt("expected a function but found %s: %s", "a list", "[ ]"), + hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.filterSource")); // Usupported by store "dummy" @@ -412,7 +412,7 @@ namespace nix { TEST_F(ErrorTraceTest, attrNames) { ASSERT_TRACE2("attrNames []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the argument passed to builtins.attrNames")); } @@ -421,7 +421,7 @@ namespace nix { TEST_F(ErrorTraceTest, attrValues) { ASSERT_TRACE2("attrValues []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the argument passed to builtins.attrValues")); } @@ -430,12 +430,12 @@ namespace nix { TEST_F(ErrorTraceTest, getAttr) { ASSERT_TRACE2("getAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" {}", @@ -453,12 +453,12 @@ namespace nix { TEST_F(ErrorTraceTest, hasAttr) { ASSERT_TRACE2("hasAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.hasAttr")); ASSERT_TRACE2("hasAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.hasAttr")); } @@ -471,17 +471,17 @@ namespace nix { TEST_F(ErrorTraceTest, removeAttrs) { ASSERT_TRACE2("removeAttrs \"\" \"\"", TypeError, - hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ \"1\" ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", ANSI_MAGENTA "\"\"" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); } @@ -490,12 +490,12 @@ namespace nix { TEST_F(ErrorTraceTest, listToAttrs) { ASSERT_TRACE2("listToAttrs 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the argument passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ {} ]", @@ -505,7 +505,7 @@ namespace nix { ASSERT_TRACE2("listToAttrs [ { name = 1; } ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ { name = \"foo\"; } ]", @@ -519,12 +519,12 @@ namespace nix { TEST_F(ErrorTraceTest, intersectAttrs) { ASSERT_TRACE2("intersectAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.intersectAttrs")); ASSERT_TRACE2("intersectAttrs {} []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.intersectAttrs")); } @@ -533,22 +533,22 @@ namespace nix { TEST_F(ErrorTraceTest, catAttrs) { ASSERT_TRACE2("catAttrs [] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a list", "[ ]"), + hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", "{ }"), + hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ { foo = 1; } 1 { bar = 5;} ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); } @@ -565,7 +565,7 @@ namespace nix { TEST_F(ErrorTraceTest, mapAttrs) { ASSERT_TRACE2("mapAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", "[ ]"), + hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the second argument passed to builtins.mapAttrs")); // XXX: defered @@ -590,12 +590,12 @@ namespace nix { TEST_F(ErrorTraceTest, zipAttrsWith) { ASSERT_TRACE2("zipAttrsWith [] [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "a list", "[ ]"), + hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), hintfmt("while evaluating the first argument passed to builtins.zipAttrsWith")); ASSERT_TRACE2("zipAttrsWith (_: 1) [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); // XXX: How to properly tell that the fucntion takes two arguments ? @@ -622,7 +622,7 @@ namespace nix { TEST_F(ErrorTraceTest, elemAt) { ASSERT_TRACE2("elemAt \"foo\" (-1)", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("elemAt [] (-1)", @@ -639,7 +639,7 @@ namespace nix { TEST_F(ErrorTraceTest, head) { ASSERT_TRACE2("head 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("head []", @@ -652,7 +652,7 @@ namespace nix { TEST_F(ErrorTraceTest, tail) { ASSERT_TRACE2("tail 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.tail")); ASSERT_TRACE1("tail []", @@ -665,12 +665,12 @@ namespace nix { TEST_F(ErrorTraceTest, map) { ASSERT_TRACE2("map 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.map")); ASSERT_TRACE2("map 1 [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.map")); } @@ -679,17 +679,17 @@ namespace nix { TEST_F(ErrorTraceTest, filter) { ASSERT_TRACE2("filter 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.filter")); ASSERT_TRACE2("filter 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.filter")); ASSERT_TRACE2("filter (_: 5) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "5" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "5" ANSI_NORMAL)), hintfmt("while evaluating the return value of the filtering function passed to builtins.filter")); } @@ -698,7 +698,7 @@ namespace nix { TEST_F(ErrorTraceTest, elem) { ASSERT_TRACE2("elem 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.elem")); } @@ -707,17 +707,17 @@ namespace nix { TEST_F(ErrorTraceTest, concatLists) { ASSERT_TRACE2("concatLists 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ 1 ]", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating a value of the list passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ [1] \"foo\" ]", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating a value of the list passed to builtins.concatLists")); } @@ -726,12 +726,12 @@ namespace nix { TEST_F(ErrorTraceTest, length) { ASSERT_TRACE2("length 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.length")); ASSERT_TRACE2("length \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.length")); } @@ -740,21 +740,21 @@ namespace nix { TEST_F(ErrorTraceTest, foldlPrime) { ASSERT_TRACE2("foldl' 1 \"foo\" true", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.foldlStrict")); ASSERT_TRACE2("foldl' (_: 1) \"foo\" true", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", ANSI_CYAN "true" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), hintfmt("while evaluating the third argument passed to builtins.foldlStrict")); ASSERT_TRACE1("foldl' (_: 1) \"foo\" [ true ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL)); + hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("foldl' (a: b: a && b) \"foo\" [ true ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("in the left operand of the AND (&&) operator")); } @@ -763,17 +763,17 @@ namespace nix { TEST_F(ErrorTraceTest, any) { ASSERT_TRACE2("any 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.any")); } @@ -782,17 +782,17 @@ namespace nix { TEST_F(ErrorTraceTest, all) { ASSERT_TRACE2("all 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.all")); } @@ -801,12 +801,12 @@ namespace nix { TEST_F(ErrorTraceTest, genList) { ASSERT_TRACE2("genList 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.genList")); ASSERT_TRACE2("genList 1 2", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.genList", "an integer")); // XXX: defered @@ -825,21 +825,21 @@ namespace nix { TEST_F(ErrorTraceTest, sort) { ASSERT_TRACE2("sort 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.sort")); ASSERT_TRACE2("sort 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.sort")); ASSERT_TRACE1("sort (_: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL)); + hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("sort (_: _: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the sorting function passed to builtins.sort")); // XXX: Trace too deep, need better asserts @@ -857,17 +857,17 @@ namespace nix { TEST_F(ErrorTraceTest, partition) { ASSERT_TRACE2("partition 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the partition function passed to builtins.partition")); } @@ -876,17 +876,17 @@ namespace nix { TEST_F(ErrorTraceTest, groupBy) { ASSERT_TRACE2("groupBy 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (x: x) [ \"foo\" \"bar\" 1 ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); } @@ -895,22 +895,22 @@ namespace nix { TEST_F(ErrorTraceTest, concatMap) { ASSERT_TRACE2("concatMap 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); } @@ -919,12 +919,12 @@ namespace nix { TEST_F(ErrorTraceTest, add) { ASSERT_TRACE2("add \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument of the addition")); ASSERT_TRACE2("add 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument of the addition")); } @@ -933,12 +933,12 @@ namespace nix { TEST_F(ErrorTraceTest, sub) { ASSERT_TRACE2("sub \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument of the subtraction")); ASSERT_TRACE2("sub 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument of the subtraction")); } @@ -947,12 +947,12 @@ namespace nix { TEST_F(ErrorTraceTest, mul) { ASSERT_TRACE2("mul \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first argument of the multiplication")); ASSERT_TRACE2("mul 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument of the multiplication")); } @@ -961,12 +961,12 @@ namespace nix { TEST_F(ErrorTraceTest, div) { ASSERT_TRACE2("div \"foo\" 1 # TODO: an integer was expected -> a number", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the first operand of the division")); ASSERT_TRACE2("div 1 \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second operand of the division")); ASSERT_TRACE1("div \"foo\" 0", @@ -979,12 +979,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitAnd) { ASSERT_TRACE2("bitAnd 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.bitAnd")); ASSERT_TRACE2("bitAnd 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.bitAnd")); } @@ -993,12 +993,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitOr) { ASSERT_TRACE2("bitOr 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.bitOr")); ASSERT_TRACE2("bitOr 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.bitOr")); } @@ -1007,12 +1007,12 @@ namespace nix { TEST_F(ErrorTraceTest, bitXor) { ASSERT_TRACE2("bitXor 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "1.1" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.bitXor")); ASSERT_TRACE2("bitXor 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", ANSI_CYAN "2.2" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), hintfmt("while evaluating the second argument passed to builtins.bitXor")); } @@ -1038,7 +1038,7 @@ namespace nix { TEST_F(ErrorTraceTest, toString) { ASSERT_TRACE2("toString { a = 1; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }"), + hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }")), hintfmt("while evaluating the first argument passed to builtins.toString")); } @@ -1047,17 +1047,17 @@ namespace nix { TEST_F(ErrorTraceTest, substring) { ASSERT_TRACE2("substring {} \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a set", "{ }"), + hintfmt("expected an integer but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the first argument (the start offset) passed to builtins.substring")); ASSERT_TRACE2("substring 3 \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", ANSI_MAGENTA "\"foo\"" ANSI_NORMAL), + hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), hintfmt("while evaluating the second argument (the substring length) passed to builtins.substring")); ASSERT_TRACE2("substring 0 3 {}", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the third argument (the string) passed to builtins.substring")); ASSERT_TRACE1("substring (-3) 3 \"sometext\"", @@ -1070,7 +1070,7 @@ namespace nix { TEST_F(ErrorTraceTest, stringLength) { ASSERT_TRACE2("stringLength {} # TODO: context is missing ???", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), + hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the argument passed to builtins.stringLength")); } @@ -1079,7 +1079,7 @@ namespace nix { TEST_F(ErrorTraceTest, hashString) { ASSERT_TRACE2("hashString 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.hashString")); ASSERT_TRACE1("hashString \"foo\" \"content\"", @@ -1088,7 +1088,7 @@ namespace nix { ASSERT_TRACE2("hashString \"sha256\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.hashString")); } @@ -1097,12 +1097,12 @@ namespace nix { TEST_F(ErrorTraceTest, match) { ASSERT_TRACE2("match 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.match")); ASSERT_TRACE2("match \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.match")); ASSERT_TRACE1("match \"(.*\" \"\"", @@ -1115,12 +1115,12 @@ namespace nix { TEST_F(ErrorTraceTest, split) { ASSERT_TRACE2("split 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.split")); ASSERT_TRACE2("split \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.split")); ASSERT_TRACE1("split \"f(o*o\" \"1foo2\"", @@ -1133,17 +1133,17 @@ namespace nix { TEST_F(ErrorTraceTest, concatStringsSep) { ASSERT_TRACE2("concatStringsSep 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", "{ }"), + hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", TypeError, - hintfmt("cannot coerce %s to a string: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("cannot coerce %s to a string: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); } @@ -1152,7 +1152,7 @@ namespace nix { TEST_F(ErrorTraceTest, parseDrvName) { ASSERT_TRACE2("parseDrvName 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.parseDrvName")); } @@ -1161,12 +1161,12 @@ namespace nix { TEST_F(ErrorTraceTest, compareVersions) { ASSERT_TRACE2("compareVersions 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.compareVersions")); ASSERT_TRACE2("compareVersions \"abd\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", "{ }"), + hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), hintfmt("while evaluating the second argument passed to builtins.compareVersions")); } @@ -1175,7 +1175,7 @@ namespace nix { TEST_F(ErrorTraceTest, splitVersion) { ASSERT_TRACE2("splitVersion 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", ANSI_CYAN "1" ANSI_NORMAL), + hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), hintfmt("while evaluating the first argument passed to builtins.splitVersion")); } From 770d2bc779d39c041293011892e80f5fcb6b76df Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:17:22 -0800 Subject: [PATCH 248/307] Key repeated values on attribute binding pointers, not value pointers Closes #8672 --- src/libexpr/print.cc | 4 ++-- tests/functional/repl.sh | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 702e4bfe8..915e8489a 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -152,7 +152,7 @@ struct ImportantFirstAttrNameCmp } }; -typedef std::set ValuesSeen; +typedef std::set ValuesSeen; class Printer { @@ -262,7 +262,7 @@ private: void printAttrs(Value & v, size_t depth) { - if (seen && !seen->insert(&v).second) { + if (seen && !seen->insert(v.attrs).second) { printRepeated(); return; } diff --git a/tests/functional/repl.sh b/tests/functional/repl.sh index 1b779c1f5..5f399aa44 100644 --- a/tests/functional/repl.sh +++ b/tests/functional/repl.sh @@ -156,7 +156,7 @@ testReplResponseNoRegex ' # Same for let expressions testReplResponseNoRegex ' let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = { ... }; y = { ... }; }' +' '{ x = «repeated»; y = { ... }; }' # The :p command should recursively print sets, but prevent infinite recursion testReplResponseNoRegex ' @@ -171,4 +171,4 @@ testReplResponseNoRegex ' # Same for let expressions testReplResponseNoRegex ' :p let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = { x = «repeated»; y = { a = 1; }; }; y = «repeated»; }' +' '{ x = «repeated»; y = { a = 1; }; }' From e1131b59279f7cf9f9bea93b5355608d78097f65 Mon Sep 17 00:00:00 2001 From: Rodney Lorrimar Date: Sun, 4 Feb 2024 12:02:06 +0800 Subject: [PATCH 249/307] print-dev-env: Avoid using unbound shellHook variable Some tools which consume the "nix print-dev-env" rc script (such as "nix-direnv") are sensitive to the use of unbound variables. They use "set -u". The "nix print-dev-env" rc script initially unsets "shellHook", then loads variables from the derivation, and then evaluates "shellHook". However, most derivations don't have a "shellHook" attribute. So users get the error "shellHook: unbound variable". This can be demonstrated with the command: nix print-dev-env nixpkgs#hello | bash -u This commit changes the rc script to provide an empty fallback value for the "shellHook" variable. Closes: #7951 #8253 --- src/nix/develop.cc | 2 +- tests/functional/nix-shell.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 1f2891378..403178a5d 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -354,7 +354,7 @@ struct Common : InstallableCommand, MixProfile for (auto & i : {"TMP", "TMPDIR", "TEMP", "TEMPDIR"}) out << fmt("export %s=\"$NIX_BUILD_TOP\"\n", i); - out << "eval \"$shellHook\"\n"; + out << "eval \"${shellHook:-}\"\n"; auto script = out.str(); diff --git a/tests/functional/nix-shell.sh b/tests/functional/nix-shell.sh index 13403fadb..04c83138e 100644 --- a/tests/functional/nix-shell.sh +++ b/tests/functional/nix-shell.sh @@ -118,10 +118,10 @@ diff $TEST_ROOT/dev-env{,2}.json # Ensure `nix print-dev-env --json` contains variable assignments. [[ $(jq -r .variables.arr1.value[2] $TEST_ROOT/dev-env.json) = '3 4' ]] -# Run tests involving `source <(nix print-dev-inv)` in subshells to avoid modifying the current +# Run tests involving `source <(nix print-dev-env)` in subshells to avoid modifying the current # environment. -set +u # FIXME: Make print-dev-env `set -u` compliant (issue #7951) +set -u # Ensure `source <(nix print-dev-env)` modifies the environment. ( From 5ccb06ee1b4c757ff4ca0aa6eac15d5656f7774c Mon Sep 17 00:00:00 2001 From: pennae Date: Sun, 4 Feb 2024 16:42:00 +0100 Subject: [PATCH 250/307] fix debugger crashing while printing envs fixes #9932 --- .gitignore | 1 + src/libexpr/eval.cc | 8 +++++--- tests/functional/debugger.sh | 13 +++++++++++++ tests/functional/local.mk | 3 ++- 4 files changed, 21 insertions(+), 4 deletions(-) create mode 100644 tests/functional/debugger.sh diff --git a/.gitignore b/.gitignore index a47b195bb..a0a0786ed 100644 --- a/.gitignore +++ b/.gitignore @@ -94,6 +94,7 @@ perl/Makefile.config /tests/functional/ca/config.nix /tests/functional/dyn-drv/config.nix /tests/functional/repl-result-out +/tests/functional/debugger-test-out /tests/functional/test-libstoreconsumer/test-libstoreconsumer # /tests/functional/lang/ diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..398eec410 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -744,7 +744,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & if (se.up && env.up) { std::cout << "static: "; printStaticEnvBindings(st, se); - printWithBindings(st, env); + if (se.isWith) + printWithBindings(st, env); std::cout << std::endl; printEnvBindings(st, *se.up, *env.up, ++lvl); } else { @@ -756,7 +757,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env & std::cout << st[i.first] << " "; std::cout << ANSI_NORMAL; std::cout << std::endl; - printWithBindings(st, env); // probably nothing there for the top level. + if (se.isWith) + printWithBindings(st, env); // probably nothing there for the top level. std::cout << std::endl; } @@ -778,7 +780,7 @@ void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const En if (env.up && se.up) { mapStaticEnvBindings(st, *se.up, *env.up, vm); - if (!env.values[0]->isThunk()) { + if (se.isWith && !env.values[0]->isThunk()) { // add 'with' bindings. Bindings::iterator j = env.values[0]->attrs->begin(); while (j != env.values[0]->attrs->end()) { diff --git a/tests/functional/debugger.sh b/tests/functional/debugger.sh new file mode 100644 index 000000000..63d88cbf3 --- /dev/null +++ b/tests/functional/debugger.sh @@ -0,0 +1,13 @@ +source common.sh + +clearStore + +# regression #9932 +echo ":env" | expect 1 nix eval --debugger --expr '(_: throw "oh snap") 42' +echo ":env" | expect 1 nix eval --debugger --expr ' + let x.a = 1; in + with x; + (_: builtins.seq x.a (throw "oh snap")) x.a +' >debugger-test-out +grep -P 'with: .*a' debugger-test-out +grep -P 'static: .*x' debugger-test-out diff --git a/tests/functional/local.mk b/tests/functional/local.mk index 888c7e18a..f369c7c2c 100644 --- a/tests/functional/local.mk +++ b/tests/functional/local.mk @@ -127,7 +127,8 @@ nix_tests = \ toString-path.sh \ read-only-store.sh \ nested-sandboxing.sh \ - impure-env.sh + impure-env.sh \ + debugger.sh ifeq ($(HAVE_LIBCPUID), 1) nix_tests += compute-levels.sh From 721fddac2f1cb633823046d97f465c579540de43 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sun, 4 Feb 2024 22:03:13 +0100 Subject: [PATCH 251/307] use the right heading level (#9935) --- doc/manual/src/installation/upgrading.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/installation/upgrading.md b/doc/manual/src/installation/upgrading.md index 47618e2f5..38edcdbc5 100644 --- a/doc/manual/src/installation/upgrading.md +++ b/doc/manual/src/installation/upgrading.md @@ -16,7 +16,7 @@ nix (Nix) 2.18.1 > Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with [`nix-build`](@docroot@/command-ref/nix-build.md) or [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md), may change the database schema! > Reverting to an older version of Nix may therefore require purging the store database before it can be used. -### Linux multi-user +## Linux multi-user ```console $ sudo su From 8b873edcca2ff9f9f11efe3cba42a291dbdd124a Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sun, 4 Feb 2024 22:15:20 +0100 Subject: [PATCH 252/307] fix anchor link; less weird link texts (#9936) --- doc/manual/src/language/operators.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/manual/src/language/operators.md b/doc/manual/src/language/operators.md index e9cbb5c92..6fd66864b 100644 --- a/doc/manual/src/language/operators.md +++ b/doc/manual/src/language/operators.md @@ -84,7 +84,7 @@ The `+` operator is overloaded to also work on strings and paths. > > *string* `+` *string* -Concatenate two [string]s and merge their string contexts. +Concatenate two [strings][string] and merge their string contexts. [String concatenation]: #string-concatenation @@ -94,7 +94,7 @@ Concatenate two [string]s and merge their string contexts. > > *path* `+` *path* -Concatenate two [path]s. +Concatenate two [paths][path]. The result is a path. [Path concatenation]: #path-concatenation @@ -150,9 +150,9 @@ If an attribute name is present in both, the attribute value from the latter is Comparison is -- [arithmetic] for [number]s -- lexicographic for [string]s and [path]s -- item-wise lexicographic for [list]s: +- [arithmetic] for [numbers][number] +- lexicographic for [strings][string] and [paths][path] +- item-wise lexicographic for [lists][list]: elements at the same index in both lists are compared according to their type and skipped if they are equal. All comparison operators are implemented in terms of `<`, and the following equivalencies hold: @@ -163,12 +163,12 @@ All comparison operators are implemented in terms of `<`, and the following equi | *a* `>` *b* | *b* `<` *a* | | *a* `>=` *b* | `! (` *a* `<` *b* `)` | -[Comparison]: #comparison-operators +[Comparison]: #comparison ## Equality -- [Attribute sets][attribute set] and [list]s are compared recursively, and therefore are fully evaluated. -- Comparison of [function]s always returns `false`. +- [Attribute sets][attribute set] and [lists][list] are compared recursively, and therefore are fully evaluated. +- Comparison of [functions][function] always returns `false`. - Numbers are type-compatible, see [arithmetic] operators. - Floating point numbers only differ up to a limited precision. From 8d4890c3f83366a0d40ed7f9c3ee21dbd6a2ef67 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Sun, 4 Feb 2024 22:45:10 +0100 Subject: [PATCH 253/307] catch multiple use of link reference (#9937) --- doc/manual/src/language/import-from-derivation.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/manual/src/language/import-from-derivation.md b/doc/manual/src/language/import-from-derivation.md index 03b3f9d91..fb12ba51a 100644 --- a/doc/manual/src/language/import-from-derivation.md +++ b/doc/manual/src/language/import-from-derivation.md @@ -1,6 +1,8 @@ # Import From Derivation -The value of a Nix expression can depend on the contents of a [store object](@docroot@/glossary.md#gloss-store-object). +The value of a Nix expression can depend on the contents of a [store object]. + +[store object]: @docroot@/glossary.md#gloss-store-object Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD): From a6737b7e179fba2681393335c69c97df9bd5a2b0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 5 Feb 2024 15:13:11 +0100 Subject: [PATCH 254/307] CanonPath, SourcePath: Change operator + to / This is less confusing and makes it more similar to std::filesystem::path. --- src/libexpr/eval.cc | 4 ++-- src/libexpr/primops.cc | 2 +- src/libfetchers/filtering-input-accessor.cc | 14 +++++++------- src/libfetchers/fs-input-accessor.cc | 2 +- src/libfetchers/git-utils.cc | 2 +- src/libfetchers/git.cc | 4 ++-- src/libfetchers/mercurial.cc | 2 +- src/libfetchers/path.cc | 2 +- src/libstore/binary-cache-store.cc | 4 ++-- src/libstore/local-fs-store.cc | 2 +- src/libstore/nar-accessor.cc | 2 +- src/libutil/archive.cc | 10 +++++----- src/libutil/canon-path.cc | 4 ++-- src/libutil/canon-path.hh | 4 ++-- src/libutil/fs-sink.cc | 2 +- src/libutil/git.cc | 2 +- src/libutil/source-path.cc | 8 ++++---- src/libutil/source-path.hh | 5 +++-- src/nix-env/nix-env.cc | 4 ++-- src/nix/ls.cc | 2 +- src/nix/run.cc | 2 +- src/nix/why-depends.cc | 2 +- tests/unit/libutil/canon-path.cc | 10 +++++----- 23 files changed, 48 insertions(+), 47 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 91fd3ddf8..bebc94873 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2689,14 +2689,14 @@ SourcePath resolveExprPath(SourcePath path) // Basic cycle/depth limit to avoid infinite loops. if (++followCount >= maxFollow) throw Error("too many symbolic links encountered while traversing the path '%s'", path); - auto p = path.parent().resolveSymlinks() + path.baseName(); + auto p = path.parent().resolveSymlinks() / path.baseName(); if (p.lstat().type != InputAccessor::tSymlink) break; path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))}; } /* If `path' refers to a directory, append `/default.nix'. */ if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory) - return path + "default.nix"; + return path / "default.nix"; return path; } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 1197b6e13..f8ded0cf8 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1816,7 +1816,7 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va // detailed node info quickly in this case we produce a thunk to // query the file type lazily. auto epath = state.allocValue(); - epath->mkPath(path + name); + epath->mkPath(path / name); if (!readFileType) readFileType = &state.getBuiltin("readFileType"); attr.mkApp(readFileType, epath); diff --git a/src/libfetchers/filtering-input-accessor.cc b/src/libfetchers/filtering-input-accessor.cc index 581ce3c1d..087a100af 100644 --- a/src/libfetchers/filtering-input-accessor.cc +++ b/src/libfetchers/filtering-input-accessor.cc @@ -5,26 +5,26 @@ namespace nix { std::string FilteringInputAccessor::readFile(const CanonPath & path) { checkAccess(path); - return next->readFile(prefix + path); + return next->readFile(prefix / path); } bool FilteringInputAccessor::pathExists(const CanonPath & path) { - return isAllowed(path) && next->pathExists(prefix + path); + return isAllowed(path) && next->pathExists(prefix / path); } std::optional FilteringInputAccessor::maybeLstat(const CanonPath & path) { checkAccess(path); - return next->maybeLstat(prefix + path); + return next->maybeLstat(prefix / path); } InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path) { checkAccess(path); DirEntries entries; - for (auto & entry : next->readDirectory(prefix + path)) { - if (isAllowed(path + entry.first)) + for (auto & entry : next->readDirectory(prefix / path)) { + if (isAllowed(path / entry.first)) entries.insert(std::move(entry)); } return entries; @@ -33,12 +33,12 @@ InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath std::string FilteringInputAccessor::readLink(const CanonPath & path) { checkAccess(path); - return next->readLink(prefix + path); + return next->readLink(prefix / path); } std::string FilteringInputAccessor::showPath(const CanonPath & path) { - return next->showPath(prefix + path); + return next->showPath(prefix / path); } void FilteringInputAccessor::checkAccess(const CanonPath & path) diff --git a/src/libfetchers/fs-input-accessor.cc b/src/libfetchers/fs-input-accessor.cc index c3d8d273c..46bc6b70d 100644 --- a/src/libfetchers/fs-input-accessor.cc +++ b/src/libfetchers/fs-input-accessor.cc @@ -48,7 +48,7 @@ struct FSInputAccessor : InputAccessor, PosixSourceAccessor CanonPath makeAbsPath(const CanonPath & path) { - return root + path; + return root / path; } std::optional getPhysicalPath(const CanonPath & path) override diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 382a363f0..1256a4c2c 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -295,7 +295,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this throw Error("getting working directory status: %s", git_error_last()->message); /* Get submodule info. */ - auto modulesFile = path + ".gitmodules"; + auto modulesFile = path / ".gitmodules"; if (pathExists(modulesFile.abs())) info.submodules = parseSubmodules(modulesFile); diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index f9a1cb1bc..26fe79596 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -319,7 +319,7 @@ struct GitInputScheme : InputScheme if (!repoInfo.isLocal) throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string()); - writeFile((CanonPath(repoInfo.url) + path).abs(), contents); + writeFile((CanonPath(repoInfo.url) / path).abs(), contents); auto result = runProgram(RunOptions { .program = "git", @@ -680,7 +680,7 @@ struct GitInputScheme : InputScheme std::map> mounts; for (auto & submodule : repoInfo.workdirInfo.submodules) { - auto submodulePath = CanonPath(repoInfo.url) + submodule.path; + auto submodulePath = CanonPath(repoInfo.url) / submodule.path; fetchers::Attrs attrs; attrs.insert_or_assign("type", "git"); attrs.insert_or_assign("url", submodulePath.abs()); diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 9982389ab..55e2eae03 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -141,7 +141,7 @@ struct MercurialInputScheme : InputScheme if (!isLocal) throw Error("cannot commit '%s' to Mercurial repository '%s' because it's not a working tree", path, input.to_string()); - auto absPath = CanonPath(repoPath) + path; + auto absPath = CanonPath(repoPath) / path; writeFile(absPath.abs(), contents); diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index f9b973320..d3b0e475d 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -84,7 +84,7 @@ struct PathInputScheme : InputScheme std::string_view contents, std::optional commitMsg) const override { - writeFile((CanonPath(getAbsPath(input)) + path).abs(), contents); + writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents); } CanonPath getAbsPath(const Input & input) const diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index ea1279e2e..189d1d305 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -235,14 +235,14 @@ ref BinaryCacheStore::addToStoreCommon( std::regex regex2("^[0-9a-f]{38}\\.debug$"); for (auto & [s1, _type] : narAccessor->readDirectory(buildIdDir)) { - auto dir = buildIdDir + s1; + auto dir = buildIdDir / s1; if (narAccessor->lstat(dir).type != SourceAccessor::tDirectory || !std::regex_match(s1, regex1)) continue; for (auto & [s2, _type] : narAccessor->readDirectory(dir)) { - auto debugPath = dir + s2; + auto debugPath = dir / s2; if (narAccessor->lstat(debugPath).type != SourceAccessor::tRegular || !std::regex_match(s2, regex2)) diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index 953f3a264..81c385ddb 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -28,7 +28,7 @@ struct LocalStoreAccessor : PosixSourceAccessor auto [storePath, rest] = store->toStorePath(path.abs()); if (requireValidPath && !store->isValidPath(storePath)) throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath)); - return CanonPath(store->getRealStoreDir()) + storePath.to_string() + CanonPath(rest); + return CanonPath(store->getRealStoreDir()) / storePath.to_string() / CanonPath(rest); } std::optional maybeLstat(const CanonPath & path) override diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index b13e4c52c..cecf8148f 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -277,7 +277,7 @@ json listNar(ref accessor, const CanonPath & path, bool recurse) json &res2 = obj["entries"]; for (const auto & [name, type] : accessor->readDirectory(path)) { if (recurse) { - res2[name] = listNar(accessor, path + name, true); + res2[name] = listNar(accessor, path / name, true); } else res2[name] = json::object(); } diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 6062392cd..b783b29e0 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -77,20 +77,20 @@ void SourceAccessor::dumpPath( std::string name(i.first); size_t pos = i.first.find(caseHackSuffix); if (pos != std::string::npos) { - debug("removing case hack suffix from '%s'", path + i.first); + debug("removing case hack suffix from '%s'", path / i.first); name.erase(pos); } if (!unhacked.emplace(name, i.first).second) throw Error("file name collision in between '%s' and '%s'", - (path + unhacked[name]), - (path + i.first)); + (path / unhacked[name]), + (path / i.first)); } else unhacked.emplace(i.first, i.first); for (auto & i : unhacked) - if (filter((path + i.first).abs())) { + if (filter((path / i.first).abs())) { sink << "entry" << "(" << "name" << i.first << "node"; - dump(path + i.second); + dump(path / i.second); sink << ")"; } } diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index 0a0f96a05..bf948be5d 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -63,7 +63,7 @@ void CanonPath::extend(const CanonPath & x) path += x.abs(); } -CanonPath CanonPath::operator + (const CanonPath & x) const +CanonPath CanonPath::operator / (const CanonPath & x) const { auto res = *this; res.extend(x); @@ -78,7 +78,7 @@ void CanonPath::push(std::string_view c) path += c; } -CanonPath CanonPath::operator + (std::string_view c) const +CanonPath CanonPath::operator / (std::string_view c) const { auto res = *this; res.push(c); diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh index 997c8c731..fb2d9244b 100644 --- a/src/libutil/canon-path.hh +++ b/src/libutil/canon-path.hh @@ -190,14 +190,14 @@ public: /** * Concatenate two paths. */ - CanonPath operator + (const CanonPath & x) const; + CanonPath operator / (const CanonPath & x) const; /** * Add a path component to this one. It must not contain any slashes. */ void push(std::string_view c); - CanonPath operator + (std::string_view c) const; + CanonPath operator / (std::string_view c) const; /** * Check whether access to this path is allowed, which is the case diff --git a/src/libutil/fs-sink.cc b/src/libutil/fs-sink.cc index b6f8db592..95b6088da 100644 --- a/src/libutil/fs-sink.cc +++ b/src/libutil/fs-sink.cc @@ -34,7 +34,7 @@ void copyRecursive( sink.createDirectory(to); for (auto & [name, _] : accessor.readDirectory(from)) { copyRecursive( - accessor, from + name, + accessor, from / name, sink, to + "/" + name); break; } diff --git a/src/libutil/git.cc b/src/libutil/git.cc index 3b8c3ebac..5733531fa 100644 --- a/src/libutil/git.cc +++ b/src/libutil/git.cc @@ -259,7 +259,7 @@ Mode dump( { Tree entries; for (auto & [name, _] : accessor.readDirectory(path)) { - auto child = path + name; + auto child = path / name; if (!filter(child.abs())) continue; auto entry = hook(child); diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc index d85b0b7fe..341daf39c 100644 --- a/src/libutil/source-path.cc +++ b/src/libutil/source-path.cc @@ -41,11 +41,11 @@ std::optional SourcePath::getPhysicalPath() const std::string SourcePath::to_string() const { return accessor->showPath(path); } -SourcePath SourcePath::operator+(const CanonPath & x) const -{ return {accessor, path + x}; } +SourcePath SourcePath::operator / (const CanonPath & x) const +{ return {accessor, path / x}; } -SourcePath SourcePath::operator+(std::string_view c) const -{ return {accessor, path + c}; } +SourcePath SourcePath::operator / (std::string_view c) const +{ return {accessor, path / c}; } bool SourcePath::operator==(const SourcePath & x) const { diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh index bf5625ca5..bde07b08f 100644 --- a/src/libutil/source-path.hh +++ b/src/libutil/source-path.hh @@ -89,14 +89,15 @@ struct SourcePath /** * Append a `CanonPath` to this path. */ - SourcePath operator + (const CanonPath & x) const; + SourcePath operator / (const CanonPath & x) const; /** * Append a single component `c` to this path. `c` must not * contain a slash. A slash is implicitly added between this path * and `c`. */ - SourcePath operator+(std::string_view c) const; + SourcePath operator / (std::string_view c) const; + bool operator==(const SourcePath & x) const; bool operator!=(const SourcePath & x) const; bool operator<(const SourcePath & x) const; diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index d5b46c57a..dfc6e70eb 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -97,7 +97,7 @@ static bool isNixExpr(const SourcePath & path, struct InputAccessor::Stat & st) { return st.type == InputAccessor::tRegular - || (st.type == InputAccessor::tDirectory && (path + "default.nix").resolveSymlinks().pathExists()); + || (st.type == InputAccessor::tDirectory && (path / "default.nix").resolveSymlinks().pathExists()); } @@ -116,7 +116,7 @@ static void getAllExprs(EvalState & state, are implemented using profiles). */ if (i == "manifest.nix") continue; - auto path2 = (path + i).resolveSymlinks(); + auto path2 = (path / i).resolveSymlinks(); InputAccessor::Stat st; try { diff --git a/src/nix/ls.cc b/src/nix/ls.cc index 231456c9c..63f97f2d3 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -72,7 +72,7 @@ struct MixLs : virtual Args, MixJSON if (st.type == SourceAccessor::Type::tDirectory && !showDirectory) { auto names = accessor->readDirectory(curPath); for (auto & [name, type] : names) - showFile(curPath + name, relPath + "/" + name); + showFile(curPath / name, relPath + "/" + name); } else showFile(curPath, relPath); }; diff --git a/src/nix/run.cc b/src/nix/run.cc index 9bca5b9d0..e86837679 100644 --- a/src/nix/run.cc +++ b/src/nix/run.cc @@ -124,7 +124,7 @@ struct CmdShell : InstallablesCommand, MixEnvironment if (true) pathAdditions.push_back(store->printStorePath(path) + "/bin"); - auto propPath = CanonPath(store->printStorePath(path)) + "nix-support" + "propagated-user-env-packages"; + auto propPath = CanonPath(store->printStorePath(path)) / "nix-support" / "propagated-user-env-packages"; if (auto st = accessor->maybeLstat(propPath); st && st->type == SourceAccessor::tRegular) { for (auto & p : tokenizeString(accessor->readFile(propPath))) todo.push(store->parseStorePath(p)); diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index aecf65922..e299585ff 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -225,7 +225,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions if (st->type == SourceAccessor::Type::tDirectory) { auto names = accessor->readDirectory(p); for (auto & [name, type] : names) - visitPath(p + name); + visitPath(p / name); } else if (st->type == SourceAccessor::Type::tRegular) { diff --git a/tests/unit/libutil/canon-path.cc b/tests/unit/libutil/canon-path.cc index fc94ccc3d..bf11abe3e 100644 --- a/tests/unit/libutil/canon-path.cc +++ b/tests/unit/libutil/canon-path.cc @@ -80,29 +80,29 @@ namespace nix { { CanonPath p1("a//foo/bar//"); CanonPath p2("xyzzy/bla"); - ASSERT_EQ((p1 + p2).abs(), "/a/foo/bar/xyzzy/bla"); + ASSERT_EQ((p1 / p2).abs(), "/a/foo/bar/xyzzy/bla"); } { CanonPath p1("/"); CanonPath p2("/a/b"); - ASSERT_EQ((p1 + p2).abs(), "/a/b"); + ASSERT_EQ((p1 / p2).abs(), "/a/b"); } { CanonPath p1("/a/b"); CanonPath p2("/"); - ASSERT_EQ((p1 + p2).abs(), "/a/b"); + ASSERT_EQ((p1 / p2).abs(), "/a/b"); } { CanonPath p("/foo/bar"); - ASSERT_EQ((p + "x").abs(), "/foo/bar/x"); + ASSERT_EQ((p / "x").abs(), "/foo/bar/x"); } { CanonPath p("/"); - ASSERT_EQ((p + "foo" + "bar").abs(), "/foo/bar"); + ASSERT_EQ((p / "foo" / "bar").abs(), "/foo/bar"); } } From 601fc7d15978827a04a1bc44e92a8a42a512f50a Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:13:26 -0800 Subject: [PATCH 255/307] Add release note --- ...debugger-more-reliably-in-let-and-calls.md | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md diff --git a/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md b/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md new file mode 100644 index 000000000..c93225816 --- /dev/null +++ b/doc/manual/rl-next/enter-debugger-more-reliably-in-let-and-calls.md @@ -0,0 +1,25 @@ +--- +synopsis: The `--debugger` will start more reliably in `let` expressions and function calls +prs: 9917 +issues: 6649 +--- + +Previously, if you attempted to evaluate this file with the debugger: + +```nix +let + a = builtins.trace "before inner break" ( + builtins.break "hello" + ); + b = builtins.trace "before outer break" ( + builtins.break a + ); +in + b +``` + +Nix would correctly enter the debugger at `builtins.break a`, but if you asked +it to `:continue`, it would skip over the `builtins.break "hello"` expression +entirely. + +Now, Nix will correctly enter the debugger at both breakpoints. From b63a8d7c46e7a59c3e133c94af24dfcf517fe50b Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:15:29 -0800 Subject: [PATCH 256/307] Add release note --- .../rl-next/debugger-locals-for-let-expressions.md | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 doc/manual/rl-next/debugger-locals-for-let-expressions.md diff --git a/doc/manual/rl-next/debugger-locals-for-let-expressions.md b/doc/manual/rl-next/debugger-locals-for-let-expressions.md new file mode 100644 index 000000000..736208724 --- /dev/null +++ b/doc/manual/rl-next/debugger-locals-for-let-expressions.md @@ -0,0 +1,9 @@ +--- +synopsis: "`--debugger` can now access bindings from `let` expressions" +prs: 9918 +issues: 8827. +--- + +Breakpoints and errors in the bindings of a `let` expression can now access +those bindings in the debugger. Previously, only the body of `let` expressions +could access those bindings. From 155bc761f601346c8113cc760aaf26306136403c Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:16:39 -0800 Subject: [PATCH 257/307] Add release note --- doc/manual/rl-next/reduce-debugger-clutter.md | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 doc/manual/rl-next/reduce-debugger-clutter.md diff --git a/doc/manual/rl-next/reduce-debugger-clutter.md b/doc/manual/rl-next/reduce-debugger-clutter.md new file mode 100644 index 000000000..9bc902eee --- /dev/null +++ b/doc/manual/rl-next/reduce-debugger-clutter.md @@ -0,0 +1,37 @@ +--- +synopsis: "Visual clutter in `--debugger` is reduced" +prs: 9919 +--- + +Before: +``` +info: breakpoint reached + + +Starting REPL to allow you to inspect the current state of the evaluator. + +Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help. + +nix-repl> :continue +error: uh oh + + +Starting REPL to allow you to inspect the current state of the evaluator. + +Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help. + +nix-repl> +``` + +After: + +``` +info: breakpoint reached + +Nix 2.20.0pre20231222_dirty debugger +Type :? for help. +nix-repl> :continue +error: uh oh + +nix-repl> +``` From 657a6078121bf08525e9cd286c6f8887e983a22e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Mon, 5 Feb 2024 13:21:08 -0800 Subject: [PATCH 258/307] Add release note --- .../rl-next/better-errors-in-nix-repl.md | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 doc/manual/rl-next/better-errors-in-nix-repl.md diff --git a/doc/manual/rl-next/better-errors-in-nix-repl.md b/doc/manual/rl-next/better-errors-in-nix-repl.md new file mode 100644 index 000000000..4deaa8c70 --- /dev/null +++ b/doc/manual/rl-next/better-errors-in-nix-repl.md @@ -0,0 +1,40 @@ +--- +synopsis: Concise error printing in `nix repl` +prs: 9928 +--- + +Previously, if an element of a list or attribute set threw an error while +evaluating, `nix repl` would print the entire error (including source location +information) inline. This output was clumsy and difficult to parse: + +``` +nix-repl> { err = builtins.throw "uh oh!"; } +{ err = «error: + … while calling the 'throw' builtin + at «string»:1:9: + 1| { err = builtins.throw "uh oh!"; } + | ^ + + error: uh oh!»; } +``` + +Now, only the error message is displayed, making the output much more readable. +``` +nix-repl> { err = builtins.throw "uh oh!"; } +{ err = «error: uh oh!»; } +``` + +However, if the whole expression being evaluated throws an error, source +locations and (if applicable) a stack trace are printed, just like you'd expect: + +``` +nix-repl> builtins.throw "uh oh!" +error: + … while calling the 'throw' builtin + at «string»:1:1: + 1| builtins.throw "uh oh!" + | ^ + + error: uh oh! +``` + From c0a15fb7d03dfb8f53bc6726c414bc88aa362592 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sun, 4 Feb 2024 00:40:30 -0800 Subject: [PATCH 259/307] Pretty-print values in the REPL Pretty-print values in the REPL by printing each item in a list or attrset on a separate line. When possible, single-item lists and attrsets are printed on one line, as long as they don't contain a nested list, attrset, or thunk. Before: ``` { attrs = { a = { b = { c = { }; }; }; }; list = [ 1 ]; list' = [ 1 2 3 ]; } ``` After: ``` { attrs = { a = { b = { c = { }; }; }; }; list = [ 1 ]; list' = [ 1 2 3 ]; } ``` --- src/libcmd/repl.cc | 3 +- src/libexpr/print-options.hh | 22 ++++ src/libexpr/print.cc | 114 ++++++++++++++++-- ...al-fail-bad-string-interpolation-4.err.exp | 2 +- tests/functional/repl.sh | 69 ++++++++++- tests/unit/libexpr/value/print.cc | 8 +- 6 files changed, 195 insertions(+), 23 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index d7af15153..2c64bd7a6 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -101,7 +101,8 @@ struct NixRepl .ansiColors = true, .force = true, .derivationPaths = true, - .maxDepth = maxDepth + .maxDepth = maxDepth, + .prettyIndent = 2 }); } }; diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh index e03746ece..94767df9c 100644 --- a/src/libexpr/print-options.hh +++ b/src/libexpr/print-options.hh @@ -17,24 +17,29 @@ struct PrintOptions * If true, output ANSI color sequences. */ bool ansiColors = false; + /** * If true, force values. */ bool force = false; + /** * If true and `force` is set, print derivations as * `«derivation /nix/store/...»` instead of as attribute sets. */ bool derivationPaths = false; + /** * If true, track which values have been printed and skip them on * subsequent encounters. Useful for self-referential values. */ bool trackRepeated = true; + /** * Maximum depth to evaluate to. */ size_t maxDepth = std::numeric_limits::max(); + /** * Maximum number of attributes in attribute sets to print. * @@ -42,6 +47,7 @@ struct PrintOptions * attribute set encountered. */ size_t maxAttrs = std::numeric_limits::max(); + /** * Maximum number of list items to print. * @@ -49,10 +55,26 @@ struct PrintOptions * list encountered. */ size_t maxListItems = std::numeric_limits::max(); + /** * Maximum string length to print. */ size_t maxStringLength = std::numeric_limits::max(); + + /** + * Indentation width for pretty-printing. + * + * If set to 0 (the default), values are not pretty-printed. + */ + size_t prettyIndent = 0; + + /** + * True if pretty-printing is enabled. + */ + inline bool prettyPrint() + { + return prettyIndent > 0; + } }; /** diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 68d381033..1ff026b3d 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -153,6 +153,7 @@ struct ImportantFirstAttrNameCmp }; typedef std::set ValuesSeen; +typedef std::vector> AttrVec; class Printer { @@ -163,6 +164,21 @@ private: std::optional seen; size_t attrsPrinted = 0; size_t listItemsPrinted = 0; + std::string indent; + + void increaseIndent() + { + if (options.prettyPrint()) { + indent.append(options.prettyIndent, ' '); + } + } + + void decreaseIndent() + { + if (options.prettyPrint()) { + indent.resize(indent.size() - options.prettyIndent); + } + } void printRepeated() { @@ -260,6 +276,28 @@ private: } } + bool shouldPrettyPrintAttrs(AttrVec & v) + { + if (!options.prettyPrint() || v.empty()) { + return false; + } + + // Pretty-print attrsets with more than one item. + if (v.size() > 1) { + return true; + } + + auto item = v[0].second; + if (!item) { + return true; + } + + // Pretty-print single-item attrsets only if they contain nested + // structures. + auto itemType = item->type(); + return itemType == nList || itemType == nAttrs || itemType == nThunk; + } + void printAttrs(Value & v, size_t depth) { if (seen && !seen->insert(v.attrs).second) { @@ -270,9 +308,10 @@ private: if (options.force && options.derivationPaths && state.isDerivation(v)) { printDerivation(v); } else if (depth < options.maxDepth) { - output << "{ "; + increaseIndent(); + output << "{"; - std::vector> sorted; + AttrVec sorted; for (auto & i : *v.attrs) sorted.emplace_back(std::pair(state.symbols[i.name], i.value)); @@ -281,7 +320,15 @@ private: else std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp()); + auto prettyPrint = shouldPrettyPrintAttrs(sorted); + for (auto & i : sorted) { + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + if (attrsPrinted >= options.maxAttrs) { printElided(sorted.size() - attrsPrinted, "attribute", "attributes"); break; @@ -290,13 +337,42 @@ private: printAttributeName(output, i.first); output << " = "; print(*i.second, depth + 1); - output << "; "; + output << ";"; attrsPrinted++; } + decreaseIndent(); + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } output << "}"; - } else + } else { output << "{ ... }"; + } + } + + bool shouldPrettyPrintList(std::span list) + { + if (!options.prettyPrint() || list.empty()) { + return false; + } + + // Pretty-print lists with more than one item. + if (list.size() > 1) { + return true; + } + + auto item = list[0]; + if (!item) { + return true; + } + + // Pretty-print single-item lists only if they contain nested + // structures. + auto itemType = item->type(); + return itemType == nList || itemType == nAttrs || itemType == nThunk; } void printList(Value & v, size_t depth) @@ -306,11 +382,20 @@ private: return; } - output << "[ "; if (depth < options.maxDepth) { - for (auto elem : v.listItems()) { + increaseIndent(); + output << "["; + auto listItems = v.listItems(); + auto prettyPrint = shouldPrettyPrintList(listItems); + for (auto elem : listItems) { + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + if (listItemsPrinted >= options.maxListItems) { - printElided(v.listSize() - listItemsPrinted, "item", "items"); + printElided(listItems.size() - listItemsPrinted, "item", "items"); break; } @@ -319,13 +404,19 @@ private: } else { printNullptr(); } - output << " "; listItemsPrinted++; } + + decreaseIndent(); + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + output << "]"; + } else { + output << "[ ... ]"; } - else - output << "... "; - output << "]"; } void printFunction(Value & v) @@ -488,6 +579,7 @@ public: { attrsPrinted = 0; listItemsPrinted = 0; + indent.clear(); if (options.trackRepeated) { seen.emplace(); diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp index 5119238d7..6f907106b 100644 --- a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp +++ b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp @@ -6,4 +6,4 @@ error: | ^ 10| - error: cannot coerce a set to a string: { a = { a = { a = { a = "ha"; b = "ha"; c = "ha"; d = "ha"; e = "ha"; f = "ha"; g = "ha"; h = "ha"; j = "ha"; }; «4294967295 attributes elided»}; «4294967294 attributes elided»}; «4294967293 attributes elided»} + error: cannot coerce a set to a string: { a = { a = { a = { a = "ha"; b = "ha"; c = "ha"; d = "ha"; e = "ha"; f = "ha"; g = "ha"; h = "ha"; j = "ha"; }; «4294967295 attributes elided» }; «4294967294 attributes elided» }; «4294967293 attributes elided» } diff --git a/tests/functional/repl.sh b/tests/functional/repl.sh index 5f399aa44..4938c2267 100644 --- a/tests/functional/repl.sh +++ b/tests/functional/repl.sh @@ -146,29 +146,86 @@ echo "$replResult" | grepQuiet -s afterChange # Normal output should print attributes in lexicographical order non-recursively testReplResponseNoRegex ' { a = { b = 2; }; l = [ 1 2 3 ]; s = "string"; n = 1234; x = rec { y = { z = { inherit y; }; }; }; } -' '{ a = { ... }; l = [ ... ]; n = 1234; s = "string"; x = { ... }; }' +' \ +'{ + a = { ... }; + l = [ ... ]; + n = 1234; + s = "string"; + x = { ... }; +} +' # Same for lists, but order is preserved testReplResponseNoRegex ' [ 42 1 "thingy" ({ a = 1; }) ([ 1 2 3 ]) ] -' '[ 42 1 "thingy" { ... } [ ... ] ]' +' \ +'[ + 42 + 1 + "thingy" + { ... } + [ ... ] +] +' # Same for let expressions testReplResponseNoRegex ' let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = «repeated»; y = { ... }; }' +' \ +'{ + x = { ... }; + y = { ... }; +} +' # The :p command should recursively print sets, but prevent infinite recursion testReplResponseNoRegex ' :p { a = { b = 2; }; s = "string"; n = 1234; x = rec { y = { z = { inherit y; }; }; }; } -' '{ a = { b = 2; }; n = 1234; s = "string"; x = { y = { z = { y = «repeated»; }; }; }; }' +' \ +'{ + a = { b = 2; }; + n = 1234; + s = "string"; + x = { + y = { + z = { + y = «repeated»; + }; + }; + }; +} +' # Same for lists testReplResponseNoRegex ' :p [ 42 1 "thingy" (rec { a = 1; b = { inherit a; inherit b; }; }) ([ 1 2 3 ]) ] -' '[ 42 1 "thingy" { a = 1; b = { a = 1; b = «repeated»; }; } [ 1 2 3 ] ]' +' \ +'[ + 42 + 1 + "thingy" + { + a = 1; + b = { + a = 1; + b = «repeated»; + }; + } + [ + 1 + 2 + 3 + ] +] +' # Same for let expressions testReplResponseNoRegex ' :p let x = { y = { a = 1; }; inherit x; }; in x -' '{ x = «repeated»; y = { a = 1; }; }' +' \ +'{ + x = «repeated»; + y = { a = 1 }; +} +' diff --git a/tests/unit/libexpr/value/print.cc b/tests/unit/libexpr/value/print.cc index c4264a38d..db1e4f3a3 100644 --- a/tests/unit/libexpr/value/print.cc +++ b/tests/unit/libexpr/value/print.cc @@ -756,7 +756,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) vAttrs.mkAttrs(builder.finish()); test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL "}", + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«1 attribute elided»" ANSI_NORMAL " }", PrintOptions { .ansiColors = true, .maxAttrs = 1 @@ -769,7 +769,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) vAttrs.mkAttrs(builder.finish()); test(vAttrs, - "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL "}", + "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL " }", PrintOptions { .ansiColors = true, .maxAttrs = 1 @@ -793,7 +793,7 @@ TEST_F(ValuePrintingTests, ansiColorsListElided) vList.bigList.size = 2; test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL "]", + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«1 item elided»" ANSI_NORMAL " ]", PrintOptions { .ansiColors = true, .maxListItems = 1 @@ -806,7 +806,7 @@ TEST_F(ValuePrintingTests, ansiColorsListElided) vList.bigList.size = 3; test(vList, - "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL "]", + "[ " ANSI_CYAN "1" ANSI_NORMAL " " ANSI_FAINT "«2 items elided»" ANSI_NORMAL " ]", PrintOptions { .ansiColors = true, .maxListItems = 1 From 2d74b56aee84051d386f124c092d143b9cc437f9 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Tue, 6 Feb 2024 23:22:34 +0100 Subject: [PATCH 260/307] fix location of `_redirects` file the Netlify `_redirects` file must be in the root directory [0] of the files to serve, and mdBook copies all the files in `src` that aren't `.md` to the output directory [1]. [0]: https://docs.netlify.com/routing/redirects/ [1]: https://rust-lang.github.io/mdBook/guide/creating.html#source-files --- doc/manual/{ => src}/_redirects | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename doc/manual/{ => src}/_redirects (100%) diff --git a/doc/manual/_redirects b/doc/manual/src/_redirects similarity index 100% rename from doc/manual/_redirects rename to doc/manual/src/_redirects From 474fc4078acbe062fcc31ce91c69c8f33bf00d5f Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Tue, 6 Feb 2024 16:49:28 -0800 Subject: [PATCH 261/307] Add comments --- src/libexpr/eval-error.cc | 2 +- src/libexpr/eval-error.hh | 30 ++++++++---------------------- 2 files changed, 9 insertions(+), 23 deletions(-) diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc index b9411cbf4..250c59a19 100644 --- a/src/libexpr/eval-error.cc +++ b/src/libexpr/eval-error.cc @@ -91,7 +91,7 @@ void EvalErrorBuilder::debugThrow() // `EvalState` is the only class that can construct an `EvalErrorBuilder`, // and it does so in dynamic storage. This is the final method called on - // any such instancve and must delete itself before throwing the underlying + // any such instance and must delete itself before throwing the underlying // error. auto error = std::move(this->error); delete this; diff --git a/src/libexpr/eval-error.hh b/src/libexpr/eval-error.hh index ee69dce64..711743886 100644 --- a/src/libexpr/eval-error.hh +++ b/src/libexpr/eval-error.hh @@ -56,6 +56,11 @@ public: } }; +/** + * `EvalErrorBuilder`s may only be constructed by `EvalState`. The `debugThrow` + * method must be the final method in any such `EvalErrorBuilder` usage, and it + * handles deleting the object. + */ template class EvalErrorBuilder final { @@ -90,29 +95,10 @@ public: [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs); + /** + * Delete the `EvalErrorBuilder` and throw the underlying exception. + */ [[gnu::noinline, gnu::noreturn]] void debugThrow(); }; -/** - * The size needed to allocate any `EvalErrorBuilder`. - * - * The list of classes here needs to be kept in sync with the list of `template - * class` declarations in `eval-error.cc`. - * - * This is used by `EvalState` to preallocate a buffer of sufficient size for - * any `EvalErrorBuilder` to avoid allocating while evaluating Nix code. - */ -constexpr size_t EVAL_ERROR_BUILDER_SIZE = std::max({ - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), - sizeof(EvalErrorBuilder), -}); - } From 9723f533d85133fa3c4d9421a58c7765cb61e733 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Tue, 6 Feb 2024 16:50:47 -0800 Subject: [PATCH 262/307] Add comment --- src/libexpr/eval.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index afe89cd30..3c7c5da27 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -239,6 +239,7 @@ public: template [[nodiscard, gnu::noinline]] EvalErrorBuilder & error(const Args & ... args) { + // `EvalErrorBuilder::debugThrow` performs the corresponding `delete`. return *new EvalErrorBuilder(*this, args...); } From bc085022494fe90f733aef0832b6d7dcc34709cf Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 26 Jan 2024 15:54:33 -0500 Subject: [PATCH 263/307] Support arbitrary stores in Perl bindings Fix #9859 It's a breaking change but that's fine; we can just update Hydra to use the new bindings. --- perl/.yath.rc | 2 + perl/default.nix | 18 +++- perl/lib/Nix/Store.pm | 19 ++-- perl/lib/Nix/Store.xs | 201 +++++++++++++++++++++++++++--------------- perl/local.mk | 3 + perl/t/init.t | 13 +++ 6 files changed, 171 insertions(+), 85 deletions(-) create mode 100644 perl/.yath.rc create mode 100644 perl/t/init.t diff --git a/perl/.yath.rc b/perl/.yath.rc new file mode 100644 index 000000000..118bf80c8 --- /dev/null +++ b/perl/.yath.rc @@ -0,0 +1,2 @@ +[test] +-I=rel(lib/Nix) diff --git a/perl/default.nix b/perl/default.nix index 4687976a1..7103574c9 100644 --- a/perl/default.nix +++ b/perl/default.nix @@ -5,12 +5,12 @@ , nix, curl, bzip2, xz, boost, libsodium, darwin }: -perl.pkgs.toPerlModule (stdenv.mkDerivation { +perl.pkgs.toPerlModule (stdenv.mkDerivation (finalAttrs: { name = "nix-perl-${nix.version}"; src = fileset.toSource { root = ../.; - fileset = fileset.unions [ + fileset = fileset.unions ([ ../.version ../m4 ../mk @@ -20,7 +20,10 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation { ./configure.ac ./lib ./local.mk - ]; + ] ++ lib.optionals finalAttrs.doCheck [ + ./.yath.rc + ./t + ]); }; nativeBuildInputs = @@ -40,6 +43,13 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation { ++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium ++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security; + # `perlPackages.Test2Harness` is marked broken for Darwin + doCheck = !stdenv.isDarwin; + + nativeCheckInputs = [ + perlPackages.Test2Harness + ]; + configureFlags = [ "--with-dbi=${perlPackages.DBI}/${perl.libPrefix}" "--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}" @@ -48,4 +58,4 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation { enableParallelBuilding = true; postUnpack = "sourceRoot=$sourceRoot/perl"; -}) +})) diff --git a/perl/lib/Nix/Store.pm b/perl/lib/Nix/Store.pm index 3e4bbee0a..16f2e17c8 100644 --- a/perl/lib/Nix/Store.pm +++ b/perl/lib/Nix/Store.pm @@ -12,17 +12,20 @@ our %EXPORT_TAGS = ( 'all' => [ qw( ) ] ); our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } ); our @EXPORT = qw( - setVerbosity - isValidPath queryReferences queryPathInfo queryDeriver queryPathHash - queryPathFromHashPart - topoSortPaths computeFSClosure followLinksToStorePath exportPaths importPaths + StoreWrapper + StoreWrapper::new + StoreWrapper::isValidPath StoreWrapper::queryReferences StoreWrapper::queryPathInfo StoreWrapper::queryDeriver StoreWrapper::queryPathHash + StoreWrapper::queryPathFromHashPart + StoreWrapper::topoSortPaths StoreWrapper::computeFSClosure followLinksToStorePath StoreWrapper::exportPaths StoreWrapper::importPaths + StoreWrapper::addToStore StoreWrapper::makeFixedOutputPath + StoreWrapper::derivationFromPath + StoreWrapper::addTempRoot + StoreWrapper::queryRawRealisation + hashPath hashFile hashString convertHash signString checkSignature - addToStore makeFixedOutputPath - derivationFromPath - addTempRoot getBinDir getStoreDir - queryRawRealisation + setVerbosity ); our $VERSION = '0.15'; diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 423c01cf7..6730197b5 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -17,47 +17,61 @@ #include #include - using namespace nix; +static bool libStoreInitialized = false; -static ref store() -{ - static std::shared_ptr _store; - if (!_store) { - try { - initLibStore(); - _store = openStore(); - } catch (Error & e) { - croak("%s", e.what()); - } - } - return ref(_store); -} - +struct StoreWrapper { + ref store; +}; MODULE = Nix::Store PACKAGE = Nix::Store PROTOTYPES: ENABLE +TYPEMAP: < _store; try { - RETVAL = store()->isValidPath(store()->parseStorePath(path)); + if (!libStoreInitialized) { + initLibStore(); + libStoreInitialized = true; + } + if (items == 1) { + _store = openStore(); + RETVAL = new StoreWrapper { + .store = ref{_store} + }; + } else { + RETVAL = new StoreWrapper { + .store = openStore(s) + }; + } } catch (Error & e) { croak("%s", e.what()); } @@ -65,52 +79,81 @@ int isValidPath(char * path) RETVAL -SV * queryReferences(char * path) +void init() + CODE: + if (!libStoreInitialized) { + initLibStore(); + libStoreInitialized = true; + } + + +void setVerbosity(int level) + CODE: + verbosity = (Verbosity) level; + + +int +StoreWrapper::isValidPath(char * path) + CODE: + try { + RETVAL = THIS->store->isValidPath(THIS->store->parseStorePath(path)); + } catch (Error & e) { + croak("%s", e.what()); + } + OUTPUT: + RETVAL + + +SV * +StoreWrapper::queryReferences(char * path) PPCODE: try { - for (auto & i : store()->queryPathInfo(store()->parseStorePath(path))->references) - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); + for (auto & i : THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->references) + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * queryPathHash(char * path) +SV * +StoreWrapper::queryPathHash(char * path) PPCODE: try { - auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true); + auto s = THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * queryDeriver(char * path) +SV * +StoreWrapper::queryDeriver(char * path) PPCODE: try { - auto info = store()->queryPathInfo(store()->parseStorePath(path)); + auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path)); if (!info->deriver) XSRETURN_UNDEF; - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * queryPathInfo(char * path, int base32) +SV * +StoreWrapper::queryPathInfo(char * path, int base32) PPCODE: try { - auto info = store()->queryPathInfo(store()->parseStorePath(path)); + auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path)); if (!info->deriver) XPUSHs(&PL_sv_undef); else - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0))); auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); mXPUSHi(info->registrationTime); mXPUSHi(info->narSize); AV * refs = newAV(); for (auto & i : info->references) - av_push(refs, newSVpv(store()->printStorePath(i).c_str(), 0)); + av_push(refs, newSVpv(THIS->store->printStorePath(i).c_str(), 0)); XPUSHs(sv_2mortal(newRV((SV *) refs))); AV * sigs = newAV(); for (auto & i : info->sigs) @@ -120,10 +163,11 @@ SV * queryPathInfo(char * path, int base32) croak("%s", e.what()); } -SV * queryRawRealisation(char * outputId) +SV * +StoreWrapper::queryRawRealisation(char * outputId) PPCODE: try { - auto realisation = store()->queryRealisation(DrvOutput::parse(outputId)); + auto realisation = THIS->store->queryRealisation(DrvOutput::parse(outputId)); if (realisation) XPUSHs(sv_2mortal(newSVpv(realisation->toJSON().dump().c_str(), 0))); else @@ -133,46 +177,50 @@ SV * queryRawRealisation(char * outputId) } -SV * queryPathFromHashPart(char * hashPart) +SV * +StoreWrapper::queryPathFromHashPart(char * hashPart) PPCODE: try { - auto path = store()->queryPathFromHashPart(hashPart); - XPUSHs(sv_2mortal(newSVpv(path ? store()->printStorePath(*path).c_str() : "", 0))); + auto path = THIS->store->queryPathFromHashPart(hashPart); + XPUSHs(sv_2mortal(newSVpv(path ? THIS->store->printStorePath(*path).c_str() : "", 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * computeFSClosure(int flipDirection, int includeOutputs, ...) +SV * +StoreWrapper::computeFSClosure(int flipDirection, int includeOutputs, ...) PPCODE: try { StorePathSet paths; for (int n = 2; n < items; ++n) - store()->computeFSClosure(store()->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs); + THIS->store->computeFSClosure(THIS->store->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs); for (auto & i : paths) - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * topoSortPaths(...) +SV * +StoreWrapper::topoSortPaths(...) PPCODE: try { StorePathSet paths; - for (int n = 0; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n)))); - auto sorted = store()->topoSortPaths(paths); + for (int n = 0; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n)))); + auto sorted = THIS->store->topoSortPaths(paths); for (auto & i : sorted) - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * followLinksToStorePath(char * path) +SV * +StoreWrapper::followLinksToStorePath(char * path) CODE: try { - RETVAL = newSVpv(store()->printStorePath(store()->followLinksToStorePath(path)).c_str(), 0); + RETVAL = newSVpv(THIS->store->printStorePath(THIS->store->followLinksToStorePath(path)).c_str(), 0); } catch (Error & e) { croak("%s", e.what()); } @@ -180,29 +228,32 @@ SV * followLinksToStorePath(char * path) RETVAL -void exportPaths(int fd, ...) +void +StoreWrapper::exportPaths(int fd, ...) PPCODE: try { StorePathSet paths; - for (int n = 1; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n)))); + for (int n = 1; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n)))); FdSink sink(fd); - store()->exportPaths(paths, sink); + THIS->store->exportPaths(paths, sink); } catch (Error & e) { croak("%s", e.what()); } -void importPaths(int fd, int dontCheckSigs) +void +StoreWrapper::importPaths(int fd, int dontCheckSigs) PPCODE: try { FdSource source(fd); - store()->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs); + THIS->store->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs); } catch (Error & e) { croak("%s", e.what()); } -SV * hashPath(char * algo, int base32, char * path) +SV * +hashPath(char * algo, int base32, char * path) PPCODE: try { PosixSourceAccessor accessor; @@ -280,64 +331,67 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg) RETVAL -SV * addToStore(char * srcPath, int recursive, char * algo) +SV * +StoreWrapper::addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; PosixSourceAccessor accessor; - auto path = store()->addToStore( + auto path = THIS->store->addToStore( std::string(baseNameOf(srcPath)), accessor, CanonPath::fromCwd(srcPath), method, parseHashAlgo(algo)); - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) +SV * +StoreWrapper::makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) PPCODE: try { auto h = Hash::parseAny(hash, parseHashAlgo(algo)); auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; - auto path = store()->makeFixedOutputPath(name, FixedOutputInfo { + auto path = THIS->store->makeFixedOutputPath(name, FixedOutputInfo { .method = method, .hash = h, .references = {}, }); - XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0))); + XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0))); } catch (Error & e) { croak("%s", e.what()); } -SV * derivationFromPath(char * drvPath) +SV * +StoreWrapper::derivationFromPath(char * drvPath) PREINIT: HV *hash; CODE: try { - Derivation drv = store()->derivationFromPath(store()->parseStorePath(drvPath)); + Derivation drv = THIS->store->derivationFromPath(THIS->store->parseStorePath(drvPath)); hash = newHV(); HV * outputs = newHV(); - for (auto & i : drv.outputsAndOptPaths(*store())) { + for (auto & i : drv.outputsAndOptPaths(*THIS->store)) { hv_store( outputs, i.first.c_str(), i.first.size(), !i.second.second ? newSV(0) /* null value */ - : newSVpv(store()->printStorePath(*i.second.second).c_str(), 0), + : newSVpv(THIS->store->printStorePath(*i.second.second).c_str(), 0), 0); } hv_stores(hash, "outputs", newRV((SV *) outputs)); AV * inputDrvs = newAV(); for (auto & i : drv.inputDrvs.map) - av_push(inputDrvs, newSVpv(store()->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second + av_push(inputDrvs, newSVpv(THIS->store->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs)); AV * inputSrcs = newAV(); for (auto & i : drv.inputSrcs) - av_push(inputSrcs, newSVpv(store()->printStorePath(i).c_str(), 0)); + av_push(inputSrcs, newSVpv(THIS->store->printStorePath(i).c_str(), 0)); hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs)); hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0)); @@ -361,10 +415,11 @@ SV * derivationFromPath(char * drvPath) RETVAL -void addTempRoot(char * storePath) +void +StoreWrapper::addTempRoot(char * storePath) PPCODE: try { - store()->addTempRoot(store()->parseStorePath(storePath)); + THIS->store->addTempRoot(THIS->store->parseStorePath(storePath)); } catch (Error & e) { croak("%s", e.what()); } diff --git a/perl/local.mk b/perl/local.mk index 0eae651d8..ed4764eb9 100644 --- a/perl/local.mk +++ b/perl/local.mk @@ -41,3 +41,6 @@ Store_FORCE_INSTALL = 1 Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config + +check: all + yath test diff --git a/perl/t/init.t b/perl/t/init.t new file mode 100644 index 000000000..80197e013 --- /dev/null +++ b/perl/t/init.t @@ -0,0 +1,13 @@ +use strict; +use warnings; +use Test2::V0; + +use Nix::Store; + +my $s = new Nix::Store("dummy://"); + +my $res = $s->isValidPath("/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"); + +ok(!$res, "should not have path"); + +done_testing; From 140de3b2780c6c49030b118051e15f32d202bc49 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Thu, 8 Feb 2024 09:00:00 +0100 Subject: [PATCH 264/307] manual: fold sidebar sections the table of contents is very long now, and folded sections allow for a better overview. --- doc/manual/book.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/book.toml b/doc/manual/book.toml index 73fb7e75e..d524dbb13 100644 --- a/doc/manual/book.toml +++ b/doc/manual/book.toml @@ -6,6 +6,8 @@ additional-css = ["custom.css"] additional-js = ["redirects.js"] edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}" git-repository-url = "https://github.com/NixOS/nix" +fold.enable = true +fold.level = 1 [preprocessor.anchors] renderers = ["html"] From e486b76eef135cdb1f112b9bb2ffcbf6a08f7c96 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Thu, 8 Feb 2024 09:08:58 +0100 Subject: [PATCH 265/307] move JSON section into Formats and Protocols --- doc/manual/src/SUMMARY.md.in | 8 ++++---- doc/manual/src/_redirects | 1 + doc/manual/src/{ => protocols}/json/derivation.md | 0 doc/manual/src/{ => protocols}/json/store-object-info.md | 5 +++-- src/nix/derivation-add.md | 2 +- src/nix/derivation-show.md | 2 +- 6 files changed, 10 insertions(+), 8 deletions(-) rename doc/manual/src/{ => protocols}/json/derivation.md (100%) rename doc/manual/src/{ => protocols}/json/store-object-info.md (96%) diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index 695d63dfc..167f54206 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -104,10 +104,10 @@ - [Channels](command-ref/files/channels.md) - [Default Nix expression](command-ref/files/default-nix-expression.md) - [Architecture and Design](architecture/architecture.md) -- [JSON Formats](json/index.md) - - [Store Object Info](json/store-object-info.md) - - [Derivation](json/derivation.md) -- [Protocols](protocols/index.md) +- [Formats and Protocols](protocols/index.md) + - [JSON Formats](protocols/json/index.md) + - [Store Object Info](protocols/json/store-object-info.md) + - [Derivation](protocols/json/derivation.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Glossary](glossary.md) diff --git a/doc/manual/src/_redirects b/doc/manual/src/_redirects index 62c693c97..8bf0e854b 100644 --- a/doc/manual/src/_redirects +++ b/doc/manual/src/_redirects @@ -36,5 +36,6 @@ /package-management/s3-substituter /store/types/s3-binary-cache-store 301! /protocols/protocols /protocols 301! +/json/* /protocols/json/:splat 301! /release-notes/release-notes /release-notes 301! diff --git a/doc/manual/src/json/derivation.md b/doc/manual/src/protocols/json/derivation.md similarity index 100% rename from doc/manual/src/json/derivation.md rename to doc/manual/src/protocols/json/derivation.md diff --git a/doc/manual/src/json/store-object-info.md b/doc/manual/src/protocols/json/store-object-info.md similarity index 96% rename from doc/manual/src/json/store-object-info.md rename to doc/manual/src/protocols/json/store-object-info.md index db43c2fa1..ba4ab098f 100644 --- a/doc/manual/src/json/store-object-info.md +++ b/doc/manual/src/protocols/json/store-object-info.md @@ -14,11 +14,11 @@ Info about a [store object]. * `narHash`: - Hash of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar). + Hash of the [file system object] part of the store object when serialized as a [Nix Archive]. * `narSize`: - Size of the [file system object] part of the store object when serialized as a [Nix Archive](#gloss-nar). + Size of the [file system object] part of the store object when serialized as a [Nix Archive]. * `references`: @@ -30,6 +30,7 @@ Info about a [store object]. [store path]: @docroot@/glossary.md#gloss-store-path [file system object]: @docroot@/store/file-system-object.md +[Nix Archive]: @docroot@/glossary.md#gloss-nar ## Impure fields diff --git a/src/nix/derivation-add.md b/src/nix/derivation-add.md index d9b8467df..331cbdd88 100644 --- a/src/nix/derivation-add.md +++ b/src/nix/derivation-add.md @@ -14,6 +14,6 @@ a Nix expression evaluates. `nix derivation add` takes a single derivation in the following format: -{{#include ../../json/derivation.md}} +{{#include ../../protocols/json/derivation.md}} )"" diff --git a/src/nix/derivation-show.md b/src/nix/derivation-show.md index 884f1adc6..2437ea08f 100644 --- a/src/nix/derivation-show.md +++ b/src/nix/derivation-show.md @@ -52,6 +52,6 @@ By default, this command only shows top-level derivations, but with [store path]: @docroot@/glossary.md#gloss-store-path -{{#include ../../json/derivation.md}} +{{#include ../../protocols/json/derivation.md}} )"" From d24c8aa49141fc384deafee50da65a05553a124b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Thu, 8 Feb 2024 09:22:30 +0100 Subject: [PATCH 266/307] Simplify a conditional in the repl initialisation --- src/libcmd/repl.cc | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 5b4d3f9d5..9826f0fac 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -255,9 +255,7 @@ void NixRepl::mainLoop() notice("Nix %1%%2%\nType :? for help.", nixVersion, debuggerNotice); } - if (isFirstRepl) { - isFirstRepl = false; - } + isFirstRepl = false; loadFiles(); From 4687beecef87b358a514825e3700e47962ca2194 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 6 Feb 2024 16:23:58 -0500 Subject: [PATCH 267/307] Get rid of `CanonPath::fromCwd` As discussed in the last Nix team meeting (2024-02-95), this method doesn't belong because `CanonPath` is a virtual/ideal absolute path format, not used in file systems beyond the native OS format for which a "current working directory" is defined. Progress towards #9205 --- perl/lib/Nix/Store.xs | 8 ++-- src/libcmd/common-eval-args.cc | 6 +-- src/libcmd/common-eval-args.hh | 2 +- src/libcmd/editor-for.cc | 2 +- src/libcmd/installables.cc | 5 +- src/libcmd/repl.cc | 2 +- src/libexpr/eval.cc | 6 +-- src/libexpr/eval.hh | 5 ++ src/libexpr/paths.cc | 6 ++- src/libfetchers/fs-input-accessor.cc | 64 +++++--------------------- src/libfetchers/fs-input-accessor.hh | 5 +- src/libfetchers/git-utils.cc | 22 ++++----- src/libfetchers/git-utils.hh | 2 +- src/libfetchers/git.cc | 18 ++++---- src/libutil/archive.cc | 4 +- src/libutil/canon-path.cc | 5 -- src/libutil/canon-path.hh | 2 - src/libutil/posix-source-accessor.cc | 45 ++++++++++++++---- src/libutil/posix-source-accessor.hh | 29 +++++++++++- src/libutil/source-accessor.hh | 4 +- src/libutil/source-path.cc | 2 +- src/libutil/source-path.hh | 2 +- src/nix-build/nix-build.cc | 4 +- src/nix-env/nix-env.cc | 2 +- src/nix-instantiate/nix-instantiate.cc | 4 +- src/nix-store/nix-store.cc | 16 +++---- src/nix/add-to-store.cc | 4 +- src/nix/eval.cc | 2 +- src/nix/hash.cc | 4 +- src/nix/prefetch.cc | 5 +- 30 files changed, 152 insertions(+), 135 deletions(-) diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index 6730197b5..4a928594b 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -256,9 +256,9 @@ SV * hashPath(char * algo, int base32, char * path) PPCODE: try { - PosixSourceAccessor accessor; + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path); Hash h = hashPath( - accessor, CanonPath::fromCwd(path), + accessor, canonPath, FileIngestionMethod::Recursive, parseHashAlgo(algo)).first; auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); @@ -336,10 +336,10 @@ StoreWrapper::addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat; - PosixSourceAccessor accessor; + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(srcPath); auto path = THIS->store->addToStore( std::string(baseNameOf(srcPath)), - accessor, CanonPath::fromCwd(srcPath), + accessor, canonPath, method, parseHashAlgo(algo)); XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0))); } catch (Error & e) { diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index 193972272..58f04e225 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -156,7 +156,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) for (auto & i : autoArgs) { auto v = state.allocValue(); if (i.second[0] == 'E') - state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(CanonPath::fromCwd()))); + state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath("."))); else v->mkString(((std::string_view) i.second).substr(1)); res.insert(state.symbols.create(i.first), v); @@ -164,7 +164,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state) return res.finish(); } -SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir) +SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir) { if (EvalSettings::isPseudoUrl(s)) { auto storePath = fetchers::downloadTarball( @@ -185,7 +185,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDi } else - return state.rootPath(CanonPath(s, baseDir)); + return state.rootPath(baseDir ? absPath(s, *baseDir) : absPath(s)); } } diff --git a/src/libcmd/common-eval-args.hh b/src/libcmd/common-eval-args.hh index 4b403d936..2eb63e15d 100644 --- a/src/libcmd/common-eval-args.hh +++ b/src/libcmd/common-eval-args.hh @@ -29,6 +29,6 @@ private: std::map autoArgs; }; -SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd()); +SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr); } diff --git a/src/libcmd/editor-for.cc b/src/libcmd/editor-for.cc index 67653d9c9..6bf36bd64 100644 --- a/src/libcmd/editor-for.cc +++ b/src/libcmd/editor-for.cc @@ -17,7 +17,7 @@ Strings editorFor(const SourcePath & file, uint32_t line) editor.find("vim") != std::string::npos || editor.find("kak") != std::string::npos)) args.push_back(fmt("+%d", line)); - args.push_back(path->abs()); + args.push_back(path->string()); return args; } diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index 736c41a1e..16d25d3cf 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -487,10 +487,11 @@ Installables SourceExprCommand::parseInstallables( state->eval(e, *vFile); } else if (file) { - state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile); + auto dir = absPath(getCommandBaseDir()); + state->evalFile(lookupFileArg(*state, *file, &dir), *vFile); } else { - CanonPath dir(CanonPath::fromCwd(getCommandBaseDir())); + Path dir = absPath(getCommandBaseDir()); auto e = state->parseExprFromString(*expr, state->rootPath(dir)); state->eval(e, *vFile); } diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index 4b51fe393..137332895 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -899,7 +899,7 @@ void NixRepl::addVarToScope(const Symbol name, Value & v) Expr * NixRepl::parseString(std::string s) { - return state->parseExprFromString(std::move(s), state->rootPath(CanonPath::fromCwd()), staticEnv); + return state->parseExprFromString(std::move(s), state->rootPath("."), staticEnv); } diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 43f8dea07..eb1b3a5f0 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -434,14 +434,14 @@ EvalState::EvalState( , emptyBindings(0) , rootFS( evalSettings.restrictEval || evalSettings.pureEval - ? ref(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {}, + ? ref(AllowListInputAccessor::create(makeFSInputAccessor(), {}, [](const CanonPath & path) -> RestrictedPathError { auto modeInformation = evalSettings.pureEval ? "in pure evaluation mode (use '--impure' to override)" : "in restricted mode"; throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation); })) - : makeFSInputAccessor(CanonPath::root)) + : makeFSInputAccessor()) , corepkgsFS(makeMemoryInputAccessor()) , internalFS(makeMemoryInputAccessor()) , derivationInternal{corepkgsFS->addFile( @@ -2763,7 +2763,7 @@ Expr * EvalState::parseStdin() // drainFD should have left some extra space for terminators buffer.append("\0\0", 2); auto s = make_ref(std::move(buffer)); - return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv); + return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv); } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 2368187b1..b75646dbd 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -372,6 +372,11 @@ public: */ SourcePath rootPath(CanonPath path); + /** + * Variant which accepts relative paths too. + */ + SourcePath rootPath(PathView path); + /** * Allow access to a path. */ diff --git a/src/libexpr/paths.cc b/src/libexpr/paths.cc index 099607638..50d0d9895 100644 --- a/src/libexpr/paths.cc +++ b/src/libexpr/paths.cc @@ -1,5 +1,4 @@ #include "eval.hh" -#include "fs-input-accessor.hh" namespace nix { @@ -8,4 +7,9 @@ SourcePath EvalState::rootPath(CanonPath path) return {rootFS, std::move(path)}; } +SourcePath EvalState::rootPath(PathView path) +{ + return {rootFS, CanonPath(absPath(path))}; +} + } diff --git a/src/libfetchers/fs-input-accessor.cc b/src/libfetchers/fs-input-accessor.cc index 46bc6b70d..ee24c621a 100644 --- a/src/libfetchers/fs-input-accessor.cc +++ b/src/libfetchers/fs-input-accessor.cc @@ -6,72 +6,30 @@ namespace nix { struct FSInputAccessor : InputAccessor, PosixSourceAccessor { - CanonPath root; - - FSInputAccessor(const CanonPath & root) - : root(root) - { - displayPrefix = root.isRoot() ? "" : root.abs(); - } - - void readFile( - const CanonPath & path, - Sink & sink, - std::function sizeCallback) override - { - auto absPath = makeAbsPath(path); - PosixSourceAccessor::readFile(absPath, sink, sizeCallback); - } - - bool pathExists(const CanonPath & path) override - { - return PosixSourceAccessor::pathExists(makeAbsPath(path)); - } - - std::optional maybeLstat(const CanonPath & path) override - { - return PosixSourceAccessor::maybeLstat(makeAbsPath(path)); - } - - DirEntries readDirectory(const CanonPath & path) override - { - DirEntries res; - for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path))) - res.emplace(entry); - return res; - } - - std::string readLink(const CanonPath & path) override - { - return PosixSourceAccessor::readLink(makeAbsPath(path)); - } - - CanonPath makeAbsPath(const CanonPath & path) - { - return root / path; - } - - std::optional getPhysicalPath(const CanonPath & path) override - { - return makeAbsPath(path); - } + using PosixSourceAccessor::PosixSourceAccessor; }; -ref makeFSInputAccessor(const CanonPath & root) +ref makeFSInputAccessor() { - return make_ref(root); + return make_ref(); +} + +ref makeFSInputAccessor(std::filesystem::path root) +{ + return make_ref(std::move(root)); } ref makeStorePathAccessor( ref store, const StorePath & storePath) { - return makeFSInputAccessor(CanonPath(store->toRealPath(storePath))); + // FIXME: should use `store->getFSAccessor()` + return makeFSInputAccessor(std::filesystem::path { store->toRealPath(storePath) }); } SourcePath getUnfilteredRootPath(CanonPath path) { - static auto rootFS = makeFSInputAccessor(CanonPath::root); + static auto rootFS = makeFSInputAccessor(); return {rootFS, path}; } diff --git a/src/libfetchers/fs-input-accessor.hh b/src/libfetchers/fs-input-accessor.hh index a98e83511..e60906bd8 100644 --- a/src/libfetchers/fs-input-accessor.hh +++ b/src/libfetchers/fs-input-accessor.hh @@ -8,8 +8,9 @@ namespace nix { class StorePath; class Store; -ref makeFSInputAccessor( - const CanonPath & root); +ref makeFSInputAccessor(); + +ref makeFSInputAccessor(std::filesystem::path root); ref makeStorePathAccessor( ref store, diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 1256a4c2c..cb4a84e53 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -140,15 +140,15 @@ T peelObject(git_repository * repo, git_object * obj, git_object_t type) struct GitRepoImpl : GitRepo, std::enable_shared_from_this { /** Location of the repository on disk. */ - CanonPath path; + std::filesystem::path path; Repository repo; - GitRepoImpl(CanonPath _path, bool create, bool bare) + GitRepoImpl(std::filesystem::path _path, bool create, bool bare) : path(std::move(_path)) { initLibGit2(); - if (pathExists(path.abs())) { + if (pathExists(path.native())) { if (git_repository_open(Setter(repo), path.c_str())) throw Error("opening Git repository '%s': %s", path, git_error_last()->message); } else { @@ -221,10 +221,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this return toHash(*oid); } - std::vector parseSubmodules(const CanonPath & configFile) + std::vector parseSubmodules(const std::filesystem::path & configFile) { GitConfig config; - if (git_config_open_ondisk(Setter(config), configFile.abs().c_str())) + if (git_config_open_ondisk(Setter(config), configFile.c_str())) throw Error("parsing .gitmodules file: %s", git_error_last()->message); ConfigIterator it; @@ -296,7 +296,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this /* Get submodule info. */ auto modulesFile = path / ".gitmodules"; - if (pathExists(modulesFile.abs())) + if (pathExists(modulesFile)) info.submodules = parseSubmodules(modulesFile); return info; @@ -389,10 +389,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this auto dir = this->path; Strings gitArgs; if (shallow) { - gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec }; + gitArgs = { "-C", dir, "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec }; } else { - gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--", url, refspec }; + gitArgs = { "-C", dir, "fetch", "--quiet", "--force", "--", url, refspec }; } runProgram(RunOptions { @@ -438,7 +438,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this .args = { "-c", "gpg.ssh.allowedSignersFile=" + allowedSignersFile, - "-C", path.abs(), + "-C", path, "verify-commit", rev.gitRev() }, @@ -465,7 +465,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this } }; -ref GitRepo::openRepo(const CanonPath & path, bool create, bool bare) +ref GitRepo::openRepo(const std::filesystem::path & path, bool create, bool bare) { return make_ref(path, create, bare); } @@ -781,7 +781,7 @@ std::vector> GitRepoImpl::getSubmodules auto rawAccessor = getRawAccessor(rev); - for (auto & submodule : parseSubmodules(CanonPath(pathTemp))) { + for (auto & submodule : parseSubmodules(pathTemp)) { auto rev = rawAccessor->getSubmoduleRev(submodule.path); result.push_back({std::move(submodule), rev}); } diff --git a/src/libfetchers/git-utils.hh b/src/libfetchers/git-utils.hh index 768554780..e55affb12 100644 --- a/src/libfetchers/git-utils.hh +++ b/src/libfetchers/git-utils.hh @@ -12,7 +12,7 @@ struct GitRepo virtual ~GitRepo() { } - static ref openRepo(const CanonPath & path, bool create = false, bool bare = false); + static ref openRepo(const std::filesystem::path & path, bool create = false, bool bare = false); virtual uint64_t getRevCount(const Hash & rev) = 0; diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 26fe79596..bef945d54 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -415,7 +415,7 @@ struct GitInputScheme : InputScheme // If this is a local directory and no ref or revision is // given, then allow the use of an unclean working tree. if (!input.getRef() && !input.getRev() && repoInfo.isLocal) - repoInfo.workdirInfo = GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirInfo(); + repoInfo.workdirInfo = GitRepo::openRepo(repoInfo.url)->getWorkdirInfo(); return repoInfo; } @@ -429,7 +429,7 @@ struct GitInputScheme : InputScheme if (auto res = cache->lookup(key)) return getIntAttr(*res, "lastModified"); - auto lastModified = GitRepo::openRepo(CanonPath(repoDir))->getLastModified(rev); + auto lastModified = GitRepo::openRepo(repoDir)->getLastModified(rev); cache->upsert(key, Attrs{{"lastModified", lastModified}}); @@ -447,7 +447,7 @@ struct GitInputScheme : InputScheme Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.url)); - auto revCount = GitRepo::openRepo(CanonPath(repoDir))->getRevCount(rev); + auto revCount = GitRepo::openRepo(repoDir)->getRevCount(rev); cache->upsert(key, Attrs{{"revCount", revCount}}); @@ -457,7 +457,7 @@ struct GitInputScheme : InputScheme std::string getDefaultRef(const RepoInfo & repoInfo) const { auto head = repoInfo.isLocal - ? GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirRef() + ? GitRepo::openRepo(repoInfo.url)->getWorkdirRef() : readHeadCached(repoInfo.url); if (!head) { warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.url); @@ -510,7 +510,7 @@ struct GitInputScheme : InputScheme if (repoInfo.isLocal) { repoDir = repoInfo.url; if (!input.getRev()) - input.attrs.insert_or_assign("rev", GitRepo::openRepo(CanonPath(repoDir))->resolveRef(ref).gitRev()); + input.attrs.insert_or_assign("rev", GitRepo::openRepo(repoDir)->resolveRef(ref).gitRev()); } else { Path cacheDir = getCachePath(repoInfo.url, getShallowAttr(input)); repoDir = cacheDir; @@ -519,7 +519,7 @@ struct GitInputScheme : InputScheme createDirs(dirOf(cacheDir)); PathLocks cacheDirLock({cacheDir}); - auto repo = GitRepo::openRepo(CanonPath(cacheDir), true, true); + auto repo = GitRepo::openRepo(cacheDir, true, true); Path localRefFile = ref.compare(0, 5, "refs/") == 0 @@ -588,7 +588,7 @@ struct GitInputScheme : InputScheme // cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder } - auto repo = GitRepo::openRepo(CanonPath(repoDir)); + auto repo = GitRepo::openRepo(repoDir); auto isShallow = repo->isShallow(); @@ -664,7 +664,7 @@ struct GitInputScheme : InputScheme for (auto & submodule : repoInfo.workdirInfo.submodules) repoInfo.workdirInfo.files.insert(submodule.path); - auto repo = GitRepo::openRepo(CanonPath(repoInfo.url), false, false); + auto repo = GitRepo::openRepo(repoInfo.url, false, false); auto exportIgnore = getExportIgnoreAttr(input); @@ -703,7 +703,7 @@ struct GitInputScheme : InputScheme } if (!repoInfo.workdirInfo.isDirty) { - auto repo = GitRepo::openRepo(CanonPath(repoInfo.url)); + auto repo = GitRepo::openRepo(repoInfo.url); if (auto ref = repo->getWorkdirRef()) input.attrs.insert_or_assign("ref", *ref); diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index b783b29e0..351ee094b 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -110,8 +110,8 @@ void SourceAccessor::dumpPath( time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter) { - PosixSourceAccessor accessor; - accessor.dumpPath(CanonPath::fromCwd(path), sink, filter); + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path); + accessor.dumpPath(canonPath, sink, filter); return accessor.mtime; } diff --git a/src/libutil/canon-path.cc b/src/libutil/canon-path.cc index bf948be5d..1223ba33c 100644 --- a/src/libutil/canon-path.cc +++ b/src/libutil/canon-path.cc @@ -20,11 +20,6 @@ CanonPath::CanonPath(const std::vector & elems) push(s); } -CanonPath CanonPath::fromCwd(std::string_view path) -{ - return CanonPath(unchecked_t(), absPath(path)); -} - std::optional CanonPath::parent() const { if (isRoot()) return std::nullopt; diff --git a/src/libutil/canon-path.hh b/src/libutil/canon-path.hh index fb2d9244b..2f8ff381e 100644 --- a/src/libutil/canon-path.hh +++ b/src/libutil/canon-path.hh @@ -52,8 +52,6 @@ public: */ CanonPath(const std::vector & elems); - static CanonPath fromCwd(std::string_view path = "."); - static CanonPath root; /** diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index 5f26fa67b..0300de01e 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -6,6 +6,33 @@ namespace nix { +PosixSourceAccessor::PosixSourceAccessor(std::filesystem::path && root) + : root(std::move(root)) +{ + assert(root.empty() || root.is_absolute()); + displayPrefix = root; +} + +PosixSourceAccessor::PosixSourceAccessor() + : PosixSourceAccessor(std::filesystem::path {}) +{ } + +std::pair PosixSourceAccessor::createAtRoot(const std::filesystem::path & path) +{ + std::filesystem::path path2 = absPath(path.native()); + return { + PosixSourceAccessor { path2.root_path() }, + CanonPath { static_cast(path2.relative_path()) }, + }; +} + +std::filesystem::path PosixSourceAccessor::makeAbsPath(const CanonPath & path) +{ + return root.empty() + ? (std::filesystem::path { path.abs() }) + : root / path.rel(); +} + void PosixSourceAccessor::readFile( const CanonPath & path, Sink & sink, @@ -13,9 +40,11 @@ void PosixSourceAccessor::readFile( { assertNoSymlinks(path); - AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC | O_NOFOLLOW); + auto ap = makeAbsPath(path); + + AutoCloseFD fd = open(ap.c_str(), O_RDONLY | O_CLOEXEC | O_NOFOLLOW); if (!fd) - throw SysError("opening file '%1%'", path); + throw SysError("opening file '%1%'", ap.native()); struct stat st; if (fstat(fd.get(), &st) == -1) @@ -46,7 +75,7 @@ void PosixSourceAccessor::readFile( bool PosixSourceAccessor::pathExists(const CanonPath & path) { if (auto parent = path.parent()) assertNoSymlinks(*parent); - return nix::pathExists(path.abs()); + return nix::pathExists(makeAbsPath(path)); } std::optional PosixSourceAccessor::cachedLstat(const CanonPath & path) @@ -60,7 +89,7 @@ std::optional PosixSourceAccessor::cachedLstat(const CanonPath & pa } std::optional st{std::in_place}; - if (::lstat(path.c_str(), &*st)) { + if (::lstat(makeAbsPath(path).c_str(), &*st)) { if (errno == ENOENT || errno == ENOTDIR) st.reset(); else @@ -95,7 +124,7 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & { assertNoSymlinks(path); DirEntries res; - for (auto & entry : nix::readDirectory(path.abs())) { + for (auto & entry : nix::readDirectory(makeAbsPath(path))) { std::optional type; switch (entry.type) { case DT_REG: type = Type::tRegular; break; @@ -110,12 +139,12 @@ SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & std::string PosixSourceAccessor::readLink(const CanonPath & path) { if (auto parent = path.parent()) assertNoSymlinks(*parent); - return nix::readLink(path.abs()); + return nix::readLink(makeAbsPath(path)); } -std::optional PosixSourceAccessor::getPhysicalPath(const CanonPath & path) +std::optional PosixSourceAccessor::getPhysicalPath(const CanonPath & path) { - return path; + return makeAbsPath(path); } void PosixSourceAccessor::assertNoSymlinks(CanonPath path) diff --git a/src/libutil/posix-source-accessor.hh b/src/libutil/posix-source-accessor.hh index b2bd39805..717c8f017 100644 --- a/src/libutil/posix-source-accessor.hh +++ b/src/libutil/posix-source-accessor.hh @@ -9,6 +9,16 @@ namespace nix { */ struct PosixSourceAccessor : virtual SourceAccessor { + /** + * Optional root path to prefix all operations into the native file + * system. This allows prepending funny things like `C:\` that + * `CanonPath` intentionally doesn't support. + */ + const std::filesystem::path root; + + PosixSourceAccessor(); + PosixSourceAccessor(std::filesystem::path && root); + /** * The most recent mtime seen by lstat(). This is a hack to * support dumpPathAndGetMtime(). Should remove this eventually. @@ -28,7 +38,22 @@ struct PosixSourceAccessor : virtual SourceAccessor std::string readLink(const CanonPath & path) override; - std::optional getPhysicalPath(const CanonPath & path) override; + std::optional getPhysicalPath(const CanonPath & path) override; + + /** + * Create a `PosixSourceAccessor` and `CanonPath` corresponding to + * some native path. + * + * The `PosixSourceAccessor` is rooted as far up the tree as + * possible, (e.g. on Windows it could scoped to a drive like + * `C:\`). This allows more `..` parent accessing to work. + * + * See + * [`std::filesystem::path::root_path`](https://en.cppreference.com/w/cpp/filesystem/path/root_path) + * and + * [`std::filesystem::path::relative_path`](https://en.cppreference.com/w/cpp/filesystem/path/relative_path). + */ + static std::pair createAtRoot(const std::filesystem::path & path); private: @@ -38,6 +63,8 @@ private: void assertNoSymlinks(CanonPath path); std::optional cachedLstat(const CanonPath & path); + + std::filesystem::path makeAbsPath(const CanonPath & path); }; } diff --git a/src/libutil/source-accessor.hh b/src/libutil/source-accessor.hh index 4f4ff09c1..aff7da09c 100644 --- a/src/libutil/source-accessor.hh +++ b/src/libutil/source-accessor.hh @@ -1,5 +1,7 @@ #pragma once +#include + #include "canon-path.hh" #include "hash.hh" @@ -119,7 +121,7 @@ struct SourceAccessor * possible. This is only possible for filesystems that are * materialized in the root filesystem. */ - virtual std::optional getPhysicalPath(const CanonPath & path) + virtual std::optional getPhysicalPath(const CanonPath & path) { return std::nullopt; } bool operator == (const SourceAccessor & x) const diff --git a/src/libutil/source-path.cc b/src/libutil/source-path.cc index 341daf39c..0f154e779 100644 --- a/src/libutil/source-path.cc +++ b/src/libutil/source-path.cc @@ -35,7 +35,7 @@ void SourcePath::dumpPath( PathFilter & filter) const { return accessor->dumpPath(path, sink, filter); } -std::optional SourcePath::getPhysicalPath() const +std::optional SourcePath::getPhysicalPath() const { return accessor->getPhysicalPath(path); } std::string SourcePath::to_string() const diff --git a/src/libutil/source-path.hh b/src/libutil/source-path.hh index bde07b08f..a2f4ddd1e 100644 --- a/src/libutil/source-path.hh +++ b/src/libutil/source-path.hh @@ -82,7 +82,7 @@ struct SourcePath * Return the location of this path in the "real" filesystem, if * it has a physical location. */ - std::optional getPhysicalPath() const; + std::optional getPhysicalPath() const; std::string to_string() const; diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc index 549adfbf7..a372e4b1c 100644 --- a/src/nix-build/nix-build.cc +++ b/src/nix-build/nix-build.cc @@ -299,7 +299,7 @@ static void main_nix_build(int argc, char * * argv) else for (auto i : left) { if (fromArgs) - exprs.push_back(state->parseExprFromString(std::move(i), state->rootPath(CanonPath::fromCwd()))); + exprs.push_back(state->parseExprFromString(std::move(i), state->rootPath("."))); else { auto absolute = i; try { @@ -400,7 +400,7 @@ static void main_nix_build(int argc, char * * argv) try { auto expr = state->parseExprFromString( "(import {}).bashInteractive", - state->rootPath(CanonPath::fromCwd())); + state->rootPath(".")); Value v; state->eval(expr, v); diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc index dfc6e70eb..1f311733b 100644 --- a/src/nix-env/nix-env.cc +++ b/src/nix-env/nix-env.cc @@ -413,7 +413,7 @@ static void queryInstSources(EvalState & state, loadSourceExpr(state, *instSource.nixExprPath, vArg); for (auto & i : args) { - Expr * eFun = state.parseExprFromString(i, state.rootPath(CanonPath::fromCwd())); + Expr * eFun = state.parseExprFromString(i, state.rootPath(".")); Value vFun, vTmp; state.eval(eFun, vFun); vTmp.mkApp(&vFun, &vArg); diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc index b9e626aed..86e6f008d 100644 --- a/src/nix-instantiate/nix-instantiate.cc +++ b/src/nix-instantiate/nix-instantiate.cc @@ -168,7 +168,7 @@ static int main_nix_instantiate(int argc, char * * argv) for (auto & i : files) { auto p = state->findFile(i); if (auto fn = p.getPhysicalPath()) - std::cout << fn->abs() << std::endl; + std::cout << fn->native() << std::endl; else throw Error("'%s' has no physical path", p); } @@ -184,7 +184,7 @@ static int main_nix_instantiate(int argc, char * * argv) for (auto & i : files) { Expr * e = fromArgs - ? state->parseExprFromString(i, state->rootPath(CanonPath::fromCwd())) + ? state->parseExprFromString(i, state->rootPath(".")) : state->parseExprFromFile(resolveExprPath(lookupFileArg(*state, i))); processExpr(*state, attrPaths, parseOnly, strict, autoArgs, evalOnly, outputKind, xmlOutputSourceLocation, e); diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc index 40378e123..f6a36da0d 100644 --- a/src/nix-store/nix-store.cc +++ b/src/nix-store/nix-store.cc @@ -176,12 +176,11 @@ static void opAdd(Strings opFlags, Strings opArgs) { if (!opFlags.empty()) throw UsageError("unknown flag"); - PosixSourceAccessor accessor; - for (auto & i : opArgs) + for (auto & i : opArgs) { + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i); cout << fmt("%s\n", store->printStorePath(store->addToStore( - std::string(baseNameOf(i)), - accessor, - CanonPath::fromCwd(i)))); + std::string(baseNameOf(i)), accessor, canonPath))); + } } @@ -201,14 +200,15 @@ static void opAddFixed(Strings opFlags, Strings opArgs) HashAlgorithm hashAlgo = parseHashAlgo(opArgs.front()); opArgs.pop_front(); - PosixSourceAccessor accessor; - for (auto & i : opArgs) + for (auto & i : opArgs) { + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(i); std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow( baseNameOf(i), accessor, - CanonPath::fromCwd(i), + canonPath, method, hashAlgo).path)); + } } diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc index 7c534517d..d3e66dc21 100644 --- a/src/nix/add-to-store.cc +++ b/src/nix/add-to-store.cc @@ -60,9 +60,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand { if (!namePart) namePart = baseNameOf(path); - PosixSourceAccessor accessor; - - auto path2 = CanonPath::fromCwd(path); + auto [accessor, path2] = PosixSourceAccessor::createAtRoot(path); auto storePath = dryRun ? store->computeStorePath( diff --git a/src/nix/eval.cc b/src/nix/eval.cc index a89fa7412..31b2ccd3c 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -66,7 +66,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption if (apply) { auto vApply = state->allocValue(); - state->eval(state->parseExprFromString(*apply, state->rootPath(CanonPath::fromCwd())), *vApply); + state->eval(state->parseExprFromString(*apply, state->rootPath(".")), *vApply); auto vRes = state->allocValue(); state->callFunction(*vApply, *v, *vRes, noPos); v = vRes; diff --git a/src/nix/hash.cc b/src/nix/hash.cc index 4837891c6..eec1c0eae 100644 --- a/src/nix/hash.cc +++ b/src/nix/hash.cc @@ -89,8 +89,8 @@ struct CmdHashBase : Command else hashSink = std::make_unique(ha); - PosixSourceAccessor accessor; - dumpPath(accessor, CanonPath::fromCwd(path), *hashSink, mode); + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path); + dumpPath(accessor, canonPath, *hashSink, mode); Hash h = hashSink->finish().first; if (truncate && h.hashSize > 20) h = compressHash(h, 20); diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc index 84b79ea28..6e3f878d9 100644 --- a/src/nix/prefetch.cc +++ b/src/nix/prefetch.cc @@ -123,10 +123,9 @@ std::tuple prefetchFile( Activity act(*logger, lvlChatty, actUnknown, fmt("adding '%s' to the store", url)); - PosixSourceAccessor accessor; + auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(tmpFile); auto info = store->addToStoreSlow( - *name, - accessor, CanonPath::fromCwd(tmpFile), + *name, accessor, canonPath, ingestionMethod, hashAlgo, {}, expectedHash); storePath = info.path; assert(info.ca); From a27651908fc1b5ef73a81e46434a408c5868fa7b Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 10:11:45 -0800 Subject: [PATCH 268/307] Add assertion for decreasing the indent MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Théophane Hufschmitt <7226587+thufschmitt@users.noreply.github.com> --- src/libexpr/print.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 1ff026b3d..cdc9f6dbe 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -176,6 +176,7 @@ private: void decreaseIndent() { if (options.prettyPrint()) { + assert(indent.size() >= options.prettyIndent); indent.resize(indent.size() - options.prettyIndent); } } From 1c5f5d4291df7bf80806e57c75d2ec67bced8616 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 10:17:20 -0800 Subject: [PATCH 269/307] `prettyPrint` -> `shouldPrettyPrint` --- src/libexpr/print-options.hh | 2 +- src/libexpr/print.cc | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/libexpr/print-options.hh b/src/libexpr/print-options.hh index 94767df9c..6c5e80c61 100644 --- a/src/libexpr/print-options.hh +++ b/src/libexpr/print-options.hh @@ -71,7 +71,7 @@ struct PrintOptions /** * True if pretty-printing is enabled. */ - inline bool prettyPrint() + inline bool shouldPrettyPrint() { return prettyIndent > 0; } diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index cdc9f6dbe..a8eac8288 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -168,14 +168,14 @@ private: void increaseIndent() { - if (options.prettyPrint()) { + if (options.shouldPrettyPrint()) { indent.append(options.prettyIndent, ' '); } } void decreaseIndent() { - if (options.prettyPrint()) { + if (options.shouldPrettyPrint()) { assert(indent.size() >= options.prettyIndent); indent.resize(indent.size() - options.prettyIndent); } @@ -279,7 +279,7 @@ private: bool shouldPrettyPrintAttrs(AttrVec & v) { - if (!options.prettyPrint() || v.empty()) { + if (!options.shouldPrettyPrint() || v.empty()) { return false; } @@ -356,7 +356,7 @@ private: bool shouldPrettyPrintList(std::span list) { - if (!options.prettyPrint() || list.empty()) { + if (!options.shouldPrettyPrint() || list.empty()) { return false; } From 403c90ddf58a3f16a44dfe1f20004b6baa4e5ce2 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 10:17:33 -0800 Subject: [PATCH 270/307] Extract `printSpace` helper --- src/libexpr/print.cc | 39 +++++++++++++++++++-------------------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index a8eac8288..5605aad28 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -181,6 +181,21 @@ private: } } + /** + * Print a space (for separating items or attributes). + * + * If pretty-printing is enabled, a newline and the current `indent` is + * printed instead. + */ + void printSpace(bool prettyPrint) + { + if (prettyPrint) { + output << "\n" << indent; + } else { + output << " "; + } + } + void printRepeated() { if (options.ansiColors) @@ -324,11 +339,7 @@ private: auto prettyPrint = shouldPrettyPrintAttrs(sorted); for (auto & i : sorted) { - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); if (attrsPrinted >= options.maxAttrs) { printElided(sorted.size() - attrsPrinted, "attribute", "attributes"); @@ -343,11 +354,7 @@ private: } decreaseIndent(); - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); output << "}"; } else { output << "{ ... }"; @@ -389,11 +396,7 @@ private: auto listItems = v.listItems(); auto prettyPrint = shouldPrettyPrintList(listItems); for (auto elem : listItems) { - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); if (listItemsPrinted >= options.maxListItems) { printElided(listItems.size() - listItemsPrinted, "item", "items"); @@ -409,11 +412,7 @@ private: } decreaseIndent(); - if (prettyPrint) { - output << "\n" << indent; - } else { - output << " "; - } + printSpace(prettyPrint); output << "]"; } else { output << "[ ... ]"; From 149bd63afb30c5ae58eb3cc03fc208f89547cc16 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 19:16:30 -0800 Subject: [PATCH 271/307] Cleanup `fmt.hh` When I started contributing to Nix, I found the mix of definitions and names in `fmt.hh` to be rather confusing, especially the small difference between `hintfmt` and `hintformat`. I've renamed many classes and added documentation to most definitions. - `formatHelper` is no longer exported. - `fmt`'s documentation is now with `fmt` rather than (misleadingly) above `formatHelper`. - `yellowtxt` is renamed to `Magenta`. `yellowtxt` wraps its value with `ANSI_WARNING`, but `ANSI_WARNING` has been equal to `ANSI_MAGENTA` for a long time. Now the name is updated. - `normaltxt` is renamed to `Uncolored`. - `hintfmt` has been merged into `hintformat` as extra constructor functions. - `hintformat` has been renamed to `hintfmt`. - The single-argument `hintformat(std::string)` constructor has been renamed to a static member `hintformat::interpolate` to avoid pitfalls with using user-generated strings as format strings. --- src/build-remote/build-remote.cc | 2 +- src/libexpr/eval.hh | 2 +- src/libexpr/value/context.hh | 2 +- src/libstore/build/derivation-goal.cc | 8 +- src/libstore/build/local-derivation-goal.cc | 2 +- src/libstore/filetransfer.cc | 2 +- src/libstore/sqlite.cc | 6 +- src/libstore/sqlite.hh | 6 +- src/libutil/error.cc | 4 +- src/libutil/error.hh | 10 +- src/libutil/fmt.hh | 157 +++++++++++++------- src/libutil/logging.hh | 11 ++ tests/unit/libexpr/error_traces.cc | 1 - tests/unit/libutil/logging.cc | 2 +- 14 files changed, 135 insertions(+), 80 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 519e03242..94b672976 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -202,7 +202,7 @@ static int main_build_remote(int argc, char * * argv) else drvstr = ""; - auto error = hintformat(errorText); + auto error = hintfmt(errorText); error % drvstr % neededSystem diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index 3c7c5da27..f72135527 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -148,7 +148,7 @@ struct DebugTrace { std::shared_ptr pos; const Expr & expr; const Env & env; - hintformat hint; + hintfmt hint; bool isError; }; diff --git a/src/libexpr/value/context.hh b/src/libexpr/value/context.hh index 51fd30a44..2abd1c9d4 100644 --- a/src/libexpr/value/context.hh +++ b/src/libexpr/value/context.hh @@ -20,7 +20,7 @@ public: { raw = raw_; auto hf = hintfmt(args...); - err.msg = hintfmt("Bad String Context element: %1%: %2%", normaltxt(hf.str()), raw); + err.msg = hintfmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw); } }; diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 454c35763..d3bbdf1ed 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -708,7 +708,7 @@ void DerivationGoal::tryToBuild() if (!outputLocks.lockPaths(lockFiles, "", false)) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for lock on %s", magentatxt(showPaths(lockFiles)))); + fmt("waiting for lock on %s", Magenta(showPaths(lockFiles)))); worker.waitForAWhile(shared_from_this()); return; } @@ -762,7 +762,7 @@ void DerivationGoal::tryToBuild() the wake-up timeout expires. */ if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a machine to build '%s'", magentatxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); outputLocks.unlock(); return; @@ -987,7 +987,7 @@ void DerivationGoal::buildDone() diskFull |= cleanupDecideWhetherDiskFull(); auto msg = fmt("builder for '%s' %s", - magentatxt(worker.store.printStorePath(drvPath)), + Magenta(worker.store.printStorePath(drvPath)), statusToString(status)); if (!logger->isVerbose() && !logTail.empty()) { @@ -1523,7 +1523,7 @@ void DerivationGoal::done( outputLocks.unlock(); buildResult.status = status; if (ex) - buildResult.errorMsg = fmt("%s", normaltxt(ex->info().msg)); + buildResult.errorMsg = fmt("%s", Uncolored(ex->info().msg)); if (buildResult.status == BuildResult::TimedOut) worker.timedOut = true; if (buildResult.status == BuildResult::PermanentFailure) diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index ce8943efe..a2f411b8a 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -232,7 +232,7 @@ void LocalDerivationGoal::tryLocalBuild() if (!buildUser) { if (!actLock) actLock = std::make_unique(*logger, lvlWarn, actBuildWaiting, - fmt("waiting for a free build user ID for '%s'", magentatxt(worker.store.printStorePath(drvPath)))); + fmt("waiting for a free build user ID for '%s'", Magenta(worker.store.printStorePath(drvPath)))); worker.waitForAWhile(shared_from_this()); return; } diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index dcbec4acd..eb39be158 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -887,7 +887,7 @@ FileTransferError::FileTransferError(FileTransfer::Error error, std::optionalsize() < 1024 || response->find("") != std::string::npos)) - err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), chomp(*response)); + err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", Uncolored(hf.str()), chomp(*response)); else err.msg = hf; } diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index d7432a305..ff14ec420 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -10,19 +10,19 @@ namespace nix { -SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintformat && hf) +SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf) : Error(""), path(path), errMsg(errMsg), errNo(errNo), extendedErrNo(extendedErrNo), offset(offset) { auto offsetStr = (offset == -1) ? "" : "at offset " + std::to_string(offset) + ": "; err.msg = hintfmt("%s: %s%s, %s (in '%s')", - normaltxt(hf.str()), + Uncolored(hf.str()), offsetStr, sqlite3_errstr(extendedErrNo), errMsg, path ? path : "(in-memory)"); } -[[noreturn]] void SQLiteError::throw_(sqlite3 * db, hintformat && hf) +[[noreturn]] void SQLiteError::throw_(sqlite3 * db, hintfmt && hf) { int err = sqlite3_errcode(db); int exterr = sqlite3_extended_errcode(db); diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index 0c08267f7..33ebb5892 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -145,16 +145,16 @@ struct SQLiteError : Error throw_(db, hintfmt(fs, args...)); } - SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintformat && hf); + SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf); protected: template SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, const std::string & fs, const Args & ... args) - : SQLiteError(path, errNo, extendedErrNo, offset, hintfmt(fs, args...)) + : SQLiteError(path, errMsg, errNo, extendedErrNo, offset, hintfmt(fs, args...)) { } - [[noreturn]] static void throw_(sqlite3 * db, hintformat && hf); + [[noreturn]] static void throw_(sqlite3 * db, hintfmt && hf); }; diff --git a/src/libutil/error.cc b/src/libutil/error.cc index e4e50d73b..e3b30b3a1 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -11,7 +11,7 @@ namespace nix { -void BaseError::addTrace(std::shared_ptr && e, hintformat hint, bool frame) +void BaseError::addTrace(std::shared_ptr && e, hintfmt hint, bool frame) { err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame }); } @@ -37,7 +37,7 @@ const std::string & BaseError::calcWhat() const std::optional ErrorInfo::programName = std::nullopt; -std::ostream & operator <<(std::ostream & os, const hintformat & hf) +std::ostream & operator <<(std::ostream & os, const hintfmt & hf) { return os << hf.str(); } diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 4fb822843..966f4d770 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -63,7 +63,7 @@ void printCodeLines(std::ostream & out, struct Trace { std::shared_ptr pos; - hintformat hint; + hintfmt hint; bool frame; }; @@ -74,7 +74,7 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs); struct ErrorInfo { Verbosity level; - hintformat msg; + hintfmt msg; std::shared_ptr pos; std::list traces; @@ -126,7 +126,7 @@ public: : err { .level = lvlError, .msg = hintfmt(args...), .suggestions = sug } { } - BaseError(hintformat hint) + BaseError(hintfmt hint) : err { .level = lvlError, .msg = hint } { } @@ -162,7 +162,7 @@ public: addTrace(std::move(e), hintfmt(std::string(fs), args...)); } - void addTrace(std::shared_ptr && e, hintformat hint, bool frame = false); + void addTrace(std::shared_ptr && e, hintfmt hint, bool frame = false); bool hasTrace() const { return !err.traces.empty(); } @@ -215,7 +215,7 @@ public: : SystemError(""), errNo(errNo) { auto hf = hintfmt(args...); - err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo)); + err.msg = hintfmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo)); } /** diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh index 6430c7707..9c2cc1e85 100644 --- a/src/libutil/fmt.hh +++ b/src/libutil/fmt.hh @@ -8,37 +8,53 @@ namespace nix { - +namespace { /** - * Inherit some names from other namespaces for convenience. - */ -using boost::format; - - -/** - * A variadic template that does nothing. Useful to call a function - * for all variadic arguments but ignoring the result. - */ -struct nop { template nop(T...) {} }; - - -/** - * A helper for formatting strings. ‘fmt(format, a_0, ..., a_n)’ is - * equivalent to ‘boost::format(format) % a_0 % ... % - * ... a_n’. However, ‘fmt(s)’ is equivalent to ‘s’ (so no %-expansion - * takes place). + * A helper for writing `boost::format` expressions. + * + * These are equivalent: + * + * ``` + * formatHelper(formatter, a_0, ..., a_n) + * formatter % a_0 % ... % a_n + * ``` + * + * With a single argument, `formatHelper(s)` is a no-op. */ template inline void formatHelper(F & f) -{ -} +{ } template inline void formatHelper(F & f, const T & x, const Args & ... args) { + // Interpolate one argument and then recurse. formatHelper(f % x, args...); } +} +/** + * A helper for writing a `boost::format` expression to a string. + * + * These are (roughly) equivalent: + * + * ``` + * fmt(formatString, a_0, ..., a_n) + * (boost::format(formatString) % a_0 % ... % a_n).str() + * ``` + * + * However, when called with a single argument, the string is returned + * unchanged. + * + * If you write code like this: + * + * ``` + * std::cout << boost::format(stringFromUserInput) << std::endl; + * ``` + * + * And `stringFromUserInput` contains formatting placeholders like `%s`, then + * the code will crash at runtime. `fmt` helps you avoid this pitfall. + */ inline std::string fmt(const std::string & s) { return s; @@ -63,61 +79,107 @@ inline std::string fmt(const std::string & fs, const Args & ... args) return f.str(); } -// format function for hints in errors. same as fmt, except templated values -// are always in magenta. +/** + * Values wrapped in this struct are printed in magenta. + * + * By default, arguments to `hintfmt` are printed in magenta. To avoid this, + * either wrap the argument in `Uncolored` or add a specialization of + * `hintfmt::operator%`. + */ template -struct magentatxt +struct Magenta { - magentatxt(const T &s) : value(s) {} + Magenta(const T &s) : value(s) {} const T & value; }; template -std::ostream & operator<<(std::ostream & out, const magentatxt & y) +std::ostream & operator<<(std::ostream & out, const Magenta & y) { return out << ANSI_WARNING << y.value << ANSI_NORMAL; } +/** + * Values wrapped in this class are printed without coloring. + * + * By default, arguments to `hintfmt` are printed in magenta (see `Magenta`). + */ template -struct normaltxt +struct Uncolored { - normaltxt(const T & s) : value(s) {} + Uncolored(const T & s) : value(s) {} const T & value; }; template -std::ostream & operator<<(std::ostream & out, const normaltxt & y) +std::ostream & operator<<(std::ostream & out, const Uncolored & y) { return out << ANSI_NORMAL << y.value; } -class hintformat +/** + * A wrapper around `boost::format` which colors interpolated arguments in + * magenta by default. + */ +class hintfmt { +private: + boost::format fmt; + public: - hintformat(const std::string & format) : fmt(format) + /** + * Construct a `hintfmt` from a format string, with values to be + * interpolated later with `%`. + * + * This isn't exposed as a single-argument constructor to avoid + * accidentally constructing `hintfmt`s with user-controlled strings. See + * the note on `fmt` for more information. + */ + static hintfmt interpolate(const std::string & formatString) { - fmt.exceptions(boost::io::all_error_bits ^ - boost::io::too_many_args_bit ^ - boost::io::too_few_args_bit); + hintfmt result((boost::format(formatString))); + result.fmt.exceptions( + boost::io::all_error_bits ^ + boost::io::too_many_args_bit ^ + boost::io::too_few_args_bit); + return result; } - hintformat(const hintformat & hf) + /** + * Format the given string literally, without interpolating format + * placeholders. + */ + hintfmt(const std::string & literal) + : hintfmt("%s", Uncolored(literal)) + { } + + /** + * Interpolate the given arguments into the format string. + */ + template + hintfmt(const std::string & format, const Args & ... args) + : fmt(format) + { + formatHelper(*this, args...); + } + + hintfmt(const hintfmt & hf) : fmt(hf.fmt) { } - hintformat(format && fmt) + hintfmt(boost::format && fmt) : fmt(std::move(fmt)) { } template - hintformat & operator%(const T & value) + hintfmt & operator%(const T & value) { - fmt % magentatxt(value); + fmt % Magenta(value); return *this; } template - hintformat & operator%(const normaltxt & value) + hintfmt & operator%(const Uncolored & value) { fmt % value.value; return *this; @@ -127,25 +189,8 @@ public: { return fmt.str(); } - -private: - format fmt; }; -std::ostream & operator<<(std::ostream & os, const hintformat & hf); - -template -inline hintformat hintfmt(const std::string & fs, const Args & ... args) -{ - hintformat f(fs); - formatHelper(f, args...); - return f; -} - -inline hintformat hintfmt(const std::string & plain_string) -{ - // we won't be receiving any args in this case, so just print the original string - return hintfmt("%s", normaltxt(plain_string)); -} +std::ostream & operator<<(std::ostream & os, const hintfmt & hf); } diff --git a/src/libutil/logging.hh b/src/libutil/logging.hh index 183f2d8e1..9e81132e3 100644 --- a/src/libutil/logging.hh +++ b/src/libutil/logging.hh @@ -120,6 +120,17 @@ public: { } }; +/** + * A variadic template that does nothing. + * + * Useful to call a function with each argument in a parameter pack. + */ +struct nop +{ + template nop(T...) + { } +}; + ActivityId getCurActivity(); void setCurActivity(const ActivityId activityId); diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 8e8726195..3cfa2b61b 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -53,7 +53,6 @@ namespace nix { state.error("beans").debugThrow(); } catch (Error & e2) { e.addTrace(state.positions[noPos], "beans2", ""); - //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); ASSERT_FALSE(&e.info() == &e2.info()); diff --git a/tests/unit/libutil/logging.cc b/tests/unit/libutil/logging.cc index 8950a26d4..c8c7c091f 100644 --- a/tests/unit/libutil/logging.cc +++ b/tests/unit/libutil/logging.cc @@ -62,7 +62,7 @@ namespace nix { throw TestError(e.info()); } catch (Error &e) { ErrorInfo ei = e.info(); - ei.msg = hintfmt("%s; subsequent error message.", normaltxt(e.info().msg.str())); + ei.msg = hintfmt("%s; subsequent error message.", Uncolored(e.info().msg.str())); testing::internal::CaptureStderr(); logger->logEI(ei); From c0e7f50c1a46693d06fab8a36526a4beaa702389 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Sat, 3 Feb 2024 20:35:19 -0800 Subject: [PATCH 272/307] Rename `hintfmt` to `HintFmt` --- src/build-remote/build-remote.cc | 2 +- src/libexpr/eval-error.cc | 10 +- src/libexpr/eval-error.hh | 2 +- src/libexpr/eval.cc | 8 +- src/libexpr/eval.hh | 2 +- src/libexpr/flake/flake.cc | 4 +- src/libexpr/lexer.l | 6 +- src/libexpr/parser-state.hh | 8 +- src/libexpr/parser.y | 8 +- src/libexpr/primops.cc | 10 +- src/libexpr/primops/fetchClosure.cc | 22 +- src/libexpr/print.cc | 2 +- src/libexpr/print.hh | 2 +- src/libexpr/value-to-json.cc | 4 +- src/libexpr/value/context.hh | 4 +- src/libstore/build/local-derivation-goal.cc | 2 +- src/libstore/filetransfer.cc | 4 +- src/libstore/sqlite.cc | 10 +- src/libstore/sqlite.hh | 8 +- src/libutil/current-process.cc | 2 +- src/libutil/error.cc | 4 +- src/libutil/error.hh | 20 +- src/libutil/fmt.hh | 67 +- src/libutil/serialise.cc | 4 +- src/nix/daemon.cc | 2 +- src/nix/eval.cc | 2 +- src/nix/flake.cc | 18 +- tests/unit/libexpr/error_traces.cc | 651 ++++++++++---------- tests/unit/libutil/logging.cc | 36 +- 29 files changed, 460 insertions(+), 464 deletions(-) diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc index 94b672976..118468477 100644 --- a/src/build-remote/build-remote.cc +++ b/src/build-remote/build-remote.cc @@ -202,7 +202,7 @@ static int main_build_remote(int argc, char * * argv) else drvstr = ""; - auto error = hintfmt(errorText); + auto error = HintFmt(errorText); error % drvstr % neededSystem diff --git a/src/libexpr/eval-error.cc b/src/libexpr/eval-error.cc index 250c59a19..f4cdeec5c 100644 --- a/src/libexpr/eval-error.cc +++ b/src/libexpr/eval-error.cc @@ -28,7 +28,7 @@ template EvalErrorBuilder & EvalErrorBuilder::withTrace(PosIdx pos, const std::string_view text) { error.err.traces.push_front( - Trace{.pos = error.state.positions[pos], .hint = hintfmt(std::string(text)), .frame = false}); + Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = false}); return *this; } @@ -36,7 +36,7 @@ template EvalErrorBuilder & EvalErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text) { error.err.traces.push_front( - Trace{.pos = error.state.positions[pos], .hint = hintformat(std::string(text)), .frame = true}); + Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = true}); return *this; } @@ -57,13 +57,13 @@ EvalErrorBuilder & EvalErrorBuilder::withFrame(const Env & env, const Expr .pos = error.state.positions[expr.getPos()], .expr = expr, .env = env, - .hint = hintformat("Fake frame for debugging purposes"), + .hint = HintFmt("Fake frame for debugging purposes"), .isError = true}); return *this; } template -EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, hintformat hint, bool frame) +EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, HintFmt hint, bool frame) { error.addTrace(error.state.positions[pos], hint, frame); return *this; @@ -75,7 +75,7 @@ EvalErrorBuilder & EvalErrorBuilder::addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs) { - addTrace(error.state.positions[pos], hintfmt(std::string(formatString), formatArgs...)); + addTrace(error.state.positions[pos], HintFmt(std::string(formatString), formatArgs...)); return *this; } diff --git a/src/libexpr/eval-error.hh b/src/libexpr/eval-error.hh index 711743886..392902ad2 100644 --- a/src/libexpr/eval-error.hh +++ b/src/libexpr/eval-error.hh @@ -89,7 +89,7 @@ public: [[nodiscard, gnu::noinline]] EvalErrorBuilder & withFrame(const Env & e, const Expr & ex); - [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, hintformat hint, bool frame = false); + [[nodiscard, gnu::noinline]] EvalErrorBuilder & addTrace(PosIdx pos, HintFmt hint, bool frame = false); template [[nodiscard, gnu::noinline]] EvalErrorBuilder & diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 5bc62589c..bffbd5f1a 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -803,7 +803,7 @@ void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2) void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame) const { - e.addTrace(positions[pos], hintfmt(s, s2), frame); + e.addTrace(positions[pos], HintFmt(s, s2), frame); } template @@ -819,7 +819,7 @@ static std::unique_ptr makeDebugTraceStacker( .pos = std::move(pos), .expr = expr, .env = env, - .hint = hintfmt(formatArgs...), + .hint = HintFmt(formatArgs...), .isError = false }); } @@ -2792,7 +2792,7 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa res = { store->toRealPath(storePath) }; } catch (FileTransferError & e) { logWarning({ - .msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) + .msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value) }); } } @@ -2825,7 +2825,7 @@ std::optional EvalState::resolveSearchPathPath(const SearchPath::Pa res = { path }; else { logWarning({ - .msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value) + .msg = HintFmt("Nix search path entry '%1%' does not exist, ignoring", value) }); res = std::nullopt; } diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh index f72135527..756ab98e3 100644 --- a/src/libexpr/eval.hh +++ b/src/libexpr/eval.hh @@ -148,7 +148,7 @@ struct DebugTrace { std::shared_ptr pos; const Expr & expr; const Env & env; - hintfmt hint; + HintFmt hint; bool isError; }; diff --git a/src/libexpr/flake/flake.cc b/src/libexpr/flake/flake.cc index 3396b0219..451780c89 100644 --- a/src/libexpr/flake/flake.cc +++ b/src/libexpr/flake/flake.cc @@ -155,7 +155,7 @@ static FlakeInput parseFlakeInput(EvalState & state, } catch (Error & e) { e.addTrace( state.positions[attr.pos], - hintfmt("while evaluating flake attribute '%s'", state.symbols[attr.name])); + HintFmt("while evaluating flake attribute '%s'", state.symbols[attr.name])); throw; } } @@ -164,7 +164,7 @@ static FlakeInput parseFlakeInput(EvalState & state, try { input.ref = FlakeRef::fromAttrs(attrs); } catch (Error & e) { - e.addTrace(state.positions[pos], hintfmt("while evaluating flake input")); + e.addTrace(state.positions[pos], HintFmt("while evaluating flake input")); throw; } else { diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index af67e847d..380048c77 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -147,7 +147,7 @@ or { return OR_KW; } yylval->n = boost::lexical_cast(yytext); } catch (const boost::bad_lexical_cast &) { throw ParseError(ErrorInfo{ - .msg = hintfmt("invalid integer '%1%'", yytext), + .msg = HintFmt("invalid integer '%1%'", yytext), .pos = state->positions[CUR_POS], }); } @@ -157,7 +157,7 @@ or { return OR_KW; } yylval->nf = strtod(yytext, 0); if (errno != 0) throw ParseError(ErrorInfo{ - .msg = hintfmt("invalid float '%1%'", yytext), + .msg = HintFmt("invalid float '%1%'", yytext), .pos = state->positions[CUR_POS], }); return FLOAT_LIT; @@ -286,7 +286,7 @@ or { return OR_KW; } {ANY} | <> { throw ParseError(ErrorInfo{ - .msg = hintfmt("path has a trailing slash"), + .msg = HintFmt("path has a trailing slash"), .pos = state->positions[CUR_POS], }); } diff --git a/src/libexpr/parser-state.hh b/src/libexpr/parser-state.hh index bdd5bbabe..87aeaeef5 100644 --- a/src/libexpr/parser-state.hh +++ b/src/libexpr/parser-state.hh @@ -64,7 +64,7 @@ struct ParserState inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", + .msg = HintFmt("attribute '%1%' already defined at %2%", showAttrPath(symbols, attrPath), positions[prevPos]), .pos = positions[pos] }); @@ -73,7 +73,7 @@ inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, co inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos) { throw ParseError({ - .msg = hintfmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), + .msg = HintFmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]), .pos = positions[pos] }); } @@ -154,13 +154,13 @@ inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Sym } if (duplicate) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), + .msg = HintFmt("duplicate formal function argument '%1%'", symbols[duplicate->first]), .pos = positions[duplicate->second] }); if (arg && formals->has(arg)) throw ParseError({ - .msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]), + .msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]), .pos = positions[pos] }); diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 95f45c80a..a3ba13c66 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -65,7 +65,7 @@ using namespace nix; void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error) { throw ParseError({ - .msg = hintfmt(error), + .msg = HintFmt(error), .pos = state->positions[state->at(*loc)] }); } @@ -154,7 +154,7 @@ expr_function | LET binds IN_KW expr_function { if (!$2->dynamicAttrs.empty()) throw ParseError({ - .msg = hintfmt("dynamic attributes not allowed in let"), + .msg = HintFmt("dynamic attributes not allowed in let"), .pos = state->positions[CUR_POS] }); $$ = new ExprLet($2, $4); @@ -244,7 +244,7 @@ expr_simple static bool noURLLiterals = experimentalFeatureSettings.isEnabled(Xp::NoUrlLiterals); if (noURLLiterals) throw ParseError({ - .msg = hintfmt("URL literals are disabled"), + .msg = HintFmt("URL literals are disabled"), .pos = state->positions[CUR_POS] }); $$ = new ExprString(std::string($1)); @@ -340,7 +340,7 @@ attrs delete str; } else throw ParseError({ - .msg = hintfmt("dynamic attributes not allowed in inherit"), + .msg = HintFmt("dynamic attributes not allowed in inherit"), .pos = state->positions[state->at(@2)] }); } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 5e2bbe16f..8c6aeffac 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -754,7 +754,7 @@ static RegisterPrimOp primop_break({ if (state.debugRepl && !state.debugTraces.empty()) { auto error = Error(ErrorInfo { .level = lvlInfo, - .msg = hintfmt("breakpoint reached"), + .msg = HintFmt("breakpoint reached"), .pos = state.positions[pos], }); @@ -765,7 +765,7 @@ static RegisterPrimOp primop_break({ // If the user elects to quit the repl, throw an exception. throw Error(ErrorInfo{ .level = lvlInfo, - .msg = hintfmt("quit the debugger"), + .msg = HintFmt("quit the debugger"), .pos = nullptr, }); } @@ -820,7 +820,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * * auto message = state.coerceToString(pos, *args[0], context, "while evaluating the error message passed to builtins.addErrorContext", false, false).toOwned(); - e.addTrace(nullptr, hintfmt(message), true); + e.addTrace(nullptr, HintFmt(message), true); throw; } } @@ -1071,7 +1071,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * * * often results from the composition of several functions * (derivationStrict, derivation, mkDerivation, mkPythonModule, etc.) */ - e.addTrace(nullptr, hintfmt( + e.addTrace(nullptr, HintFmt( "while evaluating derivation '%s'\n" " whose name attribute is located at %s", drvName, pos), true); @@ -1232,7 +1232,7 @@ drvName, Bindings * attrs, Value & v) } catch (Error & e) { e.addTrace(state.positions[i->pos], - hintfmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName), + HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName), true); throw; } diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index 5806b3ff9..f51a6465d 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -23,7 +23,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor auto rewrittenPath = makeContentAddressed(fromStore, *state.store, fromPath); if (toPathMaybe && *toPathMaybe != rewrittenPath) throw Error({ - .msg = hintfmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected", + .msg = HintFmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected", state.store->printStorePath(fromPath), state.store->printStorePath(rewrittenPath), state.store->printStorePath(*toPathMaybe)), @@ -31,7 +31,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor }); if (!toPathMaybe) throw Error({ - .msg = hintfmt( + .msg = HintFmt( "rewriting '%s' to content-addressed form yielded '%s'\n" "Use this value for the 'toPath' attribute passed to 'fetchClosure'", state.store->printStorePath(fromPath), @@ -50,7 +50,7 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor // We don't perform the rewriting when outPath already exists, as an optimisation. // However, we can quickly detect a mistake if the toPath is input addressed. throw Error({ - .msg = hintfmt( + .msg = HintFmt( "The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n" "Set 'toPath' to an empty string to make Nix report the correct content-addressed path.", state.store->printStorePath(toPath)), @@ -73,7 +73,7 @@ static void runFetchClosureWithContentAddressedPath(EvalState & state, const Pos if (!info->isContentAddressed(*state.store)) { throw Error({ - .msg = hintfmt( + .msg = HintFmt( "The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n" "If you do intend to fetch an input-addressed store path, add\n\n" " inputAddressed = true;\n\n" @@ -99,7 +99,7 @@ static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosId if (info->isContentAddressed(*state.store)) { throw Error({ - .msg = hintfmt( + .msg = HintFmt( "The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n" "Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed", state.store->printStorePath(fromPath)), @@ -153,14 +153,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg else throw Error({ - .msg = hintfmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), + .msg = HintFmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName), .pos = state.positions[pos] }); } if (!fromPath) throw Error({ - .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), + .msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"), .pos = state.positions[pos] }); @@ -169,7 +169,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg if (inputAddressed) { if (toPath) throw Error({ - .msg = hintfmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them", + .msg = HintFmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them", "inputAddressed", "toPath"), .pos = state.positions[pos] @@ -178,7 +178,7 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg if (!fromStoreUrl) throw Error({ - .msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), + .msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"), .pos = state.positions[pos] }); @@ -188,13 +188,13 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg parsedURL.scheme != "https" && !(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file")) throw Error({ - .msg = hintfmt("'fetchClosure' only supports http:// and https:// stores"), + .msg = HintFmt("'fetchClosure' only supports http:// and https:// stores"), .pos = state.positions[pos] }); if (!parsedURL.query.empty()) throw Error({ - .msg = hintfmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), + .msg = HintFmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl), .pos = state.positions[pos] }); diff --git a/src/libexpr/print.cc b/src/libexpr/print.cc index 9f31f3340..7e90e47eb 100644 --- a/src/libexpr/print.cc +++ b/src/libexpr/print.cc @@ -512,7 +512,7 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer) } template<> -hintformat & hintformat::operator%(const ValuePrinter & value) +HintFmt & HintFmt::operator%(const ValuePrinter & value) { fmt % value; return *this; diff --git a/src/libexpr/print.hh b/src/libexpr/print.hh index a542bc7b1..7ddda81b8 100644 --- a/src/libexpr/print.hh +++ b/src/libexpr/print.hh @@ -86,6 +86,6 @@ std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer); * magenta. */ template<> -hintformat & hintformat::operator%(const ValuePrinter & value); +HintFmt & HintFmt::operator%(const ValuePrinter & value); } diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index b2f116390..3f877a7fd 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -64,7 +64,7 @@ json printValueAsJSON(EvalState & state, bool strict, out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore); } catch (Error & e) { e.addTrace(state.positions[a.pos], - hintfmt("while evaluating attribute '%1%'", j)); + HintFmt("while evaluating attribute '%1%'", j)); throw; } } @@ -81,7 +81,7 @@ json printValueAsJSON(EvalState & state, bool strict, out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore)); } catch (Error & e) { e.addTrace(state.positions[pos], - hintfmt("while evaluating list element at index %1%", i)); + HintFmt("while evaluating list element at index %1%", i)); throw; } i++; diff --git a/src/libexpr/value/context.hh b/src/libexpr/value/context.hh index 2abd1c9d4..7f23cd3a4 100644 --- a/src/libexpr/value/context.hh +++ b/src/libexpr/value/context.hh @@ -19,8 +19,8 @@ public: : Error("") { raw = raw_; - auto hf = hintfmt(args...); - err.msg = hintfmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw); + auto hf = HintFmt(args...); + err.msg = HintFmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw); } }; diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index a2f411b8a..2f60d2f38 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -92,7 +92,7 @@ void handleDiffHook( } catch (Error & error) { ErrorInfo ei = error.info(); // FIXME: wrap errors. - ei.msg = hintfmt("diff hook execution failed: %s", ei.msg.str()); + ei.msg = HintFmt("diff hook execution failed: %s", ei.msg.str()); logError(ei); } } diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index eb39be158..ebfae346f 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -882,12 +882,12 @@ template FileTransferError::FileTransferError(FileTransfer::Error error, std::optional response, const Args & ... args) : Error(args...), error(error), response(response) { - const auto hf = hintfmt(args...); + const auto hf = HintFmt(args...); // FIXME: Due to https://github.com/NixOS/nix/issues/3841 we don't know how // to print different messages for different verbosity levels. For now // we add some heuristics for detecting when we want to show the response. if (response && (response->size() < 1024 || response->find("") != std::string::npos)) - err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", Uncolored(hf.str()), chomp(*response)); + err.msg = HintFmt("%1%\n\nresponse body:\n\n%2%", Uncolored(hf.str()), chomp(*response)); else err.msg = hf; } diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc index ff14ec420..06abfb90b 100644 --- a/src/libstore/sqlite.cc +++ b/src/libstore/sqlite.cc @@ -10,11 +10,11 @@ namespace nix { -SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf) +SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf) : Error(""), path(path), errMsg(errMsg), errNo(errNo), extendedErrNo(extendedErrNo), offset(offset) { auto offsetStr = (offset == -1) ? "" : "at offset " + std::to_string(offset) + ": "; - err.msg = hintfmt("%s: %s%s, %s (in '%s')", + err.msg = HintFmt("%s: %s%s, %s (in '%s')", Uncolored(hf.str()), offsetStr, sqlite3_errstr(extendedErrNo), @@ -22,7 +22,7 @@ SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int ex path ? path : "(in-memory)"); } -[[noreturn]] void SQLiteError::throw_(sqlite3 * db, hintfmt && hf) +[[noreturn]] void SQLiteError::throw_(sqlite3 * db, HintFmt && hf) { int err = sqlite3_errcode(db); int exterr = sqlite3_extended_errcode(db); @@ -33,7 +33,7 @@ SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int ex if (err == SQLITE_BUSY || err == SQLITE_PROTOCOL) { auto exp = SQLiteBusy(path, errMsg, err, exterr, offset, std::move(hf)); - exp.err.msg = hintfmt( + exp.err.msg = HintFmt( err == SQLITE_PROTOCOL ? "SQLite database '%s' is busy (SQLITE_PROTOCOL)" : "SQLite database '%s' is busy", @@ -249,7 +249,7 @@ void handleSQLiteBusy(const SQLiteBusy & e, time_t & nextWarning) if (now > nextWarning) { nextWarning = now + 10; logWarning({ - .msg = hintfmt(e.what()) + .msg = HintFmt(e.what()) }); } diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh index 33ebb5892..003e4d101 100644 --- a/src/libstore/sqlite.hh +++ b/src/libstore/sqlite.hh @@ -142,19 +142,19 @@ struct SQLiteError : Error template [[noreturn]] static void throw_(sqlite3 * db, const std::string & fs, const Args & ... args) { - throw_(db, hintfmt(fs, args...)); + throw_(db, HintFmt(fs, args...)); } - SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintfmt && hf); + SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf); protected: template SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, const std::string & fs, const Args & ... args) - : SQLiteError(path, errMsg, errNo, extendedErrNo, offset, hintfmt(fs, args...)) + : SQLiteError(path, errMsg, errNo, extendedErrNo, offset, HintFmt(fs, args...)) { } - [[noreturn]] static void throw_(sqlite3 * db, hintfmt && hf); + [[noreturn]] static void throw_(sqlite3 * db, HintFmt && hf); }; diff --git a/src/libutil/current-process.cc b/src/libutil/current-process.cc index 01f64f211..47aa137d8 100644 --- a/src/libutil/current-process.cc +++ b/src/libutil/current-process.cc @@ -63,7 +63,7 @@ void setStackSize(rlim_t stackSize) if (setrlimit(RLIMIT_STACK, &limit) != 0) { logger->log( lvlError, - hintfmt( + HintFmt( "Failed to increase stack size from %1% to %2% (maximum allowed stack size: %3%): %4%", savedStackSize, stackSize, diff --git a/src/libutil/error.cc b/src/libutil/error.cc index e3b30b3a1..4a9efc0b5 100644 --- a/src/libutil/error.cc +++ b/src/libutil/error.cc @@ -11,7 +11,7 @@ namespace nix { -void BaseError::addTrace(std::shared_ptr && e, hintfmt hint, bool frame) +void BaseError::addTrace(std::shared_ptr && e, HintFmt hint, bool frame) { err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame }); } @@ -37,7 +37,7 @@ const std::string & BaseError::calcWhat() const std::optional ErrorInfo::programName = std::nullopt; -std::ostream & operator <<(std::ostream & os, const hintfmt & hf) +std::ostream & operator <<(std::ostream & os, const HintFmt & hf) { return os << hf.str(); } diff --git a/src/libutil/error.hh b/src/libutil/error.hh index 966f4d770..2e5de5d32 100644 --- a/src/libutil/error.hh +++ b/src/libutil/error.hh @@ -63,7 +63,7 @@ void printCodeLines(std::ostream & out, struct Trace { std::shared_ptr pos; - hintfmt hint; + HintFmt hint; bool frame; }; @@ -74,7 +74,7 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs); struct ErrorInfo { Verbosity level; - hintfmt msg; + HintFmt msg; std::shared_ptr pos; std::list traces; @@ -113,20 +113,20 @@ public: template BaseError(unsigned int status, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(args...), .status = status } + : err { .level = lvlError, .msg = HintFmt(args...), .status = status } { } template explicit BaseError(const std::string & fs, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(fs, args...) } + : err { .level = lvlError, .msg = HintFmt(fs, args...) } { } template BaseError(const Suggestions & sug, const Args & ... args) - : err { .level = lvlError, .msg = hintfmt(args...), .suggestions = sug } + : err { .level = lvlError, .msg = HintFmt(args...), .suggestions = sug } { } - BaseError(hintfmt hint) + BaseError(HintFmt hint) : err { .level = lvlError, .msg = hint } { } @@ -159,10 +159,10 @@ public: template void addTrace(std::shared_ptr && e, std::string_view fs, const Args & ... args) { - addTrace(std::move(e), hintfmt(std::string(fs), args...)); + addTrace(std::move(e), HintFmt(std::string(fs), args...)); } - void addTrace(std::shared_ptr && e, hintfmt hint, bool frame = false); + void addTrace(std::shared_ptr && e, HintFmt hint, bool frame = false); bool hasTrace() const { return !err.traces.empty(); } @@ -214,8 +214,8 @@ public: SysError(int errNo, const Args & ... args) : SystemError(""), errNo(errNo) { - auto hf = hintfmt(args...); - err.msg = hintfmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo)); + auto hf = HintFmt(args...); + err.msg = HintFmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo)); } /** diff --git a/src/libutil/fmt.hh b/src/libutil/fmt.hh index 9c2cc1e85..e996f4ba2 100644 --- a/src/libutil/fmt.hh +++ b/src/libutil/fmt.hh @@ -31,6 +31,17 @@ inline void formatHelper(F & f, const T & x, const Args & ... args) // Interpolate one argument and then recurse. formatHelper(f % x, args...); } + +/** + * Set the correct exceptions for `fmt`. + */ +void setExceptions(boost::format & fmt) +{ + fmt.exceptions( + boost::io::all_error_bits ^ + boost::io::too_many_args_bit ^ + boost::io::too_few_args_bit); +} } /** @@ -74,7 +85,7 @@ template inline std::string fmt(const std::string & fs, const Args & ... args) { boost::format f(fs); - f.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit); + setExceptions(f); formatHelper(f, args...); return f.str(); } @@ -82,9 +93,9 @@ inline std::string fmt(const std::string & fs, const Args & ... args) /** * Values wrapped in this struct are printed in magenta. * - * By default, arguments to `hintfmt` are printed in magenta. To avoid this, + * By default, arguments to `HintFmt` are printed in magenta. To avoid this, * either wrap the argument in `Uncolored` or add a specialization of - * `hintfmt::operator%`. + * `HintFmt::operator%`. */ template struct Magenta @@ -102,7 +113,7 @@ std::ostream & operator<<(std::ostream & out, const Magenta & y) /** * Values wrapped in this class are printed without coloring. * - * By default, arguments to `hintfmt` are printed in magenta (see `Magenta`). + * By default, arguments to `HintFmt` are printed in magenta (see `Magenta`). */ template struct Uncolored @@ -121,65 +132,49 @@ std::ostream & operator<<(std::ostream & out, const Uncolored & y) * A wrapper around `boost::format` which colors interpolated arguments in * magenta by default. */ -class hintfmt +class HintFmt { private: boost::format fmt; public: - /** - * Construct a `hintfmt` from a format string, with values to be - * interpolated later with `%`. - * - * This isn't exposed as a single-argument constructor to avoid - * accidentally constructing `hintfmt`s with user-controlled strings. See - * the note on `fmt` for more information. - */ - static hintfmt interpolate(const std::string & formatString) - { - hintfmt result((boost::format(formatString))); - result.fmt.exceptions( - boost::io::all_error_bits ^ - boost::io::too_many_args_bit ^ - boost::io::too_few_args_bit); - return result; - } - /** * Format the given string literally, without interpolating format * placeholders. */ - hintfmt(const std::string & literal) - : hintfmt("%s", Uncolored(literal)) + HintFmt(const std::string & literal) + : HintFmt("%s", Uncolored(literal)) { } /** * Interpolate the given arguments into the format string. */ template - hintfmt(const std::string & format, const Args & ... args) - : fmt(format) - { - formatHelper(*this, args...); - } + HintFmt(const std::string & format, const Args & ... args) + : HintFmt(boost::format(format), args...) + { } - hintfmt(const hintfmt & hf) + HintFmt(const HintFmt & hf) : fmt(hf.fmt) { } - hintfmt(boost::format && fmt) + template + HintFmt(boost::format && fmt, const Args & ... args) : fmt(std::move(fmt)) - { } + { + setExceptions(fmt); + formatHelper(*this, args...); + } template - hintfmt & operator%(const T & value) + HintFmt & operator%(const T & value) { fmt % Magenta(value); return *this; } template - hintfmt & operator%(const Uncolored & value) + HintFmt & operator%(const Uncolored & value) { fmt % value.value; return *this; @@ -191,6 +186,6 @@ public: } }; -std::ostream & operator<<(std::ostream & os, const hintfmt & hf); +std::ostream & operator<<(std::ostream & os, const HintFmt & hf); } diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 7fc211491..70c16ff0d 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -448,7 +448,7 @@ Error readError(Source & source) auto msg = readString(source); ErrorInfo info { .level = level, - .msg = hintfmt(msg), + .msg = HintFmt(msg), }; auto havePos = readNum(source); assert(havePos == 0); @@ -457,7 +457,7 @@ Error readError(Source & source) havePos = readNum(source); assert(havePos == 0); info.traces.push_back(Trace { - .hint = hintfmt(readString(source)) + .hint = HintFmt(readString(source)) }); } return Error(std::move(info)); diff --git a/src/nix/daemon.cc b/src/nix/daemon.cc index 4dada8e0e..8afcbe982 100644 --- a/src/nix/daemon.cc +++ b/src/nix/daemon.cc @@ -377,7 +377,7 @@ static void daemonLoop(std::optional forceTrustClientOpt) } catch (Error & error) { auto ei = error.info(); // FIXME: add to trace? - ei.msg = hintfmt("error processing connection: %1%", ei.msg.str()); + ei.msg = HintFmt("error processing connection: %1%", ei.msg.str()); logError(ei); } } diff --git a/src/nix/eval.cc b/src/nix/eval.cc index 2e0837c8e..e6a022e5f 100644 --- a/src/nix/eval.cc +++ b/src/nix/eval.cc @@ -98,7 +98,7 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption } catch (Error & e) { e.addTrace( state->positions[attr.pos], - hintfmt("while evaluating the attribute '%s'", name)); + HintFmt("while evaluating the attribute '%s'", name)); throw; } } diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 646e4c831..4504bb22e 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -411,7 +411,7 @@ struct CmdFlakeCheck : FlakeCommand return storePath; } } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the derivation '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the derivation '%s'", attrPath)); reportError(e); } return std::nullopt; @@ -430,7 +430,7 @@ struct CmdFlakeCheck : FlakeCommand } #endif } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the app definition '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the app definition '%s'", attrPath)); reportError(e); } }; @@ -454,7 +454,7 @@ struct CmdFlakeCheck : FlakeCommand // FIXME: if we have a 'nixpkgs' input, use it to // evaluate the overlay. } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the overlay '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the overlay '%s'", attrPath)); reportError(e); } }; @@ -465,7 +465,7 @@ struct CmdFlakeCheck : FlakeCommand fmt("checking NixOS module '%s'", attrPath)); state->forceValue(v, pos); } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the NixOS module '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the NixOS module '%s'", attrPath)); reportError(e); } }; @@ -491,7 +491,7 @@ struct CmdFlakeCheck : FlakeCommand } } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the Hydra jobset '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the Hydra jobset '%s'", attrPath)); reportError(e); } }; @@ -506,7 +506,7 @@ struct CmdFlakeCheck : FlakeCommand if (!state->isDerivation(*vToplevel)) throw Error("attribute 'config.system.build.toplevel' is not a derivation"); } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the NixOS configuration '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the NixOS configuration '%s'", attrPath)); reportError(e); } }; @@ -540,7 +540,7 @@ struct CmdFlakeCheck : FlakeCommand throw Error("template '%s' has unsupported attribute '%s'", attrPath, name); } } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the template '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the template '%s'", attrPath)); reportError(e); } }; @@ -554,7 +554,7 @@ struct CmdFlakeCheck : FlakeCommand throw Error("bundler must be a function"); // TODO: check types of inputs/outputs? } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking the template '%s'", attrPath)); + e.addTrace(resolve(pos), HintFmt("while checking the template '%s'", attrPath)); reportError(e); } }; @@ -774,7 +774,7 @@ struct CmdFlakeCheck : FlakeCommand warn("unknown flake output '%s'", name); } catch (Error & e) { - e.addTrace(resolve(pos), hintfmt("while checking flake output '%s'", name)); + e.addTrace(resolve(pos), HintFmt("while checking flake output '%s'", name)); reportError(e); } }); diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index 3cfa2b61b..a899d3113 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -31,14 +31,14 @@ namespace nix { } } catch (BaseError & e) { ASSERT_EQ(PrintToString(e.info().msg), - PrintToString(hintfmt("puppy"))); + PrintToString(HintFmt("puppy"))); auto trace = e.info().traces.rbegin(); ASSERT_EQ(e.info().traces.size(), 2); ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("doggy"))); + PrintToString(HintFmt("doggy"))); trace++; ASSERT_EQ(PrintToString(trace->hint), - PrintToString(hintfmt("beans"))); + PrintToString(HintFmt("beans"))); throw; } , EvalError @@ -53,6 +53,7 @@ namespace nix { state.error("beans").debugThrow(); } catch (Error & e2) { e.addTrace(state.positions[noPos], "beans2", ""); + //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); ASSERT_FALSE(&e.info() == &e2.info()); @@ -73,7 +74,7 @@ namespace nix { ASSERT_EQ(e.info().traces.size(), 1) << "while testing " args << std::endl << e.what(); \ auto trace = e.info().traces.rbegin(); \ ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(hintfmt("while calling the '%s' builtin", name))); \ + PrintToString(HintFmt("while calling the '%s' builtin", name))); \ throw; \ } \ , type \ @@ -95,7 +96,7 @@ namespace nix { PrintToString(context)); \ ++trace; \ ASSERT_EQ(PrintToString(trace->hint), \ - PrintToString(hintfmt("while calling the '%s' builtin", name))); \ + PrintToString(HintFmt("while calling the '%s' builtin", name))); \ throw; \ } \ , type \ @@ -104,48 +105,48 @@ namespace nix { TEST_F(ErrorTraceTest, genericClosure) { ASSERT_TRACE2("genericClosure 1", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.genericClosure")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure {}", TypeError, - hintfmt("attribute '%s' missing", "startSet"), - hintfmt("in the attrset passed as argument to builtins.genericClosure")); + HintFmt("attribute '%s' missing", "startSet"), + HintFmt("in the attrset passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = 1; }", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = true; }", TypeError, - hintfmt("expected a function but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); + HintFmt("expected a function but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); + HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); + HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", TypeError, - hintfmt("attribute '%s' missing", "key"), - hintfmt("in one of the attrsets generated by (or initially passed to) builtins.genericClosure")); + HintFmt("attribute '%s' missing", "key"), + HintFmt("in one of the attrsets generated by (or initially passed to) builtins.genericClosure")); ASSERT_TRACE2("genericClosure { startSet = [{ key = 1;}]; operator = item: [{ key = ''a''; }]; }", EvalError, - hintfmt("cannot compare %s with %s", "a string", "an integer"), - hintfmt("while comparing the `key` attributes of two genericClosure elements")); + HintFmt("cannot compare %s with %s", "a string", "an integer"), + HintFmt("while comparing the `key` attributes of two genericClosure elements")); ASSERT_TRACE2("genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", TypeError, - hintfmt("expected a set but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); + HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); } @@ -153,32 +154,32 @@ namespace nix { TEST_F(ErrorTraceTest, replaceStrings) { ASSERT_TRACE2("replaceStrings 0 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.replaceStrings")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "0" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [] 0 {}", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "0" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.replaceStrings")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "0" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.replaceStrings")); ASSERT_TRACE1("replaceStrings [ 0 ] [] {}", EvalError, - hintfmt("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths")); + HintFmt("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths")); ASSERT_TRACE2("replaceStrings [ 1 ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating one of the strings to replace passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"oo\" ] [ true ] \"foo\"", TypeError, - hintfmt("expected a string but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); + HintFmt("expected a string but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating one of the replacement strings passed to builtins.replaceStrings")); ASSERT_TRACE2("replaceStrings [ \"old\" ] [ \"new\" ] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the third argument passed to builtins.replaceStrings")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the third argument passed to builtins.replaceStrings")); } @@ -242,8 +243,8 @@ namespace nix { TEST_F(ErrorTraceTest, ceil) { ASSERT_TRACE2("ceil \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.ceil")); + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.ceil")); } @@ -251,8 +252,8 @@ namespace nix { TEST_F(ErrorTraceTest, floor) { ASSERT_TRACE2("floor \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.floor")); + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.floor")); } @@ -264,8 +265,8 @@ namespace nix { TEST_F(ErrorTraceTest, getEnv) { ASSERT_TRACE2("getEnv [ ]", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.getEnv")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.getEnv")); } @@ -285,8 +286,8 @@ namespace nix { TEST_F(ErrorTraceTest, placeholder) { ASSERT_TRACE2("placeholder []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.placeholder")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.placeholder")); } @@ -294,13 +295,13 @@ namespace nix { TEST_F(ErrorTraceTest, toPath) { ASSERT_TRACE2("toPath []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.toPath")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.toPath")); ASSERT_TRACE2("toPath \"foo\"", EvalError, - hintfmt("string '%s' doesn't represent an absolute path", "foo"), - hintfmt("while evaluating the first argument passed to builtins.toPath")); + HintFmt("string '%s' doesn't represent an absolute path", "foo"), + HintFmt("while evaluating the first argument passed to builtins.toPath")); } @@ -308,8 +309,8 @@ namespace nix { TEST_F(ErrorTraceTest, storePath) { ASSERT_TRACE2("storePath true", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to 'builtins.storePath'")); + HintFmt("cannot coerce %s to a string: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to 'builtins.storePath'")); } @@ -317,13 +318,13 @@ namespace nix { TEST_F(ErrorTraceTest, pathExists) { ASSERT_TRACE2("pathExists []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while realising the context of a path")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while realising the context of a path")); ASSERT_TRACE2("pathExists \"zorglub\"", EvalError, - hintfmt("string '%s' doesn't represent an absolute path", "zorglub"), - hintfmt("while realising the context of a path")); + HintFmt("string '%s' doesn't represent an absolute path", "zorglub"), + HintFmt("while realising the context of a path")); } @@ -331,8 +332,8 @@ namespace nix { TEST_F(ErrorTraceTest, baseNameOf) { ASSERT_TRACE2("baseNameOf []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.baseNameOf")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.baseNameOf")); } @@ -376,30 +377,30 @@ namespace nix { TEST_F(ErrorTraceTest, filterSource) { ASSERT_TRACE2("filterSource [] []", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); + HintFmt("cannot coerce %s to a string: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] \"foo\"", EvalError, - hintfmt("string '%s' doesn't represent an absolute path", "foo"), - hintfmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); + HintFmt("string '%s' doesn't represent an absolute path", "foo"), + HintFmt("while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'")); ASSERT_TRACE2("filterSource [] ./.", TypeError, - hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.filterSource")); + HintFmt("expected a function but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.filterSource")); // Usupported by store "dummy" // ASSERT_TRACE2("filterSource (_: 1) ./.", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "an integer"), - // hintfmt("while adding path '/home/layus/projects/nix'")); + // HintFmt("attempt to call something which is not a function but %s", "an integer"), + // HintFmt("while adding path '/home/layus/projects/nix'")); // ASSERT_TRACE2("filterSource (_: _: 1) ./.", // TypeError, - // hintfmt("expected a Boolean but found %s: %s", "an integer", "1"), - // hintfmt("while evaluating the return value of the path filter function")); + // HintFmt("expected a Boolean but found %s: %s", "an integer", "1"), + // HintFmt("while evaluating the return value of the path filter function")); } @@ -411,8 +412,8 @@ namespace nix { TEST_F(ErrorTraceTest, attrNames) { ASSERT_TRACE2("attrNames []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the argument passed to builtins.attrNames")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the argument passed to builtins.attrNames")); } @@ -420,8 +421,8 @@ namespace nix { TEST_F(ErrorTraceTest, attrValues) { ASSERT_TRACE2("attrValues []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the argument passed to builtins.attrValues")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the argument passed to builtins.attrValues")); } @@ -429,18 +430,18 @@ namespace nix { TEST_F(ErrorTraceTest, getAttr) { ASSERT_TRACE2("getAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.getAttr")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.getAttr")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.getAttr")); ASSERT_TRACE2("getAttr \"foo\" {}", TypeError, - hintfmt("attribute '%s' missing", "foo"), - hintfmt("in the attribute set under consideration")); + HintFmt("attribute '%s' missing", "foo"), + HintFmt("in the attribute set under consideration")); } @@ -452,13 +453,13 @@ namespace nix { TEST_F(ErrorTraceTest, hasAttr) { ASSERT_TRACE2("hasAttr [] []", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.hasAttr")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.hasAttr")); ASSERT_TRACE2("hasAttr \"foo\" []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.hasAttr")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.hasAttr")); } @@ -470,18 +471,18 @@ namespace nix { TEST_F(ErrorTraceTest, removeAttrs) { ASSERT_TRACE2("removeAttrs \"\" \"\"", TypeError, - hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); ASSERT_TRACE2("removeAttrs \"\" [ \"1\" ]", TypeError, - hintfmt("expected a set but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.removeAttrs")); + HintFmt("expected a set but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.removeAttrs")); } @@ -489,28 +490,28 @@ namespace nix { TEST_F(ErrorTraceTest, listToAttrs) { ASSERT_TRACE2("listToAttrs 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the argument passed to builtins.listToAttrs")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the argument passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating an element of the list passed to builtins.listToAttrs")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ {} ]", TypeError, - hintfmt("attribute '%s' missing", "name"), - hintfmt("in a {name=...; value=...;} pair")); + HintFmt("attribute '%s' missing", "name"), + HintFmt("in a {name=...; value=...;} pair")); ASSERT_TRACE2("listToAttrs [ { name = 1; } ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the `name` attribute of an element of the list passed to builtins.listToAttrs")); ASSERT_TRACE2("listToAttrs [ { name = \"foo\"; } ]", TypeError, - hintfmt("attribute '%s' missing", "value"), - hintfmt("in a {name=...; value=...;} pair")); + HintFmt("attribute '%s' missing", "value"), + HintFmt("in a {name=...; value=...;} pair")); } @@ -518,13 +519,13 @@ namespace nix { TEST_F(ErrorTraceTest, intersectAttrs) { ASSERT_TRACE2("intersectAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.intersectAttrs")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.intersectAttrs")); ASSERT_TRACE2("intersectAttrs {} []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.intersectAttrs")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.intersectAttrs")); } @@ -532,23 +533,23 @@ namespace nix { TEST_F(ErrorTraceTest, catAttrs) { ASSERT_TRACE2("catAttrs [] {}", TypeError, - hintfmt("expected a string but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.catAttrs")); + HintFmt("expected a string but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.catAttrs")); + HintFmt("expected a list but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); ASSERT_TRACE2("catAttrs \"foo\" [ { foo = 1; } 1 { bar = 5;} ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating an element in the list passed as second argument to builtins.catAttrs")); } @@ -556,7 +557,7 @@ namespace nix { TEST_F(ErrorTraceTest, functionArgs) { ASSERT_TRACE1("functionArgs {}", TypeError, - hintfmt("'functionArgs' requires a function")); + HintFmt("'functionArgs' requires a function")); } @@ -564,24 +565,24 @@ namespace nix { TEST_F(ErrorTraceTest, mapAttrs) { ASSERT_TRACE2("mapAttrs [] []", TypeError, - hintfmt("expected a set but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the second argument passed to builtins.mapAttrs")); + HintFmt("expected a set but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the second argument passed to builtins.mapAttrs")); // XXX: defered // ASSERT_TRACE2("mapAttrs \"\" { foo.bar = 1; }", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "a string"), - // hintfmt("while evaluating the attribute 'foo'")); + // HintFmt("attempt to call something which is not a function but %s", "a string"), + // HintFmt("while evaluating the attribute 'foo'")); // ASSERT_TRACE2("mapAttrs (x: x + \"1\") { foo.bar = 1; }", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "a string"), - // hintfmt("while evaluating the attribute 'foo'")); + // HintFmt("attempt to call something which is not a function but %s", "a string"), + // HintFmt("while evaluating the attribute 'foo'")); // ASSERT_TRACE2("mapAttrs (x: y: x + 1) { foo.bar = 1; }", // TypeError, - // hintfmt("cannot coerce %s to a string", "an integer"), - // hintfmt("while evaluating a path segment")); + // HintFmt("cannot coerce %s to a string", "an integer"), + // HintFmt("while evaluating a path segment")); } @@ -589,27 +590,27 @@ namespace nix { TEST_F(ErrorTraceTest, zipAttrsWith) { ASSERT_TRACE2("zipAttrsWith [] [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "a list", normaltxt("[ ]")), - hintfmt("while evaluating the first argument passed to builtins.zipAttrsWith")); + HintFmt("expected a function but found %s: %s", "a list", Uncolored("[ ]")), + HintFmt("while evaluating the first argument passed to builtins.zipAttrsWith")); ASSERT_TRACE2("zipAttrsWith (_: 1) [ 1 ]", TypeError, - hintfmt("expected a set but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); + HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed as second argument to builtins.zipAttrsWith")); // XXX: How to properly tell that the fucntion takes two arguments ? // The same question also applies to sort, and maybe others. // Due to lazyness, we only create a thunk, and it fails later on. // ASSERT_TRACE2("zipAttrsWith (_: 1) [ { foo = 1; } ]", // TypeError, - // hintfmt("attempt to call something which is not a function but %s", "an integer"), - // hintfmt("while evaluating the attribute 'foo'")); + // HintFmt("attempt to call something which is not a function but %s", "an integer"), + // HintFmt("while evaluating the attribute 'foo'")); // XXX: Also deferred deeply // ASSERT_TRACE2("zipAttrsWith (a: b: a + b) [ { foo = 1; } { foo = 2; } ]", // TypeError, - // hintfmt("cannot coerce %s to a string", "a list"), - // hintfmt("while evaluating a path segment")); + // HintFmt("cannot coerce %s to a string", "a list"), + // HintFmt("while evaluating a path segment")); } @@ -621,16 +622,16 @@ namespace nix { TEST_F(ErrorTraceTest, elemAt) { ASSERT_TRACE2("elemAt \"foo\" (-1)", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.elemAt")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("elemAt [] (-1)", Error, - hintfmt("list index %d is out of bounds", -1)); + HintFmt("list index %d is out of bounds", -1)); ASSERT_TRACE1("elemAt [\"foo\"] 3", Error, - hintfmt("list index %d is out of bounds", 3)); + HintFmt("list index %d is out of bounds", 3)); } @@ -638,12 +639,12 @@ namespace nix { TEST_F(ErrorTraceTest, head) { ASSERT_TRACE2("head 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.elemAt")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.elemAt")); ASSERT_TRACE1("head []", Error, - hintfmt("list index %d is out of bounds", 0)); + HintFmt("list index %d is out of bounds", 0)); } @@ -651,12 +652,12 @@ namespace nix { TEST_F(ErrorTraceTest, tail) { ASSERT_TRACE2("tail 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.tail")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.tail")); ASSERT_TRACE1("tail []", Error, - hintfmt("'tail' called on an empty list")); + HintFmt("'tail' called on an empty list")); } @@ -664,13 +665,13 @@ namespace nix { TEST_F(ErrorTraceTest, map) { ASSERT_TRACE2("map 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.map")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.map")); ASSERT_TRACE2("map 1 [ 1 ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.map")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.map")); } @@ -678,18 +679,18 @@ namespace nix { TEST_F(ErrorTraceTest, filter) { ASSERT_TRACE2("filter 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.filter")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.filter")); ASSERT_TRACE2("filter 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.filter")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.filter")); ASSERT_TRACE2("filter (_: 5) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "5" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the filtering function passed to builtins.filter")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "5" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the filtering function passed to builtins.filter")); } @@ -697,8 +698,8 @@ namespace nix { TEST_F(ErrorTraceTest, elem) { ASSERT_TRACE2("elem 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.elem")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.elem")); } @@ -706,18 +707,18 @@ namespace nix { TEST_F(ErrorTraceTest, concatLists) { ASSERT_TRACE2("concatLists 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.concatLists")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ 1 ]", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating a value of the list passed to builtins.concatLists")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed to builtins.concatLists")); ASSERT_TRACE2("concatLists [ [1] \"foo\" ]", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating a value of the list passed to builtins.concatLists")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating a value of the list passed to builtins.concatLists")); } @@ -725,13 +726,13 @@ namespace nix { TEST_F(ErrorTraceTest, length) { ASSERT_TRACE2("length 1", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.length")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.length")); ASSERT_TRACE2("length \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.length")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.length")); } @@ -739,22 +740,22 @@ namespace nix { TEST_F(ErrorTraceTest, foldlPrime) { ASSERT_TRACE2("foldl' 1 \"foo\" true", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.foldlStrict")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.foldlStrict")); ASSERT_TRACE2("foldl' (_: 1) \"foo\" true", TypeError, - hintfmt("expected a list but found %s: %s", "a Boolean", normaltxt(ANSI_CYAN "true" ANSI_NORMAL)), - hintfmt("while evaluating the third argument passed to builtins.foldlStrict")); + HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), + HintFmt("while evaluating the third argument passed to builtins.foldlStrict")); ASSERT_TRACE1("foldl' (_: 1) \"foo\" [ true ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); + HintFmt("attempt to call something which is not a function but %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("foldl' (a: b: a && b) \"foo\" [ true ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("in the left operand of the AND (&&) operator")); + HintFmt("expected a Boolean but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("in the left operand of the AND (&&) operator")); } @@ -762,18 +763,18 @@ namespace nix { TEST_F(ErrorTraceTest, any) { ASSERT_TRACE2("any 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.any")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.any")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.any")); ASSERT_TRACE2("any (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.any")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.any")); } @@ -781,18 +782,18 @@ namespace nix { TEST_F(ErrorTraceTest, all) { ASSERT_TRACE2("all 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.all")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.all")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.all")); ASSERT_TRACE2("all (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.all")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.all")); } @@ -800,23 +801,23 @@ namespace nix { TEST_F(ErrorTraceTest, genList) { ASSERT_TRACE2("genList 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.genList")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.genList")); ASSERT_TRACE2("genList 1 2", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.genList", "an integer")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.genList", "an integer")); // XXX: defered // ASSERT_TRACE2("genList (x: x + \"foo\") 2 #TODO", // TypeError, - // hintfmt("cannot add %s to an integer", "a string"), - // hintfmt("while evaluating anonymous lambda")); + // HintFmt("cannot add %s to an integer", "a string"), + // HintFmt("while evaluating anonymous lambda")); ASSERT_TRACE1("genList false (-3)", EvalError, - hintfmt("cannot create list of size %d", -3)); + HintFmt("cannot create list of size %d", -3)); } @@ -824,31 +825,31 @@ namespace nix { TEST_F(ErrorTraceTest, sort) { ASSERT_TRACE2("sort 1 \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.sort")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.sort")); ASSERT_TRACE2("sort 1 [ \"foo\" ]", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.sort")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.sort")); ASSERT_TRACE1("sort (_: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("attempt to call something which is not a function but %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL))); + HintFmt("attempt to call something which is not a function but %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL))); ASSERT_TRACE2("sort (_: _: 1) [ \"foo\" \"bar\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the sorting function passed to builtins.sort")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the sorting function passed to builtins.sort")); // XXX: Trace too deep, need better asserts // ASSERT_TRACE1("sort (a: b: a <= b) [ \"foo\" {} ] # TODO", // TypeError, - // hintfmt("cannot compare %s with %s", "a string", "a set")); + // HintFmt("cannot compare %s with %s", "a string", "a set")); // ASSERT_TRACE1("sort (a: b: a <= b) [ {} {} ] # TODO", // TypeError, - // hintfmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); + // HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); } @@ -856,18 +857,18 @@ namespace nix { TEST_F(ErrorTraceTest, partition) { ASSERT_TRACE2("partition 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.partition")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.partition")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.partition")); ASSERT_TRACE2("partition (_: 1) [ \"foo\" ]", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the partition function passed to builtins.partition")); + HintFmt("expected a Boolean but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the partition function passed to builtins.partition")); } @@ -875,18 +876,18 @@ namespace nix { TEST_F(ErrorTraceTest, groupBy) { ASSERT_TRACE2("groupBy 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.groupBy")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (_: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.groupBy")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.groupBy")); ASSERT_TRACE2("groupBy (x: x) [ \"foo\" \"bar\" 1 ]", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the grouping function passed to builtins.groupBy")); } @@ -894,23 +895,23 @@ namespace nix { TEST_F(ErrorTraceTest, concatMap) { ASSERT_TRACE2("concatMap 1 \"foo\"", TypeError, - hintfmt("expected a function but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.concatMap")); + HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) \"foo\"", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.concatMap")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); + HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.concatMap")); ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO", TypeError, - hintfmt("expected a list but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the return value of the function passed to builtins.concatMap")); + HintFmt("expected a list but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the return value of the function passed to builtins.concatMap")); } @@ -918,13 +919,13 @@ namespace nix { TEST_F(ErrorTraceTest, add) { ASSERT_TRACE2("add \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument of the addition")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the addition")); ASSERT_TRACE2("add 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument of the addition")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the addition")); } @@ -932,13 +933,13 @@ namespace nix { TEST_F(ErrorTraceTest, sub) { ASSERT_TRACE2("sub \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument of the subtraction")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the subtraction")); ASSERT_TRACE2("sub 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument of the subtraction")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the subtraction")); } @@ -946,13 +947,13 @@ namespace nix { TEST_F(ErrorTraceTest, mul) { ASSERT_TRACE2("mul \"foo\" 1", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first argument of the multiplication")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first argument of the multiplication")); ASSERT_TRACE2("mul 1 \"foo\"", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument of the multiplication")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument of the multiplication")); } @@ -960,17 +961,17 @@ namespace nix { TEST_F(ErrorTraceTest, div) { ASSERT_TRACE2("div \"foo\" 1 # TODO: an integer was expected -> a number", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the first operand of the division")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the first operand of the division")); ASSERT_TRACE2("div 1 \"foo\"", TypeError, - hintfmt("expected a float but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second operand of the division")); + HintFmt("expected a float but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second operand of the division")); ASSERT_TRACE1("div \"foo\" 0", EvalError, - hintfmt("division by zero")); + HintFmt("division by zero")); } @@ -978,13 +979,13 @@ namespace nix { TEST_F(ErrorTraceTest, bitAnd) { ASSERT_TRACE2("bitAnd 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.bitAnd")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitAnd")); ASSERT_TRACE2("bitAnd 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.bitAnd")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitAnd")); } @@ -992,13 +993,13 @@ namespace nix { TEST_F(ErrorTraceTest, bitOr) { ASSERT_TRACE2("bitOr 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.bitOr")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitOr")); ASSERT_TRACE2("bitOr 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.bitOr")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitOr")); } @@ -1006,13 +1007,13 @@ namespace nix { TEST_F(ErrorTraceTest, bitXor) { ASSERT_TRACE2("bitXor 1.1 2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "1.1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.bitXor")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "1.1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.bitXor")); ASSERT_TRACE2("bitXor 1 2.2", TypeError, - hintfmt("expected an integer but found %s: %s", "a float", normaltxt(ANSI_CYAN "2.2" ANSI_NORMAL)), - hintfmt("while evaluating the second argument passed to builtins.bitXor")); + HintFmt("expected an integer but found %s: %s", "a float", Uncolored(ANSI_CYAN "2.2" ANSI_NORMAL)), + HintFmt("while evaluating the second argument passed to builtins.bitXor")); } @@ -1020,16 +1021,16 @@ namespace nix { TEST_F(ErrorTraceTest, lessThan) { ASSERT_TRACE1("lessThan 1 \"foo\"", EvalError, - hintfmt("cannot compare %s with %s", "an integer", "a string")); + HintFmt("cannot compare %s with %s", "an integer", "a string")); ASSERT_TRACE1("lessThan {} {}", EvalError, - hintfmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); + HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); ASSERT_TRACE2("lessThan [ 1 2 ] [ \"foo\" ]", EvalError, - hintfmt("cannot compare %s with %s", "an integer", "a string"), - hintfmt("while comparing two list elements")); + HintFmt("cannot compare %s with %s", "an integer", "a string"), + HintFmt("while comparing two list elements")); } @@ -1037,8 +1038,8 @@ namespace nix { TEST_F(ErrorTraceTest, toString) { ASSERT_TRACE2("toString { a = 1; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }")), - hintfmt("while evaluating the first argument passed to builtins.toString")); + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ a = " ANSI_CYAN "1" ANSI_NORMAL "; }")), + HintFmt("while evaluating the first argument passed to builtins.toString")); } @@ -1046,22 +1047,22 @@ namespace nix { TEST_F(ErrorTraceTest, substring) { ASSERT_TRACE2("substring {} \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the first argument (the start offset) passed to builtins.substring")); + HintFmt("expected an integer but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the first argument (the start offset) passed to builtins.substring")); ASSERT_TRACE2("substring 3 \"foo\" true", TypeError, - hintfmt("expected an integer but found %s: %s", "a string", normaltxt(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), - hintfmt("while evaluating the second argument (the substring length) passed to builtins.substring")); + HintFmt("expected an integer but found %s: %s", "a string", Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), + HintFmt("while evaluating the second argument (the substring length) passed to builtins.substring")); ASSERT_TRACE2("substring 0 3 {}", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the third argument (the string) passed to builtins.substring")); + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the third argument (the string) passed to builtins.substring")); ASSERT_TRACE1("substring (-3) 3 \"sometext\"", EvalError, - hintfmt("negative start position in 'substring'")); + HintFmt("negative start position in 'substring'")); } @@ -1069,8 +1070,8 @@ namespace nix { TEST_F(ErrorTraceTest, stringLength) { ASSERT_TRACE2("stringLength {} # TODO: context is missing ???", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the argument passed to builtins.stringLength")); + HintFmt("cannot coerce %s to a string: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the argument passed to builtins.stringLength")); } @@ -1078,17 +1079,17 @@ namespace nix { TEST_F(ErrorTraceTest, hashString) { ASSERT_TRACE2("hashString 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.hashString")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.hashString")); ASSERT_TRACE1("hashString \"foo\" \"content\"", UsageError, - hintfmt("unknown hash algorithm '%s', expect 'md5', 'sha1', 'sha256', or 'sha512'", "foo")); + HintFmt("unknown hash algorithm '%s', expect 'md5', 'sha1', 'sha256', or 'sha512'", "foo")); ASSERT_TRACE2("hashString \"sha256\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.hashString")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.hashString")); } @@ -1096,17 +1097,17 @@ namespace nix { TEST_F(ErrorTraceTest, match) { ASSERT_TRACE2("match 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.match")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.match")); ASSERT_TRACE2("match \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.match")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.match")); ASSERT_TRACE1("match \"(.*\" \"\"", EvalError, - hintfmt("invalid regular expression '%s'", "(.*")); + HintFmt("invalid regular expression '%s'", "(.*")); } @@ -1114,17 +1115,17 @@ namespace nix { TEST_F(ErrorTraceTest, split) { ASSERT_TRACE2("split 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.split")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.split")); ASSERT_TRACE2("split \"foo\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.split")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.split")); ASSERT_TRACE1("split \"f(o*o\" \"1foo2\"", EvalError, - hintfmt("invalid regular expression '%s'", "f(o*o")); + HintFmt("invalid regular expression '%s'", "f(o*o")); } @@ -1132,18 +1133,18 @@ namespace nix { TEST_F(ErrorTraceTest, concatStringsSep) { ASSERT_TRACE2("concatStringsSep 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument (the separator string) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" {}", TypeError, - hintfmt("expected a list but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); + HintFmt("expected a list but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument (the list of strings to concat) passed to builtins.concatStringsSep")); ASSERT_TRACE2("concatStringsSep \"foo\" [ 1 2 {} ] # TODO: coerce to string is buggy", TypeError, - hintfmt("cannot coerce %s to a string: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); + HintFmt("cannot coerce %s to a string: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep")); } @@ -1151,8 +1152,8 @@ namespace nix { TEST_F(ErrorTraceTest, parseDrvName) { ASSERT_TRACE2("parseDrvName 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.parseDrvName")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.parseDrvName")); } @@ -1160,13 +1161,13 @@ namespace nix { TEST_F(ErrorTraceTest, compareVersions) { ASSERT_TRACE2("compareVersions 1 {}", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.compareVersions")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.compareVersions")); ASSERT_TRACE2("compareVersions \"abd\" {}", TypeError, - hintfmt("expected a string but found %s: %s", "a set", normaltxt("{ }")), - hintfmt("while evaluating the second argument passed to builtins.compareVersions")); + HintFmt("expected a string but found %s: %s", "a set", Uncolored("{ }")), + HintFmt("while evaluating the second argument passed to builtins.compareVersions")); } @@ -1174,8 +1175,8 @@ namespace nix { TEST_F(ErrorTraceTest, splitVersion) { ASSERT_TRACE2("splitVersion 1", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", normaltxt(ANSI_CYAN "1" ANSI_NORMAL)), - hintfmt("while evaluating the first argument passed to builtins.splitVersion")); + HintFmt("expected a string but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("while evaluating the first argument passed to builtins.splitVersion")); } @@ -1188,108 +1189,108 @@ namespace nix { TEST_F(ErrorTraceTest, derivationStrict) { ASSERT_TRACE2("derivationStrict \"\"", TypeError, - hintfmt("expected a set but found %s: %s", "a string", "\"\""), - hintfmt("while evaluating the argument passed to builtins.derivationStrict")); + HintFmt("expected a set but found %s: %s", "a string", "\"\""), + HintFmt("while evaluating the argument passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict {}", TypeError, - hintfmt("attribute '%s' missing", "name"), - hintfmt("in the attrset passed as argument to builtins.derivationStrict")); + HintFmt("attribute '%s' missing", "name"), + HintFmt("in the attrset passed as argument to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = 1; }", TypeError, - hintfmt("expected a string but found %s: %s", "an integer", "1"), - hintfmt("while evaluating the `name` attribute passed to builtins.derivationStrict")); + HintFmt("expected a string but found %s: %s", "an integer", "1"), + HintFmt("while evaluating the `name` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; }", TypeError, - hintfmt("required attribute 'builder' missing"), - hintfmt("while evaluating derivation 'foo'")); + HintFmt("required attribute 'builder' missing"), + HintFmt("while evaluating derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __structuredAttrs = 15; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", "15"), - hintfmt("while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")); + HintFmt("expected a Boolean but found %s: %s", "an integer", "15"), + HintFmt("while evaluating the `__structuredAttrs` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; __ignoreNulls = 15; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "an integer", "15"), - hintfmt("while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict")); + HintFmt("expected a Boolean but found %s: %s", "an integer", "15"), + HintFmt("while evaluating the `__ignoreNulls` attribute passed to builtins.derivationStrict")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = 15; }", TypeError, - hintfmt("invalid value '15' for 'outputHashMode' attribute"), - hintfmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); + HintFmt("invalid value '15' for 'outputHashMode' attribute"), + HintFmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; outputHashMode = \"custom\"; }", TypeError, - hintfmt("invalid value 'custom' for 'outputHashMode' attribute"), - hintfmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); + HintFmt("invalid value 'custom' for 'outputHashMode' attribute"), + HintFmt("while evaluating the attribute 'outputHashMode' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating the attribute 'system' of derivation 'foo'")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating the attribute 'system' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"drv\"; }", TypeError, - hintfmt("invalid derivation output name 'drv'"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("invalid derivation output name 'drv'"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = []; }", TypeError, - hintfmt("derivation cannot have an empty set of outputs"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("derivation cannot have an empty set of outputs"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"drv\" ]; }", TypeError, - hintfmt("invalid derivation output name 'drv'"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("invalid derivation output name 'drv'"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = [ \"out\" \"out\" ]; }", TypeError, - hintfmt("duplicate derivation output 'out'"), - hintfmt("while evaluating the attribute 'outputs' of derivation 'foo'")); + HintFmt("duplicate derivation output 'out'"), + HintFmt("while evaluating the attribute 'outputs' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __contentAddressed = \"true\"; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - hintfmt("while evaluating the attribute '__contentAddressed' of derivation 'foo'")); + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt("while evaluating the attribute '__contentAddressed' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - hintfmt("while evaluating the attribute '__impure' of derivation 'foo'")); + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt("while evaluating the attribute '__impure' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; __impure = \"true\"; }", TypeError, - hintfmt("expected a Boolean but found %s: %s", "a string", "\"true\""), - hintfmt("while evaluating the attribute '__impure' of derivation 'foo'")); + HintFmt("expected a Boolean but found %s: %s", "a string", "\"true\""), + HintFmt("while evaluating the attribute '__impure' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = \"foo\"; }", TypeError, - hintfmt("expected a list but found %s: %s", "a string", "\"foo\""), - hintfmt("while evaluating the attribute 'args' of derivation 'foo'")); + HintFmt("expected a list but found %s: %s", "a string", "\"foo\""), + HintFmt("while evaluating the attribute 'args' of derivation 'foo'")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating an element of the argument list")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; args = [ \"a\" {} ]; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating an element of the argument list")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating an element of the argument list")); ASSERT_TRACE2("derivationStrict { name = \"foo\"; builder = 1; system = 1; outputs = \"out\"; FOO = {}; }", TypeError, - hintfmt("cannot coerce %s to a string: %s", "a set", "{ }"), - hintfmt("while evaluating the attribute 'FOO' of derivation 'foo'")); + HintFmt("cannot coerce %s to a string: %s", "a set", "{ }"), + HintFmt("while evaluating the attribute 'FOO' of derivation 'foo'")); } */ diff --git a/tests/unit/libutil/logging.cc b/tests/unit/libutil/logging.cc index c8c7c091f..1d7304f05 100644 --- a/tests/unit/libutil/logging.cc +++ b/tests/unit/libutil/logging.cc @@ -42,7 +42,7 @@ namespace nix { makeJSONLogger(*logger)->logEI({ .name = "error name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foFile, problem_file, 02, 13) @@ -62,7 +62,7 @@ namespace nix { throw TestError(e.info()); } catch (Error &e) { ErrorInfo ei = e.info(); - ei.msg = hintfmt("%s; subsequent error message.", Uncolored(e.info().msg.str())); + ei.msg = HintFmt("%s; subsequent error message.", Uncolored(e.info().msg.str())); testing::internal::CaptureStderr(); logger->logEI(ei); @@ -176,7 +176,7 @@ namespace nix { logError({ .name = "error name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foString, problem_file, 02, 13), @@ -193,7 +193,7 @@ namespace nix { logError({ .name = "error name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foFile, problem_file, 02, 13) @@ -208,7 +208,7 @@ namespace nix { logError({ .name = "error name", - .msg = hintfmt("hint %1%", "only"), + .msg = HintFmt("hint %1%", "only"), }); auto str = testing::internal::GetCapturedStderr(); @@ -225,7 +225,7 @@ namespace nix { logWarning({ .name = "name", - .msg = hintfmt("there was a %1%", "warning"), + .msg = HintFmt("there was a %1%", "warning"), }); auto str = testing::internal::GetCapturedStderr(); @@ -241,7 +241,7 @@ namespace nix { logWarning({ .name = "warning name", - .msg = hintfmt("this hint has %1% templated %2%!!", + .msg = HintFmt("this hint has %1% templated %2%!!", "yellow", "values"), .errPos = Pos(foStdin, problem_file, 2, 13), @@ -264,7 +264,7 @@ namespace nix { auto e = AssertionError(ErrorInfo { .name = "wat", - .msg = hintfmt("it has been %1% days since our last error", "zero"), + .msg = HintFmt("it has been %1% days since our last error", "zero"), .errPos = Pos(foString, problem_file, 2, 13), }); @@ -290,7 +290,7 @@ namespace nix { auto e = AssertionError(ErrorInfo { .name = "wat", - .msg = hintfmt("it has been %1% days since our last error", "zero"), + .msg = HintFmt("it has been %1% days since our last error", "zero"), .errPos = Pos(foString, problem_file, 2, 13), }); @@ -310,39 +310,39 @@ namespace nix { /* ---------------------------------------------------------------------------- - * hintfmt + * HintFmt * --------------------------------------------------------------------------*/ - TEST(hintfmt, percentStringWithoutArgs) { + TEST(HintFmt, percentStringWithoutArgs) { const char *teststr = "this is 100%s correct!"; ASSERT_STREQ( - hintfmt(teststr).str().c_str(), + HintFmt(teststr).str().c_str(), teststr); } - TEST(hintfmt, fmtToHintfmt) { + TEST(HintFmt, fmtToHintfmt) { ASSERT_STREQ( - hintfmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(), + HintFmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(), "the color of this this text is not yellow"); } - TEST(hintfmt, tooFewArguments) { + TEST(HintFmt, tooFewArguments) { ASSERT_STREQ( - hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(), + HintFmt("only one arg %1% %2%", "fulfilled").str().c_str(), "only one arg " ANSI_WARNING "fulfilled" ANSI_NORMAL " "); } - TEST(hintfmt, tooManyArguments) { + TEST(HintFmt, tooManyArguments) { ASSERT_STREQ( - hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(), + HintFmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(), "what about this " ANSI_WARNING "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL); } From 953eb0cba2aad89753a39da6c98d409d1b88f88e Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Thu, 8 Feb 2024 15:55:20 -0800 Subject: [PATCH 273/307] Fix tests --- tests/unit/libexpr/error_traces.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit/libexpr/error_traces.cc b/tests/unit/libexpr/error_traces.cc index a899d3113..7b32b320b 100644 --- a/tests/unit/libexpr/error_traces.cc +++ b/tests/unit/libexpr/error_traces.cc @@ -26,7 +26,7 @@ namespace nix { try { state.error("puppy").withTrace(noPos, "doggy").debugThrow(); } catch (Error & e) { - e.addTrace(state.positions[noPos], "beans", ""); + e.addTrace(state.positions[noPos], "beans"); throw; } } catch (BaseError & e) { @@ -52,7 +52,7 @@ namespace nix { try { state.error("beans").debugThrow(); } catch (Error & e2) { - e.addTrace(state.positions[noPos], "beans2", ""); + e.addTrace(state.positions[noPos], "beans2"); //e2.addTrace(state.positions[noPos], "Something", ""); ASSERT_TRUE(e.info().traces.size() == 2); ASSERT_TRUE(e2.info().traces.size() == 0); @@ -807,7 +807,7 @@ namespace nix { ASSERT_TRACE2("genList 1 2", TypeError, HintFmt("expected a function but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.genList", "an integer")); + HintFmt("while evaluating the first argument passed to builtins.genList")); // XXX: defered // ASSERT_TRACE2("genList (x: x + \"foo\") 2 #TODO", From 1fe7b016699c4e2a7435ba29d1ecc6830ae88946 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Th=C3=A9ophane=20Hufschmitt?= <7226587+thufschmitt@users.noreply.github.com> Date: Fri, 9 Feb 2024 06:27:24 +0100 Subject: [PATCH 274/307] Don't hardcode the `-O2` compiler flag autoconf authors apparently decided that setting `-O2` by default was a good idea. I disagree, and Nix has its own way of deciding that (with `OPTIMIZE={0,1}`). Explicitly set `CFLAGS` and `CXXFLAGS` in the configure script to disable that behaviour. Fix #9965 --- configure.ac | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/configure.ac b/configure.ac index 8c29c1e62..676b145a5 100644 --- a/configure.ac +++ b/configure.ac @@ -47,6 +47,10 @@ AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')]) # State should be stored in /nix/var, unless the user overrides it explicitly. test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var +# Assign a default value to C{,XX}FLAGS as the default configure script sets them +# to -O2 otherwise, which we don't want to have hardcoded +CFLAGS=${CFLAGS-""} +CXXFLAGS=${CXXFLAGS-""} AC_PROG_CC AC_PROG_CXX From 60045f9c9650ae87f04a2fe507817ad9b5318104 Mon Sep 17 00:00:00 2001 From: Valentin Gagarin Date: Fri, 9 Feb 2024 10:41:03 +0100 Subject: [PATCH 275/307] add clickable anchor links how the different invocations relate to each other seems be confusing, which is relatable because one has to wire it up in your head while reading. an explicit reference should make it unambiguous and easier to notice due to links being highlighted. --- doc/manual/src/command-ref/nix-collect-garbage.md | 2 +- doc/manual/src/command-ref/nix-env/delete-generations.md | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/manual/src/command-ref/nix-collect-garbage.md b/doc/manual/src/command-ref/nix-collect-garbage.md index 3cab79f0e..1bc88d858 100644 --- a/doc/manual/src/command-ref/nix-collect-garbage.md +++ b/doc/manual/src/command-ref/nix-collect-garbage.md @@ -51,7 +51,7 @@ These options are for deleting old [profiles] prior to deleting unreachable [sto - [`--delete-old`](#opt-delete-old) / `-d`\ Delete all old generations of profiles. - This is the equivalent of invoking `nix-env --delete-generations old` on each found profile. + This is the equivalent of invoking [`nix-env --delete-generations old`](@docroot@/command-ref/nix-env/delete-generations.md#generations-old) on each found profile. - [`--delete-older-than`](#opt-delete-older-than) *period*\ Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time). diff --git a/doc/manual/src/command-ref/nix-env/delete-generations.md b/doc/manual/src/command-ref/nix-env/delete-generations.md index adc6fc219..6b6ea798e 100644 --- a/doc/manual/src/command-ref/nix-env/delete-generations.md +++ b/doc/manual/src/command-ref/nix-env/delete-generations.md @@ -12,13 +12,13 @@ This operation deletes the specified generations of the current profile. *generations* can be a one of the following: -- `...`:\ +- [`...`](#generations-list):\ A list of generation numbers, each one a separate command-line argument. Delete exactly the profile generations given by their generation number. Deleting the current generation is not allowed. -- The special value `old` +- [The special value `old`](#generations-old) Delete all generations except the current one. @@ -30,7 +30,7 @@ This operation deletes the specified generations of the current profile. > Because one can roll back to a previous generation, it is possible to have generations newer than the current one. > They will also be deleted. -- `d`:\ +- [`d`](#generations-time):\ The last *number* days *Example*: `30d` @@ -38,7 +38,7 @@ This operation deletes the specified generations of the current profile. Delete all generations created more than *number* days ago, except the most recent one of them. This allows rolling back to generations that were available within the specified period. -- `+`:\ +- [`+`](#generations-count):\ The last *number* generations up to the present *Example*: `+5` From fb5a792280a55bf783528f0903204e674417c70a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Fri, 9 Feb 2024 15:55:24 +0100 Subject: [PATCH 276/307] runPostBuildHook(): Be less chatty Don't spam the user with "running post-build-hook" messages. It's up to the post-build hook if it has something interesting to say. --- src/libstore/build/derivation-goal.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index d3bbdf1ed..1b326ee13 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -891,7 +891,7 @@ void runPostBuildHook( if (hook == "") return; - Activity act(logger, lvlInfo, actPostBuildHook, + Activity act(logger, lvlTalkative, actPostBuildHook, fmt("running post-build-hook '%s'", settings.postBuildHook), Logger::Fields{store.printStorePath(drvPath)}); PushActivity pact(act.id); From 8f3253c6f4041f500631e1dac5ba75f335e9c70a Mon Sep 17 00:00:00 2001 From: Alois Wohlschlager Date: Fri, 9 Feb 2024 18:56:42 +0100 Subject: [PATCH 277/307] Restore manual pages Commit d536c57e878a04f795c1ef8ee3232a47035da2cf inadvertedly broke build and installation of all non-autogenerated manual pages (in particular, all the ones documenting the stable CLI), by moving the definition of the man-pages variable in doc/manual/local.mk after its usage in mk/lib.mk. Move including the former earlier so that the correct order is restored. --- Makefile | 25 ++++++++++++++----------- mk/lib.mk | 4 ++++ 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/Makefile b/Makefile index 7bbfbddbe..d3542c3e9 100644 --- a/Makefile +++ b/Makefile @@ -47,6 +47,17 @@ makefiles += \ tests/functional/plugins/local.mk endif +# Some makefiles require access to built programs and must be included late. +makefiles-late = + +ifeq ($(ENABLE_DOC_GEN), yes) +makefiles-late += doc/manual/local.mk +endif + +ifeq ($(ENABLE_INTERNAL_API_DOCS), yes) +makefiles-late += doc/internal-api/local.mk +endif + # Miscellaneous global Flags OPTIMIZE = 1 @@ -95,24 +106,16 @@ installcheck: @exit 1 endif -# Documentation or else fallback stub rules. -# -# The documentation makefiles be included after `mk/lib.mk` so rules -# refer to variables defined by `mk/lib.mk`. Rules are not "lazy" like -# variables, unfortunately. +# Documentation fallback stub rules. -ifeq ($(ENABLE_DOC_GEN), yes) -$(eval $(call include-sub-makefile, doc/manual/local.mk)) -else +ifneq ($(ENABLE_DOC_GEN), yes) .PHONY: manual-html manpages manual-html manpages: @echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'." @exit 1 endif -ifeq ($(ENABLE_INTERNAL_API_DOCS), yes) -$(eval $(call include-sub-makefile, doc/internal-api/local.mk)) -else +ifneq ($(ENABLE_INTERNAL_API_DOCS), yes) .PHONY: internal-api-html internal-api-html: @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'." diff --git a/mk/lib.mk b/mk/lib.mk index 10ce8d436..fe0add1c9 100644 --- a/mk/lib.mk +++ b/mk/lib.mk @@ -97,6 +97,10 @@ $(foreach test-group, $(install-tests-groups), \ $(eval $(call run-test,$(test),$(install_test_init))) \ $(eval $(test-group).test-group: $(test).test))) +# Include makefiles requiring built programs. +$(foreach mf, $(makefiles-late), $(eval $(call include-sub-makefile,$(mf)))) + + $(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file)))))) From 53eecae52546219f3f3e7bebac9792ea5d816ffc Mon Sep 17 00:00:00 2001 From: BOHverkill Date: Sat, 10 Feb 2024 17:17:48 +0100 Subject: [PATCH 278/307] Fix link to derivation in string interpolation doc The reference link definition for it pointing to the glossary was removed, so it is currently not displayed as a link. --- doc/manual/src/language/string-interpolation.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/src/language/string-interpolation.md b/doc/manual/src/language/string-interpolation.md index 6e28d2664..7d81c2020 100644 --- a/doc/manual/src/language/string-interpolation.md +++ b/doc/manual/src/language/string-interpolation.md @@ -20,6 +20,8 @@ Rather than writing (where `freetype` is a [derivation]), you can instead write +[derivation]: ../glossary.md#gloss-derivation + ```nix "--with-freetype2-library=${freetype}/lib" ``` From fae8c15737a8a1df85cc75f55c0bffa712b9ac0a Mon Sep 17 00:00:00 2001 From: BOHverkill Date: Sat, 10 Feb 2024 17:44:33 +0100 Subject: [PATCH 279/307] Fix link to manual in CONTRIBUTING.md --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ffcc0268f..a0c2b16f4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -63,7 +63,7 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy). - Functional tests – [`tests/functional/**.sh`](./tests/functional) - Unit tests – [`src/*/tests`](./src/) - Integration tests – [`tests/nixos/*`](./tests/nixos) - - [ ] User documentation in the [manual](..doc/manual/src) + - [ ] User documentation in the [manual](./doc/manual/src) - [ ] API documentation in header files - [ ] Code and comments are self-explanatory - [ ] Commit message explains **why** the change was made From f298159a2bac2932208907f6319a0ba80b2721c6 Mon Sep 17 00:00:00 2001 From: nbelakovski Date: Sat, 10 Feb 2024 18:52:39 -0800 Subject: [PATCH 280/307] Add a note about lists values.md There's probably more that can be said, but I thought it might be helpful to put something here about how to access elements of a list for folks coming from more or less any other programming language. If this is rarely used, it might be nice to add to the documentation something about why it's rarely used. --- doc/manual/src/language/values.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md index aea68a441..99dc0245d 100644 --- a/doc/manual/src/language/values.md +++ b/doc/manual/src/language/values.md @@ -156,6 +156,8 @@ function and the fifth being a set. Note that lists are only lazy in values, and they are strict in length. +Elements in a list can be accessed using `builtins.elemAt`. + ## Attribute Set An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`). From 4496a4537b56d69c7227088c4174a1ecbedd2ed5 Mon Sep 17 00:00:00 2001 From: nbelakovski Date: Sun, 11 Feb 2024 22:52:49 -0800 Subject: [PATCH 281/307] Update values.md Link to elemAt --- doc/manual/src/language/values.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/src/language/values.md b/doc/manual/src/language/values.md index 99dc0245d..74ffc7070 100644 --- a/doc/manual/src/language/values.md +++ b/doc/manual/src/language/values.md @@ -156,7 +156,7 @@ function and the fifth being a set. Note that lists are only lazy in values, and they are strict in length. -Elements in a list can be accessed using `builtins.elemAt`. +Elements in a list can be accessed using [`builtins.elemAt`](./builtins.md#builtins-elemAt). ## Attribute Set From 619ca631d07218dfe04bb53e5abb855ecf2bb67a Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Feb 2024 15:29:48 +0100 Subject: [PATCH 282/307] Fix "may be used uninitialized" warning --- src/libstore/store-api.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 439c9530c..e3715343e 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -847,7 +847,7 @@ void Store::substitutePaths(const StorePathSet & paths) if (!willSubstitute.empty()) try { std::vector subs; - for (auto & p : willSubstitute) subs.push_back(DerivedPath::Opaque{p}); + for (auto & p : willSubstitute) subs.emplace_back(DerivedPath::Opaque{p}); buildPaths(subs); } catch (Error & e) { logWarning(e.info()); From a9b69b2fff8b33bc62234f8031f9acf257d9fbe0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 12 Feb 2024 16:34:59 +0100 Subject: [PATCH 283/307] builtin:{unpack-channel,buildenv}: Get output path from the derivation Similar to 1ee42c5b88eb0533ebcf8b2579ec82f2be80e4b2, get the "out" path from the derivation (and complain if it doesn't exist), rather than getting it from the environment. --- src/libstore/build/local-derivation-goal.cc | 13 +++++++------ src/libstore/build/local-derivation-goal.hh | 2 +- src/libstore/builtins.hh | 10 ++++++++-- src/libstore/builtins/buildenv.cc | 6 ++++-- src/libstore/builtins/buildenv.hh | 4 +++- src/libstore/builtins/fetchurl.cc | 15 ++++++--------- src/libstore/builtins/unpack-channel.cc | 6 ++++-- 7 files changed, 33 insertions(+), 23 deletions(-) diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc index 2f60d2f38..b373c74b2 100644 --- a/src/libstore/build/local-derivation-goal.cc +++ b/src/libstore/build/local-derivation-goal.cc @@ -2130,16 +2130,17 @@ void LocalDerivationGoal::runChild() try { logger = makeJSONLogger(*logger); - BasicDerivation & drv2(*drv); - for (auto & e : drv2.env) - e.second = rewriteStrings(e.second, inputRewrites); + std::map outputs; + for (auto & e : drv->outputs) + outputs.insert_or_assign(e.first, + worker.store.printStorePath(scratchOutputs.at(e.first))); if (drv->builder == "builtin:fetchurl") - builtinFetchurl(drv2, netrcData); + builtinFetchurl(*drv, outputs, netrcData); else if (drv->builder == "builtin:buildenv") - builtinBuildenv(drv2); + builtinBuildenv(*drv, outputs); else if (drv->builder == "builtin:unpack-channel") - builtinUnpackChannel(drv2); + builtinUnpackChannel(*drv, outputs); else throw Error("unsupported builtin builder '%1%'", drv->builder.substr(8)); _exit(0); diff --git a/src/libstore/build/local-derivation-goal.hh b/src/libstore/build/local-derivation-goal.hh index 88152a645..f25cb9424 100644 --- a/src/libstore/build/local-derivation-goal.hh +++ b/src/libstore/build/local-derivation-goal.hh @@ -106,7 +106,7 @@ struct LocalDerivationGoal : public DerivationGoal RedirectedOutputs redirectedOutputs; /** - * The outputs paths used during the build. + * The output paths used during the build. * * - Input-addressed derivations or fixed content-addressed outputs are * sometimes built when some of their outputs already exist, and can not diff --git a/src/libstore/builtins.hh b/src/libstore/builtins.hh index d201fb3ac..93558b49e 100644 --- a/src/libstore/builtins.hh +++ b/src/libstore/builtins.hh @@ -6,7 +6,13 @@ namespace nix { // TODO: make pluggable. -void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData); -void builtinUnpackChannel(const BasicDerivation & drv); +void builtinFetchurl( + const BasicDerivation & drv, + const std::map & outputs, + const std::string & netrcData); + +void builtinUnpackChannel( + const BasicDerivation & drv, + const std::map & outputs); } diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc index 9283251ac..1ed7b39cc 100644 --- a/src/libstore/builtins/buildenv.cc +++ b/src/libstore/builtins/buildenv.cc @@ -161,7 +161,9 @@ void buildProfile(const Path & out, Packages && pkgs) debug("created %d symlinks in user environment", state.symlinks); } -void builtinBuildenv(const BasicDerivation & drv) +void builtinBuildenv( + const BasicDerivation & drv, + const std::map & outputs) { auto getAttr = [&](const std::string & name) { auto i = drv.env.find(name); @@ -169,7 +171,7 @@ void builtinBuildenv(const BasicDerivation & drv) return i->second; }; - Path out = getAttr("out"); + auto out = outputs.at("out"); createDirs(out); /* Convert the stuff we get from the environment back into a diff --git a/src/libstore/builtins/buildenv.hh b/src/libstore/builtins/buildenv.hh index b24633e27..8e112e176 100644 --- a/src/libstore/builtins/buildenv.hh +++ b/src/libstore/builtins/buildenv.hh @@ -45,6 +45,8 @@ typedef std::vector Packages; void buildProfile(const Path & out, Packages && pkgs); -void builtinBuildenv(const BasicDerivation & drv); +void builtinBuildenv( + const BasicDerivation & drv, + const std::map & outputs); } diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc index a9f2e748e..4fb67f933 100644 --- a/src/libstore/builtins/fetchurl.cc +++ b/src/libstore/builtins/fetchurl.cc @@ -6,7 +6,10 @@ namespace nix { -void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) +void builtinFetchurl( + const BasicDerivation & drv, + const std::map & outputs, + const std::string & netrcData) { /* Make the host's netrc data available. Too bad curl requires this to be stored in a file. It would be nice if we could just @@ -24,14 +27,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData) if (!dof) throw Error("'builtin:fetchurl' must be a fixed-output derivation"); - auto getAttr = [&](const std::string & name) { - auto i = drv.env.find(name); - if (i == drv.env.end()) throw Error("attribute '%s' missing", name); - return i->second; - }; - - Path storePath = getAttr("out"); - auto mainUrl = getAttr("url"); + auto storePath = outputs.at("out"); + auto mainUrl = drv.env.at("url"); bool unpack = getOr(drv.env, "unpack", "") == "1"; /* Note: have to use a fresh fileTransfer here because we're in diff --git a/src/libstore/builtins/unpack-channel.cc b/src/libstore/builtins/unpack-channel.cc index ba04bb16c..6f68d4c0b 100644 --- a/src/libstore/builtins/unpack-channel.cc +++ b/src/libstore/builtins/unpack-channel.cc @@ -3,7 +3,9 @@ namespace nix { -void builtinUnpackChannel(const BasicDerivation & drv) +void builtinUnpackChannel( + const BasicDerivation & drv, + const std::map & outputs) { auto getAttr = [&](const std::string & name) { auto i = drv.env.find(name); @@ -11,7 +13,7 @@ void builtinUnpackChannel(const BasicDerivation & drv) return i->second; }; - Path out = getAttr("out"); + auto out = outputs.at("out"); auto channelName = getAttr("channelName"); auto src = getAttr("src"); From 91557df4a78e47fdadcea59fbca7751511b73bf5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 12 Feb 2024 11:16:12 -0500 Subject: [PATCH 284/307] Apply suggestions from code review Co-authored-by: Robert Hensing --- doc/manual/src/SUMMARY.md.in | 2 +- doc/manual/src/protocols/store-path.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/src/SUMMARY.md.in b/doc/manual/src/SUMMARY.md.in index e6390c60a..d86372845 100644 --- a/doc/manual/src/SUMMARY.md.in +++ b/doc/manual/src/SUMMARY.md.in @@ -106,7 +106,7 @@ - [Architecture and Design](architecture/architecture.md) - [Protocols](protocols/index.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - - [Exact Store Path Specification](protocols/store-path.md) + - [Store Path Specification](protocols/store-path.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [Glossary](glossary.md) - [Contributing](contributing/index.md) diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md index d1c35b05e..e7bc050e7 100644 --- a/doc/manual/src/protocols/store-path.md +++ b/doc/manual/src/protocols/store-path.md @@ -47,7 +47,7 @@ where For either the outputs built from derivations, paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256]. - (in that case "source" is used; it's silly, but it's done that way for compatibility). + (in that case "source" is used; this is only necessary for compatibility). `` is the name of the output (usually, "out"). For content-addressed store objects, ``, is always "out". From ac1301ddfdc0d92a23378f2ea75b221740c15bab Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 12 Feb 2024 11:16:53 -0500 Subject: [PATCH 285/307] Convert store path "grammar" to EBNF --- doc/manual/src/protocols/store-path.md | 70 +++++++++++++++----------- 1 file changed, 40 insertions(+), 30 deletions(-) diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md index e7bc050e7..d5dec77b5 100644 --- a/doc/manual/src/protocols/store-path.md +++ b/doc/manual/src/protocols/store-path.md @@ -5,66 +5,69 @@ This is the complete specification for how store paths are calculated. Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to. But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful. -```bnf - ::= /- +```ebnf +store-path = store-dir "/" digest "-" name ``` where -- `` = base-32 representation of the first 160 bits of a [SHA-256] hash of `
`
+- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `pre`
 
   Th is :the hash part of the store name
 
-- `
` = the string `:sha256:::`;
+- `pre` = the string
+
+  ```ebnf
+  type ":" sha256 ":" inner-digest ":" store ":" name
+  ```
 
   Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
   (e.g. you won't get `/nix/store/-name1` and `/nix/store/-name2`, or `/gnu/store/-name1`, with equal hash parts).
 
-- `` = the name of the store object.
+- `name` = the name of the store object.
 
-- `` = the [store directory](@docroot@/store/store-path.md#store-directory)
+- `store` = the [store directory](@docroot@/store/store-path.md#store-directory)
 
-- `` = one of:
+- `type` = one of:
 
-  - ```bnf
-    text:::...
+  - ```ebnf
+    "text" ( ":" store-path )*
     ```
 
     for encoded derivations written to the store.
-    ` ... ` are the store paths referenced by this path.
-    Those are encoded in the form described by ``.
+    The optional trailing store paths are the references of the store object.
 
-  - ```bnf
-    source:::...::self
+  - ```ebnf
+    "source" ( ":" store-path )*
     ```
 
     For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
     Just like in the text case, we can have the store objects referenced by their paths.
     Additionally, we can have an optional `:self` label to denote self reference.
 
-  - ```bnf
-    output:
+  - ```ebnf
+    "output:" id
     ```
 
     For either the outputs built from derivations,
     paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256].
     (in that case "source" is used; this is only necessary for compatibility).
 
-    `` is the name of the output (usually, "out").
-    For content-addressed store objects, ``, is always "out".
+    `id` is the name of the output (usually, "out").
+    For content-addressed store objects, `id`, is always "out".
 
-- `` = base-16 representation of a SHA-256 hash of ``
+- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-pre`
 
-- `` = one of the following based on ``:
+- `inner-pre` = one of the following based on `type`:
 
-  - if `` = `text:...`:
+  - if `type` = `"text:" ...`:
 
     the string written to the resulting store path.
 
-  - if `` = `source:...`:
+  - if `type` = `"source:" ...`:
 
     the the hash of the [Nix Archive (NAR)] serialization of the [file system object](@docroot@/store/file-system-object.md) of the store object.
 
-  - if `` = `output:`:
+  - if `type` = `"output:" id`:
 
     - For input-addressed derivation outputs:
 
@@ -72,31 +75,38 @@ where
 
     - For content-addressed store paths:
 
-      the string `fixed:out:::`, where
+      the string
 
-      - `` = one of:
+      ```ebnf
+      "fixed:out:" rec algo ":" hash ":"
+      ```
+
+      where
+
+      - `rec` = one of:
 
         - `r:` hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
 
         - `` (empty string) for hashes of the flat (single file) serialization
 
-      - `` = `md5`, `sha1` or `sha256`
+      - `algo` = `md5`, `sha1` or `sha256`
 
-      - `` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
+      - `hash` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
 
-      Note that `` = `out`, regardless of the name part of the store path.
-      Also note that NAR + SHA-256 must not use this case, and instead must use the `` = `source:...` case.
+      Note that `id` = `out`, regardless of the name part of the store path.
+      Also note that NAR + SHA-256 must not use this case, and instead must use the `type` = `"source:" ...` case.
 
 [Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
 [sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
 
 ## Historical Note
 
-The `` = `source:...` and `` = `output:out` grammars technically overlap, in that both can represent data hashed by its SHA-256 NAR serialization.
+The `type` = `"source:" ...` and `type` = `"output:out"` grammars technically overlap in purpose,
+in that both can represent data hashed by its SHA-256 NAR serialization.
 
 The original reason for this way of computing names was to prevent name collisions (for security).
 For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
-The former would have an `` starting with `output:out:`, while the latter would have an `` starting with `source:`.
+The former would have an `inner-pre` starting with `output:out:`, while the latter would have an `inner-pre` starting with `source:`.
 
 Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
 Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).

From 95190e68ed8f6c152f8ba01b2da7baeacb342c0e Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:21:54 -0500
Subject: [PATCH 286/307] =?UTF-8?q?Mention=20the=20use=20of=20Extended=20B?=
 =?UTF-8?q?ackus=E2=80=93Naur=20form?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 doc/manual/src/protocols/store-path.md | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index d5dec77b5..57da808f9 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -2,6 +2,8 @@
 
 This is the complete specification for how store paths are calculated.
 
+The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), but must deviate for a few things such as hash functions which we treat as bidirectional for specification purposes.
+
 Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to.
 But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful.
 

From 30f6b0f9c55407207bd421b9a5446b455acd1e8e Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:22:08 -0500
Subject: [PATCH 287/307] `pre` -> `fingerprint` in store path grammar

As suggested by @roberth in
https://github.com/NixOS/nix/pull/9295#discussion_r1486402040.

Thanks!
---
 doc/manual/src/protocols/store-path.md | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 57da808f9..649bb4c45 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -12,11 +12,11 @@ store-path = store-dir "/" digest "-" name
 ```
 where
 
-- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `pre`
+- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `fingerprint`
 
   Th is :the hash part of the store name
 
-- `pre` = the string
+- `fingerprint` = the string
 
   ```ebnf
   type ":" sha256 ":" inner-digest ":" store ":" name
@@ -57,9 +57,9 @@ where
     `id` is the name of the output (usually, "out").
     For content-addressed store objects, `id`, is always "out".
 
-- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-pre`
+- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-fingerprint`
 
-- `inner-pre` = one of the following based on `type`:
+- `inner-fingerprint` = one of the following based on `type`:
 
   - if `type` = `"text:" ...`:
 
@@ -108,7 +108,7 @@ in that both can represent data hashed by its SHA-256 NAR serialization.
 
 The original reason for this way of computing names was to prevent name collisions (for security).
 For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
-The former would have an `inner-pre` starting with `output:out:`, while the latter would have an `inner-pre` starting with `source:`.
+The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
 
 Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
 Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).

From 0862d7ce57f7e16cf7f8ded3db7586a20fa8da28 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:26:02 -0500
Subject: [PATCH 288/307] Move around non-terminals

---
 doc/manual/src/protocols/store-path.md | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 649bb4c45..61f9d1604 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -12,9 +12,13 @@ store-path = store-dir "/" digest "-" name
 ```
 where
 
+- `name` = the name of the store object.
+
+- `store-dir` = the [store directory](@docroot@/store/store-path.md#store-directory)
+
 - `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `fingerprint`
 
-  Th is :the hash part of the store name
+  This the hash part of the store name
 
 - `fingerprint` = the string
 
@@ -25,10 +29,6 @@ where
   Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
   (e.g. you won't get `/nix/store/-name1` and `/nix/store/-name2`, or `/gnu/store/-name1`, with equal hash parts).
 
-- `name` = the name of the store object.
-
-- `store` = the [store directory](@docroot@/store/store-path.md#store-directory)
-
 - `type` = one of:
 
   - ```ebnf

From 4c3e4d6d7167e4fbd284eb00063882b8442e3e99 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:33:06 -0500
Subject: [PATCH 289/307] Sections, EBNF tweaks

---
 doc/manual/src/protocols/store-path.md | 34 +++++++++++++++++---------
 1 file changed, 22 insertions(+), 12 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 61f9d1604..ff075b3b6 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -7,6 +7,8 @@ The format of this specification is close to [Extended Backus–Naur form](https
 Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to.
 But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful.
 
+## Store path proper
+
 ```ebnf
 store-path = store-dir "/" digest "-" name
 ```
@@ -20,10 +22,10 @@ where
 
   This the hash part of the store name
 
-- `fingerprint` = the string
+## Fingerprint
 
-  ```ebnf
-  type ":" sha256 ":" inner-digest ":" store ":" name
+- ```ebnf
+  fingerprint = type ":" sha256 ":" inner-digest ":" store ":" name
   ```
 
   Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
@@ -32,14 +34,14 @@ where
 - `type` = one of:
 
   - ```ebnf
-    "text" ( ":" store-path )*
+    | "text" ( ":" store-path )*
     ```
 
     for encoded derivations written to the store.
     The optional trailing store paths are the references of the store object.
 
   - ```ebnf
-    "source" ( ":" store-path )*
+    | "source" ( ":" store-path )*
     ```
 
     For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
@@ -47,7 +49,7 @@ where
     Additionally, we can have an optional `:self` label to denote self reference.
 
   - ```ebnf
-    "output:" id
+    | "output:" id
     ```
 
     For either the outputs built from derivations,
@@ -59,6 +61,8 @@ where
 
 - `inner-digest` = base-16 representation of a SHA-256 hash of `inner-fingerprint`
 
+## Inner fingerprint
+
 - `inner-fingerprint` = one of the following based on `type`:
 
   - if `type` = `"text:" ...`:
@@ -77,8 +81,6 @@ where
 
     - For content-addressed store paths:
 
-      the string
-
       ```ebnf
       "fixed:out:" rec algo ":" hash ":"
       ```
@@ -87,15 +89,23 @@ where
 
       - `rec` = one of:
 
-        - `r:` hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
+        - ```ebnf
+          | "r:"
+          ```
+          hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
 
-        - `` (empty string) for hashes of the flat (single file) serialization
+        - ```ebnf
+          |
+          ```
+          (empty string) for hashes of the flat (single file) serialization
 
-      - `algo` = `md5`, `sha1` or `sha256`
+      - ```ebf
+        algo = "md5" | "sha1" | "sha256"
+        ```
 
       - `hash` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
 
-      Note that `id` = `out`, regardless of the name part of the store path.
+      Note that `id` = `"out"`, regardless of the name part of the store path.
       Also note that NAR + SHA-256 must not use this case, and instead must use the `type` = `"source:" ...` case.
 
 [Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR

From c873a140d711eb1c9f268f0903021bb68e764684 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 11:34:54 -0500
Subject: [PATCH 290/307] Apply suggestions from code review

Co-authored-by: Robert Hensing 
---
 doc/manual/src/protocols/store-path.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index ff075b3b6..2fc4bf7af 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -120,7 +120,7 @@ The original reason for this way of computing names was to prevent name collisio
 For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
 The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
 
-Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separting derivation-produced vs manually-hashed content-addressed data like this was not useful.
+Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separating derivation-produced vs manually-hashed content-addressed data like this was not useful.
 Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
 This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
 It also removes the ambiguity from the grammar.

From 5169f5f4d9743fa10a8578625d2c290141949d54 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 12:01:54 -0500
Subject: [PATCH 291/307] Apply suggestions from code review

Co-authored-by: Robert Hensing 
---
 doc/manual/src/protocols/store-path.md | 2 +-
 src/libstore/store-api.cc              | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 2fc4bf7af..1f619e6a2 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -111,7 +111,7 @@ where
 [Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
 [sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
 
-## Historical Note
+### Historical Note
 
 The `type` = `"source:" ...` and `type` = `"output:out"` grammars technically overlap in purpose,
 in that both can represent data hashed by its SHA-256 NAR serialization.
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 1fb6cdce7..4238cbbf5 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -69,7 +69,7 @@ StorePath Store::followLinksToStorePath(std::string_view path) const
 The exact specification of store paths is in `protocols/store-path.md`
 in the Nix manual. These few functions implement that specification.
 
-If changes do these functions go behind mere implementation changes but
+If changes to these functions go beyond mere implementation changes i.e.
 also update the user-visible behavior, please update the specification
 to match.
 */

From 898fd1e48d117c7cd28bbc04cd230450f1df9adc Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 12:04:37 -0500
Subject: [PATCH 292/307] Update doc/manual/src/protocols/store-path.md

---
 doc/manual/src/protocols/store-path.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 1f619e6a2..595c7a10e 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -121,6 +121,6 @@ For instance, the thinking was that it shouldn't be feasible to come up with a d
 The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
 
 Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separating derivation-produced vs manually-hashed content-addressed data like this was not useful.
-Now, data this is to be SHA-256 + NAR-serialization content-addressed always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
+Now, data that is content-addressed with SHA-256 + NAR-serialization always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
 This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
 It also removes the ambiguity from the grammar.

From f29d2a9d11c6b1c4cb8011e45dc45d99e4d572bd Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Mon, 12 Feb 2024 12:30:28 -0500
Subject: [PATCH 293/307] Small EBNF fix

---
 doc/manual/src/protocols/store-path.md | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/doc/manual/src/protocols/store-path.md b/doc/manual/src/protocols/store-path.md
index 595c7a10e..fcf8038fc 100644
--- a/doc/manual/src/protocols/store-path.md
+++ b/doc/manual/src/protocols/store-path.md
@@ -95,11 +95,11 @@ where
           hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
 
         - ```ebnf
-          |
+          | ""
           ```
           (empty string) for hashes of the flat (single file) serialization
 
-      - ```ebf
+      - ```ebnf
         algo = "md5" | "sha1" | "sha256"
         ```
 

From 64cbd4c05a413eae55cde784594472f921fc7367 Mon Sep 17 00:00:00 2001
From: Anton Samokhvalov 
Date: Mon, 12 Feb 2024 23:37:40 +0300
Subject: [PATCH 294/307] Update nar-info-disk-cache.cc

fix case when asserts are no-op, like in release build
---
 src/libstore/nar-info-disk-cache.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc
index 310105c75..07beb8acb 100644
--- a/src/libstore/nar-info-disk-cache.cc
+++ b/src/libstore/nar-info-disk-cache.cc
@@ -209,7 +209,7 @@ public:
 
             {
                 auto r(state->insertCache.use()(uri)(time(0))(storeDir)(wantMassQuery)(priority));
-                assert(r.next());
+                if (!r.next()) { abort(); }
                 ret.id = (int) r.getInt(0);
             }
 

From 64a076fe0678ee46fbec1446df1fcfbb713cfdf6 Mon Sep 17 00:00:00 2001
From: "Travis A. Everett" 
Date: Tue, 13 Feb 2024 01:18:08 -0600
Subject: [PATCH 295/307] install-darwin: fix symbolic perms for install cmd

The symbolic form in use here doesn't seem to have an effect
in either the BSD or coreutils install commands, leaving the
daemon plist with empty permissions. This seems to cause its
own problems.

I think I've got the right symbolic syntax now :)
---
 scripts/install-darwin-multi-user.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh
index 766f81bde..24c9052f9 100644
--- a/scripts/install-darwin-multi-user.sh
+++ b/scripts/install-darwin-multi-user.sh
@@ -102,7 +102,7 @@ poly_extra_try_me_commands() {
 poly_configure_nix_daemon_service() {
     task "Setting up the nix-daemon LaunchDaemon"
     _sudo "to set up the nix-daemon as a LaunchDaemon" \
-          /usr/bin/install -m -rw-r--r-- "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST"
+          /usr/bin/install -m "u=rw,go=r" "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST"
 
     _sudo "to load the LaunchDaemon plist for nix-daemon" \
           launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist

From ce19338f9fa4e8fa1fea7faf33c0f2c384e590dd Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 13:30:17 +0100
Subject: [PATCH 296/307] update glossary entry on store types

the interesting information is on the proper pages, and is now presented
a bit more prominently.

the paragraph was a bit confusing to read, also because an anchor link
to an inline definition was in the middle of the sentence. "local store"
now has its own glossary entry.
---
 doc/manual/src/glossary.md | 23 +++++++++++++----------
 1 file changed, 13 insertions(+), 10 deletions(-)

diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 13b2906f7..51f1e3a71 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -59,18 +59,21 @@
 
 - [store]{#gloss-store}
 
-  A collection of store objects, with operations to manipulate that collection.
-  See [Nix store](./store/index.md) for details.
+  A collection of [store objects][store object], with operations to manipulate that collection.
+  See [Nix Store](./store/index.md) for details.
 
-  There are many types of stores.
-  See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md) for a complete list.
-
-  From the perspective of the location where Nix is invoked, the Nix store can be  referred to _local_ or _remote_.
-  Only a [local store]{#gloss-local-store} exposes a location in the file system of the machine where Nix is invoked that allows access to store objects, typically `/nix/store`.
-  Local stores can be used for building [derivations](#gloss-derivation).
-  See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
+  There are many types of stores, see [Store Types](./store/types/index.md) for details.
 
   [store]: #gloss-store
+
+- [local store]{#gloss-local-store}
+
+  From the perspective of the location where Nix is invoked, the Nix [store] can be referred to _local_ or _remote_.
+  Only a local store exposes a file system directory, typically `/nix/store`, to allow operating system processes to directly access store objects.
+  Local stores can be used for building [derivations][derivation].
+
+  See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
+
   [local store]: #gloss-local-store
 
 - [chroot store]{#gloss-chroot-store}
@@ -87,7 +90,7 @@
 
 - [store path]{#gloss-store-path}
 
-  The location of a [store object](@docroot@/store/index.md#store-object) in the file system, i.e., an immediate child of the Nix store directory.
+  The location of a [store object] in the file system, i.e., an immediate child of the Nix store directory.
 
   > **Example**
   >

From e37d50289509dcac2303bc4de7065879dd58c731 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 13:34:51 +0100
Subject: [PATCH 297/307] add instructions to wipe the substituter lookup cache
 (#9498)

* add instructions to wipe the substituter lookup cache
---
 src/libstore/globals.hh | 15 +++++++++++----
 1 file changed, 11 insertions(+), 4 deletions(-)

diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 070e252b6..941adba78 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -793,10 +793,17 @@ public:
     Setting ttlNegativeNarInfoCache{
         this, 3600, "narinfo-cache-negative-ttl",
         R"(
-          The TTL in seconds for negative lookups. If a store path is queried
-          from a substituter but was not found, there will be a negative
-          lookup cached in the local disk cache database for the specified
-          duration.
+          The TTL in seconds for negative lookups.
+          If a store path is queried from a [substituter](#conf-substituters) but was not found, there will be a negative lookup cached in the local disk cache database for the specified duration.
+
+          Set to `0` to force updating the lookup cache.
+
+          To wipe the lookup cache completely:
+
+          ```shell-session
+          $ rm $HOME/.cache/nix/binary-cache-v*.sqlite*
+          # rm /root/.cache/nix/binary-cache-v*.sqlite*
+          ```
         )"};
 
     Setting ttlPositiveNarInfoCache{

From fd82ba0985aefc2a5498045f0caf16f8b2566cf1 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 14:13:56 +0100
Subject: [PATCH 298/307] extract reference documentation on remote builds
 (#9526)

- move all reference documentation to the `builders` configuration setting
- reword documentation on machine specification, add examples
- disable showing the default value, as it rendered as `@/dummy/machines`, which is wrong
- highlight the examples
- link to the configuration docs for distributed builds
- builder -> build machine

Co-authored-by: Janik H 
---
 doc/manual/redirects.js                       |   2 +-
 .../src/advanced-topics/distributed-builds.md | 101 ++-------------
 doc/manual/src/contributing/hacking.md        |   4 +-
 doc/manual/src/glossary.md                    |   2 +-
 .../src/language/advanced-attributes.md       |   2 +-
 doc/manual/src/language/derivations.md        |   2 +-
 src/libstore/build/derivation-goal.cc         |  10 +-
 src/libstore/build/worker.cc                  |  22 +++-
 src/libstore/globals.hh                       | 118 +++++++++++++++++-
 9 files changed, 152 insertions(+), 111 deletions(-)

diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js
index d04f32b49..27ab1853c 100644
--- a/doc/manual/redirects.js
+++ b/doc/manual/redirects.js
@@ -18,7 +18,7 @@ const redirects = {
     "chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
     "chap-diff-hook": "advanced-topics/diff-hook.html",
     "check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
-    "chap-distributed-builds": "advanced-topics/distributed-builds.html",
+    "chap-distributed-builds": "command-ref/conf-file.html#conf-builders",
     "chap-post-build-hook": "advanced-topics/post-build-hook.html",
     "chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
     "chap-writing-nix-expressions": "language/index.html",
diff --git a/doc/manual/src/advanced-topics/distributed-builds.md b/doc/manual/src/advanced-topics/distributed-builds.md
index 507c5ecb7..52acd039c 100644
--- a/doc/manual/src/advanced-topics/distributed-builds.md
+++ b/doc/manual/src/advanced-topics/distributed-builds.md
@@ -36,16 +36,8 @@ error: cannot connect to 'mac'
 then you need to ensure that the `PATH` of non-interactive login shells
 contains Nix.
 
-> **Warning**
->
-> If you are building via the Nix daemon, it is the Nix daemon user account (that is, `root`) that should have SSH access to a user (not necessarily `root`) on the remote machine.
->
-> If you can’t or don’t want to configure `root` to be able to access the remote machine, you can use a private Nix store instead by passing e.g. `--store ~/my-nix` when running a Nix command from the local machine.
-
-The list of remote machines can be specified on the command line or in
-the Nix configuration file. The former is convenient for testing. For
-example, the following command allows you to build a derivation for
-`x86_64-darwin` on a Linux machine:
+The [list of remote build machines](@docroot@/command-ref/conf-file.md#conf-builders) can be specified on the command line or in the Nix configuration file.
+For example, the following command allows you to build a derivation for `x86_64-darwin` on a Linux machine:
 
 ```console
 $ uname
@@ -60,97 +52,20 @@ $ cat ./result
 Darwin
 ```
 
-It is possible to specify multiple builders separated by a semicolon or
-a newline, e.g.
+It is possible to specify multiple build machines separated by a semicolon or a newline, e.g.
 
 ```console
   --builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd'
 ```
 
-Each machine specification consists of the following elements, separated
-by spaces. Only the first element is required. To leave a field at its
-default, set it to `-`.
-
-1.  The URI of the remote store in the format
-    `ssh://[username@]hostname`, e.g. `ssh://nix@mac` or `ssh://mac`.
-    For backward compatibility, `ssh://` may be omitted. The hostname
-    may be an alias defined in your `~/.ssh/config`.
-
-2.  A comma-separated list of Nix platform type identifiers, such as
-    `x86_64-darwin`. It is possible for a machine to support multiple
-    platform types, e.g., `i686-linux,x86_64-linux`. If omitted, this
-    defaults to the local platform type.
-
-3.  The SSH identity file to be used to log in to the remote machine. If
-    omitted, SSH will use its regular identities.
-
-4.  The maximum number of builds that Nix will execute in parallel on
-    the machine. Typically this should be equal to the number of CPU
-    cores. For instance, the machine `itchy` in the example will execute
-    up to 8 builds in parallel.
-
-5.  The “speed factor”, indicating the relative speed of the machine. If
-    there are multiple machines of the right type, Nix will prefer the
-    fastest, taking load into account.
-
-6.  A comma-separated list of *supported features*. If a derivation has
-    the `requiredSystemFeatures` attribute, then Nix will only perform
-    the derivation on a machine that has the specified features. For
-    instance, the attribute
-
-    ```nix
-    requiredSystemFeatures = [ "kvm" ];
-    ```
-
-    will cause the build to be performed on a machine that has the `kvm`
-    feature.
-
-7.  A comma-separated list of *mandatory features*. A machine will only
-    be used to build a derivation if all of the machine’s mandatory
-    features appear in the derivation’s `requiredSystemFeatures`
-    attribute.
-
-8.  The (base64-encoded) public host key of the remote machine. If omitted, SSH
-    will use its regular known-hosts file. Specifically, the field is calculated
-    via `base64 -w0 /etc/ssh/ssh_host_ed25519_key.pub`.
-
-For example, the machine specification
-
-    nix@scratchy.labs.cs.uu.nl  i686-linux      /home/nix/.ssh/id_scratchy_auto        8 1 kvm
-    nix@itchy.labs.cs.uu.nl     i686-linux      /home/nix/.ssh/id_scratchy_auto        8 2
-    nix@poochie.labs.cs.uu.nl   i686-linux      /home/nix/.ssh/id_scratchy_auto        1 2 kvm benchmark
-
-specifies several machines that can perform `i686-linux` builds.
-However, `poochie` will only do builds that have the attribute
-
-```nix
-requiredSystemFeatures = [ "benchmark" ];
-```
-
-or
-
-```nix
-requiredSystemFeatures = [ "benchmark" "kvm" ];
-```
-
-`itchy` cannot do builds that require `kvm`, but `scratchy` does support
-such builds. For regular builds, `itchy` will be preferred over
-`scratchy` because it has a higher speed factor.
-
-Remote builders can also be configured in `nix.conf`, e.g.
+Remote build machines can also be configured in [`nix.conf`](@docroot@/command-ref/conf-file.md), e.g.
 
     builders = ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd
 
-Finally, remote builders can be configured in a separate configuration
-file included in `builders` via the syntax `@file`. For example,
+Finally, remote build machines can be configured in a separate configuration
+file included in `builders` via the syntax `@/path/to/file`. For example,
 
     builders = @/etc/nix/machines
 
-causes the list of machines in `/etc/nix/machines` to be included. (This
-is the default.)
-
-If you want the builders to use caches, you likely want to set the
-option `builders-use-substitutes` in your local `nix.conf`.
-
-To build only on remote builders and disable building on the local
-machine, you can use the option `--max-jobs 0`.
+causes the list of machines in `/etc/nix/machines` to be included.
+(This is the default.)
diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md
index 9e2470859..6c9be3635 100644
--- a/doc/manual/src/contributing/hacking.md
+++ b/doc/manual/src/contributing/hacking.md
@@ -147,10 +147,10 @@ Nix can be built for various platforms, as specified in [`flake.nix`]:
 
 In order to build Nix for a different platform than the one you're currently
 on, you need a way for your current Nix installation to build code for that
-platform. Common solutions include [remote builders] and [binary format emulation]
+platform. Common solutions include [remote build machines] and [binary format emulation]
 (only supported on NixOS).
 
-[remote builders]: ../advanced-topics/distributed-builds.md
+[remote builders]: @docroot@/language/derivations.md#attr-builder
 [binary format emulation]: https://nixos.org/manual/nixos/stable/options.html#opt-boot.binfmt.emulatedSystems
 
 Given such a setup, executing the build only requires selecting the respective attribute.
diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 13b2906f7..359f727d7 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -37,7 +37,7 @@
   This can be achieved by:
   - Fetching a pre-built [store object] from a [substituter]
   - Running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation]
-  - Delegating to a [remote builder](@docroot@/advanced-topics/distributed-builds.html) and retrieving the outputs
+  - Delegating to a [remote machine](@docroot@/command-ref/conf-file.md#conf-builders) and retrieving the outputs
   
 
   See [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) for a detailed description of the algorithm.
diff --git a/doc/manual/src/language/advanced-attributes.md b/doc/manual/src/language/advanced-attributes.md
index 5a6c00cd4..7306fc182 100644
--- a/doc/manual/src/language/advanced-attributes.md
+++ b/doc/manual/src/language/advanced-attributes.md
@@ -257,7 +257,7 @@ Derivations can declare some infrequently used optional attributes.
     of the environment (typically, a few hundred kilobyte).
 
   - [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\
-    If this attribute is set to `true` and [distributed building is enabled](../advanced-topics/distributed-builds.md), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine.
+    If this attribute is set to `true` and [distributed building is enabled](@docroot@/command-ref/conf-file.md#conf-builders), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine.
     This is useful for derivations that are cheapest to build locally.
 
   - [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\
diff --git a/doc/manual/src/language/derivations.md b/doc/manual/src/language/derivations.md
index cbb30d074..75f824a34 100644
--- a/doc/manual/src/language/derivations.md
+++ b/doc/manual/src/language/derivations.md
@@ -36,7 +36,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect
   The system type on which the [`builder`](#attr-builder) executable is meant to be run.
 
   A necessary condition for Nix to build derivations locally is that the `system` attribute matches the current [`system` configuration option].
-  It can automatically [build on other platforms](../advanced-topics/distributed-builds.md) by forwarding build requests to other machines.
+  It can automatically [build on other platforms](@docroot@/language/derivations.md#attr-builder) by forwarding build requests to other machines.
 
   [`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system
 
diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc
index 1b326ee13..29bf2852f 100644
--- a/src/libstore/build/derivation-goal.cc
+++ b/src/libstore/build/derivation-goal.cc
@@ -780,9 +780,13 @@ void DerivationGoal::tryToBuild()
 
 void DerivationGoal::tryLocalBuild() {
     throw Error(
-        "unable to build with a primary store that isn't a local store; "
-        "either pass a different '--store' or enable remote builds."
-        "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
+        R"(
+        Unable to build with a primary store that isn't a local store;
+        either pass a different '--store' or enable remote builds.
+
+        For more information check 'man nix.conf' and search for '/machines'.
+        )"
+    );
 }
 
 
diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc
index d57e22393..3a34f4006 100644
--- a/src/libstore/build/worker.cc
+++ b/src/libstore/build/worker.cc
@@ -331,13 +331,23 @@ void Worker::run(const Goals & _topGoals)
             if (awake.empty() && 0U == settings.maxBuildJobs)
             {
                 if (getMachines().empty())
-                   throw Error("unable to start any build; either increase '--max-jobs' "
-                            "or enable remote builds."
-                            "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
+                   throw Error(
+                        R"(
+                        Unable to start any build;
+                        either increase '--max-jobs' or enable remote builds.
+
+                        For more information run 'man nix.conf' and search for '/machines'.
+                        )"
+                    );
                 else
-                   throw Error("unable to start any build; remote machines may not have "
-                            "all required system features."
-                            "\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
+                   throw Error(
+                        R"(
+                        Unable to start any build;
+                        remote machines may not have all required system features.
+
+                        For more information run 'man nix.conf' and search for '/machines'.
+                        )"
+                    );
 
             }
             assert(!awake.empty());
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 941adba78..fa2dc8681 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -270,9 +270,121 @@ public:
     Setting builders{
         this, "@" + nixConfDir + "/machines", "builders",
         R"(
-          A semicolon-separated list of build machines.
-          For the exact format and examples, see [the manual chapter on remote builds](../advanced-topics/distributed-builds.md)
-        )"};
+          A semicolon- or newline-separated list of build machines.
+
+          In addition to the [usual ways of setting configuration options](@docroot@/command-ref/conf-file.md), the value can be read from a file by prefixing its absolute path with `@`.
+
+          > **Example**
+          >
+          > This is the default setting:
+          >
+          > ```
+          > builders = @/etc/nix/machines
+          > ```
+
+          Each machine specification consists of the following elements, separated by spaces.
+          Only the first element is required.
+          To leave a field at its default, set it to `-`.
+
+          1. The URI of the remote store in the format `ssh://[username@]hostname`.
+
+             > **Example**
+             >
+             > `ssh://nix@mac`
+
+             For backward compatibility, `ssh://` may be omitted.
+             The hostname may be an alias defined in `~/.ssh/config`.
+
+          2. A comma-separated list of [Nix system types](@docroot@/contributing/hacking.md#system-type).
+             If omitted, this defaults to the local platform type.
+
+             > **Example**
+             >
+             > `aarch64-darwin`
+
+             It is possible for a machine to support multiple platform types.
+
+             > **Example**
+             >
+             > `i686-linux,x86_64-linux`
+
+          3. The SSH identity file to be used to log in to the remote machine.
+             If omitted, SSH will use its regular identities.
+
+             > **Example**
+             >
+             > `/home/user/.ssh/id_mac`
+
+          4. The maximum number of builds that Nix will execute in parallel on the machine.
+             Typically this should be equal to the number of CPU cores.
+
+          5. The “speed factor”, indicating the relative speed of the machine as a positive integer.
+             If there are multiple machines of the right type, Nix will prefer the fastest, taking load into account.
+
+          6. A comma-separated list of supported [system features](#conf-system-features).
+
+             A machine will only be used to build a derivation if all the features in the derivation's [`requiredSystemFeatures`](@docroot@/language/advanced-attributes.html#adv-attr-requiredSystemFeatures) attribute are supported by that machine.
+
+          7. A comma-separated list of required [system features](#conf-system-features).
+
+             A machine will only be used to build a derivation if all of the machine’s required features appear in the derivation’s [`requiredSystemFeatures`](@docroot@/language/advanced-attributes.html#adv-attr-requiredSystemFeatures) attribute.
+
+          8. The (base64-encoded) public host key of the remote machine.
+             If omitted, SSH will use its regular `known_hosts` file.
+
+             The value for this field can be obtained via `base64 -w0`.
+
+          > **Example**
+          >
+          > Multiple builders specified on the command line:
+          >
+          > ```console
+          > --builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd'
+          > ```
+
+          > **Example**
+          >
+          > This specifies several machines that can perform `i686-linux` builds:
+          >
+          > ```
+          > nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy 8 1 kvm
+          > nix@itchy.labs.cs.uu.nl    i686-linux /home/nix/.ssh/id_scratchy 8 2
+          > nix@poochie.labs.cs.uu.nl  i686-linux /home/nix/.ssh/id_scratchy 1 2 kvm benchmark
+          > ```
+          >
+          > However, `poochie` will only build derivations that have the attribute
+          >
+          > ```nix
+          > requiredSystemFeatures = [ "benchmark" ];
+          > ```
+          >
+          > or
+          >
+          > ```nix
+          > requiredSystemFeatures = [ "benchmark" "kvm" ];
+          > ```
+          >
+          > `itchy` cannot do builds that require `kvm`, but `scratchy` does support such builds.
+          > For regular builds, `itchy` will be preferred over `scratchy` because it has a higher speed factor.
+
+          For Nix to use substituters, the calling user must be in the [`trusted-users`](#conf-trusted-users) list.
+
+          > **Note**
+          >
+          > A build machine must be accessible via SSH and have Nix installed.
+          > `nix` must be available in `$PATH` for the user connecting over SSH.
+
+          > **Warning**
+          >
+          > If you are building via the Nix daemon (default), the Nix daemon user account on the local machine (that is, `root`) requires access to a user account on the remote machine (not necessarily `root`).
+          >
+          > If you can’t or don’t want to configure `root` to be able to access the remote machine, set [`store`](#conf-store) to any [local store](@docroot@/store/types/local-store.html), e.g. by passing `--store /tmp` to the command on the local machine.
+
+          To build only on remote machines and disable local builds, set [`max-jobs`](#conf-max-jobs) to 0.
+
+          If you want the remote machines to use substituters, set [`builders-use-substitutes`](#conf-builders-use-substituters) to `true`.
+        )",
+        {}, false};
 
     Setting alwaysAllowSubstitutes{
         this, false, "always-allow-substitutes",

From bb63bd50e6d817e5ca52c1d1d21232164a64f993 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Tue, 13 Feb 2024 14:14:20 +0100
Subject: [PATCH 299/307] : Restore support for "impure =
 true"

---
 src/libstore/builtins/fetchurl.cc      | 8 ++++----
 tests/functional/fetchurl.sh           | 2 +-
 tests/functional/impure-derivations.sh | 4 ++++
 3 files changed, 9 insertions(+), 5 deletions(-)

diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index a9f2e748e..559efcc17 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -20,9 +20,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
     if (!out)
         throw Error("'builtin:fetchurl' requires an 'out' output");
 
-    auto dof = std::get_if(&out->raw);
-    if (!dof)
-        throw Error("'builtin:fetchurl' must be a fixed-output derivation");
+    if (!(drv.type().isFixed() || drv.type().isImpure()))
+        throw Error("'builtin:fetchurl' must be a fixed-output or impure derivation");
 
     auto getAttr = [&](const std::string & name) {
         auto i = drv.env.find(name);
@@ -67,7 +66,8 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
     };
 
     /* Try the hashed mirrors first. */
-    if (dof->ca.method.getFileIngestionMethod() == FileIngestionMethod::Flat)
+    auto dof = std::get_if(&out->raw);
+    if (dof && dof->ca.method.getFileIngestionMethod() == FileIngestionMethod::Flat)
         for (auto hashedMirror : settings.hashedMirrors.get())
             try {
                 if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
diff --git a/tests/functional/fetchurl.sh b/tests/functional/fetchurl.sh
index 5a05cc5e1..a3620f52b 100644
--- a/tests/functional/fetchurl.sh
+++ b/tests/functional/fetchurl.sh
@@ -83,4 +83,4 @@ test -L $outPath/symlink
 requireDaemonNewerThan "2.20"
 expected=100
 if [[ -v NIX_DAEMON_PACKAGE ]]; then expected=1; fi # work around the daemon not returning a 100 status correctly
-expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output derivation'
+expectStderr $expected nix-build --expr '{ url }: builtins.derivation { name = "nix-cache-info"; system = "x86_64-linux"; builder = "builtin:fetchurl"; inherit url; outputHashMode = "flat"; }' --argstr url file://$narxz 2>&1 | grep 'must be a fixed-output or impure derivation'
diff --git a/tests/functional/impure-derivations.sh b/tests/functional/impure-derivations.sh
index 39d053a04..54ed6f5dd 100644
--- a/tests/functional/impure-derivations.sh
+++ b/tests/functional/impure-derivations.sh
@@ -63,3 +63,7 @@ path5=$(nix build -L --no-link --json --file ./impure-derivations.nix contentAdd
 path6=$(nix build -L --no-link --json --file ./impure-derivations.nix inputAddressedAfterCA | jq -r .[].outputs.out)
 [[ $(< $path6) = X ]]
 [[ $(< $TEST_ROOT/counter) = 5 ]]
+
+# Test nix/fetchurl.nix.
+path7=$(nix build -L --no-link --print-out-paths --expr "import  { impure = true; url = file://$PWD/impure-derivations.sh; }")
+cmp $path7 $PWD/impure-derivations.sh

From 39c353f6fa40a5e0ace9e2c3e69848108944845c Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 14:52:51 +0100
Subject: [PATCH 300/307] reword description of the `cores` setting (#9522)

* reword description of the `cores` setting

- be precise about the `builder` executable
- clearly distinguish between `builder` and job parallelism
- clarify the role of `mkDerivation` in the example
- remove prose for the default, it's shown programmatically
- mention relation to `max-jobs`
---
 src/libstore/globals.hh | 23 +++++++++++++++--------
 1 file changed, 15 insertions(+), 8 deletions(-)

diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index fa2dc8681..8330d6571 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -180,14 +180,21 @@ public:
         getDefaultCores(),
         "cores",
         R"(
-          Sets the value of the `NIX_BUILD_CORES` environment variable in the
-          invocation of builders. Builders can use this variable at their
-          discretion to control the maximum amount of parallelism. For
-          instance, in Nixpkgs, if the derivation attribute
-          `enableParallelBuilding` is set to `true`, the builder passes the
-          `-jN` flag to GNU Make. It can be overridden using the `--cores`
-          command line switch and defaults to `1`. The value `0` means that
-          the builder should use all available CPU cores in the system.
+          Sets the value of the `NIX_BUILD_CORES` environment variable in the [invocation of the `builder` executable](@docroot@/language/derivations.md#builder-execution) of a derivation.
+          The `builder` executable can use this variable to control its own maximum amount of parallelism.
+
+          
+          For instance, in Nixpkgs, if the attribute `enableParallelBuilding` for the `mkDerivation` build helper is set to `true`, it will pass the `-j${NIX_BUILD_CORES}` flag to GNU Make.
+
+          The value `0` means that the `builder` should use all available CPU cores in the system.
+
+          > **Note**
+          >
+          > The number of parallel local Nix build jobs is independently controlled with the [`max-jobs`](#conf-max-jobs) setting.
         )",
         {"build-cores"},
         // Don't document the machine-specific default value

From 8bebf9607cbf07fbf0f98d835f20df1f9736d5ff Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 09:37:45 -0500
Subject: [PATCH 301/307] Split `hash.sh` test in two

Converting hashes and hashing files are pretty separate tasks, and more
test parallelism is better.
---
 tests/functional/hash-convert.sh           | 105 +++++++++++++++++++++
 tests/functional/{hash.sh => hash-path.sh} | 104 --------------------
 tests/functional/local.mk                  |   3 +-
 3 files changed, 107 insertions(+), 105 deletions(-)
 create mode 100644 tests/functional/hash-convert.sh
 rename tests/functional/{hash.sh => hash-path.sh} (51%)

diff --git a/tests/functional/hash-convert.sh b/tests/functional/hash-convert.sh
new file mode 100644
index 000000000..9b3afc10b
--- /dev/null
+++ b/tests/functional/hash-convert.sh
@@ -0,0 +1,105 @@
+source common.sh
+
+# Conversion with `nix hash` `nix-hash` and `nix hash convert`
+try3() {
+    # $1 = hash algo
+    # $2 = expected hash in base16
+    # $3 = expected hash in base32
+    # $4 = expected hash in base64
+    h64=$(nix hash convert --hash-algo "$1" --to base64 "$2")
+    [ "$h64" = "$4" ]
+    h64=$(nix-hash --type "$1" --to-base64 "$2")
+    [ "$h64" = "$4" ]
+    # Deprecated experiment
+    h64=$(nix hash to-base64 --type "$1" "$2")
+    [ "$h64" = "$4" ]
+
+    sri=$(nix hash convert --hash-algo "$1" --to sri "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix-hash --type "$1" --to-sri "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash to-sri --type "$1" "$2")
+    [ "$sri" = "$1-$4" ]
+    h32=$(nix hash convert --hash-algo "$1" --to base32 "$2")
+    [ "$h32" = "$3" ]
+    h32=$(nix-hash --type "$1" --to-base32 "$2")
+    [ "$h32" = "$3" ]
+    h32=$(nix hash to-base32 --type "$1" "$2")
+    [ "$h32" = "$3" ]
+    h16=$(nix-hash --type "$1" --to-base16 "$h32")
+    [ "$h16" = "$2" ]
+
+    h16=$(nix hash convert --hash-algo "$1" --to base16 "$h64")
+    [ "$h16" = "$2" ]
+    h16=$(nix hash to-base16 --type "$1" "$h64")
+    [ "$h16" = "$2" ]
+    h16=$(nix hash convert --to base16 "$sri")
+    [ "$h16" = "$2" ]
+    h16=$(nix hash to-base16 "$sri")
+    [ "$h16" = "$2" ]
+
+    #
+    # Converting from SRI
+    #
+
+    # Input hash algo auto-detected from SRI and output defaults to SRI as well.
+    sri=$(nix hash convert "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --from sri "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --to sri "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --from sri --to sri "$1-$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --to base64 "$1-$4")
+    [ "$sri" = "$4" ]
+
+    #
+    # Auto-detecting the input from algo and length.
+    #
+
+    sri=$(nix hash convert --hash-algo "$1" "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$3")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$4")
+    [ "$sri" = "$1-$4" ]
+
+    sri=$(nix hash convert --hash-algo "$1" "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$3")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" "$4")
+    [ "$sri" = "$1-$4" ]
+
+    #
+    # Asserting input format succeeds.
+    #
+
+    sri=$(nix hash convert --hash-algo "$1" --from base16 "$2")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" --from nix32 "$3")
+    [ "$sri" = "$1-$4" ]
+    sri=$(nix hash convert --hash-algo "$1" --from base64 "$4")
+    [ "$sri" = "$1-$4" ]
+
+    #
+    # Asserting input format fails.
+    #
+
+    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?")
+    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
+    fail=$(nix hash convert --hash-algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?")
+    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
+    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?")
+    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
+
+}
+
+try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" "gA1Zz808BekAy04hS+SPa4hqCN8="
+try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
+try3 sha512 "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" "12k9jiq29iyqm03swfsgiw5mlqs173qazm3n7daz43infy12pyrcdf30fkk3qwv4yl2ick8yipc2mqnlh48xsvvxl60lbx8vp38yji0" "IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ=="
diff --git a/tests/functional/hash.sh b/tests/functional/hash-path.sh
similarity index 51%
rename from tests/functional/hash.sh
rename to tests/functional/hash-path.sh
index ff270076e..6d096b29b 100644
--- a/tests/functional/hash.sh
+++ b/tests/functional/hash-path.sh
@@ -80,107 +80,3 @@ try2 md5 "20f3ffe011d4cfa7d72bfabef7882836"
 rm $TEST_ROOT/hash-path/hello
 ln -s x $TEST_ROOT/hash-path/hello
 try2 md5 "f78b733a68f5edbdf9413899339eaa4a"
-
-# Conversion with `nix hash` `nix-hash` and `nix hash convert`
-try3() {
-    # $1 = hash algo
-    # $2 = expected hash in base16
-    # $3 = expected hash in base32
-    # $4 = expected hash in base64
-    h64=$(nix hash convert --hash-algo "$1" --to base64 "$2")
-    [ "$h64" = "$4" ]
-    h64=$(nix-hash --type "$1" --to-base64 "$2")
-    [ "$h64" = "$4" ]
-    # Deprecated experiment
-    h64=$(nix hash to-base64 --type "$1" "$2")
-    [ "$h64" = "$4" ]
-
-    sri=$(nix hash convert --hash-algo "$1" --to sri "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix-hash --type "$1" --to-sri "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash to-sri --type "$1" "$2")
-    [ "$sri" = "$1-$4" ]
-    h32=$(nix hash convert --hash-algo "$1" --to base32 "$2")
-    [ "$h32" = "$3" ]
-    h32=$(nix-hash --type "$1" --to-base32 "$2")
-    [ "$h32" = "$3" ]
-    h32=$(nix hash to-base32 --type "$1" "$2")
-    [ "$h32" = "$3" ]
-    h16=$(nix-hash --type "$1" --to-base16 "$h32")
-    [ "$h16" = "$2" ]
-
-    h16=$(nix hash convert --hash-algo "$1" --to base16 "$h64")
-    [ "$h16" = "$2" ]
-    h16=$(nix hash to-base16 --type "$1" "$h64")
-    [ "$h16" = "$2" ]
-    h16=$(nix hash convert --to base16 "$sri")
-    [ "$h16" = "$2" ]
-    h16=$(nix hash to-base16 "$sri")
-    [ "$h16" = "$2" ]
-
-    #
-    # Converting from SRI
-    #
-
-    # Input hash algo auto-detected from SRI and output defaults to SRI as well.
-    sri=$(nix hash convert "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --from sri "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --to sri "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --from sri --to sri "$1-$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --to base64 "$1-$4")
-    [ "$sri" = "$4" ]
-
-    #
-    # Auto-detecting the input from algo and length.
-    #
-
-    sri=$(nix hash convert --hash-algo "$1" "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$3")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$4")
-    [ "$sri" = "$1-$4" ]
-
-    sri=$(nix hash convert --hash-algo "$1" "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$3")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" "$4")
-    [ "$sri" = "$1-$4" ]
-
-    #
-    # Asserting input format succeeds.
-    #
-
-    sri=$(nix hash convert --hash-algo "$1" --from base16 "$2")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" --from nix32 "$3")
-    [ "$sri" = "$1-$4" ]
-    sri=$(nix hash convert --hash-algo "$1" --from base64 "$4")
-    [ "$sri" = "$1-$4" ]
-
-    #
-    # Asserting input format fails.
-    #
-
-    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$2" 2>&1 || echo "exit: $?")
-    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
-    fail=$(nix hash convert --hash-algo "$1" --from base16 "$3" 2>&1 || echo "exit: $?")
-    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
-    fail=$(nix hash convert --hash-algo "$1" --from nix32 "$4" 2>&1 || echo "exit: $?")
-    [[ "$fail" == *"error: input hash"*"exit: 1" ]]
-
-}
-
-try3 sha1 "800d59cfcd3c05e900cb4e214be48f6b886a08df" "vw46m23bizj4n8afrc0fj19wrp7mj3c0" "gA1Zz808BekAy04hS+SPa4hqCN8="
-try3 sha256 "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" "1b8m03r63zqhnjf7l5wnldhh7c134ap5vpj0850ymkq1iyzicy5s" "ungWv48Bz+pBQUDeXa4iI7ADYaOWF3qctBD/YfIAFa0="
-try3 sha512 "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c33596fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" "12k9jiq29iyqm03swfsgiw5mlqs173qazm3n7daz43infy12pyrcdf30fkk3qwv4yl2ick8yipc2mqnlh48xsvvxl60lbx8vp38yji0" "IEqPxt2oLwoM7XvrjgikFlfBbvRosiioJ5vjMacDwzWW/RXBOxsH+aodO+pXeJygMa2Fx6cd1wNU7GMSOMo0RQ=="
diff --git a/tests/functional/local.mk b/tests/functional/local.mk
index f369c7c2c..18eb887cd 100644
--- a/tests/functional/local.mk
+++ b/tests/functional/local.mk
@@ -47,7 +47,8 @@ nix_tests = \
   optimise-store.sh \
   substitute-with-invalid-ca.sh \
   signing.sh \
-  hash.sh \
+  hash-convert.sh \
+  hash-path.sh \
   gc-non-blocking.sh \
   check.sh \
   nix-shell.sh \

From 5b69409f6b479ff28870c0502682882ee14a9dc8 Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 16:08:44 +0100
Subject: [PATCH 302/307] only refer to the local store page

---
 doc/manual/src/glossary.md | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 51f1e3a71..6126b7e47 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -68,10 +68,6 @@
 
 - [local store]{#gloss-local-store}
 
-  From the perspective of the location where Nix is invoked, the Nix [store] can be referred to _local_ or _remote_.
-  Only a local store exposes a file system directory, typically `/nix/store`, to allow operating system processes to directly access store objects.
-  Local stores can be used for building [derivations][derivation].
-
   See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
 
   [local store]: #gloss-local-store

From bb2189235100a551ab416ff301bef6efd3adbc66 Mon Sep 17 00:00:00 2001
From: Eelco Dolstra 
Date: Wed, 7 Feb 2024 15:41:10 +0100
Subject: [PATCH 303/307] *.in files: Depend on config.status

---
 mk/templates.mk | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/mk/templates.mk b/mk/templates.mk
index 866bdc17f..d5dae61c7 100644
--- a/mk/templates.mk
+++ b/mk/templates.mk
@@ -10,10 +10,10 @@ endef
 
 ifneq ($(MAKECMDGOALS), clean)
 
-$(buildprefix)%.h: %.h.in
+$(buildprefix)%.h: %.h.in $(buildprefix)config.status
 	$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%)
 
-$(buildprefix)%: %.in
+$(buildprefix)%: %.in $(buildprefix)config.status
 	$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%)
 
 endif

From f27205f743fcfd05126f5fa7cc83eefea7873f1f Mon Sep 17 00:00:00 2001
From: Valentin Gagarin 
Date: Tue, 13 Feb 2024 16:25:07 +0100
Subject: [PATCH 304/307] redirect local and chroot store to main page

---
 doc/manual/redirects.js    |  6 +++++-
 doc/manual/src/glossary.md | 11 +----------
 2 files changed, 6 insertions(+), 11 deletions(-)

diff --git a/doc/manual/redirects.js b/doc/manual/redirects.js
index d04f32b49..e25b17c76 100644
--- a/doc/manual/redirects.js
+++ b/doc/manual/redirects.js
@@ -358,7 +358,11 @@ const redirects = {
     "one-time-setup": "testing.html#one-time-setup",
     "using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
     "characterization-testing": "#characterisation-testing-unit",
-  }
+  },
+  "glossary.html": {
+    "gloss-local-store": "store/types/local-store.html",
+    "gloss-chroot-store": "store/types/local-store.html",
+  },
 };
 
 // the following code matches the current page's URL against the set of redirects.
diff --git a/doc/manual/src/glossary.md b/doc/manual/src/glossary.md
index 6126b7e47..d257a8189 100644
--- a/doc/manual/src/glossary.md
+++ b/doc/manual/src/glossary.md
@@ -66,16 +66,6 @@
 
   [store]: #gloss-store
 
-- [local store]{#gloss-local-store}
-
-  See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
-
-  [local store]: #gloss-local-store
-
-- [chroot store]{#gloss-chroot-store}
-
-  A [local store] whose canonical path is anything other than `/nix/store`.
-
 - [binary cache]{#gloss-binary-cache}
 
   A *binary cache* is a Nix store which uses a different format: its
@@ -242,6 +232,7 @@
   - All paths in the store path's [closure] are valid.
 
   [validity]: #gloss-validity
+  [local store]: @docroot@/store/types/local-store.md
 
 - [user environment]{#gloss-user-env}
 

From 41dd9857c7dbd8a2df9c8da4b7cf8e0399088452 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 09:54:07 -0500
Subject: [PATCH 305/307] Proper `parse` and `render` functions for
 `FileIngestionMethod` and `ContentAddressMethod`

No outward facing behavior is changed.

Older methods with same names that operate on on method + algo pair (for
old-style `:algo`) are renamed to `*WithAlgo`.)

The functions are unit-tested in the same way the names for the hash
algorithms are tested.
---
 src/libstore/content-address.cc            | 31 +++++++++++++++----
 src/libstore/content-address.hh            | 22 +++++++++++---
 src/libstore/daemon.cc                     |  2 +-
 src/libstore/derivations.cc                | 12 ++++----
 src/libstore/remote-store.cc               |  2 +-
 src/libutil/file-content-address.cc        | 25 ++++++++++++++++
 src/libutil/file-content-address.hh        | 17 +++++++++++
 src/nix/add-to-store.cc                    | 13 +-------
 tests/unit/libstore/content-address.cc     | 35 ++++++++++++++++++++++
 tests/unit/libutil/file-content-address.cc | 33 ++++++++++++++++++++
 10 files changed, 162 insertions(+), 30 deletions(-)
 create mode 100644 tests/unit/libstore/content-address.cc
 create mode 100644 tests/unit/libutil/file-content-address.cc

diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
index fc408f5af..2091f8e02 100644
--- a/src/libstore/content-address.cc
+++ b/src/libstore/content-address.cc
@@ -4,7 +4,7 @@
 
 namespace nix {
 
-std::string makeFileIngestionPrefix(FileIngestionMethod m)
+std::string_view makeFileIngestionPrefix(FileIngestionMethod m)
 {
     switch (m) {
     case FileIngestionMethod::Flat:
@@ -16,10 +16,29 @@ std::string makeFileIngestionPrefix(FileIngestionMethod m)
     }
 }
 
-std::string ContentAddressMethod::renderPrefix() const
+std::string_view ContentAddressMethod::render() const
 {
     return std::visit(overloaded {
-        [](TextIngestionMethod) -> std::string { return "text:"; },
+        [](TextIngestionMethod) -> std::string_view { return "text"; },
+        [](FileIngestionMethod m2) {
+             /* Not prefixed for back compat with things that couldn't produce text before. */
+            return renderFileIngestionMethod(m2);
+        },
+    }, raw);
+}
+
+ContentAddressMethod ContentAddressMethod::parse(std::string_view m)
+{
+    if (m == "text")
+        return TextIngestionMethod {};
+    else
+        return parseFileIngestionMethod(m);
+}
+
+std::string_view ContentAddressMethod::renderPrefix() const
+{
+    return std::visit(overloaded {
+        [](TextIngestionMethod) -> std::string_view { return "text:"; },
         [](FileIngestionMethod m2) {
              /* Not prefixed for back compat with things that couldn't produce text before. */
             return makeFileIngestionPrefix(m2);
@@ -38,7 +57,7 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
     return FileIngestionMethod::Flat;
 }
 
-std::string ContentAddressMethod::render(HashAlgorithm ha) const
+std::string ContentAddressMethod::renderWithAlgo(HashAlgorithm ha) const
 {
     return std::visit(overloaded {
         [&](const TextIngestionMethod & th) {
@@ -133,7 +152,7 @@ ContentAddress ContentAddress::parse(std::string_view rawCa)
     };
 }
 
-std::pair ContentAddressMethod::parse(std::string_view caMethod)
+std::pair ContentAddressMethod::parseWithAlgo(std::string_view caMethod)
 {
     std::string asPrefix = std::string{caMethod} + ":";
     // parseContentAddressMethodPrefix takes its argument by reference
@@ -155,7 +174,7 @@ std::string renderContentAddress(std::optional ca)
 
 std::string ContentAddress::printMethodAlgo() const
 {
-    return method.renderPrefix()
+    return std::string { method.renderPrefix() }
         + printHashAlgo(hash.algo);
 }
 
diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh
index f0973412b..80538df50 100644
--- a/src/libstore/content-address.hh
+++ b/src/libstore/content-address.hh
@@ -36,7 +36,7 @@ struct TextIngestionMethod : std::monostate { };
  * Compute the prefix to the hash algorithm which indicates how the
  * files were ingested.
  */
-std::string makeFileIngestionPrefix(FileIngestionMethod m);
+std::string_view makeFileIngestionPrefix(FileIngestionMethod m);
 
 /**
  * An enumeration of all the ways we can content-address store objects.
@@ -59,6 +59,20 @@ struct ContentAddressMethod
 
     MAKE_WRAPPER_CONSTRUCTOR(ContentAddressMethod);
 
+    /**
+     * Parse a content addressing method (name).
+     *
+     * The inverse of `render`.
+     */
+    static ContentAddressMethod parse(std::string_view rawCaMethod);
+
+    /**
+     * Render a content addressing method (name).
+     *
+     * The inverse of `parse`.
+     */
+    std::string_view render() const;
+
     /**
      * Parse the prefix tag which indicates how the files
      * were ingested, with the fixed output case not prefixed for back
@@ -74,12 +88,12 @@ struct ContentAddressMethod
      *
      * The rough inverse of `parsePrefix()`.
      */
-    std::string renderPrefix() const;
+    std::string_view renderPrefix() const;
 
     /**
      * Parse a content addressing method and hash type.
      */
-    static std::pair parse(std::string_view rawCaMethod);
+    static std::pair parseWithAlgo(std::string_view rawCaMethod);
 
     /**
      * Render a content addressing method and hash type in a
@@ -87,7 +101,7 @@ struct ContentAddressMethod
      *
      * The rough inverse of `parse()`.
      */
-    std::string render(HashAlgorithm ht) const;
+    std::string renderWithAlgo(HashAlgorithm ht) const;
 
     /**
      * Get the underlying way to content-address file system objects.
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index 8db93fa39..cf5020dfe 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -400,7 +400,7 @@ static void performOp(TunnelLogger * logger, ref store,
             logger->startWork();
             auto pathInfo = [&]() {
                 // NB: FramedSource must be out of scope before logger->stopWork();
-                auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr);
+                auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parseWithAlgo(camStr);
                 auto hashAlgo = hashAlgo_; // work around clang bug
                 FramedSource source(from);
                 // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 393806652..36042c06c 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -601,7 +601,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
             },
             [&](const DerivationOutput::CAFloating & dof) {
                 s += ','; printUnquotedString(s, "");
-                s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo));
+                s += ','; printUnquotedString(s, std::string { dof.method.renderPrefix() } + printHashAlgo(dof.hashAlgo));
                 s += ','; printUnquotedString(s, "");
             },
             [&](const DerivationOutput::Deferred &) {
@@ -612,7 +612,7 @@ std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
             [&](const DerivationOutput::Impure & doi) {
                 // FIXME
                 s += ','; printUnquotedString(s, "");
-                s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo));
+                s += ','; printUnquotedString(s, std::string { doi.method.renderPrefix() } + printHashAlgo(doi.hashAlgo));
                 s += ','; printUnquotedString(s, "impure");
             }
         }, i.second.raw);
@@ -984,7 +984,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
             },
             [&](const DerivationOutput::CAFloating & dof) {
                 out << ""
-                    << (dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo))
+                    << (std::string { dof.method.renderPrefix() } + printHashAlgo(dof.hashAlgo))
                     << "";
             },
             [&](const DerivationOutput::Deferred &) {
@@ -994,7 +994,7 @@ void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDeriva
             },
             [&](const DerivationOutput::Impure & doi) {
                 out << ""
-                    << (doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo))
+                    << (std::string { doi.method.renderPrefix() } + printHashAlgo(doi.hashAlgo))
                     << "impure";
             },
         }, i.second.raw);
@@ -1221,11 +1221,11 @@ nlohmann::json DerivationOutput::toJSON(
             // FIXME print refs?
         },
         [&](const DerivationOutput::CAFloating & dof) {
-            res["hashAlgo"] = dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo);
+            res["hashAlgo"] = std::string { dof.method.renderPrefix() } + printHashAlgo(dof.hashAlgo);
         },
         [&](const DerivationOutput::Deferred &) {},
         [&](const DerivationOutput::Impure & doi) {
-            res["hashAlgo"] = doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo);
+            res["hashAlgo"] = std::string { doi.method.renderPrefix() } + printHashAlgo(doi.hashAlgo);
             res["impure"] = true;
         },
     }, raw);
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index ccf95beef..fadef45ff 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -435,7 +435,7 @@ ref RemoteStore::addCAToStore(
         conn->to
             << WorkerProto::Op::AddToStore
             << name
-            << caMethod.render(hashAlgo);
+            << caMethod.renderWithAlgo(hashAlgo);
         WorkerProto::write(*this, *conn, references);
         conn->to << repair;
 
diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 9917986f6..6ea7b2ab4 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -3,6 +3,31 @@
 
 namespace nix {
 
+FileIngestionMethod parseFileIngestionMethod(std::string_view input)
+{
+    if (input == "flat") {
+        return FileIngestionMethod::Flat;
+    } else if (input == "nar") {
+        return FileIngestionMethod::Recursive;
+    } else {
+        throw UsageError("Unknown file ingestion method '%s', expect `flat` or `nar`");
+    }
+}
+
+
+std::string_view renderFileIngestionMethod(FileIngestionMethod method)
+{
+    switch (method) {
+    case FileIngestionMethod::Flat:
+        return "flat";
+    case FileIngestionMethod::Recursive:
+        return "nar";
+    default:
+        assert(false);
+    }
+}
+
+
 void dumpPath(
     SourceAccessor & accessor, const CanonPath & path,
     Sink & sink,
diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh
index 7f7544e41..41f23f2af 100644
--- a/src/libutil/file-content-address.hh
+++ b/src/libutil/file-content-address.hh
@@ -23,6 +23,23 @@ enum struct FileIngestionMethod : uint8_t {
     Recursive = 1,
 };
 
+/**
+ * Parse a `FileIngestionMethod` by name. Choice of:
+ *
+ *  - `flat`: `FileIngestionMethod::Flat`
+ *  - `nar`: `FileIngestionMethod::Recursive`
+ *
+ * Oppostite of `renderFileIngestionMethod`.
+ */
+FileIngestionMethod parseFileIngestionMethod(std::string_view input);
+
+/**
+ * Render a `FileIngestionMethod` by name.
+ *
+ * Oppostite of `parseFileIngestionMethod`.
+ */
+std::string_view renderFileIngestionMethod(FileIngestionMethod method);
+
 /**
  * Dump a serialization of the given file system object.
  */
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index d3e66dc21..9ea37ab4c 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -6,17 +6,6 @@
 
 using namespace nix;
 
-static FileIngestionMethod parseIngestionMethod(std::string_view input)
-{
-    if (input == "flat") {
-        return FileIngestionMethod::Flat;
-    } else if (input == "nar") {
-        return FileIngestionMethod::Recursive;
-    } else {
-        throw UsageError("Unknown hash mode '%s', expect `flat` or `nar`");
-    }
-}
-
 struct CmdAddToStore : MixDryRun, StoreCommand
 {
     Path path;
@@ -49,7 +38,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
             )",
             .labels = {"hash-mode"},
             .handler = {[this](std::string s) {
-                this->caMethod = parseIngestionMethod(s);
+                this->caMethod = parseFileIngestionMethod(s);
             }},
         });
 
diff --git a/tests/unit/libstore/content-address.cc b/tests/unit/libstore/content-address.cc
new file mode 100644
index 000000000..98c1eace3
--- /dev/null
+++ b/tests/unit/libstore/content-address.cc
@@ -0,0 +1,35 @@
+#include 
+
+#include "content-address.hh"
+
+namespace nix {
+
+/* ----------------------------------------------------------------------------
+ * ContentAddressMethod::parse, ContentAddressMethod::render
+ * --------------------------------------------------------------------------*/
+
+TEST(ContentAddressMethod, testRoundTripPrintParse_1) {
+    for (const ContentAddressMethod & cam : {
+        ContentAddressMethod { TextIngestionMethod {} },
+        ContentAddressMethod { FileIngestionMethod::Flat },
+        ContentAddressMethod { FileIngestionMethod::Recursive },
+    }) {
+        EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam);
+    }
+}
+
+TEST(ContentAddressMethod, testRoundTripPrintParse_2) {
+    for (const std::string_view camS : {
+        "text",
+        "flat",
+        "nar",
+    }) {
+        EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS);
+    }
+}
+
+TEST(ContentAddressMethod, testParseContentAddressMethodOptException) {
+    EXPECT_THROW(ContentAddressMethod::parse("narwhal"), UsageError);
+}
+
+}
diff --git a/tests/unit/libutil/file-content-address.cc b/tests/unit/libutil/file-content-address.cc
new file mode 100644
index 000000000..2e819ce40
--- /dev/null
+++ b/tests/unit/libutil/file-content-address.cc
@@ -0,0 +1,33 @@
+#include 
+
+#include "file-content-address.hh"
+
+namespace nix {
+
+/* ----------------------------------------------------------------------------
+ * parseFileIngestionMethod, renderFileIngestionMethod
+ * --------------------------------------------------------------------------*/
+
+TEST(FileIngestionMethod, testRoundTripPrintParse_1) {
+    for (const FileIngestionMethod fim : {
+        FileIngestionMethod::Flat,
+        FileIngestionMethod::Recursive,
+    }) {
+        EXPECT_EQ(parseFileIngestionMethod(renderFileIngestionMethod(fim)), fim);
+    }
+}
+
+TEST(FileIngestionMethod, testRoundTripPrintParse_2) {
+    for (const std::string_view fimS : {
+        "flat",
+        "nar",
+    }) {
+        EXPECT_EQ(renderFileIngestionMethod(parseFileIngestionMethod(fimS)), fimS);
+    }
+}
+
+TEST(FileIngestionMethod, testParseFileIngestionMethodOptException) {
+    EXPECT_THROW(parseFileIngestionMethod("narwhal"), UsageError);
+}
+
+}

From db41a0616a42f8fb52b189f7fd05c2f09764426f Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 11:14:49 -0500
Subject: [PATCH 306/307] Use `ContentAddressMethod::render` in one more place

Good to deduplicate the code.
---
 src/libfetchers/fetch-to-store.cc | 18 ++----------------
 1 file changed, 2 insertions(+), 16 deletions(-)

diff --git a/src/libfetchers/fetch-to-store.cc b/src/libfetchers/fetch-to-store.cc
index 196489e05..c27880662 100644
--- a/src/libfetchers/fetch-to-store.cc
+++ b/src/libfetchers/fetch-to-store.cc
@@ -21,23 +21,9 @@ StorePath fetchToStore(
         cacheKey = fetchers::Attrs{
             {"_what", "fetchToStore"},
             {"store", store.storeDir},
-            {"name", std::string(name)},
+            {"name", std::string{name}},
             {"fingerprint", *path.accessor->fingerprint},
-            {
-                "method",
-                std::visit(overloaded {
-                    [](const TextIngestionMethod &) {
-                        return "text";
-                    },
-                    [](const FileIngestionMethod & fim) {
-                        switch (fim) {
-                        case FileIngestionMethod::Flat: return "flat";
-                        case FileIngestionMethod::Recursive: return "nar";
-                        default: assert(false);
-                        }
-                    },
-                }, method.raw),
-            },
+            {"method", std::string{method.render()}},
             {"path", path.path.abs()}
         };
         if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) {

From 9d64613dcac181f889f6831a08404e2483d41da4 Mon Sep 17 00:00:00 2001
From: John Ericson 
Date: Tue, 13 Feb 2024 12:50:10 -0500
Subject: [PATCH 307/307] Update src/libutil/file-content-address.cc

---
 src/libutil/file-content-address.cc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
index 6ea7b2ab4..6753e0f49 100644
--- a/src/libutil/file-content-address.cc
+++ b/src/libutil/file-content-address.cc
@@ -23,7 +23,7 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method)
     case FileIngestionMethod::Recursive:
         return "nar";
     default:
-        assert(false);
+        abort();
     }
 }