diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 39d595199..526fecabf 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -10,16 +10,8 @@
# This file
.github/CODEOWNERS @edolstra
-# Public documentation
-/doc @fricklerhandwerk
-*.md @fricklerhandwerk
-
# Documentation of built-in functions
-src/libexpr/primops.cc @fricklerhandwerk @roberth
-# Documentation on experimental features
-src/libutil/experimental-features.cc @fricklerhandwerk
-# Documentation on configuration settings
-src/libstore/globals.hh @fricklerhandwerk
+src/libexpr/primops.cc @roberth
# Libstore layer
/src/libstore @thufschmitt
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 217b19108..d12a4d36c 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -10,6 +10,8 @@
-# Priorities
+# Priorities and Process
Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).
+
+The Nix maintainer team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19) to [schedule and track reviews](https://github.com/NixOS/nix/tree/master/maintainers#project-board-protocol).
diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml
index 85ddcfad3..f003114ba 100644
--- a/.github/workflows/backport.yml
+++ b/.github/workflows/backport.yml
@@ -21,7 +21,7 @@ jobs:
fetch-depth: 0
- name: Create backport PRs
# should be kept in sync with `version`
- uses: zeebe-io/backport-action@v2.2.0
+ uses: zeebe-io/backport-action@v2.3.0
with:
# Config README: https://github.com/zeebe-io/backport-action#backport-action
github_token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/Makefile b/Makefile
index 3dae8b394..1fdb6e897 100644
--- a/Makefile
+++ b/Makefile
@@ -24,7 +24,7 @@ makefiles = \
misc/upstart/local.mk
endif
-ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes)
+ifeq ($(ENABLE_UNIT_TESTS), yes)
makefiles += \
tests/unit/libutil/local.mk \
tests/unit/libutil-support/local.mk \
@@ -34,16 +34,13 @@ makefiles += \
tests/unit/libexpr-support/local.mk
endif
-ifeq ($(ENABLE_TESTS), yes)
+ifeq ($(ENABLE_FUNCTIONAL_TESTS), yes)
makefiles += \
tests/functional/local.mk \
tests/functional/ca/local.mk \
tests/functional/dyn-drv/local.mk \
tests/functional/test-libstoreconsumer/local.mk \
tests/functional/plugins/local.mk
-else
-makefiles += \
- mk/disable-tests.mk
endif
OPTIMIZE = 1
@@ -57,11 +54,40 @@ endif
include mk/lib.mk
+# Must be included after `mk/lib.mk` so isn't the default target.
+ifneq ($(ENABLE_UNIT_TESTS), yes)
+.PHONY: check
+check:
+ @echo "Unit tests are disabled. Configure without '--disable-unit-tests', or avoid calling 'make check'."
+ @exit 1
+endif
+
+ifneq ($(ENABLE_FUNCTIONAL_TESTS), yes)
+.PHONY: installcheck
+installcheck:
+ @echo "Functional tests are disabled. Configure without '--disable-functional-tests', or avoid calling 'make installcheck'."
+ @exit 1
+endif
+
# Must be included after `mk/lib.mk` so rules refer to variables defined
# by the library. Rules are not "lazy" like variables, unfortunately.
-ifeq ($(ENABLE_BUILD), yes)
+
+ifeq ($(ENABLE_DOC_GEN), yes)
$(eval $(call include-sub-makefile, doc/manual/local.mk))
+else
+.PHONY: manual-html manpages
+manual-html manpages:
+ @echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'."
+ @exit 1
endif
+
+ifeq ($(ENABLE_INTERNAL_API_DOCS), yes)
$(eval $(call include-sub-makefile, doc/internal-api/local.mk))
+else
+.PHONY: internal-api-html
+internal-api-html:
+ @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
+ @exit 1
+endif
GLOBAL_CXXFLAGS += -g -Wall -include $(buildprefix)config.h -std=c++2a -I src
diff --git a/Makefile.config.in b/Makefile.config.in
index c85e028c2..21a9f41ec 100644
--- a/Makefile.config.in
+++ b/Makefile.config.in
@@ -9,8 +9,11 @@ CXXFLAGS = @CXXFLAGS@
CXXLTO = @CXXLTO@
EDITLINE_LIBS = @EDITLINE_LIBS@
ENABLE_BUILD = @ENABLE_BUILD@
+ENABLE_DOC_GEN = @ENABLE_DOC_GEN@
+ENABLE_FUNCTIONAL_TESTS = @ENABLE_FUNCTIONAL_TESTS@
+ENABLE_INTERNAL_API_DOCS = @ENABLE_INTERNAL_API_DOCS@
ENABLE_S3 = @ENABLE_S3@
-ENABLE_TESTS = @ENABLE_TESTS@
+ENABLE_UNIT_TESTS = @ENABLE_UNIT_TESTS@
GTEST_LIBS = @GTEST_LIBS@
HAVE_LIBCPUID = @HAVE_LIBCPUID@
HAVE_SECCOMP = @HAVE_SECCOMP@
@@ -36,12 +39,10 @@ checkbindir = @checkbindir@
checklibdir = @checklibdir@
datadir = @datadir@
datarootdir = @datarootdir@
-doc_generate = @doc_generate@
docdir = @docdir@
embedded_sandbox_shell = @embedded_sandbox_shell@
exec_prefix = @exec_prefix@
includedir = @includedir@
-internal_api_docs = @internal_api_docs@
libdir = @libdir@
libexecdir = @libexecdir@
localstatedir = @localstatedir@
diff --git a/configure.ac b/configure.ac
index a949f9df2..1bc4f17b0 100644
--- a/configure.ac
+++ b/configure.ac
@@ -138,20 +138,38 @@ AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]),
ENABLE_BUILD=$enableval, ENABLE_BUILD=yes)
AC_SUBST(ENABLE_BUILD)
-# Building without tests is useful for bootstrapping with a smaller footprint
+# Building without unit tests is useful for bootstrapping with a smaller footprint
# or running the tests in a separate derivation. Otherwise, we do compile and
# run them.
-AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
- ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
-AC_SUBST(ENABLE_TESTS)
-# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
-AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
- internal_api_docs=$enableval, internal_api_docs=no)
-AC_SUBST(internal_api_docs)
+AC_ARG_ENABLE(unit-tests, AS_HELP_STRING([--disable-unit-tests],[Do not build the tests]),
+ ENABLE_UNIT_TESTS=$enableval, ENABLE_UNIT_TESTS=$ENABLE_BUILD)
+AC_SUBST(ENABLE_UNIT_TESTS)
AS_IF(
- [test "$ENABLE_BUILD" == "yes" || test "$ENABLE_TEST" == "yes"],
+ [test "$ENABLE_BUILD" == "no" && test "$ENABLE_UNIT_TESTS" == "yes"],
+ [AC_MSG_ERROR([Cannot enable unit tests when building overall is disabled. Please do not pass '--enable-unit-tests' or do not pass '--disable-build'.])])
+
+AC_ARG_ENABLE(functional-tests, AS_HELP_STRING([--disable-functional-tests],[Do not build the tests]),
+ ENABLE_FUNCTIONAL_TESTS=$enableval, ENABLE_FUNCTIONAL_TESTS=yes)
+AC_SUBST(ENABLE_FUNCTIONAL_TESTS)
+
+# documentation generation switch
+AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
+ ENABLE_DOC_GEN=$enableval, ENABLE_DOC_GEN=$ENABLE_BUILD)
+AC_SUBST(ENABLE_DOC_GEN)
+
+AS_IF(
+ [test "$ENABLE_BUILD" == "no" && test "$ENABLE_GENERATED_DOCS" == "yes"],
+ [AC_MSG_ERROR([Cannot enable generated docs when building overall is disabled. Please do not pass '--enable-doc-gen' or do not pass '--disable-build'.])])
+
+# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
+AC_ARG_ENABLE(internal-api-docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
+ ENABLE_INTERNAL_API_DOCS=$enableval, ENABLE_INTERNAL_API_DOCS=no)
+AC_SUBST(ENABLE_INTERNAL_API_DOCS)
+
+AS_IF(
+ [test "$ENABLE_FUNCTIONAL_TESTS" == "yes" || test "$ENABLE_DOC_GEN" == "yes"],
[NEED_PROG(jq, jq)])
AS_IF([test "$ENABLE_BUILD" == "yes"],[
@@ -317,7 +335,7 @@ if test "$gc" = yes; then
AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.])
fi
-AS_IF([test "$ENABLE_TESTS" == "yes"],[
+AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[
# Look for gtest.
PKG_CHECK_MODULES([GTEST], [gtest_main])
@@ -349,11 +367,6 @@ AC_LANG_POP(C++)
# Look for nlohmann/json.
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
-# documentation generation switch
-AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
- doc_generate=$enableval, doc_generate=yes)
-AC_SUBST(doc_generate)
-
# Look for lowdown library.
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
diff --git a/doc/internal-api/local.mk b/doc/internal-api/local.mk
index 890f341b7..bf2c4dede 100644
--- a/doc/internal-api/local.mk
+++ b/doc/internal-api/local.mk
@@ -1,19 +1,7 @@
-.PHONY: internal-api-html
-
-ifeq ($(internal_api_docs), yes)
-
$(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg
mkdir -p $(docdir)/internal-api
{ cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen -
# Generate the HTML API docs for Nix's unstable internal interfaces.
+.PHONY: internal-api-html
internal-api-html: $(docdir)/internal-api/html/index.html
-
-else
-
-# Make a nicer error message
-internal-api-html:
- @echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
- @exit 1
-
-endif
diff --git a/doc/manual/local.mk b/doc/manual/local.mk
index 456000d3d..b77168885 100644
--- a/doc/manual/local.mk
+++ b/doc/manual/local.mk
@@ -1,5 +1,3 @@
-ifeq ($(doc_generate),yes)
-
# The version of Nix used to generate the doc. Can also be
# `$(nix_INSTALL_PATH)` or just `nix` (to grap ambient from the `PATH`),
# if one prefers.
@@ -180,6 +178,8 @@ manual-html: $(docdir)/manual/index.html
install: $(docdir)/manual/index.html
# Generate 'nix' manpages.
+.PHONY: manpages
+manpages: $(mandir)/man1/nix3-manpages
install: $(mandir)/man1/nix3-manpages
man: doc/manual/generated/man1/nix3-manpages
all: doc/manual/generated/man1/nix3-manpages
@@ -225,5 +225,3 @@ $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/
@rm -rf $(DESTDIR)$(docdir)/manual
@mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual
@rm -rf $(DESTDIR)$(docdir)/manual.tmp
-
-endif
diff --git a/doc/manual/rl-next/nix-profile-names.md b/doc/manual/rl-next/nix-profile-names.md
new file mode 100644
index 000000000..f5953bd72
--- /dev/null
+++ b/doc/manual/rl-next/nix-profile-names.md
@@ -0,0 +1,6 @@
+---
+synopsis: "`nix profile` now allows referring to elements by human-readable name"
+prs: 8678
+---
+
+[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Indices are deprecated and will be removed in a future version.
diff --git a/doc/manual/src/contributing/hacking.md b/doc/manual/src/contributing/hacking.md
index 4d3d66397..dce0422dc 100644
--- a/doc/manual/src/contributing/hacking.md
+++ b/doc/manual/src/contributing/hacking.md
@@ -64,6 +64,27 @@ $ nix build
You can also build Nix for one of the [supported platforms](#platforms).
+## Makefile variables
+
+You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run
+`make install`.
+
+You may want to set `MAKEFLAGS="-e -j $NIX_BUILD_CORES"` to allow environment
+variables to override `Makefile` variables.
+
+- `ENABLE_BUILD=yes` to enable building the C++ code.
+- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.).
+
+ The docs can take a while to build, so you may want to disable this for local development.
+- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests.
+- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests.
+- `OPTIMIZE=1` to enable optimizations.
+- `libraries=libutil programs=` to only build a specific library (this will
+ fail in the linking phase if you don't have the other libraries built, but is
+ useful for checking types).
+- `libraries= programs=nix` to only build a specific program (this will not, in
+ general, work, because the programs need the libraries).
+
## Building Nix
To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found:
diff --git a/doc/manual/src/installation/prerequisites-source.md b/doc/manual/src/installation/prerequisites-source.md
index d4babf1ea..807e82517 100644
--- a/doc/manual/src/installation/prerequisites-source.md
+++ b/doc/manual/src/installation/prerequisites-source.md
@@ -72,7 +72,7 @@
This is an optional dependency and can be disabled
by providing a `--disable-cpuid` to the `configure` script.
- - Unless `./configure --disable-tests` is specified, GoogleTest (GTest) and
+ - Unless `./configure --disable-unit-tests` is specified, GoogleTest (GTest) and
RapidCheck are required, which are available at
and
respectively.
diff --git a/flake.nix b/flake.nix
index 8c4436729..a8fc105e8 100644
--- a/flake.nix
+++ b/flake.nix
@@ -395,7 +395,7 @@
stdenvs)));
devShells = let
- makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; }).overrideAttrs (attrs: {
+ makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; forDevShell = true; }).overrideAttrs (attrs: {
installFlags = "sysconfdir=$(out)/etc";
shellHook = ''
PATH=$prefix/bin:$PATH
diff --git a/mk/disable-tests.mk b/mk/disable-tests.mk
deleted file mode 100644
index f72f84412..000000000
--- a/mk/disable-tests.mk
+++ /dev/null
@@ -1,12 +0,0 @@
-# This file is only active for `./configure --disable-tests`.
-# Running `make check` or `make installcheck` would indicate a mistake in the
-# caller.
-
-installcheck:
- @echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make installcheck'."
- @exit 1
-
-# This currently has little effect.
-check:
- @echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make check'."
- @exit 1
diff --git a/package.nix b/package.nix
index 24395b484..b5ff45083 100644
--- a/package.nix
+++ b/package.nix
@@ -87,6 +87,9 @@
, test-daemon ? null
, test-client ? null
+# Avoid setting things that would interfere with a functioning devShell
+, forDevShell ? false
+
# Not a real argument, just the only way to approximate let-binding some
# stuff for argument defaults.
, __forDefaults ? {
@@ -104,30 +107,6 @@ let
inherit doBuild doCheck doInstallCheck;
};
- filesets = {
- baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.;
-
- configureFiles = fileset.unions [
- ./.version
- ./configure.ac
- ./m4
- # TODO: do we really need README.md? It doesn't seem used in the build.
- ./README.md
- ];
-
- topLevelBuildFiles = fileset.unions [
- ./local.mk
- ./Makefile
- ./Makefile.config.in
- ./mk
- ];
-
- functionalTestFiles = fileset.unions [
- ./tests/functional
- (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
- ];
- };
-
mkDerivation =
if withCoverageChecks
then
@@ -151,32 +130,44 @@ mkDerivation (finalAttrs: let
# to be run later, requiresthe unit tests to be built.
buildUnitTests = doCheck || installUnitTests;
- anySortOfTesting = buildUnitTests || doInstallCheck;
-
in {
inherit pname version;
src =
let
-
+ baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.;
in
fileset.toSource {
root = ./.;
- fileset = fileset.intersect filesets.baseFiles (fileset.unions ([
- filesets.configureFiles
- filesets.topLevelBuildFiles
- ./doc/internal-api
+ fileset = fileset.intersect baseFiles (fileset.unions ([
+ # For configure
+ ./.version
+ ./configure.ac
+ ./m4
+ # TODO: do we really need README.md? It doesn't seem used in the build.
+ ./README.md
+ # For make, regardless of what we are building
+ ./local.mk
+ ./Makefile
+ ./Makefile.config.in
+ ./mk
+ (fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
] ++ lib.optionals doBuild [
./boehmgc-coroutine-sp-fallback.diff
./doc
./misc
./precompiled-headers.h
./src
- ./tests/unit
./COPYING
./scripts/local.mk
- ] ++ lib.optionals anySortOfTesting [
- filesets.functionalTestFiles
+ ] ++ lib.optionals buildUnitTests [
+ ./doc/manual
+ ] ++ lib.optionals enableInternalAPIDocs [
+ ./doc/internal-api
+ ] ++ lib.optionals buildUnitTests [
+ ./tests/unit
+ ] ++ lib.optionals doInstallCheck [
+ ./tests/functional
]));
};
@@ -275,12 +266,14 @@ in {
);
configureFlags = [
- "--sysconfdir=/etc"
(lib.enableFeature doBuild "build")
- (lib.enableFeature anySortOfTesting "tests")
+ (lib.enableFeature buildUnitTests "unit-tests")
+ (lib.enableFeature doInstallCheck "functional-tests")
(lib.enableFeature enableInternalAPIDocs "internal-api-docs")
(lib.enableFeature enableManual "doc-gen")
(lib.enableFeature installUnitTests "install-unit-tests")
+ ] ++ lib.optionals (!forDevShell) [
+ "--sysconfdir=/etc"
] ++ lib.optionals installUnitTests [
"--with-check-bin-dir=${builtins.placeholder "check"}/bin"
"--with-check-lib-dir=${builtins.placeholder "check"}/lib"
@@ -310,10 +303,7 @@ in {
'';
postInstall = lib.optionalString doBuild (
- ''
- mkdir -p $doc/nix-support
- echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
- '' + lib.optionalString stdenv.hostPlatform.isStatic ''
+ lib.optionalString stdenv.hostPlatform.isStatic ''
mkdir -p $out/nix-support
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
'' + lib.optionalString stdenv.isDarwin ''
@@ -322,7 +312,10 @@ in {
$out/lib/libboost_context.dylib \
$out/lib/libnixutil.dylib
''
- ) + lib.optionalString enableInternalAPIDocs ''
+ ) + lib.optionalString enableManual ''
+ mkdir -p ''${!outputDoc}/nix-support
+ echo "doc manual ''${!outputDoc}/share/doc/nix/manual" >> ''${!outputDoc}/nix-support/hydra-build-products
+ '' + lib.optionalString enableInternalAPIDocs ''
mkdir -p ''${!outputDoc}/nix-support
echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> ''${!outputDoc}/nix-support/hydra-build-products
'';
diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs
index 82c7db608..4964b8a34 100644
--- a/perl/lib/Nix/Store.xs
+++ b/perl/lib/Nix/Store.xs
@@ -13,6 +13,7 @@
#include "globals.hh"
#include "store-api.hh"
#include "crypto.hh"
+#include "posix-source-accessor.hh"
#include
#include
@@ -205,7 +206,10 @@ void importPaths(int fd, int dontCheckSigs)
SV * hashPath(char * algo, int base32, char * path)
PPCODE:
try {
- Hash h = hashPath(parseHashAlgo(algo), path).first;
+ PosixSourceAccessor accessor;
+ Hash h = hashPath(
+ accessor, CanonPath::fromCwd(path),
+ FileIngestionMethod::Recursive, parseHashAlgo(algo)).first;
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
} catch (Error & e) {
@@ -281,7 +285,11 @@ SV * addToStore(char * srcPath, int recursive, char * algo)
PPCODE:
try {
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
- auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashAlgo(algo));
+ PosixSourceAccessor accessor;
+ auto path = store()->addToStore(
+ std::string(baseNameOf(srcPath)),
+ accessor, CanonPath::fromCwd(srcPath),
+ method, parseHashAlgo(algo));
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
} catch (Error & e) {
croak("%s", e.what());
diff --git a/scripts/install-darwin-multi-user.sh b/scripts/install-darwin-multi-user.sh
index 0326d3415..766f81bde 100644
--- a/scripts/install-darwin-multi-user.sh
+++ b/scripts/install-darwin-multi-user.sh
@@ -3,11 +3,13 @@
set -eu
set -o pipefail
+# System specific settings
+export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-301}"
+export NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d"
+
readonly NIX_DAEMON_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist
# create by default; set 0 to DIY, use a symlink, etc.
readonly NIX_VOLUME_CREATE=${NIX_VOLUME_CREATE:-1} # now default
-NIX_FIRST_BUILD_UID="301"
-NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d"
# caution: may update times on / if not run as normal non-root user
read_only_root() {
diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh
index a08f62333..ad3ee8881 100644
--- a/scripts/install-multi-user.sh
+++ b/scripts/install-multi-user.sh
@@ -25,9 +25,9 @@ readonly RED='\033[31m'
readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32}
readonly NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}"
readonly NIX_BUILD_GROUP_NAME="nixbld"
-# darwin installer needs to override these
-NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}"
-NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d"
+# each system specific installer must set these:
+# NIX_FIRST_BUILD_UID
+# NIX_BUILD_USER_NAME_TEMPLATE
# Please don't change this. We don't support it, because the
# default shell profile that comes with Nix doesn't support it.
readonly NIX_ROOT="/nix"
@@ -707,6 +707,12 @@ EOF
fi
}
+check_required_system_specific_settings() {
+ if [ -z "${NIX_FIRST_BUILD_UID+x}" ] || [ -z "${NIX_BUILD_USER_NAME_TEMPLATE+x}" ]; then
+ failure "Internal error: System specific installer for $(uname) ($1) does not export required settings."
+ fi
+}
+
welcome_to_nix() {
local -r NIX_UID_RANGES="${NIX_FIRST_BUILD_UID}..$((NIX_FIRST_BUILD_UID + NIX_USER_COUNT - 1))"
local -r RANGE_TEXT=$(echo -ne "${BLUE}(uids [${NIX_UID_RANGES}])${ESC}")
@@ -726,7 +732,9 @@ manager. This will happen in a few stages:
if you are ready to continue.
3. Create the system users ${RANGE_TEXT} and groups ${GROUP_TEXT}
- that the Nix daemon uses to run builds.
+ that the Nix daemon uses to run builds. To create system users
+ in a different range, exit and run this tool again with
+ NIX_FIRST_BUILD_UID set.
4. Perform the basic installation of the Nix files daemon.
@@ -968,13 +976,16 @@ main() {
if is_os_darwin; then
# shellcheck source=./install-darwin-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"
+ check_required_system_specific_settings "install-darwin-multi-user.sh"
elif is_os_linux; then
# shellcheck source=./install-systemd-multi-user.sh
. "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also
+ check_required_system_specific_settings "install-systemd-multi-user.sh"
else
failure "Sorry, I don't know what to do on $(uname)"
fi
+
welcome_to_nix
if ! is_root; then
diff --git a/scripts/install-systemd-multi-user.sh b/scripts/install-systemd-multi-user.sh
index 07b34033a..202a9bb54 100755
--- a/scripts/install-systemd-multi-user.sh
+++ b/scripts/install-systemd-multi-user.sh
@@ -3,6 +3,10 @@
set -eu
set -o pipefail
+# System specific settings
+export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}"
+export NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d"
+
readonly SERVICE_SRC=/lib/systemd/system/nix-daemon.service
readonly SERVICE_DEST=/etc/systemd/system/nix-daemon.service
diff --git a/src/libcmd/installable-value.cc b/src/libcmd/installable-value.cc
index 08ad35105..bdc34bbe3 100644
--- a/src/libcmd/installable-value.cc
+++ b/src/libcmd/installable-value.cc
@@ -44,7 +44,7 @@ ref InstallableValue::require(ref installable)
std::optional InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
{
if (v.type() == nPath) {
- auto storePath = v.path().fetchToStore(state->store);
+ auto storePath = v.path().fetchToStore(*state->store);
return {{
.path = DerivedPath::Opaque {
.path = std::move(storePath),
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 1552e3e92..c9c25c898 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -2317,7 +2317,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
auto dstPath = i != srcToStore.end()
? i->second
: [&]() {
- auto dstPath = path.fetchToStore(store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
+ auto dstPath = path.fetchToStore(*store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
allowPath(dstPath);
srcToStore.insert_or_assign(path, dstPath);
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
diff --git a/src/libexpr/flake/flakeref.cc b/src/libexpr/flake/flakeref.cc
index 8b0eb7460..86a0982f3 100644
--- a/src/libexpr/flake/flakeref.cc
+++ b/src/libexpr/flake/flakeref.cc
@@ -190,7 +190,7 @@ std::optional> parseFlakeIdRef(
static std::regex flakeRegex(
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
- + "(?:#(" + queryRegex + "))?",
+ + "(?:#(" + fragmentRegex + "))?",
std::regex::ECMAScript);
if (std::regex_match(url, match, flakeRegex)) {
diff --git a/src/libexpr/flake/url-name.cc b/src/libexpr/flake/url-name.cc
new file mode 100644
index 000000000..7e51aa2e1
--- /dev/null
+++ b/src/libexpr/flake/url-name.cc
@@ -0,0 +1,48 @@
+#include "url-name.hh"
+#include
+#include
+
+namespace nix {
+
+static const std::string attributeNamePattern("[a-z0-9_-]+");
+static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?");
+static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+");
+static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")");
+static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?");
+static const std::regex gitProviderRegex("github|gitlab|sourcehut");
+static const std::regex gitSchemeRegex("git($|\\+.*)");
+static const std::regex defaultOutputRegex(".*\\.default($|\\^.*)");
+
+std::optional getNameFromURL(const ParsedURL & url)
+{
+ std::smatch match;
+
+ /* If there is a dir= argument, use its value */
+ if (url.query.count("dir") > 0)
+ return url.query.at("dir");
+
+ /* If the fragment isn't a "default" and contains two attribute elements, use the last one */
+ if (std::regex_match(url.fragment, match, lastAttributeRegex))
+ return match.str(1);
+
+ /* If this is a github/gitlab/sourcehut flake, use the repo name */
+ if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex))
+ return match.str(1);
+
+ /* If it is a regular git flake, use the directory name */
+ if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex))
+ return match.str(1);
+
+ /* If everything failed but there is a non-default fragment, use it in full */
+ if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex))
+ return url.fragment;
+
+ /* If there is no fragment, take the last element of the path */
+ if (std::regex_match(url.path, match, lastPathSegmentRegex))
+ return match.str(1);
+
+ /* If even that didn't work, the URL does not contain enough info to determine a useful name */
+ return {};
+}
+
+}
diff --git a/src/libexpr/flake/url-name.hh b/src/libexpr/flake/url-name.hh
new file mode 100644
index 000000000..6f32754d2
--- /dev/null
+++ b/src/libexpr/flake/url-name.hh
@@ -0,0 +1,20 @@
+#include "url.hh"
+#include "url-parts.hh"
+#include "util.hh"
+#include "split.hh"
+
+namespace nix {
+
+/**
+ * Try to extract a reasonably unique and meaningful, human-readable
+ * name of a flake output from a parsed URL.
+ * When nullopt is returned, the callsite should use information available
+ * to it outside of the URL to determine a useful name.
+ * This is a heuristic approach intended for user interfaces.
+ * @return nullopt if the extracted name is not useful to identify a
+ * flake output, for example because it is empty or "default".
+ * Otherwise returns the extracted name.
+ */
+std::optional getNameFromURL(const ParsedURL & url);
+
+}
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index d78a28c73..1ca4a2541 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -438,9 +438,7 @@ static RegisterPrimOp primop_isNull({
.doc = R"(
Return `true` if *e* evaluates to `null`, and `false` otherwise.
- > **Warning**
- >
- > This function is *deprecated*; just write `e == null` instead.
+ This is equivalent to `e == null`.
)",
.fun = prim_isNull,
});
@@ -2072,8 +2070,14 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
}
auto storePath = settings.readOnlyMode
- ? state.store->computeStorePathForText(name, contents, refs)
- : state.store->addTextToStore(name, contents, refs, state.repair);
+ ? state.store->makeFixedOutputPathFromCA(name, TextInfo {
+ .hash = hashString(HashAlgorithm::SHA256, contents),
+ .references = std::move(refs),
+ })
+ : ({
+ StringSource s { contents };
+ state.store->addToStoreFromDump(s, name, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair);
+ });
/* Note: we don't need to add `context' to the context of the
result, since `storePath' itself has references to the paths
@@ -2229,7 +2233,7 @@ static void addPath(
});
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
- auto dstPath = path.fetchToStore(state.store, name, method, filter.get(), state.repair);
+ auto dstPath = path.fetchToStore(*state.store, name, method, filter.get(), state.repair);
if (expectedHash && expectedStorePath != dstPath)
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
state.allowAndSetStorePathString(dstPath, v);
diff --git a/src/libfetchers/cache.cc b/src/libfetchers/cache.cc
index 63b05bdab..e071b4717 100644
--- a/src/libfetchers/cache.cc
+++ b/src/libfetchers/cache.cc
@@ -106,7 +106,7 @@ struct CacheImpl : Cache
}
void add(
- ref store,
+ Store & store,
const Attrs & inAttrs,
const Attrs & infoAttrs,
const StorePath & storePath,
@@ -115,13 +115,13 @@ struct CacheImpl : Cache
_state.lock()->add.use()
(attrsToJSON(inAttrs).dump())
(attrsToJSON(infoAttrs).dump())
- (store->printStorePath(storePath))
+ (store.printStorePath(storePath))
(locked)
(time(0)).exec();
}
std::optional> lookup(
- ref store,
+ Store & store,
const Attrs & inAttrs) override
{
if (auto res = lookupExpired(store, inAttrs)) {
@@ -134,7 +134,7 @@ struct CacheImpl : Cache
}
std::optional lookupExpired(
- ref store,
+ Store & store,
const Attrs & inAttrs) override
{
auto state(_state.lock());
@@ -148,19 +148,19 @@ struct CacheImpl : Cache
}
auto infoJSON = stmt.getStr(0);
- auto storePath = store->parseStorePath(stmt.getStr(1));
+ auto storePath = store.parseStorePath(stmt.getStr(1));
auto locked = stmt.getInt(2) != 0;
auto timestamp = stmt.getInt(3);
- store->addTempRoot(storePath);
- if (!store->isValidPath(storePath)) {
+ store.addTempRoot(storePath);
+ if (!store.isValidPath(storePath)) {
// FIXME: we could try to substitute 'storePath'.
debug("ignoring disappeared cache entry '%s'", inAttrsJSON);
return {};
}
debug("using cache entry '%s' -> '%s', '%s'",
- inAttrsJSON, infoJSON, store->printStorePath(storePath));
+ inAttrsJSON, infoJSON, store.printStorePath(storePath));
return Result {
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
diff --git a/src/libfetchers/cache.hh b/src/libfetchers/cache.hh
index f70589267..791d77025 100644
--- a/src/libfetchers/cache.hh
+++ b/src/libfetchers/cache.hh
@@ -50,14 +50,14 @@ struct Cache
/* Old cache for things that have a store path. */
virtual void add(
- ref store,
+ Store & store,
const Attrs & inAttrs,
const Attrs & infoAttrs,
const StorePath & storePath,
bool locked) = 0;
virtual std::optional> lookup(
- ref store,
+ Store & store,
const Attrs & inAttrs) = 0;
struct Result
@@ -68,7 +68,7 @@ struct Cache
};
virtual std::optional lookupExpired(
- ref store,
+ Store & store,
const Attrs & inAttrs) = 0;
};
diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc
index 7ec1f9802..f309e5993 100644
--- a/src/libfetchers/fetchers.cc
+++ b/src/libfetchers/fetchers.cc
@@ -374,7 +374,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
std::pair InputScheme::fetch(ref store, const Input & input)
{
auto [accessor, input2] = getAccessor(store, input);
- auto storePath = SourcePath(accessor).fetchToStore(store, input2.getName());
+ auto storePath = SourcePath(accessor).fetchToStore(*store, input2.getName());
return {storePath, input2};
}
diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc
index 5dac66930..01cd28427 100644
--- a/src/libfetchers/git.cc
+++ b/src/libfetchers/git.cc
@@ -368,14 +368,14 @@ struct GitInputScheme : InputScheme
RepoInfo getRepoInfo(const Input & input) const
{
- auto checkHashType = [&](const std::optional & hash)
+ auto checkHashAlgorithm = [&](const std::optional & hash)
{
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
};
if (auto rev = input.getRev())
- checkHashType(rev);
+ checkHashAlgorithm(rev);
RepoInfo repoInfo;
diff --git a/src/libfetchers/input-accessor.cc b/src/libfetchers/input-accessor.cc
index 1f793bf1d..a647f5915 100644
--- a/src/libfetchers/input-accessor.cc
+++ b/src/libfetchers/input-accessor.cc
@@ -5,10 +5,10 @@
namespace nix {
StorePath InputAccessor::fetchToStore(
- ref store,
+ Store & store,
const CanonPath & path,
std::string_view name,
- FileIngestionMethod method,
+ ContentAddressMethod method,
PathFilter * filter,
RepairFlag repair)
{
@@ -20,10 +20,24 @@ StorePath InputAccessor::fetchToStore(
if (!filter && fingerprint) {
cacheKey = fetchers::Attrs{
{"_what", "fetchToStore"},
- {"store", store->storeDir},
+ {"store", store.storeDir},
{"name", std::string(name)},
{"fingerprint", *fingerprint},
- {"method", (uint8_t) method},
+ {
+ "method",
+ std::visit(overloaded {
+ [](const TextIngestionMethod &) {
+ return "text";
+ },
+ [](const FileIngestionMethod & fim) {
+ switch (fim) {
+ case FileIngestionMethod::Flat: return "flat";
+ case FileIngestionMethod::Recursive: return "nar";
+ default: assert(false);
+ }
+ },
+ }, method.raw),
+ },
{"path", path.abs()}
};
if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) {
@@ -35,17 +49,14 @@ StorePath InputAccessor::fetchToStore(
Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path)));
- auto source = sinkToSource([&](Sink & sink) {
- if (method == FileIngestionMethod::Recursive)
- dumpPath(path, sink, filter ? *filter : defaultPathFilter);
- else
- readFile(path, sink);
- });
+ auto filter2 = filter ? *filter : defaultPathFilter;
auto storePath =
settings.readOnlyMode
- ? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first
- : store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair);
+ ? store.computeStorePath(
+ name, *this, path, method, HashAlgorithm::SHA256, {}, filter2).first
+ : store.addToStore(
+ name, *this, path, method, HashAlgorithm::SHA256, {}, filter2, repair);
if (cacheKey)
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
@@ -60,9 +71,9 @@ std::ostream & operator << (std::ostream & str, const SourcePath & path)
}
StorePath SourcePath::fetchToStore(
- ref store,
+ Store & store,
std::string_view name,
- FileIngestionMethod method,
+ ContentAddressMethod method,
PathFilter * filter,
RepairFlag repair) const
{
diff --git a/src/libfetchers/input-accessor.hh b/src/libfetchers/input-accessor.hh
index f385e6231..d2a21cb4b 100644
--- a/src/libfetchers/input-accessor.hh
+++ b/src/libfetchers/input-accessor.hh
@@ -30,10 +30,10 @@ struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this store,
+ Store & store,
const CanonPath & path,
std::string_view name = "source",
- FileIngestionMethod method = FileIngestionMethod::Recursive,
+ ContentAddressMethod method = FileIngestionMethod::Recursive,
PathFilter * filter = nullptr,
RepairFlag repair = NoRepair);
};
@@ -116,9 +116,9 @@ struct SourcePath
* Copy this `SourcePath` to the Nix store.
*/
StorePath fetchToStore(
- ref store,
+ Store & store,
std::string_view name = "source",
- FileIngestionMethod method = FileIngestionMethod::Recursive,
+ ContentAddressMethod method = FileIngestionMethod::Recursive,
PathFilter * filter = nullptr,
RepairFlag repair = NoRepair) const;
diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc
index 6056b9a3c..9982389ab 100644
--- a/src/libfetchers/mercurial.cc
+++ b/src/libfetchers/mercurial.cc
@@ -6,6 +6,7 @@
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
+#include "posix-source-accessor.hh"
#include "fetch-settings.hh"
@@ -210,7 +211,12 @@ struct MercurialInputScheme : InputScheme
return files.count(file);
};
- auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter);
+ PosixSourceAccessor accessor;
+ auto storePath = store->addToStore(
+ input.getName(),
+ accessor, CanonPath { actualPath },
+ FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {},
+ filter);
return {std::move(storePath), input};
}
@@ -218,7 +224,7 @@ struct MercurialInputScheme : InputScheme
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
- auto checkHashType = [&](const std::optional & hash)
+ auto checkHashAlgorithm = [&](const std::optional & hash)
{
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
@@ -227,7 +233,7 @@ struct MercurialInputScheme : InputScheme
auto getLockedAttrs = [&]()
{
- checkHashType(input.getRev());
+ checkHashAlgorithm(input.getRev());
return Attrs({
{"type", "hg"},
@@ -246,7 +252,7 @@ struct MercurialInputScheme : InputScheme
};
if (input.getRev()) {
- if (auto res = getCache()->lookup(store, getLockedAttrs()))
+ if (auto res = getCache()->lookup(*store, getLockedAttrs()))
return makeResult(res->first, std::move(res->second));
}
@@ -259,7 +265,7 @@ struct MercurialInputScheme : InputScheme
{"ref", *input.getRef()},
});
- if (auto res = getCache()->lookup(store, unlockedAttrs)) {
+ if (auto res = getCache()->lookup(*store, unlockedAttrs)) {
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev());
@@ -305,7 +311,7 @@ struct MercurialInputScheme : InputScheme
auto revCount = std::stoull(tokens[1]);
input.attrs.insert_or_assign("ref", tokens[2]);
- if (auto res = getCache()->lookup(store, getLockedAttrs()))
+ if (auto res = getCache()->lookup(*store, getLockedAttrs()))
return makeResult(res->first, std::move(res->second));
Path tmpDir = createTempDir();
@@ -315,7 +321,8 @@ struct MercurialInputScheme : InputScheme
deletePath(tmpDir + "/.hg_archival.txt");
- auto storePath = store->addToStore(name, tmpDir);
+ PosixSourceAccessor accessor;
+ auto storePath = store->addToStore(name, accessor, CanonPath { tmpDir });
Attrs infoAttrs({
{"rev", input.getRev()->gitRev()},
@@ -324,14 +331,14 @@ struct MercurialInputScheme : InputScheme
if (!_input.getRev())
getCache()->add(
- store,
+ *store,
unlockedAttrs,
infoAttrs,
storePath,
false);
getCache()->add(
- store,
+ *store,
getLockedAttrs(),
infoAttrs,
storePath,
diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc
index 086366180..3b7709440 100644
--- a/src/libfetchers/tarball.cc
+++ b/src/libfetchers/tarball.cc
@@ -8,6 +8,7 @@
#include "tarfile.hh"
#include "types.hh"
#include "split.hh"
+#include "posix-source-accessor.hh"
namespace nix::fetchers {
@@ -26,7 +27,7 @@ DownloadFileResult downloadFile(
{"name", name},
});
- auto cached = getCache()->lookupExpired(store, inAttrs);
+ auto cached = getCache()->lookupExpired(*store, inAttrs);
auto useCached = [&]() -> DownloadFileResult
{
@@ -91,7 +92,7 @@ DownloadFileResult downloadFile(
}
getCache()->add(
- store,
+ *store,
inAttrs,
infoAttrs,
*storePath,
@@ -99,7 +100,7 @@ DownloadFileResult downloadFile(
if (url != res.effectiveUri)
getCache()->add(
- store,
+ *store,
{
{"type", "file"},
{"url", res.effectiveUri},
@@ -130,7 +131,7 @@ DownloadTarballResult downloadTarball(
{"name", name},
});
- auto cached = getCache()->lookupExpired(store, inAttrs);
+ auto cached = getCache()->lookupExpired(*store, inAttrs);
if (cached && !cached->expired)
return {
@@ -156,7 +157,8 @@ DownloadTarballResult downloadTarball(
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name;
lastModified = lstat(topDir).st_mtime;
- unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair);
+ PosixSourceAccessor accessor;
+ unpackedStorePath = store->addToStore(name, accessor, CanonPath { topDir }, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, defaultPathFilter, NoRepair);
}
Attrs infoAttrs({
@@ -168,7 +170,7 @@ DownloadTarballResult downloadTarball(
infoAttrs.emplace("immutableUrl", *res.immutableUrl);
getCache()->add(
- store,
+ *store,
inAttrs,
infoAttrs,
*unpackedStorePath,
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 2837e8934..8a3052433 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -12,6 +12,7 @@
#include "thread-pool.hh"
#include "callback.hh"
#include "signals.hh"
+#include "archive.hh"
#include
#include
@@ -300,24 +301,60 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
}});
}
-StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
- FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
+StorePath BinaryCacheStore::addToStoreFromDump(
+ Source & dump,
+ std::string_view name,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
+ const StorePathSet & references,
+ RepairFlag repair)
{
- if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
- unsupported("addToStoreFromDump");
- return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
+ std::optional caHash;
+ std::string nar;
+
+ if (auto * dump2p = dynamic_cast(&dump)) {
+ auto & dump2 = *dump2p;
+ // Hack, this gives us a "replayable" source so we can compute
+ // multiple hashes more easily.
+ caHash = hashString(HashAlgorithm::SHA256, dump2.s);
+ switch (method.getFileIngestionMethod()) {
+ case FileIngestionMethod::Recursive:
+ // The dump is already NAR in this case, just use it.
+ nar = dump2.s;
+ break;
+ case FileIngestionMethod::Flat:
+ // The dump is Flat, so we need to convert it to NAR with a
+ // single file.
+ StringSink s;
+ dumpString(dump2.s, s);
+ nar = std::move(s.s);
+ break;
+ }
+ } else {
+ // Otherwise, we have to do th same hashing as NAR so our single
+ // hash will suffice for both purposes.
+ if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
+ unsupported("addToStoreFromDump");
+ }
+ StringSource narDump { nar };
+
+ // Use `narDump` if we wrote to `nar`.
+ Source & narDump2 = nar.size() > 0
+ ? static_cast(narDump)
+ : dump;
+
+ return addToStoreCommon(narDump2, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
*this,
name,
- FixedOutputInfo {
- .method = method,
- .hash = nar.first,
- .references = {
+ ContentAddressWithReferences::fromParts(
+ method,
+ caHash ? *caHash : nar.first,
+ {
.others = references,
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
.self = false,
- },
- },
+ }),
nar.first,
};
info.narSize = nar.second;
@@ -399,72 +436,36 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
}
StorePath BinaryCacheStore::addToStore(
- std::string_view name,
- const Path & srcPath,
- FileIngestionMethod method,
- HashAlgorithm hashAlgo,
- PathFilter & filter,
- RepairFlag repair,
- const StorePathSet & references)
+ std::string_view name,
+ SourceAccessor & accessor,
+ const CanonPath & path,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
+ const StorePathSet & references,
+ PathFilter & filter,
+ RepairFlag repair)
{
/* FIXME: Make BinaryCacheStore::addToStoreCommon support
non-recursive+sha256 so we can just use the default
implementation of this method in terms of addToStoreFromDump. */
- HashSink sink { hashAlgo };
- if (method == FileIngestionMethod::Recursive) {
- dumpPath(srcPath, sink, filter);
- } else {
- readFile(srcPath, sink);
- }
- auto h = sink.finish().first;
+ auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first;
auto source = sinkToSource([&](Sink & sink) {
- dumpPath(srcPath, sink, filter);
+ accessor.dumpPath(path, sink, filter);
});
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
*this,
name,
- FixedOutputInfo {
- .method = method,
- .hash = h,
- .references = {
+ ContentAddressWithReferences::fromParts(
+ method,
+ h,
+ {
.others = references,
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
.self = false,
- },
- },
- nar.first,
- };
- info.narSize = nar.second;
- return info;
- })->path;
-}
-
-StorePath BinaryCacheStore::addTextToStore(
- std::string_view name,
- std::string_view s,
- const StorePathSet & references,
- RepairFlag repair)
-{
- auto textHash = hashString(HashAlgorithm::SHA256, s);
- auto path = makeTextPath(name, TextInfo { { textHash }, references });
-
- if (!repair && isValidPath(path))
- return path;
-
- StringSink sink;
- dumpString(s, sink);
- StringSource source(sink.s);
- return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
- ValidPathInfo info {
- *this,
- std::string { name },
- TextInfo {
- .hash = textHash,
- .references = references,
- },
+ }),
nar.first,
};
info.narSize = nar.second;
diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh
index 395e1b479..98e43ee6a 100644
--- a/src/libstore/binary-cache-store.hh
+++ b/src/libstore/binary-cache-store.hh
@@ -123,22 +123,22 @@ public:
void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs) override;
- StorePath addToStoreFromDump(Source & dump, std::string_view name,
- FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
+ StorePath addToStoreFromDump(
+ Source & dump,
+ std::string_view name,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
+ const StorePathSet & references,
+ RepairFlag repair) override;
StorePath addToStore(
- std::string_view name,
- const Path & srcPath,
- FileIngestionMethod method,
- HashAlgorithm hashAlgo,
- PathFilter & filter,
- RepairFlag repair,
- const StorePathSet & references) override;
-
- StorePath addTextToStore(
std::string_view name,
- std::string_view s,
+ SourceAccessor & accessor,
+ const CanonPath & srcPath,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
const StorePathSet & references,
+ PathFilter & filter,
RepairFlag repair) override;
void registerDrvOutput(const Realisation & info) override;
diff --git a/src/libstore/build/local-derivation-goal.cc b/src/libstore/build/local-derivation-goal.cc
index 802b39f84..b01d9e237 100644
--- a/src/libstore/build/local-derivation-goal.cc
+++ b/src/libstore/build/local-derivation-goal.cc
@@ -20,6 +20,7 @@
#include "child.hh"
#include "unix-domain-socket.hh"
#include "posix-fs-canonicalise.hh"
+#include "posix-source-accessor.hh"
#include
#include
@@ -1290,13 +1291,14 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
{ throw Error("queryPathFromHashPart"); }
StorePath addToStore(
- std::string_view name,
- const Path & srcPath,
- FileIngestionMethod method,
- HashAlgorithm hashAlgo,
- PathFilter & filter,
- RepairFlag repair,
- const StorePathSet & references) override
+ std::string_view name,
+ SourceAccessor & accessor,
+ const CanonPath & srcPath,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
+ const StorePathSet & references,
+ PathFilter & filter,
+ RepairFlag repair) override
{ throw Error("addToStore"); }
void addToStore(const ValidPathInfo & info, Source & narSource,
@@ -1306,26 +1308,15 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
goal.addDependency(info.path);
}
- StorePath addTextToStore(
- std::string_view name,
- std::string_view s,
- const StorePathSet & references,
- RepairFlag repair = NoRepair) override
- {
- auto path = next->addTextToStore(name, s, references, repair);
- goal.addDependency(path);
- return path;
- }
-
StorePath addToStoreFromDump(
- Source & dump,
- std::string_view name,
- FileIngestionMethod method,
- HashAlgorithm hashAlgo,
- RepairFlag repair,
- const StorePathSet & references) override
+ Source & dump,
+ std::string_view name,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
+ const StorePathSet & references,
+ RepairFlag repair) override
{
- auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references);
+ auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, references, repair);
goal.addDependency(path);
return path;
}
@@ -2453,8 +2444,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
throw BuildError(
"output path %1% without valid stats info",
actualPath);
- if (outputHash.method == ContentAddressMethod { FileIngestionMethod::Flat } ||
- outputHash.method == ContentAddressMethod { TextIngestionMethod {} })
+ if (outputHash.method.getFileIngestionMethod() == FileIngestionMethod::Flat)
{
/* The output path should be a regular file without execute permission. */
if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0)
@@ -2466,38 +2456,23 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
rewriteOutput(outputRewrites);
/* FIXME optimize and deduplicate with addToStore */
std::string oldHashPart { scratchPath->hashPart() };
- HashModuloSink caSink {outputHash.hashAlgo, oldHashPart };
- std::visit(overloaded {
- [&](const TextIngestionMethod &) {
- readFile(actualPath, caSink);
- },
- [&](const FileIngestionMethod & m2) {
- switch (m2) {
- case FileIngestionMethod::Recursive:
- dumpPath(actualPath, caSink);
- break;
- case FileIngestionMethod::Flat:
- readFile(actualPath, caSink);
- break;
- }
- },
- }, outputHash.method.raw);
- auto got = caSink.finish().first;
+ auto got = ({
+ HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
+ PosixSourceAccessor accessor;
+ dumpPath(
+ accessor, CanonPath { actualPath },
+ caSink,
+ outputHash.method.getFileIngestionMethod());
+ caSink.finish().first;
+ });
- auto optCA = ContentAddressWithReferences::fromPartsOpt(
- outputHash.method,
- std::move(got),
- rewriteRefs());
- if (!optCA) {
- // TODO track distinct failure modes separately (at the time of
- // writing there is just one but `nullopt` is unclear) so this
- // message can't get out of sync.
- throw BuildError("output path '%s' has illegal content address, probably a spurious self-reference with text hashing");
- }
ValidPathInfo newInfo0 {
worker.store,
outputPathName(drv->name, outputName),
- std::move(*optCA),
+ ContentAddressWithReferences::fromParts(
+ outputHash.method,
+ std::move(got),
+ rewriteRefs()),
Hash::dummy,
};
if (*scratchPath != newInfo0.path) {
@@ -2511,9 +2486,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string(newInfo0.path.hashPart())}});
}
- HashResult narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
- newInfo0.narHash = narHashAndSize.first;
- newInfo0.narSize = narHashAndSize.second;
+ {
+ PosixSourceAccessor accessor;
+ HashResult narHashAndSize = hashPath(
+ accessor, CanonPath { actualPath },
+ FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
+ newInfo0.narHash = narHashAndSize.first;
+ newInfo0.narSize = narHashAndSize.second;
+ }
assert(newInfo0.ca);
return newInfo0;
@@ -2531,7 +2511,10 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string { scratchPath->hashPart() },
std::string { requiredFinalPath.hashPart() });
rewriteOutput(outputRewrites);
- auto narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
+ PosixSourceAccessor accessor;
+ HashResult narHashAndSize = hashPath(
+ accessor, CanonPath { actualPath },
+ FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
newInfo0.narSize = narHashAndSize.second;
auto refs = rewriteRefs();
diff --git a/src/libstore/build/sandbox-defaults.sb b/src/libstore/build/sandbox-defaults.sb
index 77f013aea..25ec11285 100644
--- a/src/libstore/build/sandbox-defaults.sb
+++ b/src/libstore/build/sandbox-defaults.sb
@@ -68,6 +68,7 @@ R""(
(allow file*
(literal "/dev/null")
(literal "/dev/random")
+ (literal "/dev/stderr")
(literal "/dev/stdin")
(literal "/dev/stdout")
(literal "/dev/tty")
diff --git a/src/libstore/build/worker.cc b/src/libstore/build/worker.cc
index 9b8c36286..399ad47fd 100644
--- a/src/libstore/build/worker.cc
+++ b/src/libstore/build/worker.cc
@@ -519,7 +519,9 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path)))
res = false;
else {
- HashResult current = hashPath(info->narHash.algo, store.printStorePath(path));
+ HashResult current = hashPath(
+ *store.getFSAccessor(), CanonPath { store.printStorePath(path) },
+ FileIngestionMethod::Recursive, info->narHash.algo);
Hash nullHash(HashAlgorithm::SHA256);
res = info->narHash == nullHash || info->narHash == current.first;
}
diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc
index f42a13126..fc408f5af 100644
--- a/src/libstore/content-address.cc
+++ b/src/libstore/content-address.cc
@@ -50,6 +50,18 @@ std::string ContentAddressMethod::render(HashAlgorithm ha) const
}, raw);
}
+FileIngestionMethod ContentAddressMethod::getFileIngestionMethod() const
+{
+ return std::visit(overloaded {
+ [&](const TextIngestionMethod & th) {
+ return FileIngestionMethod::Flat;
+ },
+ [&](const FileIngestionMethod & fim) {
+ return fim;
+ }
+ }, raw);
+}
+
std::string ContentAddress::render() const
{
return std::visit(overloaded {
@@ -79,7 +91,7 @@ static std::pair parseContentAddressMethodP
prefix = *optPrefix;
}
- auto parseHashType_ = [&](){
+ auto parseHashAlgorithm_ = [&](){
auto hashTypeRaw = splitPrefixTo(rest, ':');
if (!hashTypeRaw)
throw UsageError("content address hash must be in form ':', but found: %s", wholeInput);
@@ -90,7 +102,7 @@ static std::pair parseContentAddressMethodP
// Switch on prefix
if (prefix == "text") {
// No parsing of the ingestion method, "text" only support flat.
- HashAlgorithm hashAlgo = parseHashType_();
+ HashAlgorithm hashAlgo = parseHashAlgorithm_();
return {
TextIngestionMethod {},
std::move(hashAlgo),
@@ -100,7 +112,7 @@ static std::pair parseContentAddressMethodP
auto method = FileIngestionMethod::Flat;
if (splitPrefix(rest, "r:"))
method = FileIngestionMethod::Recursive;
- HashAlgorithm hashAlgo = parseHashType_();
+ HashAlgorithm hashAlgo = parseHashAlgorithm_();
return {
std::move(method),
std::move(hashAlgo),
@@ -176,13 +188,13 @@ ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const Con
}, ca.method.raw);
}
-std::optional ContentAddressWithReferences::fromPartsOpt(
- ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept
+ContentAddressWithReferences ContentAddressWithReferences::fromParts(
+ ContentAddressMethod method, Hash hash, StoreReferences refs)
{
return std::visit(overloaded {
- [&](TextIngestionMethod _) -> std::optional {
+ [&](TextIngestionMethod _) -> ContentAddressWithReferences {
if (refs.self)
- return std::nullopt;
+ throw Error("self-reference not allowed with text hashing");
return ContentAddressWithReferences {
TextInfo {
.hash = std::move(hash),
@@ -190,7 +202,7 @@ std::optional ContentAddressWithReferences::fromPa
}
};
},
- [&](FileIngestionMethod m2) -> std::optional {
+ [&](FileIngestionMethod m2) -> ContentAddressWithReferences {
return ContentAddressWithReferences {
FixedOutputInfo {
.method = m2,
diff --git a/src/libstore/content-address.hh b/src/libstore/content-address.hh
index 05234da38..f0973412b 100644
--- a/src/libstore/content-address.hh
+++ b/src/libstore/content-address.hh
@@ -4,6 +4,7 @@
#include
#include "hash.hh"
#include "path.hh"
+#include "file-content-address.hh"
#include "comparator.hh"
#include "variant-wrapper.hh"
@@ -31,22 +32,6 @@ namespace nix {
*/
struct TextIngestionMethod : std::monostate { };
-/**
- * An enumeration of the main ways we can serialize file system
- * objects.
- */
-enum struct FileIngestionMethod : uint8_t {
- /**
- * Flat-file hashing. Directly ingest the contents of a single file
- */
- Flat = 0,
- /**
- * Recursive (or NAR) hashing. Serializes the file-system object in Nix
- * Archive format and ingest that
- */
- Recursive = 1
-};
-
/**
* Compute the prefix to the hash algorithm which indicates how the
* files were ingested.
@@ -54,7 +39,7 @@ enum struct FileIngestionMethod : uint8_t {
std::string makeFileIngestionPrefix(FileIngestionMethod m);
/**
- * An enumeration of all the ways we can serialize file system objects.
+ * An enumeration of all the ways we can content-address store objects.
*
* Just the type of a content address. Combine with the hash itself, and
* we have a `ContentAddress` as defined below. Combine that, in turn,
@@ -102,7 +87,15 @@ struct ContentAddressMethod
*
* The rough inverse of `parse()`.
*/
- std::string render(HashAlgorithm ha) const;
+ std::string render(HashAlgorithm ht) const;
+
+ /**
+ * Get the underlying way to content-address file system objects.
+ *
+ * Different ways of hashing store objects may use the same method
+ * for hashing file systeme objects.
+ */
+ FileIngestionMethod getFileIngestionMethod() const;
};
@@ -116,11 +109,11 @@ struct ContentAddressMethod
* serialisation methods (flat file vs NAR). Thus, ‘ca’ has one of the
* following forms:
*
- * - ‘text:sha256:’: For paths
- * computed by Store::makeTextPath() / Store::addTextToStore().
+ * - `TextIngestionMethod`:
+ * ‘text:sha256:’
*
- * - ‘fixed:::’: For paths computed by
- * Store::makeFixedOutputPath() / Store::addToStore().
+ * - `FixedIngestionMethod`:
+ * ‘fixed:::’
*/
struct ContentAddress
{
@@ -266,11 +259,12 @@ struct ContentAddressWithReferences
*
* @param refs References to other store objects or oneself.
*
- * Do note that not all combinations are supported; `nullopt` is
- * returns for invalid combinations.
+ * @note note that all combinations are supported. This is a
+ * *partial function* and exceptions will be thrown for invalid
+ * combinations.
*/
- static std::optional fromPartsOpt(
- ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept;
+ static ContentAddressWithReferences fromParts(
+ ContentAddressMethod method, Hash hash, StoreReferences refs);
ContentAddressMethod getMethod() const;
diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc
index a112d6d31..923ea6447 100644
--- a/src/libstore/daemon.cc
+++ b/src/libstore/daemon.cc
@@ -403,22 +403,9 @@ static void performOp(TunnelLogger * logger, ref store,
auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr);
auto hashAlgo = hashAlgo_; // work around clang bug
FramedSource source(from);
- // TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
- return std::visit(overloaded {
- [&](const TextIngestionMethod &) {
- if (hashAlgo != HashAlgorithm::SHA256)
- throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
- name, printHashAlgo(hashAlgo));
- // We could stream this by changing Store
- std::string contents = source.drain();
- auto path = store->addTextToStore(name, contents, refs, repair);
- return store->queryPathInfo(path);
- },
- [&](const FileIngestionMethod & fim) {
- auto path = store->addToStoreFromDump(source, name, fim, hashAlgo, repair, refs);
- return store->queryPathInfo(path);
- },
- }, contentAddressMethod.raw);
+ // TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
+ auto path = store->addToStoreFromDump(source, name, contentAddressMethod, hashAlgo, refs, repair);
+ return store->queryPathInfo(path);
}();
logger->stopWork();
@@ -496,7 +483,10 @@ static void performOp(TunnelLogger * logger, ref store,
std::string s = readString(from);
auto refs = WorkerProto::Serialise::read(*store, rconn);
logger->startWork();
- auto path = store->addTextToStore(suffix, s, refs, NoRepair);
+ auto path = ({
+ StringSource source { s };
+ store->addToStoreFromDump(source, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair);
+ });
logger->stopWork();
to << store->printStorePath(path);
break;
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index c35150b57..8a7d660ff 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -143,8 +143,14 @@ StorePath writeDerivation(Store & store,
auto suffix = std::string(drv.name) + drvExtension;
auto contents = drv.unparse(store, false);
return readOnly || settings.readOnlyMode
- ? store.computeStorePathForText(suffix, contents, references)
- : store.addTextToStore(suffix, contents, references, repair);
+ ? store.makeFixedOutputPathFromCA(suffix, TextInfo {
+ .hash = hashString(HashAlgorithm::SHA256, contents),
+ .references = std::move(references),
+ })
+ : ({
+ StringSource s { contents };
+ store.addToStoreFromDump(s, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair);
+ });
}
diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc
index 821cda399..f52a309d1 100644
--- a/src/libstore/dummy-store.cc
+++ b/src/libstore/dummy-store.cc
@@ -58,13 +58,6 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
RepairFlag repair, CheckSigsFlag checkSigs) override
{ unsupported("addToStore"); }
- StorePath addTextToStore(
- std::string_view name,
- std::string_view s,
- const StorePathSet & references,
- RepairFlag repair) override
- { unsupported("addTextToStore"); }
-
void narFromPath(const StorePath & path, Sink & sink) override
{ unsupported("narFromPath"); }
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index e28615cdc..b35dc37a1 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -631,7 +631,7 @@ public:
At least one of the following condition must be met
for Nix to accept copying a store object from another
- Nix store (such as a substituter):
+ Nix store (such as a [substituter](#conf-substituters)):
- the store object has been signed using a key in the trusted keys list
- the [`require-sigs`](#conf-require-sigs) option has been set to `false`
diff --git a/src/libstore/legacy-ssh-store.hh b/src/libstore/legacy-ssh-store.hh
index c40c256bb..c5a3ce677 100644
--- a/src/libstore/legacy-ssh-store.hh
+++ b/src/libstore/legacy-ssh-store.hh
@@ -59,21 +59,15 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
{ unsupported("queryPathFromHashPart"); }
StorePath addToStore(
- std::string_view name,
- const Path & srcPath,
- FileIngestionMethod method,
- HashAlgorithm hashAlgo,
- PathFilter & filter,
- RepairFlag repair,
- const StorePathSet & references) override
- { unsupported("addToStore"); }
-
- StorePath addTextToStore(
std::string_view name,
- std::string_view s,
+ SourceAccessor & accessor,
+ const CanonPath & srcPath,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
const StorePathSet & references,
+ PathFilter & filter,
RepairFlag repair) override
- { unsupported("addTextToStore"); }
+ { unsupported("addToStore"); }
private:
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 7e82bae28..df1de7752 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -13,6 +13,7 @@
#include "compression.hh"
#include "signals.hh"
#include "posix-fs-canonicalise.hh"
+#include "posix-source-accessor.hh"
#include
#include
@@ -1088,11 +1089,22 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (info.ca) {
auto & specified = *info.ca;
- auto actualHash = hashCAPath(
- specified.method,
- specified.hash.algo,
- info.path
- );
+ auto actualHash = ({
+ HashModuloSink caSink {
+ specified.hash.algo,
+ std::string { info.path.hashPart() },
+ };
+ PosixSourceAccessor accessor;
+ dumpPath(
+ *getFSAccessor(false),
+ CanonPath { printStorePath(info.path) },
+ caSink,
+ specified.method.getFileIngestionMethod());
+ ContentAddress {
+ .method = specified.method,
+ .hash = caSink.finish().first,
+ };
+ });
if (specified.hash != actualHash.hash) {
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path),
@@ -1115,8 +1127,13 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
}
-StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name,
- FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
+StorePath LocalStore::addToStoreFromDump(
+ Source & source0,
+ std::string_view name,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
+ const StorePathSet & references,
+ RepairFlag repair)
{
/* For computing the store path. */
auto hashSink = std::make_unique(hashAlgo);
@@ -1166,25 +1183,21 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
delTempDir = std::make_unique(tempDir);
tempPath = tempDir + "/x";
- if (method == FileIngestionMethod::Recursive)
- restorePath(tempPath, bothSource);
- else
- writeFile(tempPath, bothSource);
+ restorePath(tempPath, bothSource, method.getFileIngestionMethod());
dump.clear();
}
auto [hash, size] = hashSink->finish();
- ContentAddressWithReferences desc = FixedOutputInfo {
- .method = method,
- .hash = hash,
- .references = {
+ auto desc = ContentAddressWithReferences::fromParts(
+ method,
+ hash,
+ {
.others = references,
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
.self = false,
- },
- };
+ });
auto dstPath = makeFixedOutputPathFromCA(name, desc);
@@ -1207,11 +1220,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
if (inMemory) {
StringSource dumpSource { dump };
- /* Restore from the NAR in memory. */
- if (method == FileIngestionMethod::Recursive)
- restorePath(realPath, dumpSource);
- else
- writeFile(realPath, dumpSource);
+ /* Restore from the buffer in memory. */
+ restorePath(realPath, dumpSource, method.getFileIngestionMethod());
} else {
/* Move the temporary path we restored above. */
moveFile(tempPath, realPath);
@@ -1247,58 +1257,6 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
}
-StorePath LocalStore::addTextToStore(
- std::string_view name,
- std::string_view s,
- const StorePathSet & references, RepairFlag repair)
-{
- auto hash = hashString(HashAlgorithm::SHA256, s);
- auto dstPath = makeTextPath(name, TextInfo {
- .hash = hash,
- .references = references,
- });
-
- addTempRoot(dstPath);
-
- if (repair || !isValidPath(dstPath)) {
-
- auto realPath = Store::toRealPath(dstPath);
-
- PathLocks outputLock({realPath});
-
- if (repair || !isValidPath(dstPath)) {
-
- deletePath(realPath);
-
- autoGC();
-
- writeFile(realPath, s);
-
- canonicalisePathMetaData(realPath, {});
-
- StringSink sink;
- dumpString(s, sink);
- auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
-
- optimisePath(realPath, repair);
-
- ValidPathInfo info { dstPath, narHash };
- info.narSize = sink.s.size();
- info.references = references;
- info.ca = {
- .method = TextIngestionMethod {},
- .hash = hash,
- };
- registerValidPath(info);
- }
-
- outputLock.setDeletion(true);
- }
-
- return dstPath;
-}
-
-
/* Create a temporary directory in the store that won't be
garbage-collected until the returned FD is closed. */
std::pair LocalStore::createTempDirInStore()
@@ -1389,7 +1347,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
for (auto & link : readDirectory(linksDir)) {
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
Path linkPath = linksDir + "/" + link.name;
- std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Nix32, false);
+ PosixSourceAccessor accessor;
+ std::string hash = hashPath(
+ accessor, CanonPath { linkPath },
+ FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false);
if (hash != link.name) {
printError("link '%s' was modified! expected hash '%s', got '%s'",
linkPath, link.name, hash);
@@ -1696,42 +1657,6 @@ void LocalStore::queryRealisationUncached(const DrvOutput & id,
}
}
-ContentAddress LocalStore::hashCAPath(
- const ContentAddressMethod & method, const HashAlgorithm & hashAlgo,
- const StorePath & path)
-{
- return hashCAPath(method, hashAlgo, Store::toRealPath(path), path.hashPart());
-}
-
-ContentAddress LocalStore::hashCAPath(
- const ContentAddressMethod & method,
- const HashAlgorithm & hashAlgo,
- const Path & path,
- const std::string_view pathHash
-)
-{
- HashModuloSink caSink ( hashAlgo, std::string(pathHash) );
- std::visit(overloaded {
- [&](const TextIngestionMethod &) {
- readFile(path, caSink);
- },
- [&](const FileIngestionMethod & m2) {
- switch (m2) {
- case FileIngestionMethod::Recursive:
- dumpPath(path, caSink);
- break;
- case FileIngestionMethod::Flat:
- readFile(path, caSink);
- break;
- }
- },
- }, method.raw);
- return ContentAddress {
- .method = method,
- .hash = caSink.finish().first,
- };
-}
-
void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log)
{
assert(drvPath.isDerivation());
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index ee605b5a2..ba56d3ead 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -177,12 +177,11 @@ public:
void addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs) override;
- StorePath addToStoreFromDump(Source & dump, std::string_view name,
- FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
-
- StorePath addTextToStore(
+ StorePath addToStoreFromDump(
+ Source & dump,
std::string_view name,
- std::string_view s,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
const StorePathSet & references,
RepairFlag repair) override;
@@ -350,19 +349,6 @@ private:
void signPathInfo(ValidPathInfo & info);
void signRealisation(Realisation &);
- // XXX: Make a generic `Store` method
- ContentAddress hashCAPath(
- const ContentAddressMethod & method,
- const HashAlgorithm & hashAlgo,
- const StorePath & path);
-
- ContentAddress hashCAPath(
- const ContentAddressMethod & method,
- const HashAlgorithm & hashAlgo,
- const Path & path,
- const std::string_view pathHash
- );
-
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
friend struct LocalDerivationGoal;
diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc
index b395453d1..a494e6ecc 100644
--- a/src/libstore/optimise-store.cc
+++ b/src/libstore/optimise-store.cc
@@ -2,6 +2,7 @@
#include "globals.hh"
#include "signals.hh"
#include "posix-fs-canonicalise.hh"
+#include "posix-source-accessor.hh"
#include
#include
@@ -146,7 +147,12 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
Also note that if `path' is a symlink, then we're hashing the
contents of the symlink (i.e. the result of readlink()), not
the contents of the target (which may not even exist). */
- Hash hash = hashPath(HashAlgorithm::SHA256, path).first;
+ Hash hash = ({
+ PosixSourceAccessor accessor;
+ hashPath(
+ accessor, CanonPath { path },
+ FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
+ });
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
/* Check if this is a known hash. */
@@ -156,7 +162,12 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
if (pathExists(linkPath)) {
auto stLink = lstat(linkPath);
if (st.st_size != stLink.st_size
- || (repair && hash != hashPath(HashAlgorithm::SHA256, linkPath).first))
+ || (repair && hash != ({
+ PosixSourceAccessor accessor;
+ hashPath(
+ accessor, CanonPath { linkPath },
+ FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
+ })))
{
// XXX: Consider overwriting linkPath with our valid version.
warn("removing corrupted link '%s'", linkPath);
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index dd6347468..4d0113594 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -502,8 +502,13 @@ ref RemoteStore::addCAToStore(
}
-StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name,
- FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
+StorePath RemoteStore::addToStoreFromDump(
+ Source & dump,
+ std::string_view name,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
+ const StorePathSet & references,
+ RepairFlag repair)
{
return addCAToStore(dump, name, method, hashAlgo, references, repair)->path;
}
@@ -603,16 +608,6 @@ void RemoteStore::addMultipleToStore(
}
-StorePath RemoteStore::addTextToStore(
- std::string_view name,
- std::string_view s,
- const StorePathSet & references,
- RepairFlag repair)
-{
- StringSource source(s);
- return addCAToStore(source, name, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair)->path;
-}
-
void RemoteStore::registerDrvOutput(const Realisation & info)
{
auto conn(getConnection());
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index f2e34c1a3..87704985b 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -82,10 +82,15 @@ public:
RepairFlag repair);
/**
- * Add a content-addressable store path. Does not support references. `dump` will be drained.
+ * Add a content-addressable store path. `dump` will be drained.
*/
- StorePath addToStoreFromDump(Source & dump, std::string_view name,
- FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
+ StorePath addToStoreFromDump(
+ Source & dump,
+ std::string_view name,
+ ContentAddressMethod method = FileIngestionMethod::Recursive,
+ HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
+ const StorePathSet & references = StorePathSet(),
+ RepairFlag repair = NoRepair) override;
void addToStore(const ValidPathInfo & info, Source & nar,
RepairFlag repair, CheckSigsFlag checkSigs) override;
@@ -101,12 +106,6 @@ public:
RepairFlag repair,
CheckSigsFlag checkSigs) override;
- StorePath addTextToStore(
- std::string_view name,
- std::string_view s,
- const StorePathSet & references,
- RepairFlag repair) override;
-
void registerDrvOutput(const Realisation & info) override;
void queryRealisationUncached(const DrvOutput &,
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 7f35e74af..c2516afb5 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -205,25 +205,19 @@ StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const Fixed
}
-StorePath StoreDirConfig::makeTextPath(std::string_view name, const TextInfo & info) const
-{
- assert(info.hash.algo == HashAlgorithm::SHA256);
- return makeStorePath(
- makeType(*this, "text", StoreReferences {
- .others = info.references,
- .self = false,
- }),
- info.hash,
- name);
-}
-
-
StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const
{
// New template
return std::visit(overloaded {
[&](const TextInfo & ti) {
- return makeTextPath(name, ti);
+ assert(ti.hash.algo == HashAlgorithm::SHA256);
+ return makeStorePath(
+ makeType(*this, "text", StoreReferences {
+ .others = ti.references,
+ .self = false,
+ }),
+ ti.hash,
+ name);
},
[&](const FixedOutputInfo & foi) {
return makeFixedOutputPath(name, foi);
@@ -232,54 +226,45 @@ StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const
}
-std::pair StoreDirConfig::computeStorePathFromDump(
- Source & dump,
- std::string_view name,
- FileIngestionMethod method,
- HashAlgorithm hashAlgo,
- const StorePathSet & references) const
-{
- HashSink sink(hashAlgo);
- dump.drainInto(sink);
- auto h = sink.finish().first;
- FixedOutputInfo caInfo {
- .method = method,
- .hash = h,
- .references = {},
- };
- return std::make_pair(makeFixedOutputPath(name, caInfo), h);
-}
-
-
-StorePath StoreDirConfig::computeStorePathForText(
+std::pair StoreDirConfig::computeStorePath(
std::string_view name,
- std::string_view s,
- const StorePathSet & references) const
+ SourceAccessor & accessor,
+ const CanonPath & path,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
+ const StorePathSet & references,
+ PathFilter & filter) const
{
- return makeTextPath(name, TextInfo {
- .hash = hashString(HashAlgorithm::SHA256, s),
- .references = references,
- });
+ auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first;
+ return {
+ makeFixedOutputPathFromCA(
+ name,
+ ContentAddressWithReferences::fromParts(
+ method,
+ h,
+ {
+ .others = references,
+ .self = false,
+ })),
+ h,
+ };
}
StorePath Store::addToStore(
- std::string_view name,
- const Path & _srcPath,
- FileIngestionMethod method,
- HashAlgorithm hashAlgo,
- PathFilter & filter,
- RepairFlag repair,
- const StorePathSet & references)
+ std::string_view name,
+ SourceAccessor & accessor,
+ const CanonPath & path,
+ ContentAddressMethod method,
+ HashAlgorithm hashAlgo,
+ const StorePathSet & references,
+ PathFilter & filter,
+ RepairFlag repair)
{
- Path srcPath(absPath(_srcPath));
auto source = sinkToSource([&](Sink & sink) {
- if (method == FileIngestionMethod::Recursive)
- dumpPath(srcPath, sink, filter);
- else
- readFile(srcPath, sink);
+ dumpPath(accessor, path, sink, method.getFileIngestionMethod(), filter);
});
- return addToStoreFromDump(*source, name, method, hashAlgo, repair, references);
+ return addToStoreFromDump(*source, name, method, hashAlgo, references, repair);
}
void Store::addMultipleToStore(
@@ -404,9 +389,13 @@ digraph graphname {
fileSink -> caHashSink
}
*/
-ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
- FileIngestionMethod method, HashAlgorithm hashAlgo,
- std::optional expectedCAHash)
+ValidPathInfo Store::addToStoreSlow(
+ std::string_view name,
+ SourceAccessor & accessor,
+ const CanonPath & srcPath,
+ ContentAddressMethod method, HashAlgorithm hashAlgo,
+ const StorePathSet & references,
+ std::optional expectedCAHash)
{
HashSink narHashSink { HashAlgorithm::SHA256 };
HashSink caHashSink { hashAlgo };
@@ -425,7 +414,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
srcPath. The fact that we use scratchpadSink as a temporary buffer here
is an implementation detail. */
auto fileSource = sinkToSource([&](Sink & scratchpadSink) {
- dumpPath(srcPath, scratchpadSink);
+ accessor.dumpPath(srcPath, scratchpadSink);
});
/* tapped provides the same data as fileSource, but we also write all the
@@ -433,9 +422,11 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
TeeSource tapped { *fileSource, narSink };
NullParseSink blank;
- auto & parseSink = method == FileIngestionMethod::Flat
+ auto & parseSink = method.getFileIngestionMethod() == FileIngestionMethod::Flat
? (ParseSink &) fileSink
- : (ParseSink &) blank;
+ : method.getFileIngestionMethod() == FileIngestionMethod::Recursive
+ ? (ParseSink &) blank
+ : (abort(), (ParseSink &)*(ParseSink *)nullptr); // handled both cases
/* The information that flows from tapped (besides being replicated in
narSink), is now put in parseSink. */
@@ -452,21 +443,24 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
if (expectedCAHash && expectedCAHash != hash)
throw Error("hash mismatch for '%s'", srcPath);
+
ValidPathInfo info {
*this,
name,
- FixedOutputInfo {
- .method = method,
- .hash = hash,
- .references = {},
- },
+ ContentAddressWithReferences::fromParts(
+ method,
+ hash,
+ {
+ .others = references,
+ .self = false,
+ }),
narHash,
};
info.narSize = narSize;
if (!isValidPath(info.path)) {
auto source = sinkToSource([&](Sink & scratchpadSink) {
- dumpPath(srcPath, scratchpadSink);
+ accessor.dumpPath(srcPath, scratchpadSink);
});
addToStore(info, *source);
}
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 2c883ce97..96a7ebd7b 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -427,22 +427,28 @@ public:
* libutil/archive.hh).
*/
virtual StorePath addToStore(
- std::string_view name,
- const Path & srcPath,
- FileIngestionMethod method = FileIngestionMethod::Recursive,
- HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
- PathFilter & filter = defaultPathFilter,
- RepairFlag repair = NoRepair,
- const StorePathSet & references = StorePathSet());
+ std::string_view name,
+ SourceAccessor & accessor,
+ const CanonPath & path,
+ ContentAddressMethod method = FileIngestionMethod::Recursive,
+ HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
+ const StorePathSet & references = StorePathSet(),
+ PathFilter & filter = defaultPathFilter,
+ RepairFlag repair = NoRepair);
/**
* Copy the contents of a path to the store and register the
* validity the resulting path, using a constant amount of
* memory.
*/
- ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath,
- FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
- std::optional expectedCAHash = {});
+ ValidPathInfo addToStoreSlow(
+ std::string_view name,
+ SourceAccessor & accessor,
+ const CanonPath & path,
+ ContentAddressMethod method = FileIngestionMethod::Recursive,
+ HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
+ const StorePathSet & references = StorePathSet(),
+ std::optional expectedCAHash = {});
/**
* Like addToStore(), but the contents of the path are contained
@@ -453,20 +459,14 @@ public:
*
* \todo remove?
*/
- virtual StorePath addToStoreFromDump(Source & dump, std::string_view name,
- FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair,
- const StorePathSet & references = StorePathSet())
- { unsupported("addToStoreFromDump"); }
-
- /**
- * Like addToStore, but the contents written to the output path is a
- * regular file containing the given string.
- */
- virtual StorePath addTextToStore(
+ virtual StorePath addToStoreFromDump(
+ Source & dump,
std::string_view name,
- std::string_view s,
- const StorePathSet & references,
- RepairFlag repair = NoRepair) = 0;
+ ContentAddressMethod method = FileIngestionMethod::Recursive,
+ HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
+ const StorePathSet & references = StorePathSet(),
+ RepairFlag repair = NoRepair)
+ { unsupported("addToStoreFromDump"); }
/**
* Add a mapping indicating that `deriver!outputName` maps to the output path
diff --git a/src/libstore/store-dir-config.hh b/src/libstore/store-dir-config.hh
index 8dafca096..7ca8c2665 100644
--- a/src/libstore/store-dir-config.hh
+++ b/src/libstore/store-dir-config.hh
@@ -86,41 +86,20 @@ struct StoreDirConfig : public Config
StorePath makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const;
- StorePath makeTextPath(std::string_view name, const TextInfo & info) const;
-
StorePath makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const;
/**
- * Read-only variant of addToStoreFromDump(). It returns the store
- * path to which a NAR or flat file would be written.
+ * Read-only variant of addToStore(). It returns the store
+ * path for the given file sytem object.
*/
- std::pair computeStorePathFromDump(
- Source & dump,
+ std::pair computeStorePath(
std::string_view name,
- FileIngestionMethod method = FileIngestionMethod::Recursive,
+ SourceAccessor & accessor,
+ const CanonPath & path,
+ ContentAddressMethod method = FileIngestionMethod::Recursive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
- const StorePathSet & references = {}) const;
-
- /**
- * Preparatory part of addTextToStore().
- *
- * !!! Computation of the path should take the references given to
- * addTextToStore() into account, otherwise we have a (relatively
- * minor) security hole: a caller can register a source file with
- * bogus references. If there are too many references, the path may
- * not be garbage collected when it has to be (not really a problem,
- * the caller could create a root anyway), or it may be garbage
- * collected when it shouldn't be (more serious).
- *
- * Hashing the references would solve this (bogus references would
- * simply yield a different store path, so other users wouldn't be
- * affected), but it has some backwards compatibility issues (the
- * hashing scheme changes), so I'm not doing that for now.
- */
- StorePath computeStorePathForText(
- std::string_view name,
- std::string_view s,
- const StorePathSet & references) const;
+ const StorePathSet & references = {},
+ PathFilter & filter = defaultPathFilter) const;
};
}
diff --git a/src/libutil/error.cc b/src/libutil/error.cc
index 72c346cb5..bc0194d59 100644
--- a/src/libutil/error.cc
+++ b/src/libutil/error.cc
@@ -173,10 +173,9 @@ static bool printUnknownLocations = getEnv("_NIX_EVAL_SHOW_UNKNOWN_LOCATIONS").h
static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr & pos) {
bool hasPos = pos && *pos;
if (hasPos) {
- oss << "\n" << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":";
+ oss << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":";
if (auto loc = pos->getCodeLines()) {
- oss << "\n";
printCodeLines(oss, "", *pos, *loc);
oss << "\n";
}
diff --git a/src/libutil/file-content-address.cc b/src/libutil/file-content-address.cc
new file mode 100644
index 000000000..9917986f6
--- /dev/null
+++ b/src/libutil/file-content-address.cc
@@ -0,0 +1,49 @@
+#include "file-content-address.hh"
+#include "archive.hh"
+
+namespace nix {
+
+void dumpPath(
+ SourceAccessor & accessor, const CanonPath & path,
+ Sink & sink,
+ FileIngestionMethod method,
+ PathFilter & filter)
+{
+ switch (method) {
+ case FileIngestionMethod::Flat:
+ accessor.readFile(path, sink);
+ break;
+ case FileIngestionMethod::Recursive:
+ accessor.dumpPath(path, sink, filter);
+ break;
+ }
+}
+
+
+void restorePath(
+ const Path & path,
+ Source & source,
+ FileIngestionMethod method)
+{
+ switch (method) {
+ case FileIngestionMethod::Flat:
+ writeFile(path, source);
+ break;
+ case FileIngestionMethod::Recursive:
+ restorePath(path, source);
+ break;
+ }
+}
+
+
+HashResult hashPath(
+ SourceAccessor & accessor, const CanonPath & path,
+ FileIngestionMethod method, HashAlgorithm ht,
+ PathFilter & filter)
+{
+ HashSink sink { ht };
+ dumpPath(accessor, path, sink, method, filter);
+ return sink.finish();
+}
+
+}
diff --git a/src/libutil/file-content-address.hh b/src/libutil/file-content-address.hh
new file mode 100644
index 000000000..8e93f5847
--- /dev/null
+++ b/src/libutil/file-content-address.hh
@@ -0,0 +1,56 @@
+#pragma once
+///@file
+
+#include "source-accessor.hh"
+#include "fs-sink.hh"
+#include "util.hh"
+
+namespace nix {
+
+/**
+ * An enumeration of the main ways we can serialize file system
+ * objects.
+ */
+enum struct FileIngestionMethod : uint8_t {
+ /**
+ * Flat-file hashing. Directly ingest the contents of a single file
+ */
+ Flat = 0,
+ /**
+ * Recursive (or NAR) hashing. Serializes the file-system object in
+ * Nix Archive format and ingest that.
+ */
+ Recursive = 1,
+};
+
+/**
+ * Dump a serialization of the given file system object.
+ */
+void dumpPath(
+ SourceAccessor & accessor, const CanonPath & path,
+ Sink & sink,
+ FileIngestionMethod method,
+ PathFilter & filter = defaultPathFilter);
+
+/**
+ * Restore a serialization of the given file system object.
+ *
+ * @TODO use an arbitrary `ParseSink`.
+ */
+void restorePath(
+ const Path & path,
+ Source & source,
+ FileIngestionMethod method);
+
+/**
+ * Compute the hash of the given file system object according to the
+ * given method.
+ *
+ * The hash is defined as (essentially) hashString(ht, dumpPath(path)).
+ */
+HashResult hashPath(
+ SourceAccessor & accessor, const CanonPath & path,
+ FileIngestionMethod method, HashAlgorithm ht,
+ PathFilter & filter = defaultPathFilter);
+
+}
diff --git a/src/libutil/hash.cc b/src/libutil/hash.cc
index 30456ae5c..502afbda2 100644
--- a/src/libutil/hash.cc
+++ b/src/libutil/hash.cc
@@ -367,15 +367,6 @@ HashResult HashSink::currentHash()
}
-HashResult hashPath(
- HashAlgorithm ha, const Path & path, PathFilter & filter)
-{
- HashSink sink(ha);
- dumpPath(path, sink, filter);
- return sink.finish();
-}
-
-
Hash compressHash(const Hash & hash, unsigned int newSize)
{
Hash h(hash.algo);
diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh
index 7bed9e2bd..2fe9a53f5 100644
--- a/src/libutil/hash.hh
+++ b/src/libutil/hash.hh
@@ -168,14 +168,11 @@ Hash hashString(HashAlgorithm ha, std::string_view s);
Hash hashFile(HashAlgorithm ha, const Path & path);
/**
- * Compute the hash of the given path, serializing as a Nix Archive and
- * then hashing that.
+ * The final hash and the number of bytes digested.
*
- * The hash is defined as (essentially) hashString(ht, dumpPath(path)).
+ * @todo Convert to proper struct
*/
typedef std::pair HashResult;
-HashResult hashPath(HashAlgorithm ha, const Path & path,
- PathFilter & filter = defaultPathFilter);
/**
* Compress a hash to the specified number of bytes by cyclically
diff --git a/src/libutil/url-parts.hh b/src/libutil/url-parts.hh
index 07bc8d0cd..1ddc6a536 100644
--- a/src/libutil/url-parts.hh
+++ b/src/libutil/url-parts.hh
@@ -19,13 +19,15 @@ const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncod
const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
+const static std::string fragmentRegex = "(?:" + pcharRegex + "|[/? \"^])*";
const static std::string segmentRegex = "(?:" + pcharRegex + "*)";
const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
/// A Git ref (i.e. branch or tag name).
/// \todo check that this is correct.
-const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@-]*";
+/// This regex incomplete. See https://git-scm.com/docs/git-check-ref-format
+const static std::string refRegexS = "[a-zA-Z0-9@][a-zA-Z0-9_.\\/@+-]*";
extern std::regex refRegex;
/// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
diff --git a/src/libutil/url.cc b/src/libutil/url.cc
index 152c06d8e..c6561441d 100644
--- a/src/libutil/url.cc
+++ b/src/libutil/url.cc
@@ -16,7 +16,7 @@ ParsedURL parseURL(const std::string & url)
"((" + schemeNameRegex + "):"
+ "(?:(?://(" + authorityRegex + ")(" + absPathRegex + "))|(/?" + pathRegex + ")))"
+ "(?:\\?(" + queryRegex + "))?"
- + "(?:#(" + queryRegex + "))?",
+ + "(?:#(" + fragmentRegex + "))?",
std::regex::ECMAScript);
std::smatch match;
diff --git a/src/nix-env/user-env.cc b/src/nix-env/user-env.cc
index 34f6bd005..5d01fbf10 100644
--- a/src/nix-env/user-env.cc
+++ b/src/nix-env/user-env.cc
@@ -104,10 +104,15 @@ bool createUserEnv(EvalState & state, DrvInfos & elems,
/* Also write a copy of the list of user environment elements to
the store; we need it for future modifications of the
environment. */
- std::ostringstream str;
- manifest.print(state.symbols, str, true);
- auto manifestFile = state.store->addTextToStore("env-manifest.nix",
- str.str(), references);
+ auto manifestFile = ({
+ std::ostringstream str;
+ manifest.print(state.symbols, str, true);
+ // TODO with C++20 we can use str.view() instead and avoid copy.
+ std::string str2 = str.str();
+ StringSource source { str2 };
+ state.store->addToStoreFromDump(
+ source, "env-manifest.nix", TextIngestionMethod {}, HashAlgorithm::SHA256, references);
+ });
/* Get the environment builder expression. */
Value envBuilder;
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index d361dc0ac..0a0a3ab1a 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -13,6 +13,7 @@
#include "shared.hh"
#include "graphml.hh"
#include "legacy.hh"
+#include "posix-source-accessor.hh"
#include "path-with-outputs.hh"
#include "posix-fs-canonicalise.hh"
@@ -175,8 +176,12 @@ static void opAdd(Strings opFlags, Strings opArgs)
{
if (!opFlags.empty()) throw UsageError("unknown flag");
+ PosixSourceAccessor accessor;
for (auto & i : opArgs)
- cout << fmt("%s\n", store->printStorePath(store->addToStore(std::string(baseNameOf(i)), i)));
+ cout << fmt("%s\n", store->printStorePath(store->addToStore(
+ std::string(baseNameOf(i)),
+ accessor,
+ CanonPath::fromCwd(i))));
}
@@ -196,8 +201,14 @@ static void opAddFixed(Strings opFlags, Strings opArgs)
HashAlgorithm hashAlgo = parseHashAlgo(opArgs.front());
opArgs.pop_front();
+ PosixSourceAccessor accessor;
for (auto & i : opArgs)
- std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow(baseNameOf(i), i, method, hashAlgo).path));
+ std::cout << fmt("%s\n", store->printStorePath(store->addToStoreSlow(
+ baseNameOf(i),
+ accessor,
+ CanonPath::fromCwd(i),
+ method,
+ hashAlgo).path));
}
@@ -541,7 +552,10 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
if (canonicalise)
canonicalisePathMetaData(store->printStorePath(info->path), {});
if (!hashGiven) {
- HashResult hash = hashPath(HashAlgorithm::SHA256, store->printStorePath(info->path));
+ HashResult hash = hashPath(
+ *store->getFSAccessor(false), CanonPath { store->printStorePath(info->path) },
+
+ FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
info->narHash = hash.first;
info->narSize = hash.second;
}
diff --git a/src/nix/add-to-store.cc b/src/nix/add-to-store.cc
index 02de796b5..64a43ecfa 100644
--- a/src/nix/add-to-store.cc
+++ b/src/nix/add-to-store.cc
@@ -2,6 +2,7 @@
#include "common-args.hh"
#include "store-api.hh"
#include "archive.hh"
+#include "posix-source-accessor.hh"
using namespace nix;
@@ -20,7 +21,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
{
Path path;
std::optional namePart;
- FileIngestionMethod ingestionMethod = FileIngestionMethod::Recursive;
+ ContentAddressMethod caMethod = FileIngestionMethod::Recursive;
CmdAddToStore()
{
@@ -48,7 +49,7 @@ struct CmdAddToStore : MixDryRun, StoreCommand
)",
.labels = {"hash-mode"},
.handler = {[this](std::string s) {
- this->ingestionMethod = parseIngestionMethod(s);
+ this->caMethod = parseIngestionMethod(s);
}},
});
}
@@ -57,36 +58,17 @@ struct CmdAddToStore : MixDryRun, StoreCommand
{
if (!namePart) namePart = baseNameOf(path);
- StringSink sink;
- dumpPath(path, sink);
+ PosixSourceAccessor accessor;
- auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
+ auto path2 = CanonPath::fromCwd(path);
- Hash hash = narHash;
- if (ingestionMethod == FileIngestionMethod::Flat) {
- HashSink hsink(HashAlgorithm::SHA256);
- readFile(path, hsink);
- hash = hsink.finish().first;
- }
+ auto storePath = dryRun
+ ? store->computeStorePath(
+ *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).first
+ : store->addToStoreSlow(
+ *namePart, accessor, path2, caMethod, HashAlgorithm::SHA256, {}).path;
- ValidPathInfo info {
- *store,
- std::move(*namePart),
- FixedOutputInfo {
- .method = std::move(ingestionMethod),
- .hash = std::move(hash),
- .references = {},
- },
- narHash,
- };
- info.narSize = sink.s.size();
-
- if (!dryRun) {
- auto source = StringSource(sink.s);
- store->addToStore(info, source);
- }
-
- logger->cout("%s", store->printStorePath(info.path));
+ logger->cout("%s", store->printStorePath(storePath));
}
};
@@ -110,7 +92,7 @@ struct CmdAddFile : CmdAddToStore
{
CmdAddFile()
{
- ingestionMethod = FileIngestionMethod::Flat;
+ caMethod = FileIngestionMethod::Flat;
}
std::string description() override
diff --git a/src/nix/develop.cc b/src/nix/develop.cc
index 606b044b0..8db2de491 100644
--- a/src/nix/develop.cc
+++ b/src/nix/develop.cc
@@ -223,7 +223,11 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore
if (builder != "bash")
throw Error("'nix develop' only works on derivations that use 'bash' as their builder");
- auto getEnvShPath = evalStore->addTextToStore("get-env.sh", getEnvSh, {});
+ auto getEnvShPath = ({
+ StringSource source { getEnvSh };
+ evalStore->addToStoreFromDump(
+ source, "get-env.sh", TextIngestionMethod {}, HashAlgorithm::SHA256, {});
+ });
drv.args = {store->printStorePath(getEnvShPath)};
diff --git a/src/nix/hash.cc b/src/nix/hash.cc
index 0bba3b7d2..83694306e 100644
--- a/src/nix/hash.cc
+++ b/src/nix/hash.cc
@@ -5,6 +5,7 @@
#include "shared.hh"
#include "references.hh"
#include "archive.hh"
+#include "posix-source-accessor.hh"
using namespace nix;
@@ -88,14 +89,8 @@ struct CmdHashBase : Command
else
hashSink = std::make_unique(ha);
- switch (mode) {
- case FileIngestionMethod::Flat:
- readFile(path, *hashSink);
- break;
- case FileIngestionMethod::Recursive:
- dumpPath(path, *hashSink);
- break;
- }
+ PosixSourceAccessor accessor;
+ dumpPath(accessor, CanonPath::fromCwd(path), *hashSink, mode);
Hash h = hashSink->finish().first;
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
diff --git a/src/nix/prefetch.cc b/src/nix/prefetch.cc
index bbfeb8aa4..b5d619006 100644
--- a/src/nix/prefetch.cc
+++ b/src/nix/prefetch.cc
@@ -9,6 +9,7 @@
#include "attr-path.hh"
#include "eval-inline.hh"
#include "legacy.hh"
+#include "posix-source-accessor.hh"
#include
@@ -122,7 +123,11 @@ std::tuple prefetchFile(
Activity act(*logger, lvlChatty, actUnknown,
fmt("adding '%s' to the store", url));
- auto info = store->addToStoreSlow(*name, tmpFile, ingestionMethod, hashAlgo, expectedHash);
+ PosixSourceAccessor accessor;
+ auto info = store->addToStoreSlow(
+ *name,
+ accessor, CanonPath::fromCwd(tmpFile),
+ ingestionMethod, hashAlgo, {}, expectedHash);
storePath = info.path;
assert(info.ca);
hash = info.ca->hash;
diff --git a/src/nix/profile-list.md b/src/nix/profile-list.md
index 5d7fcc0ec..facfdf0d6 100644
--- a/src/nix/profile-list.md
+++ b/src/nix/profile-list.md
@@ -6,12 +6,14 @@ R""(
```console
# nix profile list
+ Name: gdb
Index: 0
Flake attribute: legacyPackages.x86_64-linux.gdb
Original flake URL: flake:nixpkgs
Locked flake URL: github:NixOS/nixpkgs/7b38b03d76ab71bdc8dc325e3f6338d984cc35ca
Store paths: /nix/store/indzcw5wvlhx6vwk7k4iq29q15chvr3d-gdb-11.1
+ Name: blender-bin
Index: 1
Flake attribute: packages.x86_64-linux.default
Original flake URL: flake:blender-bin
@@ -26,7 +28,7 @@ R""(
# nix build github:edolstra/nix-warez/91f2ffee657bf834e4475865ae336e2379282d34?dir=blender#packages.x86_64-linux.default
```
- will build the package with index 1 shown above.
+ will build the package `blender-bin` shown above.
# Description
@@ -34,10 +36,14 @@ This command shows what packages are currently installed in a
profile. For each installed package, it shows the following
information:
-* `Index`: An integer that can be used to unambiguously identify the
+* `Name`: A unique name used to unambiguously identify the
package in invocations of `nix profile remove` and `nix profile
upgrade`.
+* `Index`: An integer that can be used to unambiguously identify the
+ package in invocations of `nix profile remove` and `nix profile upgrade`.
+ (*Deprecated, will be removed in a future version in favor of `Name`.*)
+
* `Flake attribute`: The flake output attribute path that provides the
package (e.g. `packages.x86_64-linux.hello`).
diff --git a/src/nix/profile-remove.md b/src/nix/profile-remove.md
index ba85441d8..c994b79bd 100644
--- a/src/nix/profile-remove.md
+++ b/src/nix/profile-remove.md
@@ -2,18 +2,19 @@ R""(
# Examples
-* Remove a package by position:
+* Remove a package by name:
+
+ ```console
+ # nix profile remove hello
+ ```
+
+* Remove a package by index
+ *(deprecated, will be removed in a future version)*:
```console
# nix profile remove 3
```
-* Remove a package by attribute path:
-
- ```console
- # nix profile remove packages.x86_64-linux.hello
- ```
-
* Remove all packages:
```console
diff --git a/src/nix/profile-upgrade.md b/src/nix/profile-upgrade.md
index 39cca428b..47103edfc 100644
--- a/src/nix/profile-upgrade.md
+++ b/src/nix/profile-upgrade.md
@@ -9,18 +9,16 @@ R""(
# nix profile upgrade '.*'
```
-* Upgrade a specific package:
+* Upgrade a specific package by name:
```console
- # nix profile upgrade packages.x86_64-linux.hello
+ # nix profile upgrade hello
```
-* Upgrade a specific profile element by number:
+* Upgrade a specific package by index
+ *(deprecated, will be removed in a future version)*:
```console
- # nix profile list
- 0 flake:nixpkgs#legacyPackages.x86_64-linux.spotify …
-
# nix profile upgrade 0
```
diff --git a/src/nix/profile.cc b/src/nix/profile.cc
index 9d9492da9..abd56e4f4 100644
--- a/src/nix/profile.cc
+++ b/src/nix/profile.cc
@@ -10,6 +10,8 @@
#include "../nix-env/user-env.hh"
#include "profiles.hh"
#include "names.hh"
+#include "url.hh"
+#include "flake/url-name.hh"
#include
#include
@@ -43,6 +45,7 @@ const int defaultPriority = 5;
struct ProfileElement
{
StorePathSet storePaths;
+ std::string name;
std::optional source;
bool active = true;
int priority = defaultPriority;
@@ -116,6 +119,8 @@ struct ProfileManifest
if (pathExists(manifestPath)) {
auto json = nlohmann::json::parse(readFile(manifestPath));
+ /* Keep track of already found names to allow preventing duplicates. */
+ std::set foundNames;
auto version = json.value("version", 0);
std::string sUrl;
@@ -149,6 +154,25 @@ struct ProfileManifest
e["outputs"].get()
};
}
+
+ std::string nameCandidate = element.identifier();
+ if (e.contains("name")) {
+ nameCandidate = e["name"];
+ }
+ else if (element.source) {
+ auto url = parseURL(element.source->to_string());
+ auto name = getNameFromURL(url);
+ if (name)
+ nameCandidate = *name;
+ }
+
+ auto finalName = nameCandidate;
+ for (int i = 1; foundNames.contains(finalName); ++i) {
+ finalName = nameCandidate + std::to_string(i);
+ }
+ element.name = finalName;
+ foundNames.insert(element.name);
+
elements.emplace_back(std::move(element));
}
}
@@ -163,6 +187,7 @@ struct ProfileManifest
for (auto & drvInfo : drvInfos) {
ProfileElement element;
element.storePaths = {drvInfo.queryOutPath()};
+ element.name = element.identifier();
elements.emplace_back(std::move(element));
}
}
@@ -451,15 +476,25 @@ public:
{
std::vector res;
+ auto anyIndexMatchers = false;
+
for (auto & s : _matchers) {
- if (auto n = string2Int(s))
+ if (auto n = string2Int(s)) {
res.push_back(*n);
+ anyIndexMatchers = true;
+ }
else if (store->isStorePath(s))
res.push_back(s);
else
res.push_back(RegexPattern{s,std::regex(s, std::regex::extended | std::regex::icase)});
}
+ if (anyIndexMatchers) {
+ warn("Indices are deprecated and will be removed in a future version!\n"
+ " Refer to packages by their `Name` as printed by `nix profile list`.\n"
+ " See https://github.com/NixOS/nix/issues/9171 for more information.");
+ }
+
return res;
}
@@ -471,8 +506,7 @@ public:
} else if (auto path = std::get_if(&matcher)) {
if (element.storePaths.count(store.parseStorePath(*path))) return true;
} else if (auto regex = std::get_if(&matcher)) {
- if (element.source
- && std::regex_match(element.source->attrPath, regex->reg))
+ if (std::regex_match(element.name, regex->reg))
return true;
}
}
@@ -556,62 +590,83 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
Installables installables;
std::vector indices;
+ auto matchedCount = 0;
auto upgradedCount = 0;
for (size_t i = 0; i < manifest.elements.size(); ++i) {
auto & element(manifest.elements[i]);
- if (element.source
- && !element.source->originalRef.input.isLocked()
- && matches(*store, element, i, matchers))
- {
- upgradedCount++;
-
- Activity act(*logger, lvlChatty, actUnknown,
- fmt("checking '%s' for updates", element.source->attrPath));
-
- auto installable = make_ref(
- this,
- getEvalState(),
- FlakeRef(element.source->originalRef),
- "",
- element.source->outputs,
- Strings{element.source->attrPath},
- Strings{},
- lockFlags);
-
- auto derivedPaths = installable->toDerivedPaths();
- if (derivedPaths.empty()) continue;
- auto * infop = dynamic_cast(&*derivedPaths[0].info);
- // `InstallableFlake` should use `ExtraPathInfoFlake`.
- assert(infop);
- auto & info = *infop;
-
- if (element.source->lockedRef == info.flake.lockedRef) continue;
-
- printInfo("upgrading '%s' from flake '%s' to '%s'",
- element.source->attrPath, element.source->lockedRef, info.flake.lockedRef);
-
- element.source = ProfileElementSource {
- .originalRef = installable->flakeRef,
- .lockedRef = info.flake.lockedRef,
- .attrPath = info.value.attrPath,
- .outputs = installable->extendedOutputsSpec,
- };
-
- installables.push_back(installable);
- indices.push_back(i);
+ if (!matches(*store, element, i, matchers)) {
+ continue;
}
+
+ matchedCount++;
+
+ if (!element.source) {
+ warn(
+ "Found package '%s', but it was not installed from a flake, so it can't be checked for upgrades!",
+ element.identifier()
+ );
+ continue;
+ }
+ if (element.source->originalRef.input.isLocked()) {
+ warn(
+ "Found package '%s', but it was installed from a locked flake reference so it can't be upgraded!",
+ element.identifier()
+ );
+ continue;
+ }
+
+ upgradedCount++;
+
+ Activity act(*logger, lvlChatty, actUnknown,
+ fmt("checking '%s' for updates", element.source->attrPath));
+
+ auto installable = make_ref(
+ this,
+ getEvalState(),
+ FlakeRef(element.source->originalRef),
+ "",
+ element.source->outputs,
+ Strings{element.source->attrPath},
+ Strings{},
+ lockFlags);
+
+ auto derivedPaths = installable->toDerivedPaths();
+ if (derivedPaths.empty()) continue;
+ auto * infop = dynamic_cast(&*derivedPaths[0].info);
+ // `InstallableFlake` should use `ExtraPathInfoFlake`.
+ assert(infop);
+ auto & info = *infop;
+
+ if (element.source->lockedRef == info.flake.lockedRef) continue;
+
+ printInfo("upgrading '%s' from flake '%s' to '%s'",
+ element.source->attrPath, element.source->lockedRef, info.flake.lockedRef);
+
+ element.source = ProfileElementSource {
+ .originalRef = installable->flakeRef,
+ .lockedRef = info.flake.lockedRef,
+ .attrPath = info.value.attrPath,
+ .outputs = installable->extendedOutputsSpec,
+ };
+
+ installables.push_back(installable);
+ indices.push_back(i);
}
if (upgradedCount == 0) {
- for (auto & matcher : matchers) {
- if (const size_t * index = std::get_if(&matcher)){
- warn("'%d' is not a valid index", *index);
- } else if (const Path * path = std::get_if(&matcher)){
- warn("'%s' does not match any paths", *path);
- } else if (const RegexPattern * regex = std::get_if(&matcher)){
- warn("'%s' does not match any packages", regex->pattern);
+ if (matchedCount == 0) {
+ for (auto & matcher : matchers) {
+ if (const size_t * index = std::get_if(&matcher)){
+ warn("'%d' is not a valid index", *index);
+ } else if (const Path * path = std::get_if(&matcher)){
+ warn("'%s' does not match any paths", *path);
+ } else if (const RegexPattern * regex = std::get_if(&matcher)){
+ warn("'%s' does not match any packages", regex->pattern);
+ }
}
+ } else {
+ warn("Found some packages but none of them could be upgraded.");
}
warn ("Use 'nix profile list' to see the current profile.");
}
@@ -657,9 +712,10 @@ struct CmdProfileList : virtual EvalCommand, virtual StoreCommand, MixDefaultPro
for (size_t i = 0; i < manifest.elements.size(); ++i) {
auto & element(manifest.elements[i]);
if (i) logger->cout("");
- logger->cout("Index: " ANSI_BOLD "%s" ANSI_NORMAL "%s",
- i,
+ logger->cout("Name: " ANSI_BOLD "%s" ANSI_NORMAL "%s",
+ element.name,
element.active ? "" : " " ANSI_RED "(inactive)" ANSI_NORMAL);
+ logger->cout("Index: %s", i);
if (element.source) {
logger->cout("Flake attribute: %s%s", element.source->attrPath, element.source->outputs.to_string());
logger->cout("Original flake URL: %s", element.source->originalRef.to_string());
diff --git a/tests/functional/lang/eval-fail-abort.err.exp b/tests/functional/lang/eval-fail-abort.err.exp
index 345232d3f..20e7b9e18 100644
--- a/tests/functional/lang/eval-fail-abort.err.exp
+++ b/tests/functional/lang/eval-fail-abort.err.exp
@@ -1,8 +1,6 @@
error:
… while calling the 'abort' builtin
-
at /pwd/lang/eval-fail-abort.nix:1:14:
-
1| if true then abort "this should fail" else 1
| ^
2|
diff --git a/tests/functional/lang/eval-fail-addDrvOutputDependencies-empty-context.err.exp b/tests/functional/lang/eval-fail-addDrvOutputDependencies-empty-context.err.exp
index ad91a22aa..37e0bd9ee 100644
--- a/tests/functional/lang/eval-fail-addDrvOutputDependencies-empty-context.err.exp
+++ b/tests/functional/lang/eval-fail-addDrvOutputDependencies-empty-context.err.exp
@@ -1,8 +1,6 @@
error:
… while calling the 'addDrvOutputDependencies' builtin
-
at /pwd/lang/eval-fail-addDrvOutputDependencies-empty-context.nix:1:1:
-
1| builtins.addDrvOutputDependencies ""
| ^
2|
diff --git a/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp b/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp
index bb389db4e..6828e03c8 100644
--- a/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp
+++ b/tests/functional/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.err.exp
@@ -1,8 +1,6 @@
error:
… while calling the 'addDrvOutputDependencies' builtin
-
at /pwd/lang/eval-fail-addDrvOutputDependencies-multi-elem-context.nix:18:4:
-
17|
18| in builtins.addDrvOutputDependencies combo-path
| ^
diff --git a/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp b/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp
index 070381118..72b5e6368 100644
--- a/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp
+++ b/tests/functional/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.err.exp
@@ -1,8 +1,6 @@
error:
… while calling the 'addDrvOutputDependencies' builtin
-
at /pwd/lang/eval-fail-addDrvOutputDependencies-wrong-element-kind.nix:9:4:
-
8|
9| in builtins.addDrvOutputDependencies drv.outPath
| ^
diff --git a/tests/functional/lang/eval-fail-assert.err.exp b/tests/functional/lang/eval-fail-assert.err.exp
index aeecd8167..0656ec81c 100644
--- a/tests/functional/lang/eval-fail-assert.err.exp
+++ b/tests/functional/lang/eval-fail-assert.err.exp
@@ -1,35 +1,27 @@
error:
… while evaluating the attribute 'body'
-
at /pwd/lang/eval-fail-assert.nix:4:3:
-
3|
4| body = x "x";
| ^
5| }
… from call site
-
at /pwd/lang/eval-fail-assert.nix:4:10:
-
3|
4| body = x "x";
| ^
5| }
… while calling 'x'
-
at /pwd/lang/eval-fail-assert.nix:2:7:
-
1| let {
2| x = arg: assert arg == "y"; 123;
| ^
3|
error: assertion '(arg == "y")' failed
-
at /pwd/lang/eval-fail-assert.nix:2:12:
-
1| let {
2| x = arg: assert arg == "y"; 123;
| ^
diff --git a/tests/functional/lang/eval-fail-attr-name-type.err.exp b/tests/functional/lang/eval-fail-attr-name-type.err.exp
index 5f9a073dd..23cceb58a 100644
--- a/tests/functional/lang/eval-fail-attr-name-type.err.exp
+++ b/tests/functional/lang/eval-fail-attr-name-type.err.exp
@@ -1,17 +1,13 @@
error:
… while evaluating the attribute 'puppy."${key}"'
-
at /pwd/lang/eval-fail-attr-name-type.nix:3:5:
-
2| attrs = {
3| puppy.doggy = {};
| ^
4| };
… while evaluating an attribute name
-
at /pwd/lang/eval-fail-attr-name-type.nix:7:17:
-
6| in
7| attrs.puppy.${key}
| ^
diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp
index eb73e9a52..b461b2e02 100644
--- a/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp
+++ b/tests/functional/lang/eval-fail-bad-string-interpolation-1.err.exp
@@ -1,8 +1,6 @@
error:
… while evaluating a path segment
-
at /pwd/lang/eval-fail-bad-string-interpolation-1.nix:1:2:
-
1| "${x: x}"
| ^
2|
diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp
index ac14f329b..95f4c2460 100644
--- a/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp
+++ b/tests/functional/lang/eval-fail-bad-string-interpolation-3.err.exp
@@ -1,8 +1,6 @@
error:
… while evaluating a path segment
-
at /pwd/lang/eval-fail-bad-string-interpolation-3.nix:1:3:
-
1| ''${x: x}''
| ^
2|
diff --git a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp
index 07843a480..4950f8ddb 100644
--- a/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp
+++ b/tests/functional/lang/eval-fail-bad-string-interpolation-4.err.exp
@@ -1,8 +1,6 @@
error:
… while evaluating a path segment
-
at /pwd/lang/eval-fail-bad-string-interpolation-4.nix:9:3:
-
8| # The error message should not be too long.
9| ''${pkgs}''
| ^
diff --git a/tests/functional/lang/eval-fail-blackhole.err.exp b/tests/functional/lang/eval-fail-blackhole.err.exp
index f0618d8ac..95e33a5fe 100644
--- a/tests/functional/lang/eval-fail-blackhole.err.exp
+++ b/tests/functional/lang/eval-fail-blackhole.err.exp
@@ -1,17 +1,13 @@
error:
… while evaluating the attribute 'body'
-
at /pwd/lang/eval-fail-blackhole.nix:2:3:
-
1| let {
2| body = x;
| ^
3| x = y;
error: infinite recursion encountered
-
at /pwd/lang/eval-fail-blackhole.nix:3:7:
-
2| body = x;
3| x = y;
| ^
diff --git a/tests/functional/lang/eval-fail-call-primop.err.exp b/tests/functional/lang/eval-fail-call-primop.err.exp
index 19b407c47..ae5b55ed4 100644
--- a/tests/functional/lang/eval-fail-call-primop.err.exp
+++ b/tests/functional/lang/eval-fail-call-primop.err.exp
@@ -1,8 +1,6 @@
error:
… while calling the 'length' builtin
-
at /pwd/lang/eval-fail-call-primop.nix:1:1:
-
1| builtins.length 1
| ^
2|
diff --git a/tests/functional/lang/eval-fail-deepseq.err.exp b/tests/functional/lang/eval-fail-deepseq.err.exp
index 5e204ba73..11b62340d 100644
--- a/tests/functional/lang/eval-fail-deepseq.err.exp
+++ b/tests/functional/lang/eval-fail-deepseq.err.exp
@@ -1,24 +1,18 @@
error:
… while calling the 'deepSeq' builtin
-
at /pwd/lang/eval-fail-deepseq.nix:1:1:
-
1| builtins.deepSeq { x = abort "foo"; } 456
| ^
2|
… while evaluating the attribute 'x'
-
at /pwd/lang/eval-fail-deepseq.nix:1:20:
-
1| builtins.deepSeq { x = abort "foo"; } 456
| ^
2|
… while calling the 'abort' builtin
-
at /pwd/lang/eval-fail-deepseq.nix:1:24:
-
1| builtins.deepSeq { x = abort "foo"; } 456
| ^
2|
diff --git a/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp b/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp
index c5fa67523..834f9c67b 100644
--- a/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp
+++ b/tests/functional/lang/eval-fail-dup-dynamic-attrs.err.exp
@@ -1,17 +1,13 @@
error:
… while evaluating the attribute 'set'
-
at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:3:
-
1| {
2| set = { "${"" + "b"}" = 1; };
| ^
3| set = { "${"b" + ""}" = 2; };
error: dynamic attribute 'b' already defined at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:2:11
-
at /pwd/lang/eval-fail-dup-dynamic-attrs.nix:3:11:
-
2| set = { "${"" + "b"}" = 1; };
3| set = { "${"b" + ""}" = 2; };
| ^
diff --git a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp
index 0069285fb..7cb08af8a 100644
--- a/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp
+++ b/tests/functional/lang/eval-fail-foldlStrict-strict-op-application.err.exp
@@ -1,35 +1,27 @@
error:
… while calling the 'foldl'' builtin
-
at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:2:1:
-
1| # Tests that the result of applying op is forced even if the value is never used
2| builtins.foldl'
| ^
3| (_: f: f null)
… while calling anonymous lambda
-
at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:7:
-
2| builtins.foldl'
3| (_: f: f null)
| ^
4| null
… from call site
-
at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:3:10:
-
2| builtins.foldl'
3| (_: f: f null)
| ^
4| null
… while calling anonymous lambda
-
at /pwd/lang/eval-fail-foldlStrict-strict-op-application.nix:5:6:
-
4| null
5| [ (_: throw "Not the final value, but is still forced!") (_: 23) ]
| ^
diff --git a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp
index 5b60d253d..73f9df8cc 100644
--- a/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp
+++ b/tests/functional/lang/eval-fail-fromTOML-timestamps.err.exp
@@ -1,8 +1,6 @@
error:
… while calling the 'fromTOML' builtin
-
at /pwd/lang/eval-fail-fromTOML-timestamps.nix:1:1:
-
1| builtins.fromTOML ''
| ^
2| key = "value"
diff --git a/tests/functional/lang/eval-fail-hashfile-missing.err.exp b/tests/functional/lang/eval-fail-hashfile-missing.err.exp
index 6d38608c0..1e4653927 100644
--- a/tests/functional/lang/eval-fail-hashfile-missing.err.exp
+++ b/tests/functional/lang/eval-fail-hashfile-missing.err.exp
@@ -1,8 +1,6 @@
error:
… while calling the 'toString' builtin
-
at /pwd/lang/eval-fail-hashfile-missing.nix:4:3:
-
3| in
4| toString (builtins.concatLists (map (hash: map (builtins.hashFile hash) paths) ["md5" "sha1" "sha256" "sha512"]))
| ^
diff --git a/tests/functional/lang/eval-fail-list.err.exp b/tests/functional/lang/eval-fail-list.err.exp
index 24d682118..4320fc022 100644
--- a/tests/functional/lang/eval-fail-list.err.exp
+++ b/tests/functional/lang/eval-fail-list.err.exp
@@ -1,8 +1,6 @@
error:
… while evaluating one of the elements to concatenate
-
at /pwd/lang/eval-fail-list.nix:1:2:
-
1| 8++1
| ^
2|
diff --git a/tests/functional/lang/eval-fail-missing-arg.err.exp b/tests/functional/lang/eval-fail-missing-arg.err.exp
index 61fabf0d5..3b162fe1b 100644
--- a/tests/functional/lang/eval-fail-missing-arg.err.exp
+++ b/tests/functional/lang/eval-fail-missing-arg.err.exp
@@ -1,16 +1,12 @@
error:
… from call site
-
at /pwd/lang/eval-fail-missing-arg.nix:1:1:
-
1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";}
| ^
2|
error: function 'anonymous lambda' called without required argument 'y'
-
at /pwd/lang/eval-fail-missing-arg.nix:1:2:
-
1| ({x, y, z}: x + y + z) {x = "foo"; z = "bar";}
| ^
2|
diff --git a/tests/functional/lang/eval-fail-not-throws.err.exp b/tests/functional/lang/eval-fail-not-throws.err.exp
index b290afb0a..fc81f7277 100644
--- a/tests/functional/lang/eval-fail-not-throws.err.exp
+++ b/tests/functional/lang/eval-fail-not-throws.err.exp
@@ -1,16 +1,12 @@
error:
… in the argument of the not operator
-
at /pwd/lang/eval-fail-not-throws.nix:1:4:
-
1| ! (throw "uh oh!")
| ^
2|
… while calling the 'throw' builtin
-
at /pwd/lang/eval-fail-not-throws.nix:1:4:
-
1| ! (throw "uh oh!")
| ^
2|
diff --git a/tests/functional/lang/eval-fail-path-slash.err.exp b/tests/functional/lang/eval-fail-path-slash.err.exp
index f0011c97f..e3531d352 100644
--- a/tests/functional/lang/eval-fail-path-slash.err.exp
+++ b/tests/functional/lang/eval-fail-path-slash.err.exp
@@ -1,7 +1,5 @@
error: path has a trailing slash
-
at /pwd/lang/eval-fail-path-slash.nix:6:12:
-
5| # and https://nixos.org/nix-dev/2016-June/020829.html
6| /nix/store/
| ^
diff --git a/tests/functional/lang/eval-fail-recursion.err.exp b/tests/functional/lang/eval-fail-recursion.err.exp
index af64133cb..19380dc65 100644
--- a/tests/functional/lang/eval-fail-recursion.err.exp
+++ b/tests/functional/lang/eval-fail-recursion.err.exp
@@ -1,16 +1,12 @@
error:
… in the right operand of the update (//) operator
-
at /pwd/lang/eval-fail-recursion.nix:1:12:
-
1| let a = {} // a; in a.foo
| ^
2|
error: infinite recursion encountered
-
at /pwd/lang/eval-fail-recursion.nix:1:15:
-
1| let a = {} // a; in a.foo
| ^
2|
diff --git a/tests/functional/lang/eval-fail-remove.err.exp b/tests/functional/lang/eval-fail-remove.err.exp
index e82cdac98..292b3c3f3 100644
--- a/tests/functional/lang/eval-fail-remove.err.exp
+++ b/tests/functional/lang/eval-fail-remove.err.exp
@@ -1,17 +1,13 @@
error:
… while evaluating the attribute 'body'
-
at /pwd/lang/eval-fail-remove.nix:4:3:
-
3|
4| body = (removeAttrs attrs ["x"]).x;
| ^
5| }
error: attribute 'x' missing
-
at /pwd/lang/eval-fail-remove.nix:4:10:
-
3|
4| body = (removeAttrs attrs ["x"]).x;
| ^
diff --git a/tests/functional/lang/eval-fail-scope-5.err.exp b/tests/functional/lang/eval-fail-scope-5.err.exp
index 22b6166f8..b0b05cad7 100644
--- a/tests/functional/lang/eval-fail-scope-5.err.exp
+++ b/tests/functional/lang/eval-fail-scope-5.err.exp
@@ -1,35 +1,27 @@
error:
… while evaluating the attribute 'body'
-
at /pwd/lang/eval-fail-scope-5.nix:8:3:
-
7|
8| body = f {};
| ^
9|
… from call site
-
at /pwd/lang/eval-fail-scope-5.nix:8:10:
-
7|
8| body = f {};
| ^
9|
… while calling 'f'
-
at /pwd/lang/eval-fail-scope-5.nix:6:7:
-
5|
6| f = {x ? y, y ? x}: x + y;
| ^
7|
error: infinite recursion encountered
-
at /pwd/lang/eval-fail-scope-5.nix:6:12:
-
5|
6| f = {x ? y, y ? x}: x + y;
| ^
diff --git a/tests/functional/lang/eval-fail-seq.err.exp b/tests/functional/lang/eval-fail-seq.err.exp
index 33a7e9491..3e3d71b15 100644
--- a/tests/functional/lang/eval-fail-seq.err.exp
+++ b/tests/functional/lang/eval-fail-seq.err.exp
@@ -1,16 +1,12 @@
error:
… while calling the 'seq' builtin
-
at /pwd/lang/eval-fail-seq.nix:1:1:
-
1| builtins.seq (abort "foo") 2
| ^
2|
… while calling the 'abort' builtin
-
at /pwd/lang/eval-fail-seq.nix:1:15:
-
1| builtins.seq (abort "foo") 2
| ^
2|
diff --git a/tests/functional/lang/eval-fail-set.err.exp b/tests/functional/lang/eval-fail-set.err.exp
index 0d0140508..6dd646e11 100644
--- a/tests/functional/lang/eval-fail-set.err.exp
+++ b/tests/functional/lang/eval-fail-set.err.exp
@@ -1,7 +1,5 @@
error: undefined variable 'x'
-
at /pwd/lang/eval-fail-set.nix:1:3:
-
1| 8.x
| ^
2|
diff --git a/tests/functional/lang/eval-fail-substring.err.exp b/tests/functional/lang/eval-fail-substring.err.exp
index 5c58be29a..0457a826e 100644
--- a/tests/functional/lang/eval-fail-substring.err.exp
+++ b/tests/functional/lang/eval-fail-substring.err.exp
@@ -1,8 +1,6 @@
error:
… while calling the 'substring' builtin
-
at /pwd/lang/eval-fail-substring.nix:1:1:
-
1| builtins.substring (builtins.sub 0 1) 1 "x"
| ^
2|
diff --git a/tests/functional/lang/eval-fail-to-path.err.exp b/tests/functional/lang/eval-fail-to-path.err.exp
index 4ffa2cf6d..d6b17be99 100644
--- a/tests/functional/lang/eval-fail-to-path.err.exp
+++ b/tests/functional/lang/eval-fail-to-path.err.exp
@@ -1,8 +1,6 @@
error:
… while calling the 'toPath' builtin
-
at /pwd/lang/eval-fail-to-path.nix:1:1:
-
1| builtins.toPath "foo/bar"
| ^
2|
diff --git a/tests/functional/lang/eval-fail-toJSON.err.exp b/tests/functional/lang/eval-fail-toJSON.err.exp
index 4e618c203..4f6003437 100644
--- a/tests/functional/lang/eval-fail-toJSON.err.exp
+++ b/tests/functional/lang/eval-fail-toJSON.err.exp
@@ -1,25 +1,19 @@
error:
… while calling the 'toJSON' builtin
-
at /pwd/lang/eval-fail-toJSON.nix:1:1:
-
1| builtins.toJSON {
| ^
2| a.b = [
… while evaluating attribute 'a'
-
at /pwd/lang/eval-fail-toJSON.nix:2:3:
-
1| builtins.toJSON {
2| a.b = [
| ^
3| true
… while evaluating attribute 'b'
-
at /pwd/lang/eval-fail-toJSON.nix:2:3:
-
1| builtins.toJSON {
2| a.b = [
| ^
@@ -28,27 +22,21 @@ error:
… while evaluating list element at index 3
… while evaluating attribute 'c'
-
at /pwd/lang/eval-fail-toJSON.nix:7:7:
-
6| {
7| c.d = throw "hah no";
| ^
8| }
… while evaluating attribute 'd'
-
at /pwd/lang/eval-fail-toJSON.nix:7:7:
-
6| {
7| c.d = throw "hah no";
| ^
8| }
… while calling the 'throw' builtin
-
at /pwd/lang/eval-fail-toJSON.nix:7:13:
-
6| {
7| c.d = throw "hah no";
| ^
diff --git a/tests/functional/lang/eval-fail-undeclared-arg.err.exp b/tests/functional/lang/eval-fail-undeclared-arg.err.exp
index 30db743c7..6e13a138e 100644
--- a/tests/functional/lang/eval-fail-undeclared-arg.err.exp
+++ b/tests/functional/lang/eval-fail-undeclared-arg.err.exp
@@ -1,16 +1,12 @@
error:
… from call site
-
at /pwd/lang/eval-fail-undeclared-arg.nix:1:1:
-
1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";}
| ^
2|
error: function 'anonymous lambda' called with unexpected argument 'y'
-
at /pwd/lang/eval-fail-undeclared-arg.nix:1:2:
-
1| ({x, z}: x + z) {x = "foo"; y = "bla"; z = "bar";}
| ^
2|
diff --git a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp
index 811d01b03..0a4f56ac5 100644
--- a/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp
+++ b/tests/functional/lang/eval-fail-using-set-as-attr-name.err.exp
@@ -1,8 +1,6 @@
error:
… while evaluating an attribute name
-
at /pwd/lang/eval-fail-using-set-as-attr-name.nix:5:10:
-
4| in
5| attr.${key}
| ^
diff --git a/tests/functional/lang/parse-fail-dup-attrs-1.err.exp b/tests/functional/lang/parse-fail-dup-attrs-1.err.exp
index 4fe6b7a1f..6c3a3510c 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-1.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-1.err.exp
@@ -1,7 +1,5 @@
error: attribute 'x' already defined at «stdin»:1:3
-
at «stdin»:3:3:
-
2| y = 456;
3| x = 789;
| ^
diff --git a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
index 3aba2891f..fecdece20 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-2.err.exp
@@ -1,7 +1,5 @@
error: attribute 'x' already defined at «stdin»:9:5
-
at «stdin»:10:17:
-
9| x = 789;
10| inherit (as) x;
| ^
diff --git a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
index 3aba2891f..fecdece20 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-3.err.exp
@@ -1,7 +1,5 @@
error: attribute 'x' already defined at «stdin»:9:5
-
at «stdin»:10:17:
-
9| x = 789;
10| inherit (as) x;
| ^
diff --git a/tests/functional/lang/parse-fail-dup-attrs-4.err.exp b/tests/functional/lang/parse-fail-dup-attrs-4.err.exp
index ff68446a1..f85ffea51 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-4.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-4.err.exp
@@ -1,7 +1,5 @@
error: attribute 'services.ssh.port' already defined at «stdin»:2:3
-
at «stdin»:3:3:
-
2| services.ssh.port = 22;
3| services.ssh.port = 23;
| ^
diff --git a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
index 512a499ca..98cea9dae 100644
--- a/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
+++ b/tests/functional/lang/parse-fail-dup-attrs-7.err.exp
@@ -1,7 +1,5 @@
error: attribute 'x' already defined at «stdin»:6:12
-
at «stdin»:7:12:
-
6| inherit x;
7| inherit x;
| ^
diff --git a/tests/functional/lang/parse-fail-dup-formals.err.exp b/tests/functional/lang/parse-fail-dup-formals.err.exp
index 1d566fb33..d7c7e0237 100644
--- a/tests/functional/lang/parse-fail-dup-formals.err.exp
+++ b/tests/functional/lang/parse-fail-dup-formals.err.exp
@@ -1,6 +1,4 @@
error: duplicate formal function argument 'x'
-
at «stdin»:1:8:
-
1| {x, y, x}: x
| ^
diff --git a/tests/functional/lang/parse-fail-eof-in-string.err.exp b/tests/functional/lang/parse-fail-eof-in-string.err.exp
index f9fa72312..b28d35950 100644
--- a/tests/functional/lang/parse-fail-eof-in-string.err.exp
+++ b/tests/functional/lang/parse-fail-eof-in-string.err.exp
@@ -1,7 +1,5 @@
error: syntax error, unexpected end of file, expecting '"'
-
at «stdin»:3:5:
-
2| # Note that this file must not end with a newline.
3| a 1"$
| ^
diff --git a/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp b/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp
index 32f776795..a4472156b 100644
--- a/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp
+++ b/tests/functional/lang/parse-fail-mixed-nested-attrs1.err.exp
@@ -1,7 +1,5 @@
error: attribute 'z' already defined at «stdin»:3:16
-
at «stdin»:2:3:
-
1| {
2| x.z = 3;
| ^
diff --git a/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp b/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp
index 0437cd50c..ead1f0dbd 100644
--- a/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp
+++ b/tests/functional/lang/parse-fail-mixed-nested-attrs2.err.exp
@@ -1,7 +1,5 @@
error: attribute 'y' already defined at «stdin»:3:9
-
at «stdin»:2:3:
-
1| {
2| x.y.y = 3;
| ^
diff --git a/tests/functional/lang/parse-fail-patterns-1.err.exp b/tests/functional/lang/parse-fail-patterns-1.err.exp
index 634a04aaa..6ba39d884 100644
--- a/tests/functional/lang/parse-fail-patterns-1.err.exp
+++ b/tests/functional/lang/parse-fail-patterns-1.err.exp
@@ -1,7 +1,5 @@
error: duplicate formal function argument 'args'
-
at «stdin»:1:1:
-
1| args@{args, x, y, z}: x
| ^
2|
diff --git a/tests/functional/lang/parse-fail-regression-20060610.err.exp b/tests/functional/lang/parse-fail-regression-20060610.err.exp
index 167d01e85..d8875a6a5 100644
--- a/tests/functional/lang/parse-fail-regression-20060610.err.exp
+++ b/tests/functional/lang/parse-fail-regression-20060610.err.exp
@@ -1,7 +1,5 @@
error: undefined variable 'gcc'
-
at «stdin»:8:12:
-
7|
8| body = ({
| ^
diff --git a/tests/functional/lang/parse-fail-undef-var-2.err.exp b/tests/functional/lang/parse-fail-undef-var-2.err.exp
index 77c96bbd2..a58d8dca4 100644
--- a/tests/functional/lang/parse-fail-undef-var-2.err.exp
+++ b/tests/functional/lang/parse-fail-undef-var-2.err.exp
@@ -1,7 +1,5 @@
error: syntax error, unexpected ':', expecting '}'
-
at «stdin»:3:13:
-
2|
3| f = {x, y :
| ^
diff --git a/tests/functional/lang/parse-fail-undef-var.err.exp b/tests/functional/lang/parse-fail-undef-var.err.exp
index 48e88747f..3d143d9af 100644
--- a/tests/functional/lang/parse-fail-undef-var.err.exp
+++ b/tests/functional/lang/parse-fail-undef-var.err.exp
@@ -1,7 +1,5 @@
error: undefined variable 'y'
-
at «stdin»:1:4:
-
1| x: y
| ^
2|
diff --git a/tests/functional/lang/parse-fail-utf8.err.exp b/tests/functional/lang/parse-fail-utf8.err.exp
index 6087479a3..e83abdb9e 100644
--- a/tests/functional/lang/parse-fail-utf8.err.exp
+++ b/tests/functional/lang/parse-fail-utf8.err.exp
@@ -1,6 +1,4 @@
error: syntax error, unexpected invalid token, expecting end of file
-
at «stdin»:1:5:
-
1| 123 Ã
| ^
diff --git a/tests/functional/logging.sh b/tests/functional/logging.sh
index 1481b9b36..1ccc21d0b 100644
--- a/tests/functional/logging.sh
+++ b/tests/functional/logging.sh
@@ -15,7 +15,7 @@ nix-build dependencies.nix --no-out-link --compress-build-log
[ "$(nix-store -l $path)" = FOO ]
# test whether empty logs work fine with `nix log`.
-builder="$(mktemp)"
+builder="$(realpath "$(mktemp)")"
echo -e "#!/bin/sh\nmkdir \$out" > "$builder"
outp="$(nix-build -E \
'with import ./config.nix; mkDerivation { name = "fnord"; builder = '"$builder"'; }' \
diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh
index 7c478a0cd..eced4d3f1 100644
--- a/tests/functional/nix-profile.sh
+++ b/tests/functional/nix-profile.sh
@@ -47,7 +47,7 @@ cp ./config.nix $flake1Dir/
# Test upgrading from nix-env.
nix-env -f ./user-envs.nix -i foo-1.0
-nix profile list | grep -A2 'Index:.*0' | grep 'Store paths:.*foo-1.0'
+nix profile list | grep -A2 'Name:.*foo' | grep 'Store paths:.*foo-1.0'
nix profile install $flake1Dir -L
nix profile list | grep -A4 'Index:.*1' | grep 'Locked flake URL:.*narHash'
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello World" ]]
@@ -81,7 +81,7 @@ nix profile rollback
# Test uninstall.
[ -e $TEST_HOME/.nix-profile/bin/foo ]
-nix profile remove 0
+nix profile remove foo
(! [ -e $TEST_HOME/.nix-profile/bin/foo ])
nix profile history | grep 'foo: 1.0 -> ∅'
nix profile diff-closures | grep 'Version 3 -> 4'
@@ -93,6 +93,13 @@ nix profile remove 1
nix profile install $(nix-build --no-out-link ./simple.nix)
[[ $(cat $TEST_HOME/.nix-profile/hello) = "Hello World!" ]]
+# Test packages with same name from different sources
+mkdir $TEST_ROOT/simple-too
+cp ./simple.nix ./config.nix simple.builder.sh $TEST_ROOT/simple-too
+nix profile install --file $TEST_ROOT/simple-too/simple.nix ''
+nix profile list | grep -A4 'Name:.*simple' | grep 'Name:.*simple1'
+nix profile remove simple1
+
# Test wipe-history.
nix profile wipe-history
[[ $(nix profile history | grep Version | wc -l) -eq 1 ]]
@@ -104,7 +111,7 @@ nix profile upgrade 0
nix profile history | grep "packages.$system.default: 1.0, 1.0-man -> 3.0, 3.0-man"
# Test new install of CA package.
-nix profile remove 0
+nix profile remove flake1
printf 4.0 > $flake1Dir/version
printf Utrecht > $flake1Dir/who
nix profile install $flake1Dir
@@ -112,26 +119,27 @@ nix profile install $flake1Dir
[[ $(nix path-info --json $(realpath $TEST_HOME/.nix-profile/bin/hello) | jq -r .[].ca) =~ fixed:r:sha256: ]]
# Override the outputs.
-nix profile remove 0 1
+nix profile remove simple flake1
nix profile install "$flake1Dir^*"
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Utrecht" ]]
[ -e $TEST_HOME/.nix-profile/share/man ]
[ -e $TEST_HOME/.nix-profile/include ]
printf Nix > $flake1Dir/who
-nix profile upgrade 0
+nix profile list
+nix profile upgrade flake1
[[ $($TEST_HOME/.nix-profile/bin/hello) = "Hello Nix" ]]
[ -e $TEST_HOME/.nix-profile/share/man ]
[ -e $TEST_HOME/.nix-profile/include ]
-nix profile remove 0
+nix profile remove flake1
nix profile install "$flake1Dir^man"
(! [ -e $TEST_HOME/.nix-profile/bin/hello ])
[ -e $TEST_HOME/.nix-profile/share/man ]
(! [ -e $TEST_HOME/.nix-profile/include ])
# test priority
-nix profile remove 0
+nix profile remove flake1
# Make another flake.
flake2Dir=$TEST_ROOT/flake2
diff --git a/tests/nixos/github-flakes.nix b/tests/nixos/github-flakes.nix
index 62ae8871b..a51689445 100644
--- a/tests/nixos/github-flakes.nix
+++ b/tests/nixos/github-flakes.nix
@@ -144,7 +144,7 @@ in
virtualisation.memorySize = 4096;
nix.settings.substituters = lib.mkForce [ ];
nix.extraOptions = "experimental-features = nix-command flakes";
- networking.hosts.${(builtins.head nodes.github.config.networking.interfaces.eth1.ipv4.addresses).address} =
+ networking.hosts.${(builtins.head nodes.github.networking.interfaces.eth1.ipv4.addresses).address} =
[ "channels.nixos.org" "api.github.com" "github.com" ];
security.pki.certificateFiles = [ "${cert}/ca.crt" ];
};
diff --git a/tests/nixos/nix-copy.nix b/tests/nixos/nix-copy.nix
index 2981cc2b8..7db5197aa 100644
--- a/tests/nixos/nix-copy.nix
+++ b/tests/nixos/nix-copy.nix
@@ -36,7 +36,7 @@ in {
server =
{ config, pkgs, ... }:
{ services.openssh.enable = true;
- services.openssh.permitRootLogin = "yes";
+ services.openssh.settings.PermitRootLogin = "yes";
users.users.root.password = "foobar";
virtualisation.writableStore = true;
virtualisation.additionalPaths = [ pkgB pkgC ];
diff --git a/tests/nixos/nss-preload.nix b/tests/nixos/nss-preload.nix
index cef62e95b..00505d114 100644
--- a/tests/nixos/nss-preload.nix
+++ b/tests/nixos/nss-preload.nix
@@ -84,8 +84,8 @@ in
client = { lib, nodes, pkgs, ... }: {
networking.useDHCP = false;
networking.nameservers = [
- (lib.head nodes.http_dns.config.networking.interfaces.eth1.ipv6.addresses).address
- (lib.head nodes.http_dns.config.networking.interfaces.eth1.ipv4.addresses).address
+ (lib.head nodes.http_dns.networking.interfaces.eth1.ipv6.addresses).address
+ (lib.head nodes.http_dns.networking.interfaces.eth1.ipv4.addresses).address
];
networking.interfaces.eth1.ipv6.addresses = [
{ address = "fd21::10"; prefixLength = 64; }
diff --git a/tests/nixos/remote-builds-ssh-ng.nix b/tests/nixos/remote-builds-ssh-ng.nix
index b59dde9bf..20a43803d 100644
--- a/tests/nixos/remote-builds-ssh-ng.nix
+++ b/tests/nixos/remote-builds-ssh-ng.nix
@@ -81,7 +81,7 @@ in
client.succeed(f"ssh -o StrictHostKeyChecking=no {builder.name} 'echo hello world'")
# Perform a build
- out = client.succeed("nix-build ${expr nodes.client.config 1} 2> build-output")
+ out = client.succeed("nix-build ${expr nodes.client 1} 2> build-output")
# Verify that the build was done on the builder
builder.succeed(f"test -e {out.strip()}")
diff --git a/tests/nixos/remote-builds.nix b/tests/nixos/remote-builds.nix
index 1c96cc787..ad7f509db 100644
--- a/tests/nixos/remote-builds.nix
+++ b/tests/nixos/remote-builds.nix
@@ -90,22 +90,22 @@ in
# Perform a build and check that it was performed on the builder.
out = client.succeed(
- "nix-build ${expr nodes.client.config 1} 2> build-output",
+ "nix-build ${expr nodes.client 1} 2> build-output",
"grep -q Hello build-output"
)
builder1.succeed(f"test -e {out}")
# And a parallel build.
- paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client.config 2})\!out $(nix-instantiate ${expr nodes.client.config 3})\!out')
+ paths = client.succeed(r'nix-store -r $(nix-instantiate ${expr nodes.client 2})\!out $(nix-instantiate ${expr nodes.client 3})\!out')
out1, out2 = paths.split()
builder1.succeed(f"test -e {out1} -o -e {out2}")
builder2.succeed(f"test -e {out1} -o -e {out2}")
# And a failing build.
- client.fail("nix-build ${expr nodes.client.config 5}")
+ client.fail("nix-build ${expr nodes.client 5}")
# Test whether the build hook automatically skips unavailable builders.
builder1.block()
- client.succeed("nix-build ${expr nodes.client.config 4}")
+ client.succeed("nix-build ${expr nodes.client 4}")
'';
}
diff --git a/tests/nixos/sourcehut-flakes.nix b/tests/nixos/sourcehut-flakes.nix
index 6e8d884a0..04f3590e1 100644
--- a/tests/nixos/sourcehut-flakes.nix
+++ b/tests/nixos/sourcehut-flakes.nix
@@ -108,7 +108,7 @@ in
flake-registry = https://git.sr.ht/~NixOS/flake-registry/blob/master/flake-registry.json
'';
environment.systemPackages = [ pkgs.jq ];
- networking.hosts.${(builtins.head nodes.sourcehut.config.networking.interfaces.eth1.ipv4.addresses).address} =
+ networking.hosts.${(builtins.head nodes.sourcehut.networking.interfaces.eth1.ipv4.addresses).address} =
[ "git.sr.ht" ];
security.pki.certificateFiles = [ "${cert}/ca.crt" ];
};
diff --git a/tests/unit/libexpr/flakeref.cc b/tests/unit/libexpr/flake/flakeref.cc
similarity index 100%
rename from tests/unit/libexpr/flakeref.cc
rename to tests/unit/libexpr/flake/flakeref.cc
diff --git a/tests/unit/libexpr/flake/url-name.cc b/tests/unit/libexpr/flake/url-name.cc
new file mode 100644
index 000000000..84d32837c
--- /dev/null
+++ b/tests/unit/libexpr/flake/url-name.cc
@@ -0,0 +1,67 @@
+#include "flake/url-name.hh"
+#include
+
+namespace nix {
+
+/* ----------- tests for url-name.hh --------------------------------------------------*/
+
+ TEST(getNameFromURL, getsNameFromURL) {
+ ASSERT_EQ(getNameFromURL(parseURL("path:/home/user/project")), "project");
+ ASSERT_EQ(getNameFromURL(parseURL("path:~/repos/nixpkgs#packages.x86_64-linux.hello")), "hello");
+ ASSERT_EQ(getNameFromURL(parseURL("path:.#nonStandardAttr.mylaptop")), "nonStandardAttr.mylaptop");
+ ASSERT_EQ(getNameFromURL(parseURL("path:./repos/myflake#nonStandardAttr.mylaptop")), "nonStandardAttr.mylaptop");
+ ASSERT_EQ(getNameFromURL(parseURL("path:./nixpkgs#packages.x86_64-linux.complex^bin,man")), "complex");
+ ASSERT_EQ(getNameFromURL(parseURL("path:./myproj#packages.x86_64-linux.default^*")), "myproj");
+
+ ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello");
+ ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nixpkgs#hello")), "hello");
+ ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix#packages.x86_64-linux.default")), "nix");
+ ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix#")), "nix");
+ ASSERT_EQ(getNameFromURL(parseURL("github:NixOS/nix")), "nix");
+ ASSERT_EQ(getNameFromURL(parseURL("github:cachix/devenv/main#packages.x86_64-linux.default")), "devenv");
+ ASSERT_EQ(getNameFromURL(parseURL("github:edolstra/nix-warez?rev=1234&dir=blender&ref=master")), "blender");
+
+ ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello");
+ ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nixpkgs#hello")), "hello");
+ ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix#packages.x86_64-linux.default")), "nix");
+ ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix#")), "nix");
+ ASSERT_EQ(getNameFromURL(parseURL("gitlab:NixOS/nix")), "nix");
+ ASSERT_EQ(getNameFromURL(parseURL("gitlab:cachix/devenv/main#packages.x86_64-linux.default")), "devenv");
+
+ ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nixpkgs#packages.x86_64-linux.hello")), "hello");
+ ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nixpkgs#hello")), "hello");
+ ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix#packages.x86_64-linux.default")), "nix");
+ ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix#")), "nix");
+ ASSERT_EQ(getNameFromURL(parseURL("sourcehut:NixOS/nix")), "nix");
+ ASSERT_EQ(getNameFromURL(parseURL("sourcehut:cachix/devenv/main#packages.x86_64-linux.default")), "devenv");
+
+ ASSERT_EQ(getNameFromURL(parseURL("git://github.com/edolstra/dwarffs")), "dwarffs");
+ ASSERT_EQ(getNameFromURL(parseURL("git://github.com/edolstra/nix-warez?dir=blender")), "blender");
+ ASSERT_EQ(getNameFromURL(parseURL("git+file:///home/user/project")), "project");
+ ASSERT_EQ(getNameFromURL(parseURL("git+file:///home/user/project?ref=fa1e2d23a22")), "project");
+ ASSERT_EQ(getNameFromURL(parseURL("git+ssh://git@github.com/someuser/my-repo#")), "my-repo");
+ ASSERT_EQ(getNameFromURL(parseURL("git+git://github.com/someuser/my-repo?rev=v1.2.3")), "my-repo");
+ ASSERT_EQ(getNameFromURL(parseURL("git+ssh:///home/user/project?dir=subproject&rev=v2.4")), "subproject");
+ ASSERT_EQ(getNameFromURL(parseURL("git+http://not-even-real#packages.x86_64-linux.hello")), "hello");
+ ASSERT_EQ(getNameFromURL(parseURL("git+https://not-even-real#packages.aarch64-darwin.hello")), "hello");
+
+ ASSERT_EQ(getNameFromURL(parseURL("tarball+http://github.com/NixOS/nix/archive/refs/tags/2.18.1#packages.x86_64-linux.jq")), "jq");
+ ASSERT_EQ(getNameFromURL(parseURL("tarball+https://github.com/NixOS/nix/archive/refs/tags/2.18.1#packages.x86_64-linux.hg")), "hg");
+ ASSERT_EQ(getNameFromURL(parseURL("tarball+file:///home/user/Downloads/nixpkgs-2.18.1#packages.aarch64-darwin.ripgrep")), "ripgrep");
+
+ ASSERT_EQ(getNameFromURL(parseURL("https://github.com/NixOS/nix/archive/refs/tags/2.18.1.tar.gz#packages.x86_64-linux.pv")), "pv");
+ ASSERT_EQ(getNameFromURL(parseURL("http://github.com/NixOS/nix/archive/refs/tags/2.18.1.tar.gz#packages.x86_64-linux.pv")), "pv");
+
+ ASSERT_EQ(getNameFromURL(parseURL("file:///home/user/project?ref=fa1e2d23a22")), "project");
+ ASSERT_EQ(getNameFromURL(parseURL("file+file:///home/user/project?ref=fa1e2d23a22")), "project");
+ ASSERT_EQ(getNameFromURL(parseURL("file+http://not-even-real#packages.x86_64-linux.hello")), "hello");
+ ASSERT_EQ(getNameFromURL(parseURL("file+http://gitfantasy.com/org/user/notaflake")), "notaflake");
+ ASSERT_EQ(getNameFromURL(parseURL("file+https://not-even-real#packages.aarch64-darwin.hello")), "hello");
+
+ ASSERT_EQ(getNameFromURL(parseURL("https://www.github.com/")), std::nullopt);
+ ASSERT_EQ(getNameFromURL(parseURL("path:.")), std::nullopt);
+ ASSERT_EQ(getNameFromURL(parseURL("file:.#")), std::nullopt);
+ ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default")), std::nullopt);
+ ASSERT_EQ(getNameFromURL(parseURL("path:.#packages.x86_64-linux.default^*")), std::nullopt);
+ }
+}
diff --git a/tests/unit/libexpr/local.mk b/tests/unit/libexpr/local.mk
index 5743880d7..25810ad9c 100644
--- a/tests/unit/libexpr/local.mk
+++ b/tests/unit/libexpr/local.mk
@@ -16,7 +16,8 @@ endif
libexpr-tests_SOURCES := \
$(wildcard $(d)/*.cc) \
- $(wildcard $(d)/value/*.cc)
+ $(wildcard $(d)/value/*.cc) \
+ $(wildcard $(d)/flake/*.cc)
libexpr-tests_EXTRA_INCLUDES = \
-I tests/unit/libexpr-support \
diff --git a/tests/unit/libexpr/primops.cc b/tests/unit/libexpr/primops.cc
index 31b1b49ae..6d7649b3c 100644
--- a/tests/unit/libexpr/primops.cc
+++ b/tests/unit/libexpr/primops.cc
@@ -604,7 +604,7 @@ namespace nix {
ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1"));
}
- TEST_F(PrimOpTest, hashStringInvalidHashType) {
+ TEST_F(PrimOpTest, hashStringInvalidHashAlgorithm) {
ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error);
}